release orchestrator v1 draft and build fixes

This commit is contained in:
master
2026-01-12 12:24:17 +02:00
parent f3de858c59
commit 9873f80830
1598 changed files with 240385 additions and 5944 deletions

View File

@@ -0,0 +1,202 @@
namespace StellaOps.Scanner.Analyzers.Plugin.Unified;
using StellaOps.Plugin.Abstractions;
using StellaOps.Plugin.Abstractions.Capabilities;
using StellaOps.Plugin.Abstractions.Context;
using StellaOps.Plugin.Abstractions.Health;
using StellaOps.Plugin.Abstractions.Lifecycle;
using StellaOps.Scanner.Analyzers.Lang;
using StellaOps.Scanner.Analyzers.Lang.Plugin;
/// <summary>
/// Adapts an existing ILanguageAnalyzer to the unified IPlugin and IAnalysisCapability interfaces.
/// This enables gradual migration of Scanner language analyzers to the unified plugin architecture.
/// </summary>
/// <remarks>
/// The adapter bridges the Scanner-specific ILanguageAnalyzer interface to the Plugin.Abstractions
/// IAnalysisCapability interface. The underlying analysis is delegated to the wrapped analyzer.
/// </remarks>
public sealed class AnalyzerPluginAdapter : IPlugin, IAnalysisCapability
{
private readonly ILanguageAnalyzer _inner;
private readonly ILanguageAnalyzerPlugin _plugin;
private readonly IServiceProvider _serviceProvider;
private IPluginContext? _context;
private PluginLifecycleState _state = PluginLifecycleState.Discovered;
private readonly string[] _filePatterns;
private readonly string[] _ecosystems;
/// <summary>
/// Creates a new adapter for an existing language analyzer.
/// </summary>
/// <param name="inner">The existing language analyzer to wrap.</param>
/// <param name="plugin">The plugin metadata for this analyzer.</param>
/// <param name="serviceProvider">Service provider for DI.</param>
/// <param name="filePatterns">File patterns this analyzer handles.</param>
/// <param name="ecosystems">Supported ecosystems.</param>
public AnalyzerPluginAdapter(
ILanguageAnalyzer inner,
ILanguageAnalyzerPlugin plugin,
IServiceProvider serviceProvider,
string[] filePatterns,
string[] ecosystems)
{
_inner = inner ?? throw new ArgumentNullException(nameof(inner));
_plugin = plugin ?? throw new ArgumentNullException(nameof(plugin));
_serviceProvider = serviceProvider ?? throw new ArgumentNullException(nameof(serviceProvider));
_filePatterns = filePatterns ?? Array.Empty<string>();
_ecosystems = ecosystems ?? new[] { inner.Id };
}
/// <inheritdoc />
public PluginInfo Info => new(
Id: $"com.stellaops.analyzer.{_inner.Id}",
Name: _inner.DisplayName,
Version: "1.0.0",
Vendor: "Stella Ops",
Description: $"{_inner.DisplayName} language analyzer for dependency scanning");
/// <inheritdoc />
public PluginTrustLevel TrustLevel => PluginTrustLevel.BuiltIn;
/// <inheritdoc />
public PluginCapabilities Capabilities => PluginCapabilities.Analysis;
/// <inheritdoc />
public PluginLifecycleState State => _state;
#region IAnalysisCapability
/// <inheritdoc />
public string AnalysisType => _inner.Id;
/// <inheritdoc />
public IReadOnlyList<string> FilePatterns => _filePatterns;
/// <inheritdoc />
public IReadOnlyList<string> SupportedEcosystems => _ecosystems;
/// <inheritdoc />
public bool CanAnalyze(string filePath, ReadOnlySpan<byte> fileHeader)
{
// Check if file matches any of our patterns
var fileName = Path.GetFileName(filePath);
foreach (var pattern in _filePatterns)
{
if (MatchesPattern(fileName, pattern))
{
return true;
}
}
return false;
}
/// <inheritdoc />
public async Task<AnalysisResult> AnalyzeAsync(IAnalysisContext context, CancellationToken ct)
{
if (_state != PluginLifecycleState.Active)
{
return new AnalysisResult(
Success: false,
Components: Array.Empty<DiscoveredComponent>(),
Diagnostics: new[]
{
new AnalysisDiagnostic(
DiagnosticSeverity.Error,
"ANALYZER_NOT_ACTIVE",
$"Analyzer {_inner.Id} is not in active state (current: {_state})")
},
Metadata: new AnalysisMetadata(
AnalyzerType: _inner.Id,
AnalyzerVersion: Info.Version,
Duration: TimeSpan.Zero,
FilesProcessed: 0));
}
// Note: The ILanguageAnalyzer interface uses a different analysis model
// (LanguageAnalyzerContext + LanguageComponentWriter) than IAnalysisCapability.
// Full integration would require creating adapter context/writer classes.
// For now, we document this limitation and provide basic bridging.
throw new NotSupportedException(
$"Direct analysis via IAnalysisCapability is not yet supported for {_inner.Id}. " +
"Use the Scanner service with the existing ILanguageAnalyzer interface. " +
"Full IAnalysisCapability integration requires LanguageAnalyzerContext adapter.");
}
#endregion
#region IPlugin
/// <inheritdoc />
public async Task InitializeAsync(IPluginContext context, CancellationToken ct)
{
_context = context;
_state = PluginLifecycleState.Initializing;
// Verify the plugin is available
if (!_plugin.IsAvailable(_serviceProvider))
{
_state = PluginLifecycleState.Failed;
throw new InvalidOperationException(
$"Language analyzer plugin '{_plugin.Name}' is not available.");
}
_state = PluginLifecycleState.Active;
context.Logger.Info("Analyzer plugin adapter initialized for {AnalyzerId}", _inner.Id);
await Task.CompletedTask;
}
/// <inheritdoc />
public async Task<HealthCheckResult> HealthCheckAsync(CancellationToken ct)
{
try
{
var isAvailable = _plugin.IsAvailable(_serviceProvider);
if (isAvailable)
{
return HealthCheckResult.Healthy()
.WithDetails(new Dictionary<string, object>
{
["analyzerId"] = _inner.Id,
["displayName"] = _inner.DisplayName,
["filePatterns"] = string.Join(", ", _filePatterns),
["ecosystems"] = string.Join(", ", _ecosystems)
});
}
return HealthCheckResult.Unhealthy($"Analyzer '{_inner.Id}' is not available");
}
catch (Exception ex)
{
return HealthCheckResult.Unhealthy(ex);
}
}
/// <inheritdoc />
public ValueTask DisposeAsync()
{
_state = PluginLifecycleState.Stopped;
return ValueTask.CompletedTask;
}
#endregion
#region Helpers
private static bool MatchesPattern(string fileName, string pattern)
{
if (pattern.StartsWith("*"))
{
return fileName.EndsWith(pattern[1..], StringComparison.OrdinalIgnoreCase);
}
if (pattern.EndsWith("*"))
{
return fileName.StartsWith(pattern[..^1], StringComparison.OrdinalIgnoreCase);
}
return fileName.Equals(pattern, StringComparison.OrdinalIgnoreCase);
}
#endregion
}

View File

@@ -0,0 +1,174 @@
namespace StellaOps.Scanner.Analyzers.Plugin.Unified;
using Microsoft.Extensions.DependencyInjection;
using StellaOps.Plugin.Abstractions;
using StellaOps.Plugin.Abstractions.Capabilities;
using StellaOps.Scanner.Analyzers.Lang;
using StellaOps.Scanner.Analyzers.Lang.Plugin;
/// <summary>
/// Factory for creating unified analyzer plugin adapters from existing analyzers.
/// </summary>
public sealed class AnalyzerPluginAdapterFactory
{
private readonly ILanguageAnalyzerPluginCatalog _catalog;
private readonly IServiceProvider _serviceProvider;
private readonly Dictionary<string, AnalyzerPluginAdapter> _adapters = new(StringComparer.OrdinalIgnoreCase);
private readonly object _lock = new();
// Known file patterns for each analyzer type
private static readonly Dictionary<string, string[]> KnownFilePatterns = new(StringComparer.OrdinalIgnoreCase)
{
["dotnet"] = new[] { "*.csproj", "*.fsproj", "*.vbproj", "*.sln", "packages.config", "*.deps.json", "Directory.Packages.props" },
["go"] = new[] { "go.mod", "go.sum", "Gopkg.lock", "Gopkg.toml" },
["java"] = new[] { "pom.xml", "build.gradle", "build.gradle.kts", "*.jar" },
["node"] = new[] { "package.json", "package-lock.json", "yarn.lock", "pnpm-lock.yaml" },
["python"] = new[] { "requirements.txt", "Pipfile", "Pipfile.lock", "pyproject.toml", "poetry.lock", "setup.py" },
["ruby"] = new[] { "Gemfile", "Gemfile.lock", "*.gemspec" },
["rust"] = new[] { "Cargo.toml", "Cargo.lock" },
["php"] = new[] { "composer.json", "composer.lock" },
["deno"] = new[] { "deno.json", "deno.jsonc", "import_map.json" },
["bun"] = new[] { "bun.lockb", "bunfig.toml" },
};
private static readonly Dictionary<string, string[]> KnownEcosystems = new(StringComparer.OrdinalIgnoreCase)
{
["dotnet"] = new[] { "nuget", "dotnet" },
["go"] = new[] { "go", "golang" },
["java"] = new[] { "maven", "gradle", "java" },
["node"] = new[] { "npm", "yarn", "pnpm", "nodejs" },
["python"] = new[] { "pypi", "pip", "poetry", "python" },
["ruby"] = new[] { "rubygems", "bundler", "ruby" },
["rust"] = new[] { "cargo", "crates.io", "rust" },
["php"] = new[] { "composer", "packagist", "php" },
["deno"] = new[] { "deno", "jsr" },
["bun"] = new[] { "bun", "npm" },
};
/// <summary>
/// Creates a new factory instance.
/// </summary>
/// <param name="catalog">The language analyzer plugin catalog.</param>
/// <param name="serviceProvider">Service provider for DI.</param>
public AnalyzerPluginAdapterFactory(
ILanguageAnalyzerPluginCatalog catalog,
IServiceProvider serviceProvider)
{
_catalog = catalog ?? throw new ArgumentNullException(nameof(catalog));
_serviceProvider = serviceProvider ?? throw new ArgumentNullException(nameof(serviceProvider));
}
/// <summary>
/// Gets all available unified analyzer plugins.
/// </summary>
/// <returns>List of unified analyzer plugins.</returns>
public IReadOnlyList<IPlugin> GetAllPlugins()
{
var analyzers = _catalog.CreateAnalyzers(_serviceProvider);
var result = new List<IPlugin>();
foreach (var analyzer in analyzers)
{
var adapter = GetOrCreateAdapter(analyzer);
if (adapter != null)
{
result.Add(adapter);
}
}
return result;
}
/// <summary>
/// Gets a unified analyzer plugin by analyzer ID.
/// </summary>
/// <param name="analyzerId">Analyzer identifier.</param>
/// <returns>Unified analyzer plugin, or null if not found.</returns>
public IPlugin? GetPlugin(string analyzerId)
{
var analyzers = _catalog.CreateAnalyzers(_serviceProvider);
var analyzer = analyzers.FirstOrDefault(a =>
a.Id.Equals(analyzerId, StringComparison.OrdinalIgnoreCase));
if (analyzer == null)
return null;
return GetOrCreateAdapter(analyzer);
}
/// <summary>
/// Gets the analysis capability for an analyzer.
/// </summary>
/// <param name="analyzerId">Analyzer identifier.</param>
/// <returns>Analysis capability, or null if not found.</returns>
public IAnalysisCapability? GetCapability(string analyzerId)
{
return GetPlugin(analyzerId) as IAnalysisCapability;
}
private AnalyzerPluginAdapter? GetOrCreateAdapter(ILanguageAnalyzer analyzer)
{
lock (_lock)
{
if (_adapters.TryGetValue(analyzer.Id, out var existing))
{
return existing;
}
var plugin = _catalog.Plugins.FirstOrDefault(p =>
{
try
{
var created = p.CreateAnalyzer(_serviceProvider);
return created?.Id.Equals(analyzer.Id, StringComparison.OrdinalIgnoreCase) == true;
}
catch
{
return false;
}
});
if (plugin == null)
{
return null;
}
var filePatterns = KnownFilePatterns.TryGetValue(analyzer.Id, out var patterns)
? patterns
: Array.Empty<string>();
var ecosystems = KnownEcosystems.TryGetValue(analyzer.Id, out var eco)
? eco
: new[] { analyzer.Id };
var adapter = new AnalyzerPluginAdapter(
analyzer,
plugin,
_serviceProvider,
filePatterns,
ecosystems);
_adapters[analyzer.Id] = adapter;
return adapter;
}
}
}
/// <summary>
/// Extension methods for registering unified analyzer plugin services.
/// </summary>
public static class AnalyzerPluginAdapterExtensions
{
/// <summary>
/// Adds unified analyzer plugin adapter services to the service collection.
/// </summary>
/// <param name="services">Service collection.</param>
/// <returns>Service collection for chaining.</returns>
public static IServiceCollection AddUnifiedAnalyzerPlugins(this IServiceCollection services)
{
services.AddSingleton<AnalyzerPluginAdapterFactory>();
return services;
}
}

View File

@@ -0,0 +1,17 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
<LangVersion>preview</LangVersion>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
<Description>Unified plugin adapter for Scanner language analyzers</Description>
</PropertyGroup>
<ItemGroup>
<ProjectReference Include="..\__Libraries\StellaOps.Scanner.Analyzers.Lang\StellaOps.Scanner.Analyzers.Lang.csproj" />
<ProjectReference Include="..\..\Plugin\StellaOps.Plugin.Abstractions\StellaOps.Plugin.Abstractions.csproj" />
</ItemGroup>
</Project>

View File

@@ -0,0 +1,328 @@
// SPDX-License-Identifier: AGPL-3.0-or-later
// Copyright (c) StellaOps
// Sprint: EVID-001-002 - Reachability Evidence Endpoints
using Microsoft.AspNetCore.Builder;
using Microsoft.AspNetCore.Http;
using Microsoft.AspNetCore.Mvc;
using Microsoft.AspNetCore.Routing;
using StellaOps.Scanner.Reachability.Jobs;
using StellaOps.Scanner.Reachability.Services;
using StellaOps.Scanner.Reachability.Vex;
namespace StellaOps.Scanner.WebService.Endpoints;
/// <summary>
/// Minimal API endpoints for reachability evidence operations.
/// </summary>
public static class ReachabilityEvidenceEndpoints
{
/// <summary>
/// Maps reachability evidence endpoints.
/// </summary>
public static IEndpointRouteBuilder MapReachabilityEvidenceEndpoints(
this IEndpointRouteBuilder routes)
{
var group = routes.MapGroup("/api/reachability")
.WithTags("Reachability Evidence");
// Analyze reachability for a CVE
group.MapPost("/analyze", AnalyzeAsync)
.WithName("AnalyzeReachability")
.WithSummary("Analyze reachability of a CVE in an image")
.Produces<ReachabilityAnalyzeResponse>(StatusCodes.Status200OK)
.Produces<ProblemDetails>(StatusCodes.Status400BadRequest)
.Produces<ProblemDetails>(StatusCodes.Status404NotFound);
// Get job result
group.MapGet("/result/{jobId}", GetResultAsync)
.WithName("GetReachabilityResult")
.WithSummary("Get result of a reachability analysis job")
.Produces<ReachabilityResultResponse>(StatusCodes.Status200OK)
.Produces<ProblemDetails>(StatusCodes.Status404NotFound);
// Check if CVE has sink mappings
group.MapGet("/mapping/{cveId}", GetCveMappingAsync)
.WithName("GetCveMapping")
.WithSummary("Get CVE-to-symbol mappings for a vulnerability")
.Produces<CveMappingResponse>(StatusCodes.Status200OK)
.Produces<ProblemDetails>(StatusCodes.Status404NotFound);
// Get VEX statement from reachability
group.MapPost("/vex", GenerateVexAsync)
.WithName("GenerateVexFromReachability")
.WithSummary("Generate VEX statement from reachability analysis")
.Produces<VexStatementResponse>(StatusCodes.Status200OK)
.Produces<ProblemDetails>(StatusCodes.Status400BadRequest);
return routes;
}
private static async Task<IResult> AnalyzeAsync(
[FromBody] ReachabilityAnalyzeRequest request,
[FromServices] IReachabilityEvidenceJobExecutor executor,
[FromServices] ICveSymbolMappingService mappingService,
[FromServices] TimeProvider timeProvider,
CancellationToken ct)
{
if (string.IsNullOrWhiteSpace(request.ImageDigest) ||
string.IsNullOrWhiteSpace(request.CveId) ||
string.IsNullOrWhiteSpace(request.Purl))
{
return Results.Problem(
detail: "imageDigest, cveId, and purl are required",
statusCode: StatusCodes.Status400BadRequest);
}
// Check if we have mappings for this CVE
var hasMappings = await mappingService.HasMappingAsync(request.CveId, ct);
if (!hasMappings)
{
return Results.Problem(
detail: $"No sink mappings found for CVE {request.CveId}",
statusCode: StatusCodes.Status404NotFound);
}
// Create and execute job
var jobId = ReachabilityEvidenceJob.ComputeJobId(
request.ImageDigest, request.CveId, request.Purl);
var job = new ReachabilityEvidenceJob
{
JobId = jobId,
ImageDigest = request.ImageDigest,
CveId = request.CveId,
Purl = request.Purl,
SourceCommit = request.SourceCommit,
Options = new ReachabilityJobOptions
{
IncludeL2 = request.IncludeBinaryAnalysis,
IncludeL3 = request.IncludeRuntimeAnalysis,
MaxPathsPerSink = request.MaxPaths ?? 5,
MaxDepth = request.MaxDepth ?? 256
},
QueuedAt = timeProvider.GetUtcNow()
};
var result = await executor.ExecuteAsync(job, ct);
return Results.Ok(new ReachabilityAnalyzeResponse
{
JobId = result.JobId,
Status = result.Status.ToString(),
Verdict = result.Stack?.Verdict.ToString(),
EvidenceUri = result.EvidenceUri,
DurationMs = result.DurationMs,
Error = result.Error
});
}
private static async Task<IResult> GetResultAsync(
[FromRoute] string jobId,
[FromServices] IEvidenceResultStore resultStore,
CancellationToken ct)
{
var result = await resultStore.GetResultAsync(jobId, ct);
if (result is null)
{
return Results.Problem(
detail: $"No result found for job {jobId}",
statusCode: StatusCodes.Status404NotFound);
}
return Results.Ok(new ReachabilityResultResponse
{
JobId = result.JobId,
Status = result.Status.ToString(),
Verdict = result.Stack?.Verdict.ToString(),
VerdictExplanation = result.Stack?.Explanation,
IsReachable = result.Stack?.StaticCallGraph.IsReachable,
PathCount = result.Stack?.StaticCallGraph.Paths.Length ?? 0,
EntrypointCount = result.Stack?.StaticCallGraph.ReachingEntrypoints.Length ?? 0,
EvidenceBundleId = result.EvidenceBundleId,
EvidenceUri = result.EvidenceUri,
CompletedAt = result.CompletedAt,
DurationMs = result.DurationMs,
Error = result.Error
});
}
private static async Task<IResult> GetCveMappingAsync(
[FromRoute] string cveId,
[FromQuery] string? purl,
[FromServices] ICveSymbolMappingService mappingService,
CancellationToken ct)
{
var mappings = string.IsNullOrWhiteSpace(purl)
? await mappingService.GetAllMappingsForCveAsync(cveId, ct)
: await mappingService.GetSinksForCveAsync(cveId, purl, ct);
if (mappings.Count == 0)
{
return Results.Problem(
detail: $"No mappings found for CVE {cveId}",
statusCode: StatusCodes.Status404NotFound);
}
return Results.Ok(new CveMappingResponse
{
CveId = cveId,
MappingCount = mappings.Count,
Mappings = mappings.Select(m => new CveMappingItem
{
SymbolName = m.SymbolName,
CanonicalId = m.CanonicalId,
Purl = m.Purl,
FilePath = m.FilePath,
VulnType = m.VulnType.ToString(),
Confidence = m.Confidence,
Source = m.Source.ToString()
}).ToList()
});
}
private static async Task<IResult> GenerateVexAsync(
[FromBody] GenerateVexRequest request,
[FromServices] IEvidenceResultStore resultStore,
[FromServices] IVexStatusDeterminer vexDeterminer,
CancellationToken ct)
{
if (string.IsNullOrWhiteSpace(request.JobId) ||
string.IsNullOrWhiteSpace(request.ProductId))
{
return Results.Problem(
detail: "jobId and productId are required",
statusCode: StatusCodes.Status400BadRequest);
}
var result = await resultStore.GetResultAsync(request.JobId, ct);
if (result?.Stack is null)
{
return Results.Problem(
detail: $"No reachability result found for job {request.JobId}",
statusCode: StatusCodes.Status404NotFound);
}
var evidenceUris = result.EvidenceUri is not null
? new[] { result.EvidenceUri }
: Array.Empty<string>();
var statement = vexDeterminer.CreateStatement(
result.Stack,
request.ProductId,
evidenceUris);
return Results.Ok(new VexStatementResponse
{
StatementId = statement.StatementId,
VulnerabilityId = statement.VulnerabilityId,
ProductId = statement.ProductId,
Status = statement.Status.ToString(),
Justification = statement.Justification is not null ? new VexJustificationResponse
{
Category = statement.Justification.Category.ToString(),
Detail = statement.Justification.Detail,
Confidence = statement.Justification.Confidence,
EvidenceReferences = statement.Justification.EvidenceReferences.ToList()
} : null,
ActionStatement = statement.ActionStatement,
ImpactStatement = statement.ImpactStatement,
Timestamp = statement.Timestamp
});
}
}
// Request/Response DTOs
public sealed record ReachabilityAnalyzeRequest
{
public string ImageDigest { get; init; } = string.Empty;
public string CveId { get; init; } = string.Empty;
public string Purl { get; init; } = string.Empty;
public string? SourceCommit { get; init; }
public bool IncludeBinaryAnalysis { get; init; } = false;
public bool IncludeRuntimeAnalysis { get; init; } = false;
public int? MaxPaths { get; init; }
public int? MaxDepth { get; init; }
}
public sealed record ReachabilityAnalyzeResponse
{
public string JobId { get; init; } = string.Empty;
public string Status { get; init; } = string.Empty;
public string? Verdict { get; init; }
public string? EvidenceUri { get; init; }
public long? DurationMs { get; init; }
public string? Error { get; init; }
}
public sealed record ReachabilityResultResponse
{
public string JobId { get; init; } = string.Empty;
public string Status { get; init; } = string.Empty;
public string? Verdict { get; init; }
public string? VerdictExplanation { get; init; }
public bool? IsReachable { get; init; }
public int PathCount { get; init; }
public int EntrypointCount { get; init; }
public string? EvidenceBundleId { get; init; }
public string? EvidenceUri { get; init; }
public DateTimeOffset? CompletedAt { get; init; }
public long? DurationMs { get; init; }
public string? Error { get; init; }
}
public sealed record CveMappingResponse
{
public string CveId { get; init; } = string.Empty;
public int MappingCount { get; init; }
public List<CveMappingItem> Mappings { get; init; } = [];
}
public sealed record CveMappingItem
{
public string SymbolName { get; init; } = string.Empty;
public string? CanonicalId { get; init; }
public string Purl { get; init; } = string.Empty;
public string? FilePath { get; init; }
public string VulnType { get; init; } = string.Empty;
public decimal Confidence { get; init; }
public string Source { get; init; } = string.Empty;
}
public sealed record GenerateVexRequest
{
public string JobId { get; init; } = string.Empty;
public string ProductId { get; init; } = string.Empty;
}
public sealed record VexStatementResponse
{
public string StatementId { get; init; } = string.Empty;
public string VulnerabilityId { get; init; } = string.Empty;
public string ProductId { get; init; } = string.Empty;
public string Status { get; init; } = string.Empty;
public VexJustificationResponse? Justification { get; init; }
public string? ActionStatement { get; init; }
public string? ImpactStatement { get; init; }
public DateTimeOffset Timestamp { get; init; }
}
public sealed record VexJustificationResponse
{
public string Category { get; init; } = string.Empty;
public string Detail { get; init; } = string.Empty;
public decimal Confidence { get; init; }
public List<string> EvidenceReferences { get; init; } = [];
}
/// <summary>
/// Store for evidence job results.
/// </summary>
public interface IEvidenceResultStore
{
Task<ReachabilityEvidenceJobResult?> GetResultAsync(string jobId, CancellationToken ct);
Task StoreResultAsync(ReachabilityEvidenceJobResult result, CancellationToken ct);
}

View File

@@ -4,9 +4,9 @@ using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Extensions.Logging;
using StellaOps.Scanner.Core.Contracts;
using StellaOps.Scanner.EntryTrace;
using StellaOps.Scanner.Reachability;
using ScanAnalysisKeys = StellaOps.Scanner.Core.Contracts.ScanAnalysisKeys;
namespace StellaOps.Scanner.Worker.Processing.Reachability;

View File

@@ -0,0 +1,321 @@
// -----------------------------------------------------------------------------
// ReachabilityEvidenceStageExecutor.cs
// Sprint: EVID-001 - Reachability Evidence Pipeline
// Task: EVID-001-005
// Description: Scan stage executor that generates reachability evidence for CVEs.
// -----------------------------------------------------------------------------
using Microsoft.Extensions.Logging;
using StellaOps.Scanner.Reachability.Jobs;
using StellaOps.Scanner.Reachability.Services;
using StellaOps.Scanner.Reachability.Vex;
using CoreScanAnalysisKeys = StellaOps.Scanner.Core.Contracts.ScanAnalysisKeys;
namespace StellaOps.Scanner.Worker.Processing.Reachability;
/// <summary>
/// Scan stage executor that generates reachability evidence for vulnerability findings.
/// Analyzes CVE reachability using the 3-layer model and emits VEX statements.
/// </summary>
public sealed class ReachabilityEvidenceStageExecutor : IScanStageExecutor
{
private readonly IReachabilityEvidenceJobExecutor _jobExecutor;
private readonly ICveSymbolMappingService _mappingService;
private readonly IVexStatusDeterminer _vexDeterminer;
private readonly TimeProvider _timeProvider;
private readonly ILogger<ReachabilityEvidenceStageExecutor> _logger;
public ReachabilityEvidenceStageExecutor(
IReachabilityEvidenceJobExecutor jobExecutor,
ICveSymbolMappingService mappingService,
IVexStatusDeterminer vexDeterminer,
TimeProvider timeProvider,
ILogger<ReachabilityEvidenceStageExecutor> logger)
{
_jobExecutor = jobExecutor ?? throw new ArgumentNullException(nameof(jobExecutor));
_mappingService = mappingService ?? throw new ArgumentNullException(nameof(mappingService));
_vexDeterminer = vexDeterminer ?? throw new ArgumentNullException(nameof(vexDeterminer));
_timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider));
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
}
public string StageName => "reachability-evidence";
public async ValueTask ExecuteAsync(ScanJobContext context, CancellationToken cancellationToken)
{
ArgumentNullException.ThrowIfNull(context);
// Extract CVE findings that have symbol mappings
var cveFindings = await ExtractEligibleCveFindingsAsync(context, cancellationToken);
if (cveFindings.Count == 0)
{
_logger.LogDebug(
"No eligible CVE findings with symbol mappings for job {JobId}",
context.JobId);
return;
}
_logger.LogInformation(
"Generating reachability evidence for {CveCount} CVEs in job {JobId}",
cveFindings.Count,
context.JobId);
var results = new List<ReachabilityEvidenceJobResult>();
var vexStatements = new List<VexStatement>();
foreach (var (cveId, purl) in cveFindings)
{
try
{
var result = await ProcessCveAsync(context, cveId, purl, cancellationToken);
if (result is not null)
{
results.Add(result);
// Generate VEX statement if we have a verdict
if (result.Stack is not null)
{
var productId = $"{context.ImageDigest}:{purl}";
var evidenceUris = result.EvidenceUri is not null
? new[] { result.EvidenceUri }
: Array.Empty<string>();
var vexStatement = _vexDeterminer.CreateStatement(
result.Stack,
productId,
evidenceUris);
vexStatements.Add(vexStatement);
}
}
}
catch (Exception ex)
{
_logger.LogWarning(ex,
"Failed to process reachability for CVE {CveId} PURL {Purl} in job {JobId}",
cveId, purl, context.JobId);
}
}
// Store results in analysis context
if (results.Count > 0)
{
context.Analysis.Set(ReachabilityAnalysisKeys.ReachabilityEvidenceResults, results);
_logger.LogInformation(
"Reachability evidence generated for {Count} CVEs in job {JobId}",
results.Count,
context.JobId);
}
if (vexStatements.Count > 0)
{
context.Analysis.Set(ReachabilityAnalysisKeys.VexStatements, vexStatements);
_logger.LogInformation(
"Generated {Count} VEX statements for job {JobId}",
vexStatements.Count,
context.JobId);
}
}
private async Task<List<(string CveId, string Purl)>> ExtractEligibleCveFindingsAsync(
ScanJobContext context,
CancellationToken cancellationToken)
{
var eligibleFindings = new List<(string CveId, string Purl)>();
var seenCves = new HashSet<string>(StringComparer.OrdinalIgnoreCase);
// Extract CVE+PURL pairs from findings
var cvePurlPairs = ExtractCvePurlPairs(context);
foreach (var (cveId, purl) in cvePurlPairs)
{
if (seenCves.Contains(cveId))
continue;
// Check if we have symbol mappings for this CVE
var hasMapping = await _mappingService.HasMappingAsync(cveId, cancellationToken);
if (hasMapping)
{
eligibleFindings.Add((cveId, purl));
seenCves.Add(cveId);
}
}
return eligibleFindings;
}
private async Task<ReachabilityEvidenceJobResult?> ProcessCveAsync(
ScanJobContext context,
string cveId,
string purl,
CancellationToken cancellationToken)
{
var imageDigest = context.ImageDigest;
if (string.IsNullOrEmpty(imageDigest))
{
_logger.LogWarning("Cannot process reachability for CVE {CveId}: no image digest available", cveId);
return null;
}
var jobId = ReachabilityEvidenceJob.ComputeJobId(
imageDigest,
cveId,
purl);
var job = new ReachabilityEvidenceJob
{
JobId = jobId,
ImageDigest = imageDigest,
CveId = cveId,
Purl = purl,
SourceCommit = context.Analysis.TryGet<string>(ReachabilityAnalysisKeys.SourceCommit, out var commit)
? commit
: null,
Options = new ReachabilityJobOptions
{
IncludeL2 = false, // Requires binary paths not available in scan context
IncludeL3 = false, // Requires container ID
MaxPathsPerSink = 5,
MaxDepth = 256
},
QueuedAt = _timeProvider.GetUtcNow()
};
_logger.LogDebug(
"Executing reachability evidence job {JobId} for CVE {CveId}",
jobId, cveId);
var result = await _jobExecutor.ExecuteAsync(job, cancellationToken);
if (result.Status == JobStatus.Completed && result.Stack is not null)
{
_logger.LogDebug(
"Reachability verdict for CVE {CveId}: {Verdict}",
cveId, result.Stack.Verdict);
}
return result;
}
private static List<(string CveId, string Purl)> ExtractCvePurlPairs(ScanJobContext context)
{
var pairs = new List<(string CveId, string Purl)>();
// Extract from OS package analyzer results
if (context.Analysis.TryGet<object>(CoreScanAnalysisKeys.OsPackageAnalyzers, out var osResults) &&
osResults is System.Collections.IDictionary osDictionary)
{
foreach (var analyzerResult in osDictionary.Values)
{
if (analyzerResult is not null)
{
ExtractPairsFromAnalyzerResult(analyzerResult, pairs);
}
}
}
// Extract from language analyzer results
if (context.Analysis.TryGet<object>(CoreScanAnalysisKeys.LanguageAnalyzerResults, out var langResults) &&
langResults is System.Collections.IDictionary langDictionary)
{
foreach (var analyzerResult in langDictionary.Values)
{
if (analyzerResult is not null)
{
ExtractPairsFromAnalyzerResult(analyzerResult, pairs);
}
}
}
return pairs;
}
private static void ExtractPairsFromAnalyzerResult(
object analyzerResult,
List<(string CveId, string Purl)> pairs)
{
var resultType = analyzerResult.GetType();
// Try to get Vulnerabilities property
var vulnsProperty = resultType.GetProperty("Vulnerabilities");
if (vulnsProperty?.GetValue(analyzerResult) is IEnumerable<object> vulns)
{
foreach (var vuln in vulns)
{
ExtractPairFromFinding(vuln, pairs);
}
}
// Try to get Findings property
var findingsProperty = resultType.GetProperty("Findings");
if (findingsProperty?.GetValue(analyzerResult) is IEnumerable<object> findingsList)
{
foreach (var finding in findingsList)
{
ExtractPairFromFinding(finding, pairs);
}
}
}
private static void ExtractPairFromFinding(
object finding,
List<(string CveId, string Purl)> pairs)
{
var findingType = finding.GetType();
string? cveId = null;
string? purl = null;
// Try CveId property
var cveIdProperty = findingType.GetProperty("CveId");
if (cveIdProperty?.GetValue(finding) is string cve && !string.IsNullOrWhiteSpace(cve))
{
cveId = cve;
}
// Try VulnerabilityId property
if (cveId is null)
{
var vulnIdProperty = findingType.GetProperty("VulnerabilityId");
if (vulnIdProperty?.GetValue(finding) is string vulnId &&
!string.IsNullOrWhiteSpace(vulnId) &&
vulnId.StartsWith("CVE-", StringComparison.OrdinalIgnoreCase))
{
cveId = vulnId;
}
}
// Try Purl property
var purlProperty = findingType.GetProperty("Purl");
if (purlProperty?.GetValue(finding) is string p && !string.IsNullOrWhiteSpace(p))
{
purl = p;
}
// Try PackageUrl property
if (purl is null)
{
var packageUrlProperty = findingType.GetProperty("PackageUrl");
if (packageUrlProperty?.GetValue(finding) is string pkg && !string.IsNullOrWhiteSpace(pkg))
{
purl = pkg;
}
}
if (!string.IsNullOrWhiteSpace(cveId) && !string.IsNullOrWhiteSpace(purl))
{
pairs.Add((cveId, purl));
}
}
}
/// <summary>
/// Analysis keys for reachability evidence stage.
/// </summary>
public static class ReachabilityAnalysisKeys
{
public const string ReachabilityEvidenceResults = "reachability.evidence.results";
public const string VexStatements = "reachability.vex.statements";
public const string SourceCommit = "source.commit";
}

View File

@@ -2,8 +2,8 @@ using System;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Extensions.Logging;
using StellaOps.Scanner.Core.Contracts;
using StellaOps.Scanner.Reachability;
using ScanAnalysisKeys = StellaOps.Scanner.Core.Contracts.ScanAnalysisKeys;
namespace StellaOps.Scanner.Worker.Processing.Reachability;

View File

@@ -6,6 +6,8 @@ namespace StellaOps.Scanner.Worker.Processing;
public sealed class ScanJobContext
{
private const string ImageDigestMetadataKey = "image.digest";
public ScanJobContext(IScanJobLease lease, TimeProvider timeProvider, DateTimeOffset startUtc, CancellationToken cancellationToken)
{
Lease = lease ?? throw new ArgumentNullException(nameof(lease));
@@ -27,6 +29,12 @@ public sealed class ScanJobContext
public string ScanId => Lease.ScanId;
/// <summary>
/// Gets the OCI image digest from job metadata, if available.
/// </summary>
public string? ImageDigest =>
Lease.Metadata.TryGetValue(ImageDigestMetadataKey, out var digest) ? digest : null;
public string? ReplayBundlePath { get; set; }
public ScanAnalysisStore Analysis { get; }

View File

@@ -35,6 +35,7 @@ using StellaOps.Scanner.Storage.Extensions;
using StellaOps.Scanner.Storage;
using StellaOps.Scanner.Storage.Services;
using Reachability = StellaOps.Scanner.Worker.Processing.Reachability;
using ReachabilityEvidenceStageExecutor = StellaOps.Scanner.Worker.Processing.Reachability.ReachabilityEvidenceStageExecutor;
using GateDetectors = StellaOps.Scanner.Reachability.Gates.Detectors;
var builder = Host.CreateApplicationBuilder(args);
@@ -127,6 +128,10 @@ if (!string.IsNullOrWhiteSpace(connectionString))
builder.Services.AddSingleton<IScanStageExecutor, SurfaceManifestStageExecutor>();
builder.Services.AddSingleton<IDsseEnvelopeSigner, HmacDsseEnvelopeSigner>();
// Reachability Evidence Pipeline (Sprint: EVID-001)
builder.Services.AddReachabilityEvidence(connectionString);
builder.Services.AddSingleton<IScanStageExecutor, ReachabilityEvidenceStageExecutor>();
// EPSS ingestion job (Sprint: SPRINT_3410_0001_0001)
builder.Services.AddOptions<EpssIngestOptions>()
.BindConfiguration(EpssIngestOptions.SectionName)

View File

@@ -1,86 +1,4 @@
using System.Collections.Immutable;
namespace StellaOps.Scanner.CallGraph;
/// <summary>
/// Configuration options for <see cref="ReachabilityAnalyzer"/>.
/// Defines limits and ordering rules for deterministic path output.
/// </summary>
/// <remarks>
/// Sprint: SPRINT_3700_0001_0001 (WIT-007A, WIT-007B)
/// Contract: ReachabilityAnalyzer → PathWitnessBuilder output contract
///
/// Determinism guarantees:
/// - Paths are ordered by (SinkId ASC, EntrypointId ASC, PathLength ASC)
/// - Node IDs within paths are ordered from entrypoint to sink (caller → callee)
/// - Maximum caps prevent unbounded output
/// </remarks>
public sealed record ReachabilityAnalysisOptions
{
/// <summary>
/// Default options with sensible limits.
/// </summary>
public static ReachabilityAnalysisOptions Default { get; } = new();
/// <summary>
/// Maximum depth for BFS traversal (0 = unlimited, default = 256).
/// Prevents infinite loops in cyclic graphs.
/// </summary>
public int MaxDepth { get; init; } = 256;
/// <summary>
/// Maximum number of paths to return per sink (default = 10).
/// Limits witness explosion when many entrypoints reach the same sink.
/// </summary>
public int MaxPathsPerSink { get; init; } = 10;
/// <summary>
/// Maximum total paths to return (default = 100).
/// Hard cap to prevent memory issues with highly connected graphs.
/// </summary>
public int MaxTotalPaths { get; init; } = 100;
/// <summary>
/// Whether to include node metadata in path reconstruction (default = true).
/// When false, paths only contain node IDs without additional context.
/// </summary>
public bool IncludeNodeMetadata { get; init; } = true;
/// <summary>
/// Explicit list of sink node IDs to target (default = null, meaning use snapshot.SinkIds).
/// When set, analysis will only find paths to these specific sinks.
/// This enables targeted witness generation for specific vulnerabilities.
/// </summary>
/// <remarks>
/// Sprint: SPRINT_3700_0001_0001 (WIT-007B)
/// Enables: PathWitnessBuilder can request paths to specific trigger methods.
/// </remarks>
public ImmutableArray<string>? ExplicitSinks { get; init; }
/// <summary>
/// Validates options and returns sanitized values.
/// </summary>
public ReachabilityAnalysisOptions Validated()
{
// Normalize explicit sinks: trim, dedupe, order
ImmutableArray<string>? normalizedSinks = null;
if (ExplicitSinks.HasValue && !ExplicitSinks.Value.IsDefaultOrEmpty)
{
normalizedSinks = ExplicitSinks.Value
.Where(s => !string.IsNullOrWhiteSpace(s))
.Select(s => s.Trim())
.Distinct(StringComparer.Ordinal)
.OrderBy(s => s, StringComparer.Ordinal)
.ToImmutableArray();
}
return new ReachabilityAnalysisOptions
{
MaxDepth = MaxDepth <= 0 ? 256 : Math.Min(MaxDepth, 1024),
MaxPathsPerSink = MaxPathsPerSink <= 0 ? 10 : Math.Min(MaxPathsPerSink, 100),
MaxTotalPaths = MaxTotalPaths <= 0 ? 100 : Math.Min(MaxTotalPaths, 1000),
IncludeNodeMetadata = IncludeNodeMetadata,
ExplicitSinks = normalizedSinks
};
}
}
// SPDX-License-Identifier: AGPL-3.0-or-later
// Copyright (c) StellaOps
// ReachabilityAnalysisOptions is now defined in StellaOps.Scanner.Contracts.
// This file exists only for file system tracking - the type is imported via global using.

View File

@@ -1,4 +1,5 @@
using System.Collections.Immutable;
using StellaOps.Scanner.Contracts;
namespace StellaOps.Scanner.CallGraph;

View File

@@ -8,7 +8,7 @@ using System.Collections.Immutable;
using Microsoft.Extensions.Logging;
using StellaOps.Scanner.CallGraph.Binary.Analysis;
using StellaOps.Scanner.CallGraph.Binary.Disassembly;
using StellaOps.Scanner.Reachability;
using StellaOps.Scanner.Contracts;
namespace StellaOps.Scanner.CallGraph.Binary;

View File

@@ -4,7 +4,7 @@
// Description: Classifies binary symbols as entrypoints based on naming patterns.
// -----------------------------------------------------------------------------
using StellaOps.Scanner.Reachability;
using StellaOps.Scanner.Contracts;
namespace StellaOps.Scanner.CallGraph.Binary;

View File

@@ -7,7 +7,7 @@
using System.Collections.Immutable;
using System.Text.Json;
using Microsoft.Extensions.Logging;
using StellaOps.Scanner.Reachability;
using StellaOps.Scanner.Contracts;
namespace StellaOps.Scanner.CallGraph.Bun;

View File

@@ -4,7 +4,7 @@
// Description: Classifies Bun functions as entrypoints based on framework patterns.
// -----------------------------------------------------------------------------
using StellaOps.Scanner.Reachability;
using StellaOps.Scanner.Contracts;
namespace StellaOps.Scanner.CallGraph.Bun;

View File

@@ -4,7 +4,7 @@
// Description: Matches Bun/JS function calls to security sink categories.
// -----------------------------------------------------------------------------
using StellaOps.Scanner.Reachability;
using StellaOps.Scanner.Contracts;
namespace StellaOps.Scanner.CallGraph.Bun;

View File

@@ -7,7 +7,7 @@
using System.Collections.Immutable;
using System.Text.Json;
using Microsoft.Extensions.Logging;
using StellaOps.Scanner.Reachability;
using StellaOps.Scanner.Contracts;
namespace StellaOps.Scanner.CallGraph.Deno;

View File

@@ -4,7 +4,7 @@
// Description: Classifies Deno functions as entrypoints based on framework patterns.
// -----------------------------------------------------------------------------
using StellaOps.Scanner.Reachability;
using StellaOps.Scanner.Contracts;
namespace StellaOps.Scanner.CallGraph.Deno;

View File

@@ -4,7 +4,7 @@
// Description: Matches Deno function calls to security sink categories.
// -----------------------------------------------------------------------------
using StellaOps.Scanner.Reachability;
using StellaOps.Scanner.Contracts;
namespace StellaOps.Scanner.CallGraph.Deno;

View File

@@ -4,7 +4,7 @@ using Microsoft.CodeAnalysis;
using Microsoft.CodeAnalysis.CSharp;
using Microsoft.CodeAnalysis.CSharp.Syntax;
using Microsoft.CodeAnalysis.MSBuild;
using StellaOps.Scanner.Reachability;
using StellaOps.Scanner.Contracts;
namespace StellaOps.Scanner.CallGraph.DotNet;

View File

@@ -8,7 +8,7 @@ using System.Collections.Immutable;
using System.Diagnostics;
using System.Text.Json;
using Microsoft.Extensions.Logging;
using StellaOps.Scanner.Reachability;
using StellaOps.Scanner.Contracts;
namespace StellaOps.Scanner.CallGraph.Go;

View File

@@ -4,7 +4,7 @@
// Description: Classifies Go functions as entrypoints based on framework patterns.
// -----------------------------------------------------------------------------
using StellaOps.Scanner.Reachability;
using StellaOps.Scanner.Contracts;
namespace StellaOps.Scanner.CallGraph.Go;

View File

@@ -4,7 +4,7 @@
// Description: Matches Go function calls to known security sinks.
// -----------------------------------------------------------------------------
using StellaOps.Scanner.Reachability;
using StellaOps.Scanner.Contracts;
namespace StellaOps.Scanner.CallGraph.Go;

View File

@@ -7,7 +7,7 @@
using System.Collections.Immutable;
using System.IO.Compression;
using Microsoft.Extensions.Logging;
using StellaOps.Scanner.Reachability;
using StellaOps.Scanner.Contracts;
namespace StellaOps.Scanner.CallGraph.Java;

View File

@@ -4,7 +4,7 @@
// Description: Classifies Java methods as entrypoints based on framework annotations.
// -----------------------------------------------------------------------------
using StellaOps.Scanner.Reachability;
using StellaOps.Scanner.Contracts;
namespace StellaOps.Scanner.CallGraph.Java;

View File

@@ -4,7 +4,7 @@
// Description: Matches Java method calls to known security sinks.
// -----------------------------------------------------------------------------
using StellaOps.Scanner.Reachability;
using StellaOps.Scanner.Contracts;
namespace StellaOps.Scanner.CallGraph.Java;

View File

@@ -8,7 +8,7 @@ using System.Collections.Immutable;
using System.Text.Json;
using System.Text.RegularExpressions;
using Microsoft.Extensions.Logging;
using StellaOps.Scanner.Reachability;
using StellaOps.Scanner.Contracts;
namespace StellaOps.Scanner.CallGraph.JavaScript;

View File

@@ -4,7 +4,7 @@
// Description: Classifies JavaScript/TypeScript functions as entrypoints.
// -----------------------------------------------------------------------------
using StellaOps.Scanner.Reachability;
using StellaOps.Scanner.Contracts;
namespace StellaOps.Scanner.CallGraph.JavaScript;

View File

@@ -4,7 +4,7 @@
// Description: Matches JavaScript/TypeScript function calls to known security sinks.
// -----------------------------------------------------------------------------
using StellaOps.Scanner.Reachability;
using StellaOps.Scanner.Contracts;
namespace StellaOps.Scanner.CallGraph.JavaScript;

View File

@@ -2,7 +2,7 @@ using System.Collections.Immutable;
using System.Diagnostics;
using System.Text.Json;
using Microsoft.Extensions.Logging;
using StellaOps.Scanner.Reachability;
using StellaOps.Scanner.Contracts;
namespace StellaOps.Scanner.CallGraph.Node;

View File

@@ -7,7 +7,7 @@
using System.Collections.Immutable;
using System.Text.RegularExpressions;
using Microsoft.Extensions.Logging;
using StellaOps.Scanner.Reachability;
using StellaOps.Scanner.Contracts;
namespace StellaOps.Scanner.CallGraph.Php;

View File

@@ -4,7 +4,7 @@
// Description: Classifies PHP functions as entrypoints based on framework patterns.
// -----------------------------------------------------------------------------
using StellaOps.Scanner.Reachability;
using StellaOps.Scanner.Contracts;
namespace StellaOps.Scanner.CallGraph.Php;

View File

@@ -4,7 +4,7 @@
// Description: Matches PHP function calls to known security sinks.
// -----------------------------------------------------------------------------
using StellaOps.Scanner.Reachability;
using StellaOps.Scanner.Contracts;
namespace StellaOps.Scanner.CallGraph.Php;

View File

@@ -7,7 +7,7 @@
using System.Collections.Immutable;
using System.Text.RegularExpressions;
using Microsoft.Extensions.Logging;
using StellaOps.Scanner.Reachability;
using StellaOps.Scanner.Contracts;
namespace StellaOps.Scanner.CallGraph.Python;

View File

@@ -4,7 +4,7 @@
// Description: Classifies Python functions as entrypoints based on framework patterns.
// -----------------------------------------------------------------------------
using StellaOps.Scanner.Reachability;
using StellaOps.Scanner.Contracts;
namespace StellaOps.Scanner.CallGraph.Python;

View File

@@ -4,7 +4,7 @@
// Description: Matches Python function calls to known security sinks.
// -----------------------------------------------------------------------------
using StellaOps.Scanner.Reachability;
using StellaOps.Scanner.Contracts;
namespace StellaOps.Scanner.CallGraph.Python;

View File

@@ -7,7 +7,7 @@
using System.Collections.Immutable;
using System.Text.RegularExpressions;
using Microsoft.Extensions.Logging;
using StellaOps.Scanner.Reachability;
using StellaOps.Scanner.Contracts;
namespace StellaOps.Scanner.CallGraph.Ruby;

View File

@@ -4,7 +4,7 @@
// Description: Classifies Ruby methods as entrypoints based on framework patterns.
// -----------------------------------------------------------------------------
using StellaOps.Scanner.Reachability;
using StellaOps.Scanner.Contracts;
namespace StellaOps.Scanner.CallGraph.Ruby;

View File

@@ -4,7 +4,7 @@
// Description: Matches Ruby method calls to known security sinks.
// -----------------------------------------------------------------------------
using StellaOps.Scanner.Reachability;
using StellaOps.Scanner.Contracts;
namespace StellaOps.Scanner.CallGraph.Ruby;

View File

@@ -1,516 +1,13 @@
using System.Collections.Immutable;
using System.Security.Cryptography;
using System.Text;
using System.Text.Json;
using System.Text.Json.Serialization;
using StellaOps.Scanner.CallGraph.Serialization;
using StellaOps.Scanner.Reachability;
// SPDX-License-Identifier: AGPL-3.0-or-later
// Copyright (c) StellaOps
// Re-exports types from Contracts for backward compatibility.
namespace StellaOps.Scanner.CallGraph;
// All call graph models are now defined in StellaOps.Scanner.Contracts.
// This file provides type aliases for backward compatibility with code
// that uses the StellaOps.Scanner.CallGraph namespace.
public sealed record CallGraphSnapshot(
[property: JsonPropertyName("scanId")] string ScanId,
[property: JsonPropertyName("graphDigest")] string GraphDigest,
[property: JsonPropertyName("language")] string Language,
[property: JsonPropertyName("extractedAt")] DateTimeOffset ExtractedAt,
[property: JsonPropertyName("nodes")]
[property: JsonConverter(typeof(ImmutableArrayJsonConverter<CallGraphNode>))]
ImmutableArray<CallGraphNode> Nodes,
[property: JsonPropertyName("edges")]
[property: JsonConverter(typeof(ImmutableArrayJsonConverter<CallGraphEdge>))]
ImmutableArray<CallGraphEdge> Edges,
[property: JsonPropertyName("entrypointIds")]
[property: JsonConverter(typeof(ImmutableArrayJsonConverter<string>))]
ImmutableArray<string> EntrypointIds,
[property: JsonPropertyName("sinkIds")]
[property: JsonConverter(typeof(ImmutableArrayJsonConverter<string>))]
ImmutableArray<string> SinkIds)
{
public CallGraphSnapshot Trimmed()
{
var nodes = (Nodes.IsDefault ? ImmutableArray<CallGraphNode>.Empty : Nodes)
.Where(n => !string.IsNullOrWhiteSpace(n.NodeId))
.Select(n => n.Trimmed())
.OrderBy(n => n.NodeId, StringComparer.Ordinal)
.ToImmutableArray();
global using StellaOps.Scanner.Contracts;
var edges = (Edges.IsDefault ? ImmutableArray<CallGraphEdge>.Empty : Edges)
.Where(e => !string.IsNullOrWhiteSpace(e.SourceId) && !string.IsNullOrWhiteSpace(e.TargetId))
.Select(e => e.Trimmed())
.OrderBy(e => e.SourceId, StringComparer.Ordinal)
.ThenBy(e => e.TargetId, StringComparer.Ordinal)
.ThenBy(e => e.CallKind.ToString(), StringComparer.Ordinal)
.ThenBy(e => e.CallSite ?? string.Empty, StringComparer.Ordinal)
.ToImmutableArray();
var entrypoints = (EntrypointIds.IsDefault ? ImmutableArray<string>.Empty : EntrypointIds)
.Where(id => !string.IsNullOrWhiteSpace(id))
.Select(id => id.Trim())
.Distinct(StringComparer.Ordinal)
.OrderBy(id => id, StringComparer.Ordinal)
.ToImmutableArray();
var sinks = (SinkIds.IsDefault ? ImmutableArray<string>.Empty : SinkIds)
.Where(id => !string.IsNullOrWhiteSpace(id))
.Select(id => id.Trim())
.Distinct(StringComparer.Ordinal)
.OrderBy(id => id, StringComparer.Ordinal)
.ToImmutableArray();
return this with
{
ScanId = ScanId?.Trim() ?? string.Empty,
GraphDigest = GraphDigest?.Trim() ?? string.Empty,
Language = Language?.Trim() ?? string.Empty,
Nodes = nodes,
Edges = edges,
EntrypointIds = entrypoints,
SinkIds = sinks
};
}
}
public sealed record CallGraphNode(
[property: JsonPropertyName("nodeId")] string NodeId,
[property: JsonPropertyName("symbol")] string Symbol,
[property: JsonPropertyName("file")] string File,
[property: JsonPropertyName("line")] int Line,
[property: JsonPropertyName("package")] string Package,
[property: JsonPropertyName("visibility")] Visibility Visibility,
[property: JsonPropertyName("isEntrypoint")] bool IsEntrypoint,
[property: JsonPropertyName("entrypointType")] EntrypointType? EntrypointType,
[property: JsonPropertyName("isSink")] bool IsSink,
[property: JsonPropertyName("sinkCategory")] SinkCategory? SinkCategory)
{
public CallGraphNode Trimmed()
=> this with
{
NodeId = NodeId?.Trim() ?? string.Empty,
Symbol = Symbol?.Trim() ?? string.Empty,
File = File?.Trim() ?? string.Empty,
Package = Package?.Trim() ?? string.Empty
};
}
public sealed record CallGraphEdge(
[property: JsonPropertyName("sourceId")] string SourceId,
[property: JsonPropertyName("targetId")] string TargetId,
[property: JsonPropertyName("callKind")] CallKind CallKind,
[property: JsonPropertyName("callSite")] string? CallSite = null,
[property: JsonPropertyName("explanation")] CallEdgeExplanation? Explanation = null)
{
public CallGraphEdge Trimmed()
=> this with
{
SourceId = SourceId?.Trim() ?? string.Empty,
TargetId = TargetId?.Trim() ?? string.Empty,
CallSite = string.IsNullOrWhiteSpace(CallSite) ? null : CallSite.Trim(),
Explanation = Explanation?.Trimmed()
};
}
[JsonConverter(typeof(JsonStringEnumConverter<Visibility>))]
public enum Visibility
{
Public,
Internal,
Protected,
Private
}
[JsonConverter(typeof(JsonStringEnumConverter<CallKind>))]
public enum CallKind
{
Direct,
Virtual,
Delegate,
Reflection,
Dynamic,
Plt,
Iat
}
/// <summary>
/// Explanation type for call graph edges.
/// </summary>
[JsonConverter(typeof(JsonStringEnumConverter<CallEdgeExplanationType>))]
public enum CallEdgeExplanationType
{
/// <summary>Static import (ES6 import, Python import, using directive).</summary>
Import,
/// <summary>Dynamic load (require(), dlopen, LoadLibrary).</summary>
DynamicLoad,
/// <summary>Reflection invocation (Class.forName, Type.GetType).</summary>
Reflection,
/// <summary>Foreign function interface (JNI, P/Invoke, ctypes).</summary>
Ffi,
/// <summary>Environment variable guard (process.env.X, os.environ.get).</summary>
EnvGuard,
/// <summary>Feature flag check (LaunchDarkly, unleash, custom flags).</summary>
FeatureFlag,
/// <summary>Platform/architecture guard (process.platform, runtime.GOOS).</summary>
PlatformArch,
/// <summary>Taint gate (sanitization, validation).</summary>
TaintGate,
/// <summary>Loader rule (PLT/IAT/GOT entry).</summary>
LoaderRule,
/// <summary>Direct call (static, virtual, delegate).</summary>
DirectCall,
/// <summary>Cannot determine explanation type.</summary>
Unknown
}
/// <summary>
/// Explanation for why an edge exists in the call graph.
/// </summary>
public sealed record CallEdgeExplanation(
[property: JsonPropertyName("type")] CallEdgeExplanationType Type,
[property: JsonPropertyName("confidence")] double Confidence,
[property: JsonPropertyName("guard")] string? Guard = null,
[property: JsonPropertyName("metadata")] ImmutableDictionary<string, string>? Metadata = null)
{
/// <summary>
/// Creates a simple direct call explanation with full confidence.
/// </summary>
public static CallEdgeExplanation DirectCall() =>
new(CallEdgeExplanationType.DirectCall, 1.0);
/// <summary>
/// Creates an import explanation with full confidence.
/// </summary>
public static CallEdgeExplanation Import(string? location = null) =>
new(CallEdgeExplanationType.Import, 1.0);
/// <summary>
/// Creates a dynamic load explanation with medium confidence.
/// </summary>
public static CallEdgeExplanation DynamicLoad(double confidence = 0.5) =>
new(CallEdgeExplanationType.DynamicLoad, confidence);
/// <summary>
/// Creates an environment guard explanation.
/// </summary>
public static CallEdgeExplanation EnvGuard(string guard, double confidence = 0.9) =>
new(CallEdgeExplanationType.EnvGuard, confidence, guard);
/// <summary>
/// Creates a feature flag explanation.
/// </summary>
public static CallEdgeExplanation FeatureFlag(string flag, double confidence = 0.85) =>
new(CallEdgeExplanationType.FeatureFlag, confidence, flag);
/// <summary>
/// Creates a platform/architecture guard explanation.
/// </summary>
public static CallEdgeExplanation PlatformArch(string platform, double confidence = 0.95) =>
new(CallEdgeExplanationType.PlatformArch, confidence, $"platform={platform}");
/// <summary>
/// Creates a reflection explanation.
/// </summary>
public static CallEdgeExplanation ReflectionCall(double confidence = 0.5) =>
new(CallEdgeExplanationType.Reflection, confidence);
/// <summary>
/// Creates a loader rule explanation (PLT/IAT/GOT).
/// </summary>
public static CallEdgeExplanation LoaderRule(string loaderType, ImmutableDictionary<string, string>? metadata = null) =>
new(CallEdgeExplanationType.LoaderRule, 0.8, null, metadata ?? ImmutableDictionary<string, string>.Empty.Add("loader", loaderType));
public CallEdgeExplanation Trimmed() =>
this with
{
Guard = string.IsNullOrWhiteSpace(Guard) ? null : Guard.Trim()
};
}
[JsonConverter(typeof(JsonStringEnumConverter<EntrypointType>))]
public enum EntrypointType
{
HttpHandler,
GrpcMethod,
CliCommand,
BackgroundJob,
ScheduledJob,
MessageHandler,
EventSubscriber,
WebSocketHandler,
EventHandler,
Lambda,
Unknown
}
public static class CallGraphDigests
{
private static readonly JsonWriterOptions CanonicalJsonOptions = new()
{
Encoder = System.Text.Encodings.Web.JavaScriptEncoder.UnsafeRelaxedJsonEscaping,
Indented = false,
SkipValidation = false
};
public static string ComputeGraphDigest(CallGraphSnapshot snapshot)
{
ArgumentNullException.ThrowIfNull(snapshot);
var trimmed = snapshot.Trimmed();
using var buffer = new MemoryStream(capacity: 64 * 1024);
using (var writer = new Utf8JsonWriter(buffer, CanonicalJsonOptions))
{
WriteDigestPayload(writer, trimmed);
writer.Flush();
}
var hash = SHA256.HashData(buffer.ToArray());
return $"sha256:{Convert.ToHexString(hash).ToLowerInvariant()}";
}
public static string ComputeResultDigest(ReachabilityAnalysisResult result)
{
ArgumentNullException.ThrowIfNull(result);
var trimmed = result.Trimmed();
using var buffer = new MemoryStream(capacity: 64 * 1024);
using (var writer = new Utf8JsonWriter(buffer, CanonicalJsonOptions))
{
WriteDigestPayload(writer, trimmed);
writer.Flush();
}
var hash = SHA256.HashData(buffer.ToArray());
return $"sha256:{Convert.ToHexString(hash).ToLowerInvariant()}";
}
private static void WriteDigestPayload(Utf8JsonWriter writer, CallGraphSnapshot snapshot)
{
writer.WriteStartObject();
writer.WriteString("schema", "stellaops.callgraph@v1");
writer.WriteString("language", snapshot.Language);
writer.WritePropertyName("nodes");
writer.WriteStartArray();
foreach (var node in snapshot.Nodes)
{
writer.WriteStartObject();
writer.WriteString("nodeId", node.NodeId);
writer.WriteString("symbol", node.Symbol);
writer.WriteString("file", node.File);
writer.WriteNumber("line", node.Line);
writer.WriteString("package", node.Package);
writer.WriteString("visibility", node.Visibility.ToString());
writer.WriteBoolean("isEntrypoint", node.IsEntrypoint);
if (node.EntrypointType is not null)
{
writer.WriteString("entrypointType", node.EntrypointType.Value.ToString());
}
writer.WriteBoolean("isSink", node.IsSink);
if (node.SinkCategory is not null)
{
writer.WriteString("sinkCategory", node.SinkCategory.Value.ToString());
}
writer.WriteEndObject();
}
writer.WriteEndArray();
writer.WritePropertyName("edges");
writer.WriteStartArray();
foreach (var edge in snapshot.Edges)
{
writer.WriteStartObject();
writer.WriteString("sourceId", edge.SourceId);
writer.WriteString("targetId", edge.TargetId);
writer.WriteString("callKind", edge.CallKind.ToString());
if (!string.IsNullOrWhiteSpace(edge.CallSite))
{
writer.WriteString("callSite", edge.CallSite);
}
if (edge.Explanation is not null)
{
writer.WritePropertyName("explanation");
writer.WriteStartObject();
writer.WriteString("type", edge.Explanation.Type.ToString());
writer.WriteNumber("confidence", edge.Explanation.Confidence);
if (!string.IsNullOrWhiteSpace(edge.Explanation.Guard))
{
writer.WriteString("guard", edge.Explanation.Guard);
}
if (edge.Explanation.Metadata is { Count: > 0 })
{
writer.WritePropertyName("metadata");
writer.WriteStartObject();
foreach (var kv in edge.Explanation.Metadata.OrderBy(kv => kv.Key, StringComparer.Ordinal))
{
writer.WriteString(kv.Key, kv.Value);
}
writer.WriteEndObject();
}
writer.WriteEndObject();
}
writer.WriteEndObject();
}
writer.WriteEndArray();
writer.WritePropertyName("entrypointIds");
writer.WriteStartArray();
foreach (var id in snapshot.EntrypointIds)
{
writer.WriteStringValue(id);
}
writer.WriteEndArray();
writer.WritePropertyName("sinkIds");
writer.WriteStartArray();
foreach (var id in snapshot.SinkIds)
{
writer.WriteStringValue(id);
}
writer.WriteEndArray();
writer.WriteEndObject();
}
private static void WriteDigestPayload(Utf8JsonWriter writer, ReachabilityAnalysisResult result)
{
writer.WriteStartObject();
writer.WriteString("schema", "stellaops.reachability@v1");
writer.WriteString("graphDigest", result.GraphDigest);
writer.WriteString("language", result.Language);
writer.WritePropertyName("reachableNodeIds");
writer.WriteStartArray();
foreach (var id in result.ReachableNodeIds)
{
writer.WriteStringValue(id);
}
writer.WriteEndArray();
writer.WritePropertyName("reachableSinkIds");
writer.WriteStartArray();
foreach (var id in result.ReachableSinkIds)
{
writer.WriteStringValue(id);
}
writer.WriteEndArray();
writer.WritePropertyName("paths");
writer.WriteStartArray();
foreach (var path in result.Paths)
{
writer.WriteStartObject();
writer.WriteString("entrypointId", path.EntrypointId);
writer.WriteString("sinkId", path.SinkId);
writer.WritePropertyName("nodeIds");
writer.WriteStartArray();
foreach (var nodeId in path.NodeIds)
{
writer.WriteStringValue(nodeId);
}
writer.WriteEndArray();
writer.WriteEndObject();
}
writer.WriteEndArray();
writer.WriteEndObject();
}
}
public sealed record ReachabilityPath(
[property: JsonPropertyName("entrypointId")] string EntrypointId,
[property: JsonPropertyName("sinkId")] string SinkId,
[property: JsonPropertyName("nodeIds")]
[property: JsonConverter(typeof(ImmutableArrayJsonConverter<string>))]
ImmutableArray<string> NodeIds)
{
public ReachabilityPath Trimmed()
{
var nodes = (NodeIds.IsDefault ? ImmutableArray<string>.Empty : NodeIds)
.Where(id => !string.IsNullOrWhiteSpace(id))
.Select(id => id.Trim())
.ToImmutableArray();
return this with
{
EntrypointId = EntrypointId?.Trim() ?? string.Empty,
SinkId = SinkId?.Trim() ?? string.Empty,
NodeIds = nodes
};
}
}
public sealed record ReachabilityAnalysisResult(
[property: JsonPropertyName("scanId")] string ScanId,
[property: JsonPropertyName("graphDigest")] string GraphDigest,
[property: JsonPropertyName("language")] string Language,
[property: JsonPropertyName("computedAt")] DateTimeOffset ComputedAt,
[property: JsonPropertyName("reachableNodeIds")]
[property: JsonConverter(typeof(ImmutableArrayJsonConverter<string>))]
ImmutableArray<string> ReachableNodeIds,
[property: JsonPropertyName("reachableSinkIds")]
[property: JsonConverter(typeof(ImmutableArrayJsonConverter<string>))]
ImmutableArray<string> ReachableSinkIds,
[property: JsonPropertyName("paths")]
[property: JsonConverter(typeof(ImmutableArrayJsonConverter<ReachabilityPath>))]
ImmutableArray<ReachabilityPath> Paths,
[property: JsonPropertyName("resultDigest")] string ResultDigest)
{
public ReachabilityAnalysisResult Trimmed()
{
var reachableNodes = (ReachableNodeIds.IsDefault ? ImmutableArray<string>.Empty : ReachableNodeIds)
.Where(id => !string.IsNullOrWhiteSpace(id))
.Select(id => id.Trim())
.Distinct(StringComparer.Ordinal)
.OrderBy(id => id, StringComparer.Ordinal)
.ToImmutableArray();
var reachableSinks = (ReachableSinkIds.IsDefault ? ImmutableArray<string>.Empty : ReachableSinkIds)
.Where(id => !string.IsNullOrWhiteSpace(id))
.Select(id => id.Trim())
.Distinct(StringComparer.Ordinal)
.OrderBy(id => id, StringComparer.Ordinal)
.ToImmutableArray();
var paths = (Paths.IsDefault ? ImmutableArray<ReachabilityPath>.Empty : Paths)
.Select(p => p.Trimmed())
.OrderBy(p => p.SinkId, StringComparer.Ordinal)
.ThenBy(p => p.EntrypointId, StringComparer.Ordinal)
.ToImmutableArray();
return this with
{
ScanId = ScanId?.Trim() ?? string.Empty,
GraphDigest = GraphDigest?.Trim() ?? string.Empty,
Language = Language?.Trim() ?? string.Empty,
ResultDigest = ResultDigest?.Trim() ?? string.Empty,
ReachableNodeIds = reachableNodes,
ReachableSinkIds = reachableSinks,
Paths = paths
};
}
}
public static class CallGraphNodeIds
{
public static string Compute(string stableSymbolId)
{
if (string.IsNullOrWhiteSpace(stableSymbolId))
{
throw new ArgumentException("Symbol id must be provided.", nameof(stableSymbolId));
}
var hash = SHA256.HashData(Encoding.UTF8.GetBytes(stableSymbolId.Trim()));
return $"sha256:{Convert.ToHexString(hash).ToLowerInvariant()}";
}
public static string StableSymbolId(string language, string symbol)
=> $"{language.Trim().ToLowerInvariant()}:{symbol.Trim()}";
}
// Type aliases for backward compatibility.
// Code using StellaOps.Scanner.CallGraph.CallGraphSnapshot etc. will continue to work
// because the global using imports all types from Contracts into this namespace.

View File

@@ -27,8 +27,8 @@
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\\StellaOps.Scanner.Evidence\\StellaOps.Scanner.Evidence.csproj" />
<ProjectReference Include="..\\StellaOps.Scanner.Reachability\\StellaOps.Scanner.Reachability.csproj" />
<ProjectReference Include="..\StellaOps.Scanner.Contracts\StellaOps.Scanner.Contracts.csproj" />
<ProjectReference Include="..\StellaOps.Scanner.Evidence\StellaOps.Scanner.Evidence.csproj" />
</ItemGroup>
</Project>

View File

@@ -0,0 +1,170 @@
using System.Collections.Immutable;
using System.Reflection;
using Microsoft.Extensions.Logging;
using StellaOps.Scanner.ChangeTrace.Models;
using StellaOps.Scanner.ChangeTrace.Serialization;
namespace StellaOps.Scanner.ChangeTrace.Builder;
/// <summary>
/// Builder for constructing change traces from scan or binary comparisons.
/// </summary>
public sealed class ChangeTraceBuilder : IChangeTraceBuilder
{
private readonly ILogger<ChangeTraceBuilder> _logger;
private readonly TimeProvider _timeProvider;
/// <summary>
/// Current engine version.
/// </summary>
public static string EngineVersion { get; } = Assembly.GetExecutingAssembly()
.GetCustomAttribute<AssemblyInformationalVersionAttribute>()?.InformationalVersion
?? "1.0.0";
/// <summary>
/// Initializes a new instance of the ChangeTraceBuilder.
/// </summary>
/// <param name="logger">Logger instance.</param>
/// <param name="timeProvider">Time provider for deterministic timestamps.</param>
public ChangeTraceBuilder(ILogger<ChangeTraceBuilder> logger, TimeProvider timeProvider)
{
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
_timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider));
}
/// <inheritdoc />
public Task<Models.ChangeTrace> FromScanComparisonAsync(
string fromScanId,
string toScanId,
ChangeTraceBuilderOptions? options = null,
CancellationToken ct = default)
{
ArgumentException.ThrowIfNullOrWhiteSpace(fromScanId);
ArgumentException.ThrowIfNullOrWhiteSpace(toScanId);
options ??= ChangeTraceBuilderOptions.Default;
_logger.LogInformation("Building change trace from scan comparison: {FromScanId} -> {ToScanId}",
fromScanId, toScanId);
// TODO: Integrate with actual scan repository to fetch scan data
// For now, create a placeholder trace structure
var trace = BuildPlaceholderTrace(fromScanId, toScanId, options);
var finalTrace = FinalizeTrace(trace);
return Task.FromResult(finalTrace);
}
/// <inheritdoc />
public Task<Models.ChangeTrace> FromBinaryComparisonAsync(
string fromBinaryPath,
string toBinaryPath,
ChangeTraceBuilderOptions? options = null,
CancellationToken ct = default)
{
ArgumentException.ThrowIfNullOrWhiteSpace(fromBinaryPath);
ArgumentException.ThrowIfNullOrWhiteSpace(toBinaryPath);
if (!File.Exists(fromBinaryPath))
throw new FileNotFoundException("From binary not found", fromBinaryPath);
if (!File.Exists(toBinaryPath))
throw new FileNotFoundException("To binary not found", toBinaryPath);
options ??= ChangeTraceBuilderOptions.Default;
_logger.LogInformation("Building change trace from binary comparison: {FromPath} -> {ToPath}",
fromBinaryPath, toBinaryPath);
// Generate scan IDs from file paths
var fromScanId = $"binary:{Path.GetFileName(fromBinaryPath)}";
var toScanId = $"binary:{Path.GetFileName(toBinaryPath)}";
// TODO: Integrate with BinaryIndex for symbol extraction
// For now, create a placeholder trace structure
var trace = BuildPlaceholderTrace(fromScanId, toScanId, options);
var finalTrace = FinalizeTrace(trace);
return Task.FromResult(finalTrace);
}
private Models.ChangeTrace BuildPlaceholderTrace(
string fromScanId,
string toScanId,
ChangeTraceBuilderOptions options)
{
var now = _timeProvider.GetUtcNow();
var combinedScanId = $"{fromScanId}..{toScanId}";
return new Models.ChangeTrace
{
Subject = new ChangeTraceSubject
{
Type = "scan.comparison",
Digest = $"sha256:{Guid.Empty:N}",
Name = combinedScanId
},
Basis = new ChangeTraceBasis
{
ScanId = combinedScanId,
FromScanId = fromScanId,
ToScanId = toScanId,
Policies = options.Policies,
DiffMethod = options.GetDiffMethods(),
EngineVersion = EngineVersion,
AnalyzedAt = now
},
Deltas = [],
Summary = new ChangeTraceSummary
{
ChangedPackages = 0,
ChangedSymbols = 0,
ChangedBytes = 0,
RiskDelta = 0.0,
Verdict = ChangeTraceVerdict.Neutral
}
};
}
private Models.ChangeTrace FinalizeTrace(Models.ChangeTrace trace)
{
// Compute commitment hash
var hash = ChangeTraceSerializer.ComputeCommitmentHash(trace);
return trace with
{
Commitment = new ChangeTraceCommitment
{
Sha256 = hash,
Algorithm = "RFC8785+SHA256"
}
};
}
/// <summary>
/// Computes the verdict based on risk delta score.
/// </summary>
/// <param name="riskDelta">Risk delta score.</param>
/// <returns>Verdict classification.</returns>
public static ChangeTraceVerdict ComputeVerdict(double riskDelta)
{
return riskDelta switch
{
< -0.3 => ChangeTraceVerdict.RiskDown,
> 0.3 => ChangeTraceVerdict.RiskUp,
_ => ChangeTraceVerdict.Neutral
};
}
/// <summary>
/// Computes trust delta score from before/after scores.
/// Formula: (AfterTrust - BeforeTrust) / max(BeforeTrust, 0.01)
/// </summary>
/// <param name="beforeTrust">Trust score before change.</param>
/// <param name="afterTrust">Trust score after change.</param>
/// <returns>Trust delta score.</returns>
public static double ComputeTrustDelta(double beforeTrust, double afterTrust)
{
var denominator = Math.Max(beforeTrust, 0.01);
return (afterTrust - beforeTrust) / denominator;
}
}

View File

@@ -0,0 +1,65 @@
using System.Collections.Immutable;
namespace StellaOps.Scanner.ChangeTrace.Builder;
/// <summary>
/// Options for change trace building.
/// </summary>
public sealed record ChangeTraceBuilderOptions
{
/// <summary>
/// Include package-level diffing. Default: true.
/// </summary>
public bool IncludePackageDiff { get; init; } = true;
/// <summary>
/// Include symbol-level diffing. Default: true.
/// </summary>
public bool IncludeSymbolDiff { get; init; } = true;
/// <summary>
/// Include byte-level diffing. Default: false.
/// </summary>
public bool IncludeByteDiff { get; init; } = false;
/// <summary>
/// Minimum confidence threshold for symbol matches.
/// Default: 0.75.
/// </summary>
public double MinSymbolConfidence { get; init; } = 0.75;
/// <summary>
/// Rolling hash window size for byte diffing.
/// Default: 2048 bytes.
/// </summary>
public int ByteDiffWindowSize { get; init; } = 2048;
/// <summary>
/// Maximum binary size for byte-level analysis (bytes).
/// Default: 10MB.
/// </summary>
public long MaxBinarySize { get; init; } = 10 * 1024 * 1024;
/// <summary>
/// Lattice policies to apply during trust delta computation.
/// Default: ["lattice:default@v3"].
/// </summary>
public ImmutableArray<string> Policies { get; init; } = ["lattice:default@v3"];
/// <summary>
/// Gets the diff methods enabled based on options.
/// </summary>
public ImmutableArray<string> GetDiffMethods()
{
var methods = ImmutableArray.CreateBuilder<string>();
if (IncludePackageDiff) methods.Add("pkg");
if (IncludeSymbolDiff) methods.Add("symbol");
if (IncludeByteDiff) methods.Add("byte");
return methods.ToImmutable();
}
/// <summary>
/// Default options instance.
/// </summary>
public static ChangeTraceBuilderOptions Default { get; } = new();
}

View File

@@ -0,0 +1,35 @@
namespace StellaOps.Scanner.ChangeTrace.Builder;
/// <summary>
/// Builder interface for constructing change traces.
/// </summary>
public interface IChangeTraceBuilder
{
/// <summary>
/// Build change trace from two scan comparisons.
/// </summary>
/// <param name="fromScanId">Scan ID of the "before" state.</param>
/// <param name="toScanId">Scan ID of the "after" state.</param>
/// <param name="options">Builder options for configuring the trace.</param>
/// <param name="ct">Cancellation token.</param>
/// <returns>Constructed change trace.</returns>
Task<Models.ChangeTrace> FromScanComparisonAsync(
string fromScanId,
string toScanId,
ChangeTraceBuilderOptions? options = null,
CancellationToken ct = default);
/// <summary>
/// Build change trace from two binary files.
/// </summary>
/// <param name="fromBinaryPath">Path to the "before" binary.</param>
/// <param name="toBinaryPath">Path to the "after" binary.</param>
/// <param name="options">Builder options for configuring the trace.</param>
/// <param name="ct">Cancellation token.</param>
/// <returns>Constructed change trace.</returns>
Task<Models.ChangeTrace> FromBinaryComparisonAsync(
string fromBinaryPath,
string toBinaryPath,
ChangeTraceBuilderOptions? options = null,
CancellationToken ct = default);
}

View File

@@ -0,0 +1,57 @@
// Copyright (c) StellaOps. All rights reserved.
// Licensed under AGPL-3.0-or-later. See LICENSE in the project root.
using System.Collections.Immutable;
namespace StellaOps.Scanner.ChangeTrace.ByteDiff;
/// <summary>
/// Options for byte-level diffing.
/// </summary>
public sealed record ByteDiffOptions
{
/// <summary>
/// Rolling hash window size in bytes. Default: 2048.
/// </summary>
public int WindowSize { get; init; } = 2048;
/// <summary>
/// Step size for window advancement. Default: WindowSize (non-overlapping).
/// </summary>
public int StepSize { get; init; } = 2048;
/// <summary>
/// Maximum file size to analyze in bytes. Default: 10MB.
/// </summary>
public long MaxFileSize { get; init; } = 10 * 1024 * 1024;
/// <summary>
/// Whether to analyze by ELF/PE section. Default: true.
/// </summary>
public bool AnalyzeBySections { get; init; } = true;
/// <summary>
/// Sections to include (e.g., ".text", ".data"). Null = all sections.
/// </summary>
public ImmutableArray<string>? IncludeSections { get; init; }
/// <summary>
/// Whether to include context description in output. Default: false.
/// </summary>
public bool IncludeContext { get; init; } = false;
/// <summary>
/// Enable parallel processing for large files. Default: true.
/// </summary>
public bool EnableParallel { get; init; } = true;
/// <summary>
/// Minimum number of consecutive changed windows to report. Default: 1.
/// </summary>
public int MinConsecutiveChanges { get; init; } = 1;
/// <summary>
/// Create default options.
/// </summary>
public static ByteDiffOptions Default { get; } = new();
}

View File

@@ -0,0 +1,381 @@
// Copyright (c) StellaOps. All rights reserved.
// Licensed under AGPL-3.0-or-later. See LICENSE in the project root.
using System.Security.Cryptography;
using StellaOps.Scanner.ChangeTrace.Models;
namespace StellaOps.Scanner.ChangeTrace.ByteDiff;
/// <summary>
/// Byte-level binary comparison using rolling hash windows.
/// </summary>
public sealed class ByteLevelDiffer : IByteLevelDiffer
{
private readonly ISectionAnalyzer _sectionAnalyzer;
/// <summary>
/// Creates a new ByteLevelDiffer with the specified section analyzer.
/// </summary>
public ByteLevelDiffer(ISectionAnalyzer sectionAnalyzer)
{
_sectionAnalyzer = sectionAnalyzer ?? throw new ArgumentNullException(nameof(sectionAnalyzer));
}
/// <summary>
/// Creates a new ByteLevelDiffer with the default section analyzer.
/// </summary>
public ByteLevelDiffer() : this(new SectionAnalyzer())
{
}
/// <inheritdoc/>
public async Task<IReadOnlyList<ByteDelta>> CompareAsync(
Stream fromStream,
Stream toStream,
ByteDiffOptions? options = null,
CancellationToken ct = default)
{
ArgumentNullException.ThrowIfNull(fromStream);
ArgumentNullException.ThrowIfNull(toStream);
options ??= ByteDiffOptions.Default;
// Check file sizes
if (fromStream.Length > options.MaxFileSize || toStream.Length > options.MaxFileSize)
{
// Fall back to sampling for large files
return await CompareLargeFilesAsync(fromStream, toStream, options, ct);
}
// Read both streams
var fromBytes = await ReadStreamAsync(fromStream, ct);
var toBytes = await ReadStreamAsync(toStream, ct);
// Compare by sections if enabled and formats support it
if (options.AnalyzeBySections)
{
var fromSections = await _sectionAnalyzer.AnalyzeAsync(fromBytes, ct);
var toSections = await _sectionAnalyzer.AnalyzeAsync(toBytes, ct);
if (fromSections.Count > 0 && toSections.Count > 0)
{
return CompareBySections(fromBytes, toBytes, fromSections, toSections, options);
}
}
// Fall back to full binary comparison
return CompareFullBinary(fromBytes, toBytes, options);
}
/// <inheritdoc/>
public async Task<IReadOnlyList<ByteDelta>> CompareFilesAsync(
string fromPath,
string toPath,
ByteDiffOptions? options = null,
CancellationToken ct = default)
{
ArgumentException.ThrowIfNullOrEmpty(fromPath);
ArgumentException.ThrowIfNullOrEmpty(toPath);
await using var fromStream = File.OpenRead(fromPath);
await using var toStream = File.OpenRead(toPath);
return await CompareAsync(fromStream, toStream, options, ct);
}
private IReadOnlyList<ByteDelta> CompareFullBinary(
byte[] fromBytes,
byte[] toBytes,
ByteDiffOptions options)
{
var deltas = new List<ByteDelta>();
var windowSize = options.WindowSize;
var stepSize = options.StepSize;
// Track consecutive changes for merging
var consecutiveChanges = new List<(long offset, int size, string fromHash, string toHash)>();
// Compare windows in "from" file
for (long offset = 0; offset + windowSize <= fromBytes.Length; offset += stepSize)
{
var fromWindow = fromBytes.AsSpan((int)offset, windowSize);
var fromHash = ComputeWindowHash(fromWindow);
// Check if same window exists in "to" file at same position
string toHash;
if (offset + windowSize <= toBytes.Length)
{
var toWindow = toBytes.AsSpan((int)offset, windowSize);
toHash = ComputeWindowHash(toWindow);
}
else
{
toHash = "truncated";
}
if (!string.Equals(fromHash, toHash, StringComparison.OrdinalIgnoreCase))
{
consecutiveChanges.Add((offset, windowSize, fromHash, toHash));
}
else if (consecutiveChanges.Count >= options.MinConsecutiveChanges)
{
// Flush consecutive changes as a single delta
deltas.Add(CreateMergedDelta(consecutiveChanges, null, options.IncludeContext));
consecutiveChanges.Clear();
}
else
{
consecutiveChanges.Clear();
}
}
// Flush remaining changes
if (consecutiveChanges.Count >= options.MinConsecutiveChanges)
{
deltas.Add(CreateMergedDelta(consecutiveChanges, null, options.IncludeContext));
}
// Handle size difference (bytes added or removed)
if (toBytes.Length > fromBytes.Length && fromBytes.Length > 0)
{
var addedOffset = (long)(fromBytes.Length / windowSize) * windowSize;
var addedSize = (int)(toBytes.Length - addedOffset);
if (addedSize > 0 && addedOffset + addedSize <= toBytes.Length)
{
var addedWindow = toBytes.AsSpan((int)addedOffset, Math.Min(addedSize, windowSize));
deltas.Add(new ByteDelta
{
Offset = addedOffset,
Size = addedSize,
FromHash = "absent",
ToHash = ComputeWindowHash(addedWindow),
Context = options.IncludeContext ? "Bytes added at end" : null
});
}
}
return deltas;
}
private IReadOnlyList<ByteDelta> CompareBySections(
byte[] fromBytes,
byte[] toBytes,
IReadOnlyList<SectionInfo> fromSections,
IReadOnlyList<SectionInfo> toSections,
ByteDiffOptions options)
{
var deltas = new List<ByteDelta>();
// Match sections by name
var toSectionDict = toSections.ToDictionary(s => s.Name, StringComparer.Ordinal);
foreach (var fromSection in fromSections)
{
// Filter by included sections
if (options.IncludeSections.HasValue &&
!options.IncludeSections.Value.Contains(fromSection.Name, StringComparer.Ordinal))
{
continue;
}
if (!toSectionDict.TryGetValue(fromSection.Name, out var toSection))
{
// Section removed
deltas.Add(new ByteDelta
{
Offset = fromSection.Offset,
Size = (int)Math.Min(fromSection.Size, int.MaxValue),
FromHash = ComputeSectionHash(fromBytes, fromSection),
ToHash = "removed",
Section = fromSection.Name,
Context = options.IncludeContext ? "Section removed" : null
});
continue;
}
// Compare section contents
var sectionDeltas = CompareSectionWindows(
fromBytes, toBytes, fromSection, toSection, options);
deltas.AddRange(sectionDeltas);
}
// Check for added sections
foreach (var toSection in toSections)
{
if (options.IncludeSections.HasValue &&
!options.IncludeSections.Value.Contains(toSection.Name, StringComparer.Ordinal))
{
continue;
}
if (!fromSections.Any(s => s.Name == toSection.Name))
{
// Section added
deltas.Add(new ByteDelta
{
Offset = toSection.Offset,
Size = (int)Math.Min(toSection.Size, int.MaxValue),
FromHash = "absent",
ToHash = ComputeSectionHash(toBytes, toSection),
Section = toSection.Name,
Context = options.IncludeContext ? "Section added" : null
});
}
}
return deltas;
}
private IReadOnlyList<ByteDelta> CompareSectionWindows(
byte[] fromBytes,
byte[] toBytes,
SectionInfo fromSection,
SectionInfo toSection,
ByteDiffOptions options)
{
var deltas = new List<ByteDelta>();
var windowSize = options.WindowSize;
var stepSize = options.StepSize;
var fromEnd = Math.Min(fromSection.Offset + fromSection.Size, fromBytes.Length);
var toEnd = Math.Min(toSection.Offset + toSection.Size, toBytes.Length);
var consecutiveChanges = new List<(long offset, int size, string fromHash, string toHash)>();
for (var fromOffset = fromSection.Offset;
fromOffset + windowSize <= fromEnd;
fromOffset += stepSize)
{
var toOffset = toSection.Offset + (fromOffset - fromSection.Offset);
var fromWindow = fromBytes.AsSpan((int)fromOffset, windowSize);
var fromHash = ComputeWindowHash(fromWindow);
string toHash;
if (toOffset + windowSize <= toEnd)
{
var toWindow = toBytes.AsSpan((int)toOffset, windowSize);
toHash = ComputeWindowHash(toWindow);
}
else
{
toHash = "truncated";
}
if (!string.Equals(fromHash, toHash, StringComparison.OrdinalIgnoreCase))
{
consecutiveChanges.Add((fromOffset, windowSize, fromHash, toHash));
}
else if (consecutiveChanges.Count >= options.MinConsecutiveChanges)
{
deltas.Add(CreateMergedDelta(consecutiveChanges, fromSection.Name, options.IncludeContext));
consecutiveChanges.Clear();
}
else
{
consecutiveChanges.Clear();
}
}
// Flush remaining changes
if (consecutiveChanges.Count >= options.MinConsecutiveChanges)
{
deltas.Add(CreateMergedDelta(consecutiveChanges, fromSection.Name, options.IncludeContext));
}
return deltas;
}
private async Task<IReadOnlyList<ByteDelta>> CompareLargeFilesAsync(
Stream fromStream,
Stream toStream,
ByteDiffOptions options,
CancellationToken ct)
{
// Sample-based comparison for large files
var deltas = new List<ByteDelta>();
var sampleInterval = Math.Max(1, (int)(fromStream.Length / 1000)); // ~1000 samples
var windowSize = options.WindowSize;
var buffer = new byte[windowSize];
for (long offset = 0; offset < fromStream.Length - windowSize; offset += sampleInterval)
{
ct.ThrowIfCancellationRequested();
fromStream.Position = offset;
var fromRead = await fromStream.ReadAsync(buffer.AsMemory(0, windowSize), ct);
if (fromRead < windowSize) break;
var fromHash = ComputeWindowHash(buffer.AsSpan(0, fromRead));
if (offset < toStream.Length - windowSize)
{
toStream.Position = offset;
var toRead = await toStream.ReadAsync(buffer.AsMemory(0, windowSize), ct);
var toHash = toRead >= windowSize
? ComputeWindowHash(buffer.AsSpan(0, toRead))
: "truncated";
if (!string.Equals(fromHash, toHash, StringComparison.OrdinalIgnoreCase))
{
deltas.Add(new ByteDelta
{
Offset = offset,
Size = windowSize,
FromHash = fromHash,
ToHash = toHash,
Context = options.IncludeContext ? "Sampled comparison (large file)" : null
});
}
}
}
return deltas;
}
private static string ComputeWindowHash(ReadOnlySpan<byte> window)
{
var hash = SHA256.HashData(window);
return Convert.ToHexString(hash).ToLowerInvariant();
}
private static string ComputeSectionHash(byte[] bytes, SectionInfo section)
{
var start = (int)Math.Min(section.Offset, bytes.Length);
var length = (int)Math.Min(section.Size, bytes.Length - start);
if (length <= 0) return "empty";
return ComputeWindowHash(bytes.AsSpan(start, length));
}
private static ByteDelta CreateMergedDelta(
List<(long offset, int size, string fromHash, string toHash)> changes,
string? section,
bool includeContext)
{
var first = changes[0];
var last = changes[^1];
var totalSize = (int)(last.offset + last.size - first.offset);
return new ByteDelta
{
Offset = first.offset,
Size = totalSize,
FromHash = first.fromHash, // First window hash
ToHash = last.toHash, // Last window hash
Section = section,
Context = includeContext && changes.Count > 1
? $"{changes.Count} consecutive windows changed"
: null
};
}
private static async Task<byte[]> ReadStreamAsync(Stream stream, CancellationToken ct)
{
if (stream is MemoryStream ms && ms.TryGetBuffer(out var buffer))
{
return buffer.ToArray();
}
using var memoryStream = new MemoryStream();
await stream.CopyToAsync(memoryStream, ct);
return memoryStream.ToArray();
}
}

View File

@@ -0,0 +1,30 @@
// Copyright (c) StellaOps. All rights reserved.
// Licensed under AGPL-3.0-or-later. See LICENSE in the project root.
using StellaOps.Scanner.ChangeTrace.Models;
namespace StellaOps.Scanner.ChangeTrace.ByteDiff;
/// <summary>
/// Service for byte-level binary comparison using rolling hash windows.
/// </summary>
public interface IByteLevelDiffer
{
/// <summary>
/// Compare two binary files and return byte-level deltas.
/// </summary>
Task<IReadOnlyList<ByteDelta>> CompareAsync(
Stream fromStream,
Stream toStream,
ByteDiffOptions? options = null,
CancellationToken ct = default);
/// <summary>
/// Compare two binary files by path.
/// </summary>
Task<IReadOnlyList<ByteDelta>> CompareFilesAsync(
string fromPath,
string toPath,
ByteDiffOptions? options = null,
CancellationToken ct = default);
}

View File

@@ -0,0 +1,59 @@
// Copyright (c) StellaOps. All rights reserved.
// Licensed under AGPL-3.0-or-later. See LICENSE in the project root.
namespace StellaOps.Scanner.ChangeTrace.ByteDiff;
/// <summary>
/// Analyzes binary format sections (ELF, PE, Mach-O).
/// </summary>
public interface ISectionAnalyzer
{
/// <summary>
/// Extract section information from binary.
/// </summary>
Task<IReadOnlyList<SectionInfo>> AnalyzeAsync(byte[] binary, CancellationToken ct = default);
}
/// <summary>
/// Information about a binary section.
/// </summary>
/// <param name="Name">Section name (e.g., ".text", ".data").</param>
/// <param name="Offset">Offset in bytes from start of file.</param>
/// <param name="Size">Size in bytes.</param>
/// <param name="Type">Type of section.</param>
public sealed record SectionInfo(
string Name,
long Offset,
long Size,
SectionType Type);
/// <summary>
/// Type of binary section.
/// </summary>
public enum SectionType
{
/// <summary>
/// Code section (.text).
/// </summary>
Code,
/// <summary>
/// Data section (.data, .rodata).
/// </summary>
Data,
/// <summary>
/// Uninitialized data section (.bss).
/// </summary>
Bss,
/// <summary>
/// Debug information (.debug_*).
/// </summary>
Debug,
/// <summary>
/// Other section type.
/// </summary>
Other
}

View File

@@ -0,0 +1,425 @@
// Copyright (c) StellaOps. All rights reserved.
// Licensed under AGPL-3.0-or-later. See LICENSE in the project root.
using System.Buffers.Binary;
namespace StellaOps.Scanner.ChangeTrace.ByteDiff;
/// <summary>
/// Analyzes binary format sections (ELF, PE, Mach-O).
/// </summary>
public sealed class SectionAnalyzer : ISectionAnalyzer
{
// ELF magic bytes
private static ReadOnlySpan<byte> ElfMagic => [0x7f, (byte)'E', (byte)'L', (byte)'F'];
// PE magic bytes (MZ header)
private static ReadOnlySpan<byte> PeMagic => [(byte)'M', (byte)'Z'];
/// <inheritdoc/>
public Task<IReadOnlyList<SectionInfo>> AnalyzeAsync(byte[] binary, CancellationToken ct = default)
{
ct.ThrowIfCancellationRequested();
if (binary.Length < 64)
{
// Too small to be a valid binary
return Task.FromResult<IReadOnlyList<SectionInfo>>([]);
}
// Detect format and parse sections
if (IsElf(binary))
{
return Task.FromResult(ParseElfSections(binary));
}
if (IsPe(binary))
{
return Task.FromResult(ParsePeSections(binary));
}
if (IsMachO(binary))
{
return Task.FromResult(ParseMachOSections(binary));
}
// Unknown format - return empty
return Task.FromResult<IReadOnlyList<SectionInfo>>([]);
}
private static bool IsElf(byte[] binary) =>
binary.Length >= 4 && binary.AsSpan(0, 4).SequenceEqual(ElfMagic);
private static bool IsPe(byte[] binary) =>
binary.Length >= 2 && binary.AsSpan(0, 2).SequenceEqual(PeMagic);
private static bool IsMachO(byte[] binary)
{
if (binary.Length < 4) return false;
var magic = BinaryPrimitives.ReadUInt32BigEndian(binary);
return magic is 0xfeedface or 0xfeedfacf or 0xcefaedfe or 0xcffaedfe;
}
private static IReadOnlyList<SectionInfo> ParseElfSections(byte[] binary)
{
var sections = new List<SectionInfo>();
try
{
// Check ELF class (32 or 64 bit)
var is64Bit = binary[4] == 2;
var isLittleEndian = binary[5] == 1;
// Get section header offset and count
long shoff;
int shentsize;
int shnum;
int shstrndx;
if (is64Bit)
{
shoff = isLittleEndian
? BinaryPrimitives.ReadInt64LittleEndian(binary.AsSpan(40))
: BinaryPrimitives.ReadInt64BigEndian(binary.AsSpan(40));
shentsize = isLittleEndian
? BinaryPrimitives.ReadUInt16LittleEndian(binary.AsSpan(58))
: BinaryPrimitives.ReadUInt16BigEndian(binary.AsSpan(58));
shnum = isLittleEndian
? BinaryPrimitives.ReadUInt16LittleEndian(binary.AsSpan(60))
: BinaryPrimitives.ReadUInt16BigEndian(binary.AsSpan(60));
shstrndx = isLittleEndian
? BinaryPrimitives.ReadUInt16LittleEndian(binary.AsSpan(62))
: BinaryPrimitives.ReadUInt16BigEndian(binary.AsSpan(62));
}
else
{
shoff = isLittleEndian
? BinaryPrimitives.ReadInt32LittleEndian(binary.AsSpan(32))
: BinaryPrimitives.ReadInt32BigEndian(binary.AsSpan(32));
shentsize = isLittleEndian
? BinaryPrimitives.ReadUInt16LittleEndian(binary.AsSpan(46))
: BinaryPrimitives.ReadUInt16BigEndian(binary.AsSpan(46));
shnum = isLittleEndian
? BinaryPrimitives.ReadUInt16LittleEndian(binary.AsSpan(48))
: BinaryPrimitives.ReadUInt16BigEndian(binary.AsSpan(48));
shstrndx = isLittleEndian
? BinaryPrimitives.ReadUInt16LittleEndian(binary.AsSpan(50))
: BinaryPrimitives.ReadUInt16BigEndian(binary.AsSpan(50));
}
// Validate header
if (shoff <= 0 || shoff >= binary.Length || shnum == 0 || shstrndx >= shnum)
{
return GetFallbackSections(binary, ".text", ".data");
}
// Get string table section
var strTableOffset = GetElfSectionOffset(binary, (int)shoff, shstrndx, shentsize, is64Bit, isLittleEndian);
if (strTableOffset < 0 || strTableOffset >= binary.Length)
{
return GetFallbackSections(binary, ".text", ".data");
}
// Parse each section
for (var i = 0; i < shnum; i++)
{
var sectionOffset = shoff + (i * shentsize);
if (sectionOffset + shentsize > binary.Length) break;
var (name, offset, size, type) = ParseElfSectionHeader(
binary, (int)sectionOffset, strTableOffset, is64Bit, isLittleEndian);
if (!string.IsNullOrEmpty(name) && size > 0)
{
sections.Add(new SectionInfo(name, offset, size, type));
}
}
}
catch
{
// Fallback on parse error
return GetFallbackSections(binary, ".text", ".data");
}
return sections.Count > 0 ? sections : GetFallbackSections(binary, ".text", ".data");
}
private static int GetElfSectionOffset(byte[] binary, int shoff, int index, int shentsize, bool is64Bit, bool isLittleEndian)
{
var offset = shoff + (index * shentsize);
if (offset + shentsize > binary.Length) return -1;
if (is64Bit)
{
return (int)(isLittleEndian
? BinaryPrimitives.ReadInt64LittleEndian(binary.AsSpan(offset + 24))
: BinaryPrimitives.ReadInt64BigEndian(binary.AsSpan(offset + 24)));
}
else
{
return isLittleEndian
? BinaryPrimitives.ReadInt32LittleEndian(binary.AsSpan(offset + 16))
: BinaryPrimitives.ReadInt32BigEndian(binary.AsSpan(offset + 16));
}
}
private static (string name, long offset, long size, SectionType type) ParseElfSectionHeader(
byte[] binary, int headerOffset, int strTableOffset, bool is64Bit, bool isLittleEndian)
{
var nameOffset = isLittleEndian
? BinaryPrimitives.ReadInt32LittleEndian(binary.AsSpan(headerOffset))
: BinaryPrimitives.ReadInt32BigEndian(binary.AsSpan(headerOffset));
var shType = isLittleEndian
? BinaryPrimitives.ReadUInt32LittleEndian(binary.AsSpan(headerOffset + 4))
: BinaryPrimitives.ReadUInt32BigEndian(binary.AsSpan(headerOffset + 4));
long offset, size;
if (is64Bit)
{
offset = isLittleEndian
? BinaryPrimitives.ReadInt64LittleEndian(binary.AsSpan(headerOffset + 24))
: BinaryPrimitives.ReadInt64BigEndian(binary.AsSpan(headerOffset + 24));
size = isLittleEndian
? BinaryPrimitives.ReadInt64LittleEndian(binary.AsSpan(headerOffset + 32))
: BinaryPrimitives.ReadInt64BigEndian(binary.AsSpan(headerOffset + 32));
}
else
{
offset = isLittleEndian
? BinaryPrimitives.ReadInt32LittleEndian(binary.AsSpan(headerOffset + 16))
: BinaryPrimitives.ReadInt32BigEndian(binary.AsSpan(headerOffset + 16));
size = isLittleEndian
? BinaryPrimitives.ReadInt32LittleEndian(binary.AsSpan(headerOffset + 20))
: BinaryPrimitives.ReadInt32BigEndian(binary.AsSpan(headerOffset + 20));
}
// Read section name from string table
var name = ReadNullTerminatedString(binary, strTableOffset + nameOffset);
// Determine section type
var sectionType = shType switch
{
1 => name.StartsWith(".text") ? SectionType.Code : SectionType.Data, // SHT_PROGBITS
8 => SectionType.Bss, // SHT_NOBITS
_ when name.StartsWith(".debug") => SectionType.Debug,
_ when name.StartsWith(".text") => SectionType.Code,
_ when name is ".data" or ".rodata" or ".bss" => SectionType.Data,
_ => SectionType.Other
};
return (name, offset, size, sectionType);
}
private static IReadOnlyList<SectionInfo> ParsePeSections(byte[] binary)
{
var sections = new List<SectionInfo>();
try
{
// Get PE header offset from DOS header
var peOffset = BinaryPrimitives.ReadInt32LittleEndian(binary.AsSpan(60));
if (peOffset < 0 || peOffset + 24 >= binary.Length) return GetFallbackSections(binary, ".text", ".rdata");
// Verify PE signature
if (binary[peOffset] != 'P' || binary[peOffset + 1] != 'E' ||
binary[peOffset + 2] != 0 || binary[peOffset + 3] != 0)
{
return GetFallbackSections(binary, ".text", ".rdata");
}
// Get number of sections and optional header size
var numberOfSections = BinaryPrimitives.ReadUInt16LittleEndian(binary.AsSpan(peOffset + 6));
var sizeOfOptionalHeader = BinaryPrimitives.ReadUInt16LittleEndian(binary.AsSpan(peOffset + 20));
// Section headers start after optional header
var sectionHeaderOffset = peOffset + 24 + sizeOfOptionalHeader;
for (var i = 0; i < numberOfSections; i++)
{
var offset = sectionHeaderOffset + (i * 40);
if (offset + 40 > binary.Length) break;
// Read section name (8 bytes, null-padded)
var nameBytes = binary.AsSpan(offset, 8);
var nameEnd = nameBytes.IndexOf((byte)0);
var name = System.Text.Encoding.ASCII.GetString(
nameEnd >= 0 ? nameBytes[..nameEnd] : nameBytes);
// Read virtual size and raw data offset/size
var virtualSize = BinaryPrimitives.ReadUInt32LittleEndian(binary.AsSpan(offset + 8));
var rawDataOffset = BinaryPrimitives.ReadUInt32LittleEndian(binary.AsSpan(offset + 20));
var rawDataSize = BinaryPrimitives.ReadUInt32LittleEndian(binary.AsSpan(offset + 16));
// Determine section type
var sectionType = name switch
{
".text" or ".code" => SectionType.Code,
".data" or ".rdata" => SectionType.Data,
".bss" => SectionType.Bss,
_ when name.StartsWith(".debug") => SectionType.Debug,
_ => SectionType.Other
};
if (!string.IsNullOrEmpty(name) && rawDataSize > 0)
{
sections.Add(new SectionInfo(name, rawDataOffset, rawDataSize, sectionType));
}
}
}
catch
{
return GetFallbackSections(binary, ".text", ".rdata");
}
return sections.Count > 0 ? sections : GetFallbackSections(binary, ".text", ".rdata");
}
private static IReadOnlyList<SectionInfo> ParseMachOSections(byte[] binary)
{
var sections = new List<SectionInfo>();
try
{
// Check endianness and 32/64 bit
var magic = BinaryPrimitives.ReadUInt32BigEndian(binary);
var is64Bit = magic is 0xfeedfacf or 0xcffaedfe;
var isLittleEndian = magic is 0xcefaedfe or 0xcffaedfe;
// Read number of load commands
var ncmds = isLittleEndian
? BinaryPrimitives.ReadUInt32LittleEndian(binary.AsSpan(16))
: BinaryPrimitives.ReadUInt32BigEndian(binary.AsSpan(16));
// Header size
var headerSize = is64Bit ? 32 : 28;
var cmdOffset = headerSize;
for (var i = 0; i < ncmds && cmdOffset < binary.Length - 8; i++)
{
var cmd = isLittleEndian
? BinaryPrimitives.ReadUInt32LittleEndian(binary.AsSpan(cmdOffset))
: BinaryPrimitives.ReadUInt32BigEndian(binary.AsSpan(cmdOffset));
var cmdSize = isLittleEndian
? BinaryPrimitives.ReadUInt32LittleEndian(binary.AsSpan(cmdOffset + 4))
: BinaryPrimitives.ReadUInt32BigEndian(binary.AsSpan(cmdOffset + 4));
// LC_SEGMENT (1) or LC_SEGMENT_64 (0x19)
if (cmd is 1 or 0x19)
{
var segmentSections = ParseMachOSegment(binary, cmdOffset, is64Bit, isLittleEndian);
sections.AddRange(segmentSections);
}
cmdOffset += (int)cmdSize;
}
}
catch
{
return GetFallbackSections(binary, "__TEXT", "__DATA");
}
return sections.Count > 0 ? sections : GetFallbackSections(binary, "__TEXT", "__DATA");
}
private static IReadOnlyList<SectionInfo> ParseMachOSegment(byte[] binary, int cmdOffset, bool is64Bit, bool isLittleEndian)
{
var sections = new List<SectionInfo>();
try
{
// Read segment name (16 bytes at offset 8)
var segNameBytes = binary.AsSpan(cmdOffset + 8, 16);
var segNameEnd = segNameBytes.IndexOf((byte)0);
var segName = System.Text.Encoding.ASCII.GetString(
segNameEnd >= 0 ? segNameBytes[..segNameEnd] : segNameBytes);
// Number of sections in this segment
var nsects = isLittleEndian
? BinaryPrimitives.ReadUInt32LittleEndian(binary.AsSpan(cmdOffset + (is64Bit ? 64 : 48)))
: BinaryPrimitives.ReadUInt32BigEndian(binary.AsSpan(cmdOffset + (is64Bit ? 64 : 48)));
// Section headers start after segment command header
var sectionOffset = cmdOffset + (is64Bit ? 72 : 56);
var sectionSize = is64Bit ? 80 : 68;
for (var i = 0; i < nsects && sectionOffset + sectionSize <= binary.Length; i++)
{
// Section name (16 bytes)
var sectNameBytes = binary.AsSpan(sectionOffset, 16);
var sectNameEnd = sectNameBytes.IndexOf((byte)0);
var sectName = System.Text.Encoding.ASCII.GetString(
sectNameEnd >= 0 ? sectNameBytes[..sectNameEnd] : sectNameBytes);
long offset, size;
if (is64Bit)
{
size = isLittleEndian
? BinaryPrimitives.ReadInt64LittleEndian(binary.AsSpan(sectionOffset + 40))
: BinaryPrimitives.ReadInt64BigEndian(binary.AsSpan(sectionOffset + 40));
offset = isLittleEndian
? BinaryPrimitives.ReadUInt32LittleEndian(binary.AsSpan(sectionOffset + 48))
: BinaryPrimitives.ReadUInt32BigEndian(binary.AsSpan(sectionOffset + 48));
}
else
{
size = isLittleEndian
? BinaryPrimitives.ReadInt32LittleEndian(binary.AsSpan(sectionOffset + 36))
: BinaryPrimitives.ReadInt32BigEndian(binary.AsSpan(sectionOffset + 36));
offset = isLittleEndian
? BinaryPrimitives.ReadUInt32LittleEndian(binary.AsSpan(sectionOffset + 40))
: BinaryPrimitives.ReadUInt32BigEndian(binary.AsSpan(sectionOffset + 40));
}
var fullName = $"{segName},{sectName}";
var sectionType = segName switch
{
"__TEXT" => SectionType.Code,
"__DATA" or "__DATA_CONST" => SectionType.Data,
"__DWARF" => SectionType.Debug,
_ => SectionType.Other
};
if (size > 0)
{
sections.Add(new SectionInfo(fullName, offset, size, sectionType));
}
sectionOffset += sectionSize;
}
}
catch
{
// Ignore parse errors for individual segments
}
return sections;
}
private static string ReadNullTerminatedString(byte[] binary, int offset)
{
if (offset < 0 || offset >= binary.Length) return string.Empty;
var end = offset;
while (end < binary.Length && binary[end] != 0)
{
end++;
}
return System.Text.Encoding.UTF8.GetString(binary.AsSpan(offset, end - offset));
}
private static IReadOnlyList<SectionInfo> GetFallbackSections(byte[] binary, string textName, string dataName)
{
// Provide reasonable fallback sections for unknown/unparseable binaries
var textSize = Math.Min(binary.Length / 2, binary.Length);
var dataSize = Math.Min(binary.Length / 4, binary.Length - textSize);
return
[
new SectionInfo(textName, 0, textSize, SectionType.Code),
new SectionInfo(dataName, textSize, dataSize, SectionType.Data)
];
}
}

View File

@@ -0,0 +1,362 @@
// -----------------------------------------------------------------------------
// ChangeTraceEvidenceExtension.cs
// Sprint: SPRINT_20260112_200_005_ATTEST_predicate
// Description: CycloneDX evidence extension for change traces.
// -----------------------------------------------------------------------------
using System.Globalization;
using System.Text.Json;
using System.Text.Json.Nodes;
using StellaOps.Scanner.ChangeTrace.Models;
using ChangeTraceModel = StellaOps.Scanner.ChangeTrace.Models.ChangeTrace;
namespace StellaOps.Scanner.ChangeTrace.CycloneDx;
/// <summary>
/// CycloneDX evidence extension for change traces.
/// Provides both embedded and standalone export modes.
/// </summary>
public sealed class ChangeTraceEvidenceExtension : IChangeTraceEvidenceExtension
{
private const string ExtensionType = "stella.change-trace";
private const string ToolVendor = "StellaOps";
private const string ToolName = "ChangeTrace";
private readonly TimeProvider _timeProvider;
/// <summary>
/// Create a new change trace evidence extension.
/// </summary>
/// <param name="timeProvider">Time provider for timestamps.</param>
public ChangeTraceEvidenceExtension(TimeProvider timeProvider)
{
_timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider));
}
/// <summary>
/// Create a new change trace evidence extension with default time provider.
/// </summary>
public ChangeTraceEvidenceExtension()
: this(TimeProvider.System)
{
}
/// <inheritdoc />
public JsonDocument EmbedInCycloneDx(
JsonDocument bom,
ChangeTraceModel trace,
ChangeTraceEvidenceOptions? options = null)
{
ArgumentNullException.ThrowIfNull(bom);
ArgumentNullException.ThrowIfNull(trace);
options ??= ChangeTraceEvidenceOptions.Default;
// Parse existing BOM as a mutable JsonNode
var bomNode = JsonNode.Parse(bom.RootElement.GetRawText());
if (bomNode is not JsonObject bomObject)
{
throw new ArgumentException("BOM root must be a JSON object", nameof(bom));
}
// Build the extension object
var extensionNode = BuildExtensionObject(trace, options);
// Get or create extensions array
if (bomObject.TryGetPropertyValue("extensions", out var existingExtensions) &&
existingExtensions is JsonArray extensionsArray)
{
extensionsArray.Add(extensionNode);
}
else
{
var newExtensions = new JsonArray { extensionNode };
bomObject["extensions"] = newExtensions;
}
// Serialize back to JsonDocument
var outputJson = bomObject.ToJsonString(GetSerializerOptions());
return JsonDocument.Parse(outputJson);
}
/// <inheritdoc />
public JsonDocument ExportAsStandalone(
ChangeTraceModel trace,
ChangeTraceEvidenceOptions? options = null)
{
ArgumentNullException.ThrowIfNull(trace);
options ??= ChangeTraceEvidenceOptions.Default;
var extensionNode = BuildExtensionObject(trace, options);
var standaloneDoc = new JsonObject
{
["bomFormat"] = "CycloneDX",
["specVersion"] = options.SpecVersion,
["serialNumber"] = $"urn:uuid:{Guid.NewGuid()}",
["version"] = 1,
["metadata"] = new JsonObject
{
["timestamp"] = trace.Basis.AnalyzedAt.ToString("O", CultureInfo.InvariantCulture),
["tools"] = new JsonArray
{
new JsonObject
{
["vendor"] = ToolVendor,
["name"] = ToolName,
["version"] = trace.Basis.EngineVersion
}
}
},
["extensions"] = new JsonArray { extensionNode }
};
// Add subject information as component reference
if (!string.IsNullOrEmpty(trace.Subject.Purl))
{
var components = new JsonArray
{
new JsonObject
{
["type"] = "container",
["bom-ref"] = "change-trace-subject",
["purl"] = trace.Subject.Purl,
["hashes"] = new JsonArray
{
new JsonObject
{
["alg"] = "SHA-256",
["content"] = ExtractHashValue(trace.Subject.Digest)
}
}
}
};
standaloneDoc["components"] = components;
}
var json = standaloneDoc.ToJsonString(GetSerializerOptions());
return JsonDocument.Parse(json);
}
/// <summary>
/// Build the extension object for the change trace.
/// </summary>
private JsonObject BuildExtensionObject(
ChangeTraceModel trace,
ChangeTraceEvidenceOptions options)
{
var changeTraceNode = new JsonObject
{
["schema"] = trace.Schema,
["subject"] = BuildSubjectNode(trace.Subject),
["basis"] = BuildBasisNode(trace.Basis),
["summary"] = BuildSummaryNode(trace.Summary),
["commitment"] = trace.Commitment is not null ? BuildCommitmentNode(trace.Commitment) : null
};
// Add deltas with limit
var deltasArray = new JsonArray();
var deltaCount = 0;
foreach (var delta in trace.Deltas)
{
if (deltaCount >= options.MaxDeltas)
{
break;
}
deltasArray.Add(BuildDeltaNode(delta, options));
deltaCount++;
}
changeTraceNode["deltas"] = deltasArray;
// Add truncation notice if needed
if (trace.Deltas.Length > options.MaxDeltas)
{
changeTraceNode["truncated"] = true;
changeTraceNode["totalDeltas"] = trace.Deltas.Length;
}
return new JsonObject
{
["extensionType"] = ExtensionType,
["changeTrace"] = changeTraceNode
};
}
private static JsonObject BuildSubjectNode(ChangeTraceSubject subject)
{
var node = new JsonObject
{
["type"] = subject.Type,
["digest"] = subject.Digest
};
if (!string.IsNullOrEmpty(subject.Purl))
{
node["purl"] = subject.Purl;
}
if (!string.IsNullOrEmpty(subject.Name))
{
node["name"] = subject.Name;
}
return node;
}
private static JsonObject BuildBasisNode(ChangeTraceBasis basis)
{
var node = new JsonObject
{
["scanId"] = basis.ScanId,
["engineVersion"] = basis.EngineVersion,
["analyzedAt"] = basis.AnalyzedAt.ToString("O", CultureInfo.InvariantCulture)
};
if (!string.IsNullOrEmpty(basis.FromScanId))
{
node["fromScanId"] = basis.FromScanId;
}
if (!string.IsNullOrEmpty(basis.ToScanId))
{
node["toScanId"] = basis.ToScanId;
}
if (!basis.Policies.IsDefaultOrEmpty)
{
var policiesArray = new JsonArray();
foreach (var policy in basis.Policies)
{
policiesArray.Add(policy);
}
node["policies"] = policiesArray;
}
if (!basis.DiffMethod.IsDefaultOrEmpty)
{
var methodsArray = new JsonArray();
foreach (var method in basis.DiffMethod)
{
methodsArray.Add(method);
}
node["diffMethod"] = methodsArray;
}
return node;
}
private static JsonObject BuildSummaryNode(ChangeTraceSummary summary)
{
return new JsonObject
{
["changedPackages"] = summary.ChangedPackages,
["changedSymbols"] = summary.ChangedSymbols,
["changedBytes"] = summary.ChangedBytes,
["riskDelta"] = Math.Round(summary.RiskDelta, 4),
["verdict"] = summary.Verdict.ToString().ToLowerInvariant()
};
}
private static JsonObject BuildCommitmentNode(ChangeTraceCommitment commitment)
{
return new JsonObject
{
["sha256"] = commitment.Sha256,
["algorithm"] = commitment.Algorithm
};
}
private static JsonObject BuildDeltaNode(
PackageDelta delta,
ChangeTraceEvidenceOptions options)
{
var node = new JsonObject
{
["purl"] = delta.Purl,
["name"] = delta.Name,
["fromVersion"] = delta.FromVersion,
["toVersion"] = delta.ToVersion,
["changeType"] = delta.ChangeType.ToString().ToLowerInvariant(),
["explain"] = delta.Explain.ToString().ToLowerInvariant()
};
// Add evidence summary
node["evidence"] = new JsonObject
{
["symbolsChanged"] = delta.Evidence.SymbolsChanged,
["bytesChanged"] = delta.Evidence.BytesChanged,
["confidence"] = Math.Round(delta.Evidence.Confidence, 4)
};
// Add trust delta if available
if (delta.TrustDelta is not null)
{
var trustNode = new JsonObject
{
["score"] = Math.Round(delta.TrustDelta.Score, 4),
["reachabilityImpact"] = delta.TrustDelta.ReachabilityImpact.ToString().ToLowerInvariant(),
["exploitabilityImpact"] = delta.TrustDelta.ExploitabilityImpact.ToString().ToLowerInvariant()
};
if (options.IncludeProofSteps && !delta.TrustDelta.ProofSteps.IsDefaultOrEmpty)
{
var stepsArray = new JsonArray();
foreach (var step in delta.TrustDelta.ProofSteps)
{
stepsArray.Add(step);
}
trustNode["proofSteps"] = stepsArray;
}
node["trustDelta"] = trustNode;
}
// Add symbol deltas if requested
if (options.IncludeSymbolDeltas && !delta.SymbolDeltas.IsDefaultOrEmpty)
{
var symbolsArray = new JsonArray();
foreach (var symbol in delta.SymbolDeltas.Take(50))
{
symbolsArray.Add(new JsonObject
{
["name"] = symbol.Name,
["changeType"] = symbol.ChangeType.ToString().ToLowerInvariant()
});
}
node["symbolDeltas"] = symbolsArray;
}
// Add byte deltas if requested
if (options.IncludeByteDeltas && !delta.ByteDeltas.IsDefaultOrEmpty)
{
var bytesArray = new JsonArray();
foreach (var byteD in delta.ByteDeltas.Take(20))
{
bytesArray.Add(new JsonObject
{
["section"] = byteD.Section,
["offset"] = byteD.Offset,
["size"] = byteD.Size
});
}
node["byteDeltas"] = bytesArray;
}
return node;
}
private static string ExtractHashValue(string digest)
{
var colonIndex = digest.IndexOf(':', StringComparison.Ordinal);
return colonIndex > 0 ? digest[(colonIndex + 1)..] : digest;
}
private static JsonSerializerOptions GetSerializerOptions()
{
return new JsonSerializerOptions
{
WriteIndented = true,
PropertyNamingPolicy = JsonNamingPolicy.CamelCase
};
}
}

View File

@@ -0,0 +1,78 @@
// -----------------------------------------------------------------------------
// IChangeTraceEvidenceExtension.cs
// Sprint: SPRINT_20260112_200_005_ATTEST_predicate
// Description: Interface for CycloneDX evidence extension support.
// -----------------------------------------------------------------------------
using System.Text.Json;
namespace StellaOps.Scanner.ChangeTrace.CycloneDx;
using ChangeTraceModel = StellaOps.Scanner.ChangeTrace.Models.ChangeTrace;
/// <summary>
/// CycloneDX evidence extension for change traces.
/// Supports both embedded mode (within an existing BOM) and standalone mode.
/// </summary>
public interface IChangeTraceEvidenceExtension
{
/// <summary>
/// Embed change trace as component evidence in CycloneDX BOM.
/// Adds the change trace as an extension to the BOM's extensions array.
/// </summary>
/// <param name="bom">The existing CycloneDX BOM document.</param>
/// <param name="trace">The change trace to embed.</param>
/// <param name="options">Optional extension options.</param>
/// <returns>A new BOM document with the embedded change trace extension.</returns>
JsonDocument EmbedInCycloneDx(
JsonDocument bom,
ChangeTraceModel trace,
ChangeTraceEvidenceOptions? options = null);
/// <summary>
/// Export change trace as standalone CycloneDX evidence file.
/// Creates a minimal CycloneDX wrapper containing only the change trace extension.
/// </summary>
/// <param name="trace">The change trace to export.</param>
/// <param name="options">Optional extension options.</param>
/// <returns>A standalone CycloneDX document with the change trace extension.</returns>
JsonDocument ExportAsStandalone(
ChangeTraceModel trace,
ChangeTraceEvidenceOptions? options = null);
}
/// <summary>
/// Options for change trace CycloneDX evidence extension.
/// </summary>
public sealed record ChangeTraceEvidenceOptions
{
/// <summary>
/// Default extension options.
/// </summary>
public static readonly ChangeTraceEvidenceOptions Default = new();
/// <summary>
/// Include detailed proof steps in the evidence.
/// </summary>
public bool IncludeProofSteps { get; init; } = true;
/// <summary>
/// Include byte-level deltas in the evidence.
/// </summary>
public bool IncludeByteDeltas { get; init; }
/// <summary>
/// Include symbol-level deltas in the evidence.
/// </summary>
public bool IncludeSymbolDeltas { get; init; } = true;
/// <summary>
/// Maximum number of deltas to include.
/// </summary>
public int MaxDeltas { get; init; } = 100;
/// <summary>
/// CycloneDX spec version for standalone export.
/// </summary>
public string SpecVersion { get; init; } = "1.7";
}

View File

@@ -0,0 +1,117 @@
namespace StellaOps.Scanner.ChangeTrace.Integration;
/// <summary>
/// Simplified client interface for ReachGraph operations.
/// This is an adapter interface to decouple ChangeTrace from ReachGraph internals.
/// </summary>
public interface IReachGraphClient
{
/// <summary>
/// Get reachability information for a package in an image.
/// </summary>
/// <param name="imageDigest">Image digest (sha256:...).</param>
/// <param name="purl">Package URL.</param>
/// <param name="ct">Cancellation token.</param>
/// <returns>Reachability result.</returns>
Task<ReachabilityResult> GetReachabilityAsync(
string imageDigest,
string purl,
CancellationToken ct = default);
/// <summary>
/// Get call paths to a vulnerable function.
/// </summary>
/// <param name="imageDigest">Image digest.</param>
/// <param name="functionName">Function name.</param>
/// <param name="maxPaths">Maximum number of paths to return.</param>
/// <param name="ct">Cancellation token.</param>
/// <returns>Call path result.</returns>
Task<CallPathResult> GetCallPathsAsync(
string imageDigest,
string functionName,
int maxPaths = 5,
CancellationToken ct = default);
}
/// <summary>
/// Reachability result for a package.
/// </summary>
public sealed record ReachabilityResult
{
/// <summary>
/// Whether the package is reachable from entrypoints.
/// </summary>
public required bool IsReachable { get; init; }
/// <summary>
/// Number of reachable call paths.
/// </summary>
public required int ReachableCallPaths { get; init; }
/// <summary>
/// Total number of exported symbols.
/// </summary>
public int TotalSymbols { get; init; }
/// <summary>
/// Number of reachable symbols.
/// </summary>
public int ReachableSymbols { get; init; }
/// <summary>
/// Fraction of package that is unreachable (0.0 to 1.0).
/// </summary>
public double UnreachableFraction { get; init; }
/// <summary>
/// Entrypoints that reach this package.
/// </summary>
public IReadOnlyList<string>? ReachingEntrypoints { get; init; }
}
/// <summary>
/// Call path result for a function.
/// </summary>
public sealed record CallPathResult
{
/// <summary>
/// Number of call paths found.
/// </summary>
public required int PathCount { get; init; }
/// <summary>
/// Individual call paths.
/// </summary>
public IReadOnlyList<CallPath>? Paths { get; init; }
/// <summary>
/// Shortest path depth.
/// </summary>
public int? ShortestPathDepth { get; init; }
}
/// <summary>
/// A single call path from entrypoint to target.
/// </summary>
public sealed record CallPath
{
/// <summary>
/// Entrypoint function name.
/// </summary>
public required string Entrypoint { get; init; }
/// <summary>
/// Target function name.
/// </summary>
public required string Target { get; init; }
/// <summary>
/// Call chain (function names).
/// </summary>
public required IReadOnlyList<string> Chain { get; init; }
/// <summary>
/// Path depth (number of calls).
/// </summary>
public int Depth => Chain.Count;
}

View File

@@ -0,0 +1,93 @@
namespace StellaOps.Scanner.ChangeTrace.Integration;
/// <summary>
/// Simplified client interface for VexLens consensus operations.
/// This is an adapter interface to decouple ChangeTrace from VexLens internals.
/// </summary>
public interface IVexLensClient
{
/// <summary>
/// Get consensus trust score for a package version.
/// </summary>
/// <param name="purl">Package URL.</param>
/// <param name="version">Package version.</param>
/// <param name="ct">Cancellation token.</param>
/// <returns>Consensus result with trust score.</returns>
Task<VexConsensusResult> GetConsensusAsync(
string purl,
string version,
CancellationToken ct = default);
/// <summary>
/// Get advisory information for a CVE.
/// </summary>
/// <param name="cveId">CVE identifier.</param>
/// <param name="ct">Cancellation token.</param>
/// <returns>Advisory info or null if not found.</returns>
Task<VexAdvisoryInfo?> GetAdvisoryAsync(
string cveId,
CancellationToken ct = default);
}
/// <summary>
/// Simplified consensus result for change trace calculation.
/// </summary>
public sealed record VexConsensusResult
{
/// <summary>
/// Overall trust score (0.0 to 1.0).
/// Higher values indicate higher trust (less exploitable).
/// </summary>
public required double TrustScore { get; init; }
/// <summary>
/// Confidence in the consensus (0.0 to 1.0).
/// </summary>
public required double Confidence { get; init; }
/// <summary>
/// VEX status string (e.g., "not_affected", "affected", "fixed").
/// </summary>
public required string Status { get; init; }
/// <summary>
/// Number of VEX statements contributing to consensus.
/// </summary>
public int ContributingStatements { get; init; }
/// <summary>
/// Justification for the status, if available.
/// </summary>
public string? Justification { get; init; }
}
/// <summary>
/// Advisory information for a CVE.
/// </summary>
public sealed record VexAdvisoryInfo
{
/// <summary>
/// CVE identifier.
/// </summary>
public required string CveId { get; init; }
/// <summary>
/// Affected functions or components.
/// </summary>
public IReadOnlyList<string>? AffectedFunctions { get; init; }
/// <summary>
/// CVSS score if available.
/// </summary>
public double? CvssScore { get; init; }
/// <summary>
/// Summary description.
/// </summary>
public string? Summary { get; init; }
/// <summary>
/// Fixed in versions.
/// </summary>
public IReadOnlyList<string>? FixedInVersions { get; init; }
}

View File

@@ -0,0 +1,51 @@
using System.Text.Json.Serialization;
namespace StellaOps.Scanner.ChangeTrace.Models;
/// <summary>
/// Byte-level change delta (rolling hash window granularity).
/// </summary>
public sealed record ByteDelta
{
/// <summary>
/// Scope of this delta: always "byte" for byte deltas.
/// </summary>
[JsonPropertyName("scope")]
public string Scope { get; init; } = "byte";
/// <summary>
/// Byte offset where the change begins.
/// </summary>
[JsonPropertyName("offset")]
public required long Offset { get; init; }
/// <summary>
/// Size of the changed region in bytes.
/// </summary>
[JsonPropertyName("size")]
public required int Size { get; init; }
/// <summary>
/// Rolling hash of the "before" bytes.
/// </summary>
[JsonPropertyName("fromHash")]
public required string FromHash { get; init; }
/// <summary>
/// Rolling hash of the "after" bytes.
/// </summary>
[JsonPropertyName("toHash")]
public required string ToHash { get; init; }
/// <summary>
/// Binary section containing this change (e.g., ".text", ".data").
/// </summary>
[JsonPropertyName("section")]
public string? Section { get; init; }
/// <summary>
/// Optional context description for this change.
/// </summary>
[JsonPropertyName("context")]
public string? Context { get; init; }
}

View File

@@ -0,0 +1,178 @@
using System.Collections.Immutable;
using System.Text.Json.Serialization;
namespace StellaOps.Scanner.ChangeTrace.Models;
/// <summary>
/// Root model for change trace artifacts.
/// Schema: stella.change-trace/1.0
/// </summary>
public sealed record ChangeTrace
{
/// <summary>
/// Current schema version for change trace documents.
/// </summary>
public const string SchemaVersion = "stella.change-trace/1.0";
/// <summary>
/// Schema identifier for this change trace.
/// </summary>
[JsonPropertyName("schema")]
public string Schema { get; init; } = SchemaVersion;
/// <summary>
/// Subject artifact being compared.
/// </summary>
[JsonPropertyName("subject")]
public required ChangeTraceSubject Subject { get; init; }
/// <summary>
/// Analysis basis and configuration.
/// </summary>
[JsonPropertyName("basis")]
public required ChangeTraceBasis Basis { get; init; }
/// <summary>
/// Package-level deltas with nested symbol and byte deltas.
/// </summary>
[JsonPropertyName("deltas")]
public ImmutableArray<PackageDelta> Deltas { get; init; } = [];
/// <summary>
/// Aggregated summary of all changes.
/// </summary>
[JsonPropertyName("summary")]
public required ChangeTraceSummary Summary { get; init; }
/// <summary>
/// Commitment hash for deterministic verification.
/// </summary>
[JsonPropertyName("commitment")]
public ChangeTraceCommitment? Commitment { get; init; }
/// <summary>
/// Reference to DSSE attestation, if attached.
/// </summary>
[JsonPropertyName("attestation")]
public ChangeTraceAttestationRef? Attestation { get; init; }
}
/// <summary>
/// Subject artifact being compared.
/// </summary>
public sealed record ChangeTraceSubject
{
/// <summary>
/// Type of artifact: "oci.image", "binary", "package".
/// </summary>
[JsonPropertyName("type")]
public required string Type { get; init; }
/// <summary>
/// Digest of the artifact (e.g., sha256:...).
/// </summary>
[JsonPropertyName("digest")]
public required string Digest { get; init; }
/// <summary>
/// Package URL if applicable.
/// </summary>
[JsonPropertyName("purl")]
public string? Purl { get; init; }
/// <summary>
/// Human-readable name of the artifact.
/// </summary>
[JsonPropertyName("name")]
public string? Name { get; init; }
}
/// <summary>
/// Analysis basis and configuration.
/// </summary>
public sealed record ChangeTraceBasis
{
/// <summary>
/// Primary scan identifier for this comparison.
/// </summary>
[JsonPropertyName("scanId")]
public required string ScanId { get; init; }
/// <summary>
/// Scan ID of the "before" state.
/// </summary>
[JsonPropertyName("fromScanId")]
public string? FromScanId { get; init; }
/// <summary>
/// Scan ID of the "after" state.
/// </summary>
[JsonPropertyName("toScanId")]
public string? ToScanId { get; init; }
/// <summary>
/// Lattice policies applied during analysis.
/// </summary>
[JsonPropertyName("policies")]
public ImmutableArray<string> Policies { get; init; } = [];
/// <summary>
/// Diff methods used: "pkg", "symbol", "byte".
/// </summary>
[JsonPropertyName("diffMethod")]
public ImmutableArray<string> DiffMethod { get; init; } = [];
/// <summary>
/// Version of the engine that produced this trace.
/// </summary>
[JsonPropertyName("engineVersion")]
public required string EngineVersion { get; init; }
/// <summary>
/// Digest of the engine binary/source for reproducibility verification.
/// </summary>
[JsonPropertyName("engineDigest")]
public string? EngineDigest { get; init; }
/// <summary>
/// Timestamp when analysis was performed.
/// </summary>
[JsonPropertyName("analyzedAt")]
public required DateTimeOffset AnalyzedAt { get; init; }
}
/// <summary>
/// Commitment hash for deterministic verification.
/// </summary>
public sealed record ChangeTraceCommitment
{
/// <summary>
/// SHA-256 hash of the canonical JSON representation.
/// </summary>
[JsonPropertyName("sha256")]
public required string Sha256 { get; init; }
/// <summary>
/// Algorithm used for canonicalization and hashing.
/// </summary>
[JsonPropertyName("algorithm")]
public string Algorithm { get; init; } = "RFC8785+SHA256";
}
/// <summary>
/// Reference to DSSE attestation.
/// </summary>
public sealed record ChangeTraceAttestationRef
{
/// <summary>
/// Predicate type for the attestation.
/// </summary>
[JsonPropertyName("predicateType")]
public required string PredicateType { get; init; }
/// <summary>
/// Digest of the DSSE envelope.
/// </summary>
[JsonPropertyName("envelopeDigest")]
public string? EnvelopeDigest { get; init; }
}

View File

@@ -0,0 +1,78 @@
using System.Text.Json.Serialization;
namespace StellaOps.Scanner.ChangeTrace.Models;
/// <summary>
/// Aggregated summary of all changes in a change trace.
/// </summary>
public sealed record ChangeTraceSummary
{
/// <summary>
/// Total number of packages with changes.
/// </summary>
[JsonPropertyName("changedPackages")]
public required int ChangedPackages { get; init; }
/// <summary>
/// Total number of symbols with changes.
/// </summary>
[JsonPropertyName("changedSymbols")]
public required int ChangedSymbols { get; init; }
/// <summary>
/// Total bytes changed across all packages.
/// </summary>
[JsonPropertyName("changedBytes")]
public required long ChangedBytes { get; init; }
/// <summary>
/// Aggregated risk delta score.
/// </summary>
[JsonPropertyName("riskDelta")]
public required double RiskDelta { get; init; }
/// <summary>
/// Overall verdict based on risk delta.
/// </summary>
[JsonPropertyName("verdict")]
public required ChangeTraceVerdict Verdict { get; init; }
/// <summary>
/// Risk score before changes.
/// </summary>
[JsonPropertyName("beforeRiskScore")]
public double? BeforeRiskScore { get; init; }
/// <summary>
/// Risk score after changes.
/// </summary>
[JsonPropertyName("afterRiskScore")]
public double? AfterRiskScore { get; init; }
}
/// <summary>
/// Overall verdict for a change trace.
/// </summary>
[JsonConverter(typeof(JsonStringEnumConverter))]
public enum ChangeTraceVerdict
{
/// <summary>
/// Risk has decreased significantly (score &lt; -0.3).
/// </summary>
RiskDown,
/// <summary>
/// Risk change is minimal (-0.3 &lt;= score &lt;= 0.3).
/// </summary>
Neutral,
/// <summary>
/// Risk has increased (score &gt; 0.3).
/// </summary>
RiskUp,
/// <summary>
/// Unable to determine risk impact.
/// </summary>
Inconclusive
}

View File

@@ -0,0 +1,208 @@
using System.Collections.Immutable;
using System.Text.Json.Serialization;
namespace StellaOps.Scanner.ChangeTrace.Models;
/// <summary>
/// Package-level change delta.
/// </summary>
public sealed record PackageDelta
{
/// <summary>
/// Scope of this delta: always "pkg" for package deltas.
/// </summary>
[JsonPropertyName("scope")]
public string Scope { get; init; } = "pkg";
/// <summary>
/// Package URL (PURL) identifying the package.
/// </summary>
[JsonPropertyName("purl")]
public required string Purl { get; init; }
/// <summary>
/// Human-readable package name.
/// </summary>
[JsonPropertyName("name")]
public required string Name { get; init; }
/// <summary>
/// Version before the change.
/// </summary>
[JsonPropertyName("fromVersion")]
public required string FromVersion { get; init; }
/// <summary>
/// Version after the change.
/// </summary>
[JsonPropertyName("toVersion")]
public required string ToVersion { get; init; }
/// <summary>
/// Type of change detected.
/// </summary>
[JsonPropertyName("changeType")]
public required PackageChangeType ChangeType { get; init; }
/// <summary>
/// Explanation of the change reason.
/// </summary>
[JsonPropertyName("explain")]
public required PackageChangeExplanation Explain { get; init; }
/// <summary>
/// Evidence supporting this change classification.
/// </summary>
[JsonPropertyName("evidence")]
public required PackageDeltaEvidence Evidence { get; init; }
/// <summary>
/// Trust delta computed for this package change.
/// </summary>
[JsonPropertyName("trustDelta")]
public TrustDelta? TrustDelta { get; init; }
/// <summary>
/// Symbol-level deltas within this package.
/// </summary>
[JsonPropertyName("symbolDeltas")]
public ImmutableArray<SymbolDelta> SymbolDeltas { get; init; } = [];
/// <summary>
/// Byte-level deltas within this package.
/// </summary>
[JsonPropertyName("byteDeltas")]
public ImmutableArray<ByteDelta> ByteDeltas { get; init; } = [];
}
/// <summary>
/// Type of package change detected.
/// </summary>
[JsonConverter(typeof(JsonStringEnumConverter))]
public enum PackageChangeType
{
/// <summary>
/// Package was added to the artifact.
/// </summary>
Added,
/// <summary>
/// Package was removed from the artifact.
/// </summary>
Removed,
/// <summary>
/// Package was modified (general change).
/// </summary>
Modified,
/// <summary>
/// Package version was upgraded.
/// </summary>
Upgraded,
/// <summary>
/// Package version was downgraded.
/// </summary>
Downgraded,
/// <summary>
/// Package was rebuilt without version change.
/// </summary>
Rebuilt
}
/// <summary>
/// Explanation category for why the package changed.
/// </summary>
[JsonConverter(typeof(JsonStringEnumConverter))]
public enum PackageChangeExplanation
{
/// <summary>
/// Vendor backport of upstream fixes.
/// </summary>
VendorBackport,
/// <summary>
/// Standard upstream version upgrade.
/// </summary>
UpstreamUpgrade,
/// <summary>
/// Security patch applied.
/// </summary>
SecurityPatch,
/// <summary>
/// Package rebuilt without source changes.
/// </summary>
Rebuild,
/// <summary>
/// Compilation flags or build options changed.
/// </summary>
FlagChange,
/// <summary>
/// New dependency added.
/// </summary>
NewDependency,
/// <summary>
/// Dependency removed.
/// </summary>
RemovedDependency,
/// <summary>
/// Change reason could not be determined.
/// </summary>
Unknown
}
/// <summary>
/// Evidence supporting the package change classification.
/// </summary>
public sealed record PackageDeltaEvidence
{
/// <summary>
/// Patch identifiers associated with this change.
/// </summary>
[JsonPropertyName("patchIds")]
public ImmutableArray<string> PatchIds { get; init; } = [];
/// <summary>
/// CVE identifiers addressed by this change.
/// </summary>
[JsonPropertyName("cveIds")]
public ImmutableArray<string> CveIds { get; init; } = [];
/// <summary>
/// Number of symbols changed in this package.
/// </summary>
[JsonPropertyName("symbolsChanged")]
public int SymbolsChanged { get; init; }
/// <summary>
/// Total bytes changed in this package.
/// </summary>
[JsonPropertyName("bytesChanged")]
public long BytesChanged { get; init; }
/// <summary>
/// Function names affected by this change.
/// </summary>
[JsonPropertyName("functions")]
public ImmutableArray<string> Functions { get; init; } = [];
/// <summary>
/// Method used to verify this change.
/// </summary>
[JsonPropertyName("verificationMethod")]
public string? VerificationMethod { get; init; }
/// <summary>
/// Confidence score for the change classification (0.0-1.0).
/// </summary>
[JsonPropertyName("confidence")]
public double Confidence { get; init; }
}

View File

@@ -0,0 +1,114 @@
using System.Collections.Immutable;
using System.Text.Json.Serialization;
namespace StellaOps.Scanner.ChangeTrace.Models;
/// <summary>
/// Symbol-level change delta (function/method granularity).
/// </summary>
public sealed record SymbolDelta
{
/// <summary>
/// Scope of this delta: always "symbol" for symbol deltas.
/// </summary>
[JsonPropertyName("scope")]
public string Scope { get; init; } = "symbol";
/// <summary>
/// Symbol name (function/method name).
/// </summary>
[JsonPropertyName("name")]
public required string Name { get; init; }
/// <summary>
/// Type of change detected for this symbol.
/// </summary>
[JsonPropertyName("changeType")]
public required SymbolChangeType ChangeType { get; init; }
/// <summary>
/// Hash of the symbol in the "before" state.
/// </summary>
[JsonPropertyName("fromHash")]
public string? FromHash { get; init; }
/// <summary>
/// Hash of the symbol in the "after" state.
/// </summary>
[JsonPropertyName("toHash")]
public string? ToHash { get; init; }
/// <summary>
/// Size difference in bytes.
/// </summary>
[JsonPropertyName("sizeDelta")]
public int SizeDelta { get; init; }
/// <summary>
/// Change in CFG (Control Flow Graph) basic block count.
/// </summary>
[JsonPropertyName("cfgBlockDelta")]
public int? CfgBlockDelta { get; init; }
/// <summary>
/// Similarity score between before and after versions (0.0-1.0).
/// </summary>
[JsonPropertyName("similarity")]
public double Similarity { get; init; }
/// <summary>
/// Confidence in the match determination (0.0-1.0).
/// </summary>
[JsonPropertyName("confidence")]
public double Confidence { get; init; }
/// <summary>
/// Method used for matching: "CFGHash", "InstructionHash", "SemanticHash".
/// </summary>
[JsonPropertyName("matchMethod")]
public string? MatchMethod { get; init; }
/// <summary>
/// Human-readable explanation of the change.
/// </summary>
[JsonPropertyName("explanation")]
public string? Explanation { get; init; }
/// <summary>
/// Indices of matched instruction chunks.
/// </summary>
[JsonPropertyName("matchedChunks")]
public ImmutableArray<int> MatchedChunks { get; init; } = [];
}
/// <summary>
/// Type of symbol change detected.
/// </summary>
[JsonConverter(typeof(JsonStringEnumConverter))]
public enum SymbolChangeType
{
/// <summary>
/// Symbol is unchanged between versions.
/// </summary>
Unchanged,
/// <summary>
/// Symbol was added.
/// </summary>
Added,
/// <summary>
/// Symbol was removed.
/// </summary>
Removed,
/// <summary>
/// Symbol was modified.
/// </summary>
Modified,
/// <summary>
/// Symbol was patched (security or bug fix detected).
/// </summary>
Patched
}

View File

@@ -0,0 +1,111 @@
using System.Collections.Immutable;
using System.Text.Json.Serialization;
namespace StellaOps.Scanner.ChangeTrace.Models;
/// <summary>
/// Trust delta with lattice proof steps.
/// </summary>
public sealed record TrustDelta
{
/// <summary>
/// Impact on code reachability.
/// </summary>
[JsonPropertyName("reachabilityImpact")]
public required ReachabilityImpact ReachabilityImpact { get; init; }
/// <summary>
/// Impact on exploitability.
/// </summary>
[JsonPropertyName("exploitabilityImpact")]
public required ExploitabilityImpact ExploitabilityImpact { get; init; }
/// <summary>
/// Overall trust delta score (-1.0 to +1.0).
/// Negative values indicate risk reduction, positive values indicate risk increase.
/// </summary>
[JsonPropertyName("score")]
public required double Score { get; init; }
/// <summary>
/// Trust score before the change.
/// </summary>
[JsonPropertyName("beforeScore")]
public double? BeforeScore { get; init; }
/// <summary>
/// Trust score after the change.
/// </summary>
[JsonPropertyName("afterScore")]
public double? AfterScore { get; init; }
/// <summary>
/// Human-readable proof steps explaining the trust delta computation.
/// </summary>
[JsonPropertyName("proofSteps")]
public ImmutableArray<string> ProofSteps { get; init; } = [];
}
/// <summary>
/// Impact classification for code reachability.
/// </summary>
[JsonConverter(typeof(JsonStringEnumConverter))]
public enum ReachabilityImpact
{
/// <summary>
/// Reachability unchanged.
/// </summary>
Unchanged,
/// <summary>
/// Reachable code paths reduced.
/// </summary>
Reduced,
/// <summary>
/// Reachable code paths increased.
/// </summary>
Increased,
/// <summary>
/// All vulnerable paths eliminated.
/// </summary>
Eliminated,
/// <summary>
/// New reachable paths introduced.
/// </summary>
Introduced
}
/// <summary>
/// Impact classification for exploitability.
/// </summary>
[JsonConverter(typeof(JsonStringEnumConverter))]
public enum ExploitabilityImpact
{
/// <summary>
/// Exploitability unchanged.
/// </summary>
Unchanged,
/// <summary>
/// Exploitability decreased.
/// </summary>
Down,
/// <summary>
/// Exploitability increased.
/// </summary>
Up,
/// <summary>
/// Vulnerability eliminated.
/// </summary>
Eliminated,
/// <summary>
/// New vulnerability introduced.
/// </summary>
Introduced
}

View File

@@ -0,0 +1,22 @@
using StellaOps.Scanner.ChangeTrace.Scoring;
namespace StellaOps.Scanner.ChangeTrace.Proofs;
/// <summary>
/// Generates human-readable proof steps for trust delta calculations.
/// Proof steps explain how the trust delta was computed and why.
/// </summary>
public interface ILatticeProofGenerator
{
/// <summary>
/// Generate proof steps explaining how the trust delta was computed.
/// </summary>
/// <param name="context">Trust delta calculation context.</param>
/// <param name="delta">Computed delta value.</param>
/// <param name="ct">Cancellation token.</param>
/// <returns>List of human-readable proof steps.</returns>
Task<IReadOnlyList<string>> GenerateAsync(
TrustDeltaContext context,
double delta,
CancellationToken ct = default);
}

View File

@@ -0,0 +1,196 @@
using System.Globalization;
using StellaOps.Scanner.ChangeTrace.Integration;
using StellaOps.Scanner.ChangeTrace.Scoring;
namespace StellaOps.Scanner.ChangeTrace.Proofs;
/// <summary>
/// Generates human-readable proof steps for trust delta calculations.
/// </summary>
public sealed class LatticeProofGenerator : ILatticeProofGenerator
{
private readonly IVexLensClient _vexLens;
/// <summary>
/// Create a new lattice proof generator.
/// </summary>
/// <param name="vexLens">VexLens client for advisory information.</param>
public LatticeProofGenerator(IVexLensClient vexLens)
{
_vexLens = vexLens ?? throw new ArgumentNullException(nameof(vexLens));
}
/// <inheritdoc />
public async Task<IReadOnlyList<string>> GenerateAsync(
TrustDeltaContext context,
double delta,
CancellationToken ct = default)
{
ArgumentNullException.ThrowIfNull(context);
var steps = new List<string>();
// Step 1: CVE context - what vulnerabilities affect this package
if (context.CveIds?.Count > 0)
{
foreach (var cve in context.CveIds.Take(3))
{
var advisory = await _vexLens.GetAdvisoryAsync(cve, ct).ConfigureAwait(false);
if (advisory is not null)
{
var affectedInfo = advisory.AffectedFunctions?.Count > 0
? string.Join(", ", advisory.AffectedFunctions.Take(2))
: "package code";
steps.Add(string.Format(
CultureInfo.InvariantCulture,
"{0} affects {1}",
cve,
affectedInfo));
}
else
{
steps.Add(string.Format(
CultureInfo.InvariantCulture,
"{0} referenced for {1}",
cve,
context.Purl));
}
}
if (context.CveIds.Count > 3)
{
steps.Add(string.Format(
CultureInfo.InvariantCulture,
"... and {0} more CVEs",
context.CveIds.Count - 3));
}
}
// Step 2: Version change context
if (!string.IsNullOrEmpty(context.FromVersion) && !string.IsNullOrEmpty(context.ToVersion))
{
if (context.FromVersion == context.ToVersion)
{
steps.Add(string.Format(
CultureInfo.InvariantCulture,
"Rebuilt at version {0}",
context.ToVersion));
}
else if (string.IsNullOrEmpty(context.FromVersion))
{
steps.Add(string.Format(
CultureInfo.InvariantCulture,
"Added at version {0}",
context.ToVersion));
}
else
{
steps.Add(string.Format(
CultureInfo.InvariantCulture,
"Version changed: {0} -> {1}",
context.FromVersion,
context.ToVersion));
}
}
// Step 3: Patch verification evidence
if (context.PatchVerificationConfidence.HasValue)
{
var confidence = context.PatchVerificationConfidence.Value;
var method = confidence >= 0.9 ? "CFG hash match"
: confidence >= 0.75 ? "instruction hash match"
: confidence >= 0.5 ? "section match"
: "heuristic match";
steps.Add(string.Format(
CultureInfo.InvariantCulture,
"Patch verified via {0}: {1:P0} confidence",
method,
confidence));
}
// Step 4: Symbol similarity evidence
if (context.SymbolMatchSimilarity.HasValue)
{
steps.Add(string.Format(
CultureInfo.InvariantCulture,
"Symbol similarity: {0:P0}",
context.SymbolMatchSimilarity.Value));
}
// Step 5: Reachability analysis
if (context.ReachableCallPathsBefore.HasValue && context.ReachableCallPathsAfter.HasValue)
{
var before = context.ReachableCallPathsBefore.Value;
var after = context.ReachableCallPathsAfter.Value;
if (before == 0 && after == 0)
{
steps.Add("Code path unreachable (before and after)");
}
else if (before > 0 && after == 0)
{
steps.Add(string.Format(
CultureInfo.InvariantCulture,
"Reachable call paths: {0} -> 0 (eliminated)",
before));
}
else if (before == 0 && after > 0)
{
steps.Add(string.Format(
CultureInfo.InvariantCulture,
"Reachable call paths: 0 -> {0} (introduced)",
after));
}
else
{
steps.Add(string.Format(
CultureInfo.InvariantCulture,
"Reachable call paths: {0} -> {1}",
before,
after));
}
}
// Step 6: Attestation presence
if (context.HasDsseAttestation)
{
if (context.IssuerAuthorityScore.HasValue)
{
steps.Add(string.Format(
CultureInfo.InvariantCulture,
"DSSE attestation present (issuer authority: {0:P0})",
context.IssuerAuthorityScore.Value));
}
else
{
steps.Add("DSSE attestation present");
}
}
// Step 7: Runtime confirmation
if (context.RuntimeConfirmationConfidence.HasValue)
{
steps.Add(string.Format(
CultureInfo.InvariantCulture,
"Runtime confirmation: {0:P0} confidence",
context.RuntimeConfirmationConfidence.Value));
}
// Step 8: Final verdict
var verdict = delta switch
{
<= -0.3 => "risk_down",
>= 0.3 => "risk_up",
_ => "neutral"
};
steps.Add(string.Format(
CultureInfo.InvariantCulture,
"Verdict: {0} ({1:+0.00;-0.00;0.00})",
verdict,
delta));
return steps;
}
}

View File

@@ -0,0 +1,34 @@
using StellaOps.Scanner.ChangeTrace.Models;
namespace StellaOps.Scanner.ChangeTrace.Scoring;
/// <summary>
/// Calculates trust delta between two artifact versions.
/// Uses VexLens for consensus scoring and ReachGraph for reachability analysis.
/// </summary>
public interface ITrustDeltaCalculator
{
/// <summary>
/// Calculate trust delta for a package change.
/// </summary>
/// <param name="context">Context containing package version information.</param>
/// <param name="options">Calculation options.</param>
/// <param name="ct">Cancellation token.</param>
/// <returns>Calculated trust delta with proof steps.</returns>
Task<TrustDelta> CalculateAsync(
TrustDeltaContext context,
TrustDeltaOptions? options = null,
CancellationToken ct = default);
/// <summary>
/// Calculate aggregate trust delta for all changes in a trace.
/// </summary>
/// <param name="contexts">Contexts for each package change.</param>
/// <param name="options">Calculation options.</param>
/// <param name="ct">Cancellation token.</param>
/// <returns>Aggregate trust delta.</returns>
Task<TrustDelta> CalculateAggregateAsync(
IEnumerable<TrustDeltaContext> contexts,
TrustDeltaOptions? options = null,
CancellationToken ct = default);
}

View File

@@ -0,0 +1,265 @@
using System.Collections.Immutable;
using StellaOps.Scanner.ChangeTrace.Integration;
using StellaOps.Scanner.ChangeTrace.Models;
using StellaOps.Scanner.ChangeTrace.Proofs;
namespace StellaOps.Scanner.ChangeTrace.Scoring;
/// <summary>
/// Calculates trust delta between two artifact versions.
/// Implements the trust-delta formula:
/// TrustDelta = (AfterTrust - BeforeTrust) / max(BeforeTrust, 0.01)
/// </summary>
public sealed class TrustDeltaCalculator : ITrustDeltaCalculator
{
private readonly IVexLensClient _vexLens;
private readonly IReachGraphClient? _reachGraph;
private readonly ILatticeProofGenerator _proofGenerator;
/// <summary>
/// Create a new trust delta calculator.
/// </summary>
/// <param name="vexLens">VexLens client for consensus scores.</param>
/// <param name="proofGenerator">Proof step generator.</param>
/// <param name="reachGraph">Optional ReachGraph client for reachability data.</param>
public TrustDeltaCalculator(
IVexLensClient vexLens,
ILatticeProofGenerator proofGenerator,
IReachGraphClient? reachGraph = null)
{
_vexLens = vexLens ?? throw new ArgumentNullException(nameof(vexLens));
_proofGenerator = proofGenerator ?? throw new ArgumentNullException(nameof(proofGenerator));
_reachGraph = reachGraph;
}
/// <inheritdoc />
public async Task<TrustDelta> CalculateAsync(
TrustDeltaContext context,
TrustDeltaOptions? options = null,
CancellationToken ct = default)
{
ArgumentNullException.ThrowIfNull(context);
options ??= TrustDeltaOptions.Default;
// Get VEX consensus for both versions
var beforeConsensus = await _vexLens.GetConsensusAsync(
context.Purl, context.FromVersion, ct).ConfigureAwait(false);
var afterConsensus = await _vexLens.GetConsensusAsync(
context.Purl, context.ToVersion, ct).ConfigureAwait(false);
// Get reachability data if available
var reachBefore = context.ReachableCallPathsBefore;
var reachAfter = context.ReachableCallPathsAfter;
// Enrich from ReachGraph if client available and image digests provided
if (_reachGraph is not null)
{
if (context.FromImageDigest is not null && !reachBefore.HasValue)
{
var reachResult = await _reachGraph.GetReachabilityAsync(
context.FromImageDigest, context.Purl, ct).ConfigureAwait(false);
reachBefore = reachResult.ReachableCallPaths;
}
if (context.ToImageDigest is not null && !reachAfter.HasValue)
{
var reachResult = await _reachGraph.GetReachabilityAsync(
context.ToImageDigest, context.Purl, ct).ConfigureAwait(false);
reachAfter = reachResult.ReachableCallPaths;
}
}
// Get reachability factors
var beforeReach = ComputeReachabilityFactor(reachBefore, options);
var afterReach = ComputeReachabilityFactor(reachAfter, options);
// Compute before/after trust
var beforeTrust = beforeConsensus.TrustScore * beforeReach;
var afterTrust = afterConsensus.TrustScore * afterReach;
// Add patch verification bonus
var patchBonus = ComputePatchVerificationBonus(context, options);
afterTrust += patchBonus;
// Clamp trust values to [0, 1]
beforeTrust = Math.Clamp(beforeTrust, 0.0, 1.0);
afterTrust = Math.Clamp(afterTrust, 0.0, 1.0);
// Compute delta using the formula
// Semantics: negative delta = risk down (improvement), positive delta = risk up (regression)
// Therefore: delta = (before - after) / max(before, min_denom)
var delta = (beforeTrust - afterTrust) / Math.Max(beforeTrust, options.MinTrustDenominator);
delta = Math.Clamp(delta, -1.0, 1.0);
// Determine impacts
var reachabilityImpact = DetermineReachabilityImpact(reachBefore, reachAfter);
var exploitabilityImpact = DetermineExploitabilityImpact(delta, options);
// Generate proof steps
var proofSteps = await _proofGenerator.GenerateAsync(context, delta, ct).ConfigureAwait(false);
return new TrustDelta
{
ReachabilityImpact = reachabilityImpact,
ExploitabilityImpact = exploitabilityImpact,
Score = Math.Round(delta, 2),
BeforeScore = Math.Round(beforeTrust, 2),
AfterScore = Math.Round(afterTrust, 2),
ProofSteps = proofSteps.ToImmutableArray()
};
}
/// <inheritdoc />
public async Task<TrustDelta> CalculateAggregateAsync(
IEnumerable<TrustDeltaContext> contexts,
TrustDeltaOptions? options = null,
CancellationToken ct = default)
{
ArgumentNullException.ThrowIfNull(contexts);
options ??= TrustDeltaOptions.Default;
var contextList = contexts.ToList();
if (contextList.Count == 0)
{
return new TrustDelta
{
ReachabilityImpact = ReachabilityImpact.Unchanged,
ExploitabilityImpact = ExploitabilityImpact.Unchanged,
Score = 0.0,
ProofSteps = ["No changes to analyze"]
};
}
// Calculate individual deltas
var deltas = new List<TrustDelta>();
foreach (var context in contextList)
{
var delta = await CalculateAsync(context, options, ct).ConfigureAwait(false);
deltas.Add(delta);
}
// Aggregate scores (weighted average by absolute value for significance)
var totalWeight = 0.0;
var weightedSum = 0.0;
var allProofSteps = new List<string>();
foreach (var delta in deltas)
{
var weight = Math.Abs(delta.Score) + 0.1; // Minimum weight to include all
totalWeight += weight;
weightedSum += delta.Score * weight;
allProofSteps.AddRange(delta.ProofSteps);
}
var aggregateScore = totalWeight > 0 ? weightedSum / totalWeight : 0.0;
aggregateScore = Math.Clamp(aggregateScore, -1.0, 1.0);
// Determine aggregate impacts
var aggregateReachability = AggregateReachabilityImpact(deltas);
var aggregateExploitability = DetermineExploitabilityImpact(aggregateScore, options);
// Add summary proof step
allProofSteps.Add($"Aggregate of {deltas.Count} package changes");
return new TrustDelta
{
ReachabilityImpact = aggregateReachability,
ExploitabilityImpact = aggregateExploitability,
Score = Math.Round(aggregateScore, 2),
BeforeScore = Math.Round(deltas.Average(d => d.BeforeScore ?? 0.5), 2),
AfterScore = Math.Round(deltas.Average(d => d.AfterScore ?? 0.5), 2),
ProofSteps = allProofSteps.ToImmutableArray()
};
}
/// <summary>
/// Compute reachability factor for trust calculation.
/// Unreachable code gets reduced trust contribution.
/// </summary>
private static double ComputeReachabilityFactor(int? callPaths, TrustDeltaOptions options)
{
if (callPaths is null) return 1.0;
if (callPaths == 0) return options.UnreachableReductionFactor;
return 1.0;
}
/// <summary>
/// Compute patch verification bonus from context.
/// </summary>
private static double ComputePatchVerificationBonus(
TrustDeltaContext context,
TrustDeltaOptions options)
{
var bonus = 0.0;
if (context.PatchVerificationConfidence.HasValue)
{
bonus += options.FunctionMatchWeight * context.PatchVerificationConfidence.Value;
}
if (context.SymbolMatchSimilarity.HasValue)
{
bonus += options.SectionMatchWeight * context.SymbolMatchSimilarity.Value;
}
if (context.HasDsseAttestation && context.IssuerAuthorityScore.HasValue)
{
bonus += options.AttestationWeight * context.IssuerAuthorityScore.Value;
}
if (context.RuntimeConfirmationConfidence.HasValue)
{
bonus += options.RuntimeConfirmWeight * context.RuntimeConfirmationConfidence.Value;
}
return bonus;
}
/// <summary>
/// Determine reachability impact from before/after call path counts.
/// </summary>
private static ReachabilityImpact DetermineReachabilityImpact(int? before, int? after)
{
if (before is null || after is null) return ReachabilityImpact.Unchanged;
if (before == 0 && after > 0) return ReachabilityImpact.Introduced;
if (before > 0 && after == 0) return ReachabilityImpact.Eliminated;
if (after < before) return ReachabilityImpact.Reduced;
if (after > before) return ReachabilityImpact.Increased;
return ReachabilityImpact.Unchanged;
}
/// <summary>
/// Determine exploitability impact from delta value.
/// </summary>
private static ExploitabilityImpact DetermineExploitabilityImpact(
double delta,
TrustDeltaOptions options)
{
if (delta <= -options.ExploitabilityEliminatedThreshold)
return ExploitabilityImpact.Eliminated;
if (delta < -options.ExploitabilityChangeThreshold)
return ExploitabilityImpact.Down;
if (delta >= options.ExploitabilityIntroducedThreshold)
return ExploitabilityImpact.Introduced;
if (delta > options.ExploitabilityChangeThreshold)
return ExploitabilityImpact.Up;
return ExploitabilityImpact.Unchanged;
}
/// <summary>
/// Aggregate reachability impacts from multiple deltas.
/// </summary>
private static ReachabilityImpact AggregateReachabilityImpact(List<TrustDelta> deltas)
{
// Priority: Introduced > Increased > Reduced > Eliminated > Unchanged
if (deltas.Any(d => d.ReachabilityImpact == ReachabilityImpact.Introduced))
return ReachabilityImpact.Introduced;
if (deltas.Any(d => d.ReachabilityImpact == ReachabilityImpact.Increased))
return ReachabilityImpact.Increased;
if (deltas.Any(d => d.ReachabilityImpact == ReachabilityImpact.Reduced))
return ReachabilityImpact.Reduced;
if (deltas.Any(d => d.ReachabilityImpact == ReachabilityImpact.Eliminated))
return ReachabilityImpact.Eliminated;
return ReachabilityImpact.Unchanged;
}
}

View File

@@ -0,0 +1,75 @@
namespace StellaOps.Scanner.ChangeTrace.Scoring;
/// <summary>
/// Context for trust delta calculation.
/// Contains all input data needed to compute trust delta between two versions.
/// </summary>
public sealed record TrustDeltaContext
{
/// <summary>
/// Package URL (PURL) identifier.
/// </summary>
public required string Purl { get; init; }
/// <summary>
/// Version before the change.
/// </summary>
public required string FromVersion { get; init; }
/// <summary>
/// Version after the change.
/// </summary>
public required string ToVersion { get; init; }
/// <summary>
/// CVE IDs relevant to this package.
/// </summary>
public IReadOnlyList<string>? CveIds { get; init; }
/// <summary>
/// Patch verification confidence from binary analysis (0.0 to 1.0).
/// Based on function match similarity (CFG hash, instruction hash, etc.).
/// </summary>
public double? PatchVerificationConfidence { get; init; }
/// <summary>
/// Symbol match similarity from delta signature analysis (0.0 to 1.0).
/// </summary>
public double? SymbolMatchSimilarity { get; init; }
/// <summary>
/// Whether a DSSE attestation is available for this change.
/// </summary>
public bool HasDsseAttestation { get; init; }
/// <summary>
/// Authority score of the DSSE issuer (0.0 to 1.0).
/// </summary>
public double? IssuerAuthorityScore { get; init; }
/// <summary>
/// Number of reachable call paths before the change.
/// </summary>
public int? ReachableCallPathsBefore { get; init; }
/// <summary>
/// Number of reachable call paths after the change.
/// </summary>
public int? ReachableCallPathsAfter { get; init; }
/// <summary>
/// Image digest for the "before" state (for ReachGraph queries).
/// </summary>
public string? FromImageDigest { get; init; }
/// <summary>
/// Image digest for the "after" state (for ReachGraph queries).
/// </summary>
public string? ToImageDigest { get; init; }
/// <summary>
/// Runtime confirmation confidence (0.0 to 1.0).
/// From actual runtime observation of patch effectiveness.
/// </summary>
public double? RuntimeConfirmationConfidence { get; init; }
}

View File

@@ -0,0 +1,79 @@
namespace StellaOps.Scanner.ChangeTrace.Scoring;
/// <summary>
/// Options for trust delta calculation.
/// Configures weights and thresholds for the trust delta formula.
/// </summary>
public sealed record TrustDeltaOptions
{
/// <summary>
/// Default options instance.
/// </summary>
public static TrustDeltaOptions Default { get; } = new();
/// <summary>
/// Weight for function match confidence in patch verification bonus.
/// Default: 0.25 (25% contribution).
/// </summary>
public double FunctionMatchWeight { get; init; } = 0.25;
/// <summary>
/// Weight for section match confidence in patch verification bonus.
/// Default: 0.15 (15% contribution).
/// </summary>
public double SectionMatchWeight { get; init; } = 0.15;
/// <summary>
/// Weight for DSSE attestation presence in patch verification bonus.
/// Default: 0.10 (10% contribution).
/// </summary>
public double AttestationWeight { get; init; } = 0.10;
/// <summary>
/// Weight for runtime confirmation in patch verification bonus.
/// Default: 0.10 (10% contribution).
/// </summary>
public double RuntimeConfirmWeight { get; init; } = 0.10;
/// <summary>
/// Threshold for considering delta significant.
/// |delta| >= threshold => risk_up or risk_down verdict.
/// Default: 0.3 (30% change).
/// </summary>
public double SignificantDeltaThreshold { get; init; } = 0.3;
/// <summary>
/// Minimum trust value to use as denominator in delta calculation.
/// Prevents division by zero and very large delta values.
/// Default: 0.01.
/// </summary>
public double MinTrustDenominator { get; init; } = 0.01;
/// <summary>
/// Reduction factor for unreachable code paths.
/// Unreachable code contributes this fraction to trust (higher = more trust).
/// Default: 0.7 (30% reduction for unreachable).
/// </summary>
public double UnreachableReductionFactor { get; init; } = 0.7;
/// <summary>
/// Threshold for considering exploitability eliminated.
/// delta <= -threshold => exploitability eliminated.
/// Default: 0.5.
/// </summary>
public double ExploitabilityEliminatedThreshold { get; init; } = 0.5;
/// <summary>
/// Threshold for considering exploitability introduced.
/// delta >= threshold => exploitability introduced.
/// Default: 0.5.
/// </summary>
public double ExploitabilityIntroducedThreshold { get; init; } = 0.5;
/// <summary>
/// Threshold for considering exploitability changed.
/// |delta| >= threshold => up or down.
/// Default: 0.1.
/// </summary>
public double ExploitabilityChangeThreshold { get; init; } = 0.1;
}

View File

@@ -0,0 +1,158 @@
using System.Collections.Immutable;
using System.Text;
using System.Text.Json;
using System.Text.Json.Serialization;
using StellaOps.Canonical.Json;
using StellaOps.Scanner.ChangeTrace.Models;
namespace StellaOps.Scanner.ChangeTrace.Serialization;
/// <summary>
/// Deterministic serialization for change traces (RFC 8785 compliant).
/// </summary>
public static class ChangeTraceSerializer
{
private static readonly JsonSerializerOptions SerializerOptions = new()
{
WriteIndented = false,
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull,
Encoder = System.Text.Encodings.Web.JavaScriptEncoder.UnsafeRelaxedJsonEscaping,
Converters = { new JsonStringEnumConverter(JsonNamingPolicy.CamelCase) }
};
private static readonly JsonSerializerOptions PrettyOptions = new()
{
WriteIndented = true,
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull,
Encoder = System.Text.Encodings.Web.JavaScriptEncoder.UnsafeRelaxedJsonEscaping,
Converters = { new JsonStringEnumConverter(JsonNamingPolicy.CamelCase) }
};
/// <summary>
/// Serialize change trace to canonical JSON (RFC 8785).
/// Deltas are sorted by PURL, symbols by name, bytes by offset.
/// </summary>
/// <param name="trace">Change trace to serialize.</param>
/// <returns>Canonical JSON string.</returns>
public static string SerializeCanonical(Models.ChangeTrace trace)
{
ArgumentNullException.ThrowIfNull(trace);
var sortedTrace = SortTrace(trace);
return CanonJson.Serialize(sortedTrace, SerializerOptions);
}
/// <summary>
/// Serialize with pretty printing for human reading.
/// </summary>
/// <param name="trace">Change trace to serialize.</param>
/// <returns>Pretty-printed JSON string.</returns>
public static string SerializePretty(Models.ChangeTrace trace)
{
ArgumentNullException.ThrowIfNull(trace);
var sortedTrace = SortTrace(trace);
return JsonSerializer.Serialize(sortedTrace, PrettyOptions);
}
/// <summary>
/// Serialize to UTF-8 bytes (canonical).
/// </summary>
/// <param name="trace">Change trace to serialize.</param>
/// <returns>UTF-8 encoded canonical JSON bytes.</returns>
public static byte[] SerializeCanonicalBytes(Models.ChangeTrace trace)
{
ArgumentNullException.ThrowIfNull(trace);
var sortedTrace = SortTrace(trace);
return CanonJson.Canonicalize(sortedTrace, SerializerOptions);
}
/// <summary>
/// Deserialize change trace from JSON.
/// </summary>
/// <param name="json">JSON string to deserialize.</param>
/// <returns>Deserialized change trace, or null if invalid.</returns>
public static Models.ChangeTrace? Deserialize(string json)
{
ArgumentException.ThrowIfNullOrWhiteSpace(json);
return JsonSerializer.Deserialize<Models.ChangeTrace>(json, SerializerOptions);
}
/// <summary>
/// Deserialize change trace from UTF-8 bytes.
/// </summary>
/// <param name="utf8Json">UTF-8 encoded JSON bytes.</param>
/// <returns>Deserialized change trace, or null if invalid.</returns>
public static Models.ChangeTrace? Deserialize(ReadOnlySpan<byte> utf8Json)
{
return JsonSerializer.Deserialize<Models.ChangeTrace>(utf8Json, SerializerOptions);
}
/// <summary>
/// Compute commitment hash for a change trace.
/// The commitment field itself is excluded from the hash computation.
/// </summary>
/// <param name="trace">Change trace to hash.</param>
/// <returns>SHA-256 hash as lowercase hex string.</returns>
public static string ComputeCommitmentHash(Models.ChangeTrace trace)
{
ArgumentNullException.ThrowIfNull(trace);
// Remove commitment and attestation for hash computation
var traceForHash = trace with
{
Commitment = null,
Attestation = null
};
var sortedTrace = SortTrace(traceForHash);
var canonicalBytes = CanonJson.Canonicalize(sortedTrace, SerializerOptions);
return CanonJson.Sha256Hex(canonicalBytes);
}
/// <summary>
/// Verify that a change trace's commitment hash is correct.
/// </summary>
/// <param name="trace">Change trace to verify.</param>
/// <returns>True if commitment matches computed hash.</returns>
public static bool VerifyCommitment(Models.ChangeTrace trace)
{
ArgumentNullException.ThrowIfNull(trace);
if (trace.Commitment is null)
return false;
var computed = ComputeCommitmentHash(trace);
return string.Equals(computed, trace.Commitment.Sha256, StringComparison.OrdinalIgnoreCase);
}
/// <summary>
/// Sort trace contents for deterministic serialization.
/// </summary>
private static Models.ChangeTrace SortTrace(Models.ChangeTrace trace)
{
return trace with
{
Deltas = trace.Deltas
.OrderBy(d => d.Purl, StringComparer.Ordinal)
.Select(SortPackageDelta)
.ToImmutableArray()
};
}
private static PackageDelta SortPackageDelta(PackageDelta delta)
{
return delta with
{
SymbolDeltas = delta.SymbolDeltas
.OrderBy(s => s.Name, StringComparer.Ordinal)
.ToImmutableArray(),
ByteDeltas = delta.ByteDeltas
.OrderBy(b => b.Offset)
.ToImmutableArray()
};
}
}

View File

@@ -0,0 +1,19 @@
<?xml version='1.0' encoding='utf-8'?>
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<LangVersion>preview</LangVersion>
<Nullable>enable</Nullable>
<ImplicitUsings>enable</ImplicitUsings>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
<RootNamespace>StellaOps.Scanner.ChangeTrace</RootNamespace>
<Description>Change-Trace library for deterministic trust-delta visualization between binary/package versions</Description>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" />
<PackageReference Include="Microsoft.Extensions.Options" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="../../../__Libraries/StellaOps.Canonical.Json/StellaOps.Canonical.Json.csproj" />
</ItemGroup>
</Project>

View File

@@ -0,0 +1,260 @@
// -----------------------------------------------------------------------------
// ChangeTraceValidator.cs
// Sprint: SPRINT_20260112_200_006_CLI_commands
// Description: Validates change trace structure and content.
// -----------------------------------------------------------------------------
using StellaOps.Scanner.ChangeTrace.Models;
using ChangeTraceModel = StellaOps.Scanner.ChangeTrace.Models.ChangeTrace;
namespace StellaOps.Scanner.ChangeTrace.Validation;
/// <summary>
/// Result of change trace validation.
/// </summary>
public sealed record ChangeTraceValidationResult
{
/// <summary>
/// Whether the trace is valid (no errors).
/// </summary>
public required bool IsValid { get; init; }
/// <summary>
/// Validation errors (structural issues that invalidate the trace).
/// </summary>
public required IReadOnlyList<string> Errors { get; init; }
/// <summary>
/// Validation warnings (issues that don't invalidate but should be noted).
/// </summary>
public required IReadOnlyList<string> Warnings { get; init; }
/// <summary>
/// Create a successful validation result.
/// </summary>
public static ChangeTraceValidationResult Success(IReadOnlyList<string>? warnings = null)
{
return new ChangeTraceValidationResult
{
IsValid = true,
Errors = Array.Empty<string>(),
Warnings = warnings ?? Array.Empty<string>()
};
}
/// <summary>
/// Create a failed validation result.
/// </summary>
public static ChangeTraceValidationResult Failure(
IReadOnlyList<string> errors,
IReadOnlyList<string>? warnings = null)
{
return new ChangeTraceValidationResult
{
IsValid = false,
Errors = errors,
Warnings = warnings ?? Array.Empty<string>()
};
}
}
/// <summary>
/// Validates change trace structure and content.
/// </summary>
public sealed class ChangeTraceValidator
{
/// <summary>
/// Validate a change trace.
/// </summary>
/// <param name="trace">The trace to validate.</param>
/// <returns>Validation result with any errors and warnings.</returns>
public ChangeTraceValidationResult Validate(ChangeTraceModel trace)
{
ArgumentNullException.ThrowIfNull(trace);
var errors = new List<string>();
var warnings = new List<string>();
// Validate schema
if (string.IsNullOrEmpty(trace.Schema))
{
errors.Add("Missing required field: schema");
}
else if (!trace.Schema.StartsWith("stella.change-trace/", StringComparison.OrdinalIgnoreCase))
{
warnings.Add($"Non-standard schema: {trace.Schema}");
}
// Validate subject
ValidateSubject(trace.Subject, errors, warnings);
// Validate basis
ValidateBasis(trace.Basis, errors, warnings);
// Validate summary
ValidateSummary(trace.Summary, errors, warnings);
// Validate deltas
ValidateDeltas(trace.Deltas, errors, warnings);
// Validate commitment if present
if (trace.Commitment is not null)
{
ValidateCommitment(trace.Commitment, errors, warnings);
}
return errors.Count > 0
? ChangeTraceValidationResult.Failure(errors, warnings)
: ChangeTraceValidationResult.Success(warnings);
}
private static void ValidateSubject(
ChangeTraceSubject? subject,
List<string> errors,
List<string> warnings)
{
if (subject is null)
{
errors.Add("Missing required field: subject");
return;
}
if (string.IsNullOrEmpty(subject.Type))
{
errors.Add("Missing required field: subject.type");
}
if (string.IsNullOrEmpty(subject.Digest))
{
errors.Add("Missing required field: subject.digest");
}
else if (!subject.Digest.Contains(':'))
{
warnings.Add("Subject digest should include algorithm prefix (e.g., sha256:...)");
}
}
private static void ValidateBasis(
ChangeTraceBasis? basis,
List<string> errors,
List<string> warnings)
{
if (basis is null)
{
errors.Add("Missing required field: basis");
return;
}
if (string.IsNullOrEmpty(basis.ScanId))
{
errors.Add("Missing required field: basis.scanId");
}
if (string.IsNullOrEmpty(basis.EngineVersion))
{
warnings.Add("Missing basis.engineVersion - reproducibility may be affected");
}
if (basis.AnalyzedAt == default)
{
warnings.Add("Missing basis.analyzedAt timestamp");
}
if (basis.DiffMethod.IsDefaultOrEmpty)
{
warnings.Add("Missing basis.diffMethod - diff methods should be specified");
}
}
private static void ValidateSummary(
ChangeTraceSummary? summary,
List<string> errors,
List<string> warnings)
{
if (summary is null)
{
errors.Add("Missing required field: summary");
return;
}
if (summary.ChangedPackages < 0)
{
errors.Add("Invalid summary.changedPackages: must be non-negative");
}
if (summary.ChangedSymbols < 0)
{
errors.Add("Invalid summary.changedSymbols: must be non-negative");
}
if (summary.ChangedBytes < 0)
{
errors.Add("Invalid summary.changedBytes: must be non-negative");
}
if (summary.RiskDelta < -1.0 || summary.RiskDelta > 1.0)
{
warnings.Add($"Unusual riskDelta value: {summary.RiskDelta} (expected -1.0 to 1.0)");
}
}
private static void ValidateDeltas(
IReadOnlyList<PackageDelta> deltas,
List<string> errors,
List<string> warnings)
{
if (deltas is null || deltas.Count == 0)
{
// Empty deltas is valid - just means no changes
return;
}
for (var i = 0; i < deltas.Count; i++)
{
var delta = deltas[i];
if (string.IsNullOrEmpty(delta.Purl))
{
errors.Add($"Missing purl for delta at index {i}");
}
if (string.IsNullOrEmpty(delta.FromVersion) && string.IsNullOrEmpty(delta.ToVersion))
{
warnings.Add($"Delta at index {i} has neither fromVersion nor toVersion");
}
if (delta.Evidence is not null)
{
if (delta.Evidence.Confidence < 0 || delta.Evidence.Confidence > 1)
{
warnings.Add($"Invalid confidence value for delta at index {i}: {delta.Evidence.Confidence}");
}
}
if (delta.TrustDelta is not null)
{
if (delta.TrustDelta.Score < -1.0 || delta.TrustDelta.Score > 1.0)
{
warnings.Add($"Unusual trust delta score for delta at index {i}: {delta.TrustDelta.Score}");
}
}
}
}
private static void ValidateCommitment(
ChangeTraceCommitment commitment,
List<string> errors,
List<string> warnings)
{
if (string.IsNullOrEmpty(commitment.Sha256))
{
errors.Add("Missing required field: commitment.sha256");
}
if (string.IsNullOrEmpty(commitment.Algorithm))
{
warnings.Add("Missing commitment.algorithm - defaults assumed");
}
}
}

View File

@@ -0,0 +1,93 @@
// SPDX-License-Identifier: AGPL-3.0-or-later
// Copyright (c) StellaOps
// Shared enums for call graph analysis.
namespace StellaOps.Scanner.Contracts;
using System.Text.Json.Serialization;
/// <summary>
/// Visibility level of a code symbol.
/// </summary>
[JsonConverter(typeof(JsonStringEnumConverter<Visibility>))]
public enum Visibility
{
Public,
Internal,
Protected,
Private
}
/// <summary>
/// Kind of call edge in a call graph.
/// </summary>
[JsonConverter(typeof(JsonStringEnumConverter<CallKind>))]
public enum CallKind
{
Direct,
Virtual,
Delegate,
Reflection,
Dynamic,
Plt,
Iat
}
/// <summary>
/// Type of entrypoint in a call graph.
/// </summary>
[JsonConverter(typeof(JsonStringEnumConverter<EntrypointType>))]
public enum EntrypointType
{
HttpHandler,
GrpcMethod,
CliCommand,
BackgroundJob,
ScheduledJob,
MessageHandler,
EventSubscriber,
WebSocketHandler,
EventHandler,
Lambda,
Unknown
}
/// <summary>
/// Explanation type for call graph edges.
/// </summary>
[JsonConverter(typeof(JsonStringEnumConverter<CallEdgeExplanationType>))]
public enum CallEdgeExplanationType
{
/// <summary>Static import (ES6 import, Python import, using directive).</summary>
Import,
/// <summary>Dynamic load (require(), dlopen, LoadLibrary).</summary>
DynamicLoad,
/// <summary>Reflection invocation (Class.forName, Type.GetType).</summary>
Reflection,
/// <summary>Foreign function interface (JNI, P/Invoke, ctypes).</summary>
Ffi,
/// <summary>Environment variable guard (process.env.X, os.environ.get).</summary>
EnvGuard,
/// <summary>Feature flag check (LaunchDarkly, unleash, custom flags).</summary>
FeatureFlag,
/// <summary>Platform/architecture guard (process.platform, runtime.GOOS).</summary>
PlatformArch,
/// <summary>Taint gate (sanitization, validation).</summary>
TaintGate,
/// <summary>Loader rule (PLT/IAT/GOT entry).</summary>
LoaderRule,
/// <summary>Direct call (static, virtual, delegate).</summary>
DirectCall,
/// <summary>Cannot determine explanation type.</summary>
Unknown
}

View File

@@ -0,0 +1,485 @@
// SPDX-License-Identifier: AGPL-3.0-or-later
// Copyright (c) StellaOps
// Shared call graph models for Scanner CallGraph and Reachability modules.
namespace StellaOps.Scanner.Contracts;
using System.Collections.Immutable;
using System.Security.Cryptography;
using System.Text;
using System.Text.Json;
using System.Text.Json.Serialization;
/// <summary>
/// A point-in-time snapshot of a call graph for analysis.
/// </summary>
public sealed record CallGraphSnapshot(
[property: JsonPropertyName("scanId")] string ScanId,
[property: JsonPropertyName("graphDigest")] string GraphDigest,
[property: JsonPropertyName("language")] string Language,
[property: JsonPropertyName("extractedAt")] DateTimeOffset ExtractedAt,
[property: JsonPropertyName("nodes")] ImmutableArray<CallGraphNode> Nodes,
[property: JsonPropertyName("edges")] ImmutableArray<CallGraphEdge> Edges,
[property: JsonPropertyName("entrypointIds")] ImmutableArray<string> EntrypointIds,
[property: JsonPropertyName("sinkIds")] ImmutableArray<string> SinkIds)
{
/// <summary>
/// Returns a trimmed, normalized copy of this snapshot for deterministic operations.
/// </summary>
public CallGraphSnapshot Trimmed()
{
var nodes = (Nodes.IsDefault ? ImmutableArray<CallGraphNode>.Empty : Nodes)
.Where(n => !string.IsNullOrWhiteSpace(n.NodeId))
.Select(n => n.Trimmed())
.OrderBy(n => n.NodeId, StringComparer.Ordinal)
.ToImmutableArray();
var edges = (Edges.IsDefault ? ImmutableArray<CallGraphEdge>.Empty : Edges)
.Where(e => !string.IsNullOrWhiteSpace(e.SourceId) && !string.IsNullOrWhiteSpace(e.TargetId))
.Select(e => e.Trimmed())
.OrderBy(e => e.SourceId, StringComparer.Ordinal)
.ThenBy(e => e.TargetId, StringComparer.Ordinal)
.ThenBy(e => e.CallKind.ToString(), StringComparer.Ordinal)
.ThenBy(e => e.CallSite ?? string.Empty, StringComparer.Ordinal)
.ToImmutableArray();
var entrypoints = (EntrypointIds.IsDefault ? ImmutableArray<string>.Empty : EntrypointIds)
.Where(id => !string.IsNullOrWhiteSpace(id))
.Select(id => id.Trim())
.Distinct(StringComparer.Ordinal)
.OrderBy(id => id, StringComparer.Ordinal)
.ToImmutableArray();
var sinks = (SinkIds.IsDefault ? ImmutableArray<string>.Empty : SinkIds)
.Where(id => !string.IsNullOrWhiteSpace(id))
.Select(id => id.Trim())
.Distinct(StringComparer.Ordinal)
.OrderBy(id => id, StringComparer.Ordinal)
.ToImmutableArray();
return this with
{
ScanId = ScanId?.Trim() ?? string.Empty,
GraphDigest = GraphDigest?.Trim() ?? string.Empty,
Language = Language?.Trim() ?? string.Empty,
Nodes = nodes,
Edges = edges,
EntrypointIds = entrypoints,
SinkIds = sinks
};
}
}
/// <summary>
/// A node in the call graph representing a method/function.
/// </summary>
public sealed record CallGraphNode(
[property: JsonPropertyName("nodeId")] string NodeId,
[property: JsonPropertyName("symbol")] string Symbol,
[property: JsonPropertyName("file")] string File,
[property: JsonPropertyName("line")] int Line,
[property: JsonPropertyName("package")] string Package,
[property: JsonPropertyName("visibility")] Visibility Visibility,
[property: JsonPropertyName("isEntrypoint")] bool IsEntrypoint,
[property: JsonPropertyName("entrypointType")] EntrypointType? EntrypointType,
[property: JsonPropertyName("isSink")] bool IsSink,
[property: JsonPropertyName("sinkCategory")] SinkCategory? SinkCategory)
{
public CallGraphNode Trimmed()
=> this with
{
NodeId = NodeId?.Trim() ?? string.Empty,
Symbol = Symbol?.Trim() ?? string.Empty,
File = File?.Trim() ?? string.Empty,
Package = Package?.Trim() ?? string.Empty
};
}
/// <summary>
/// An edge in the call graph representing a call relationship.
/// </summary>
public sealed record CallGraphEdge(
[property: JsonPropertyName("sourceId")] string SourceId,
[property: JsonPropertyName("targetId")] string TargetId,
[property: JsonPropertyName("callKind")] CallKind CallKind,
[property: JsonPropertyName("callSite")] string? CallSite = null,
[property: JsonPropertyName("explanation")] CallEdgeExplanation? Explanation = null)
{
public CallGraphEdge Trimmed()
=> this with
{
SourceId = SourceId?.Trim() ?? string.Empty,
TargetId = TargetId?.Trim() ?? string.Empty,
CallSite = string.IsNullOrWhiteSpace(CallSite) ? null : CallSite.Trim(),
Explanation = Explanation?.Trimmed()
};
}
/// <summary>
/// Explanation for why an edge exists in the call graph.
/// </summary>
public sealed record CallEdgeExplanation(
[property: JsonPropertyName("type")] CallEdgeExplanationType Type,
[property: JsonPropertyName("confidence")] double Confidence,
[property: JsonPropertyName("guard")] string? Guard = null,
[property: JsonPropertyName("metadata")] ImmutableDictionary<string, string>? Metadata = null)
{
/// <summary>Creates a simple direct call explanation with full confidence.</summary>
public static CallEdgeExplanation DirectCall() =>
new(CallEdgeExplanationType.DirectCall, 1.0);
/// <summary>Creates an import explanation with full confidence.</summary>
public static CallEdgeExplanation Import(string? location = null) =>
new(CallEdgeExplanationType.Import, 1.0);
/// <summary>Creates a dynamic load explanation with medium confidence.</summary>
public static CallEdgeExplanation DynamicLoad(double confidence = 0.5) =>
new(CallEdgeExplanationType.DynamicLoad, confidence);
/// <summary>Creates an environment guard explanation.</summary>
public static CallEdgeExplanation EnvGuard(string guard, double confidence = 0.9) =>
new(CallEdgeExplanationType.EnvGuard, confidence, guard);
/// <summary>Creates a feature flag explanation.</summary>
public static CallEdgeExplanation FeatureFlag(string flag, double confidence = 0.85) =>
new(CallEdgeExplanationType.FeatureFlag, confidence, flag);
/// <summary>Creates a platform/architecture guard explanation.</summary>
public static CallEdgeExplanation PlatformArch(string platform, double confidence = 0.95) =>
new(CallEdgeExplanationType.PlatformArch, confidence, $"platform={platform}");
/// <summary>Creates a reflection explanation.</summary>
public static CallEdgeExplanation ReflectionCall(double confidence = 0.5) =>
new(CallEdgeExplanationType.Reflection, confidence);
/// <summary>Creates a loader rule explanation (PLT/IAT/GOT).</summary>
public static CallEdgeExplanation LoaderRule(string loaderType, ImmutableDictionary<string, string>? metadata = null) =>
new(CallEdgeExplanationType.LoaderRule, 0.8, null, metadata ?? ImmutableDictionary<string, string>.Empty.Add("loader", loaderType));
public CallEdgeExplanation Trimmed() =>
this with
{
Guard = string.IsNullOrWhiteSpace(Guard) ? null : Guard.Trim()
};
}
/// <summary>
/// A path from an entrypoint to a sink in the call graph.
/// </summary>
public sealed record ReachabilityPath(
[property: JsonPropertyName("entrypointId")] string EntrypointId,
[property: JsonPropertyName("sinkId")] string SinkId,
[property: JsonPropertyName("nodeIds")] ImmutableArray<string> NodeIds)
{
public ReachabilityPath Trimmed()
{
var nodes = (NodeIds.IsDefault ? ImmutableArray<string>.Empty : NodeIds)
.Where(id => !string.IsNullOrWhiteSpace(id))
.Select(id => id.Trim())
.ToImmutableArray();
return this with
{
EntrypointId = EntrypointId?.Trim() ?? string.Empty,
SinkId = SinkId?.Trim() ?? string.Empty,
NodeIds = nodes
};
}
}
/// <summary>
/// Result of reachability analysis on a call graph.
/// </summary>
public sealed record ReachabilityAnalysisResult(
[property: JsonPropertyName("scanId")] string ScanId,
[property: JsonPropertyName("graphDigest")] string GraphDigest,
[property: JsonPropertyName("language")] string Language,
[property: JsonPropertyName("computedAt")] DateTimeOffset ComputedAt,
[property: JsonPropertyName("reachableNodeIds")] ImmutableArray<string> ReachableNodeIds,
[property: JsonPropertyName("reachableSinkIds")] ImmutableArray<string> ReachableSinkIds,
[property: JsonPropertyName("paths")] ImmutableArray<ReachabilityPath> Paths,
[property: JsonPropertyName("resultDigest")] string ResultDigest)
{
public ReachabilityAnalysisResult Trimmed()
{
var reachableNodes = (ReachableNodeIds.IsDefault ? ImmutableArray<string>.Empty : ReachableNodeIds)
.Where(id => !string.IsNullOrWhiteSpace(id))
.Select(id => id.Trim())
.Distinct(StringComparer.Ordinal)
.OrderBy(id => id, StringComparer.Ordinal)
.ToImmutableArray();
var reachableSinks = (ReachableSinkIds.IsDefault ? ImmutableArray<string>.Empty : ReachableSinkIds)
.Where(id => !string.IsNullOrWhiteSpace(id))
.Select(id => id.Trim())
.Distinct(StringComparer.Ordinal)
.OrderBy(id => id, StringComparer.Ordinal)
.ToImmutableArray();
var paths = (Paths.IsDefault ? ImmutableArray<ReachabilityPath>.Empty : Paths)
.Select(p => p.Trimmed())
.OrderBy(p => p.SinkId, StringComparer.Ordinal)
.ThenBy(p => p.EntrypointId, StringComparer.Ordinal)
.ToImmutableArray();
return this with
{
ScanId = ScanId?.Trim() ?? string.Empty,
GraphDigest = GraphDigest?.Trim() ?? string.Empty,
Language = Language?.Trim() ?? string.Empty,
ResultDigest = ResultDigest?.Trim() ?? string.Empty,
ReachableNodeIds = reachableNodes,
ReachableSinkIds = reachableSinks,
Paths = paths
};
}
}
/// <summary>
/// Configuration options for reachability analysis.
/// </summary>
public sealed record ReachabilityAnalysisOptions
{
/// <summary>Default options with sensible limits.</summary>
public static ReachabilityAnalysisOptions Default { get; } = new();
/// <summary>Maximum depth for BFS traversal (0 = unlimited, default = 256).</summary>
public int MaxDepth { get; init; } = 256;
/// <summary>Maximum number of paths to return per sink (default = 10).</summary>
public int MaxPathsPerSink { get; init; } = 10;
/// <summary>Maximum total paths to return (default = 100).</summary>
public int MaxTotalPaths { get; init; } = 100;
/// <summary>Whether to include node metadata in path reconstruction (default = true).</summary>
public bool IncludeNodeMetadata { get; init; } = true;
/// <summary>Explicit list of sink node IDs to target (default = null, meaning use snapshot.SinkIds).</summary>
public ImmutableArray<string>? ExplicitSinks { get; init; }
/// <summary>Validates options and returns sanitized values.</summary>
public ReachabilityAnalysisOptions Validated()
{
ImmutableArray<string>? normalizedSinks = null;
if (ExplicitSinks.HasValue && !ExplicitSinks.Value.IsDefaultOrEmpty)
{
normalizedSinks = ExplicitSinks.Value
.Where(s => !string.IsNullOrWhiteSpace(s))
.Select(s => s.Trim())
.Distinct(StringComparer.Ordinal)
.OrderBy(s => s, StringComparer.Ordinal)
.ToImmutableArray();
}
return new ReachabilityAnalysisOptions
{
MaxDepth = MaxDepth <= 0 ? 256 : Math.Min(MaxDepth, 1024),
MaxPathsPerSink = MaxPathsPerSink <= 0 ? 10 : Math.Min(MaxPathsPerSink, 100),
MaxTotalPaths = MaxTotalPaths <= 0 ? 100 : Math.Min(MaxTotalPaths, 1000),
IncludeNodeMetadata = IncludeNodeMetadata,
ExplicitSinks = normalizedSinks
};
}
}
/// <summary>
/// Utilities for computing deterministic identifiers for call graph elements.
/// </summary>
public static class CallGraphNodeIds
{
/// <summary>Computes a deterministic node ID from a stable symbol identifier.</summary>
public static string Compute(string stableSymbolId)
{
if (string.IsNullOrWhiteSpace(stableSymbolId))
{
throw new ArgumentException("Symbol id must be provided.", nameof(stableSymbolId));
}
var hash = SHA256.HashData(Encoding.UTF8.GetBytes(stableSymbolId.Trim()));
return $"sha256:{Convert.ToHexString(hash).ToLowerInvariant()}";
}
/// <summary>Builds a stable symbol identifier from language and symbol.</summary>
public static string StableSymbolId(string language, string symbol)
=> $"{language.Trim().ToLowerInvariant()}:{symbol.Trim()}";
}
/// <summary>
/// Utilities for computing digests of call graph snapshots and results.
/// </summary>
public static class CallGraphDigests
{
private static readonly JsonWriterOptions CanonicalJsonOptions = new()
{
Encoder = System.Text.Encodings.Web.JavaScriptEncoder.UnsafeRelaxedJsonEscaping,
Indented = false,
SkipValidation = false
};
/// <summary>Computes a deterministic digest for a call graph snapshot.</summary>
public static string ComputeGraphDigest(CallGraphSnapshot snapshot)
{
ArgumentNullException.ThrowIfNull(snapshot);
var trimmed = snapshot.Trimmed();
using var buffer = new MemoryStream(capacity: 64 * 1024);
using (var writer = new Utf8JsonWriter(buffer, CanonicalJsonOptions))
{
WriteGraphDigestPayload(writer, trimmed);
writer.Flush();
}
var hash = SHA256.HashData(buffer.ToArray());
return $"sha256:{Convert.ToHexString(hash).ToLowerInvariant()}";
}
/// <summary>Computes a deterministic digest for a reachability analysis result.</summary>
public static string ComputeResultDigest(ReachabilityAnalysisResult result)
{
ArgumentNullException.ThrowIfNull(result);
var trimmed = result.Trimmed();
using var buffer = new MemoryStream(capacity: 64 * 1024);
using (var writer = new Utf8JsonWriter(buffer, CanonicalJsonOptions))
{
WriteResultDigestPayload(writer, trimmed);
writer.Flush();
}
var hash = SHA256.HashData(buffer.ToArray());
return $"sha256:{Convert.ToHexString(hash).ToLowerInvariant()}";
}
private static void WriteGraphDigestPayload(Utf8JsonWriter writer, CallGraphSnapshot snapshot)
{
writer.WriteStartObject();
writer.WriteString("schema", "stellaops.callgraph@v1");
writer.WriteString("language", snapshot.Language);
writer.WritePropertyName("nodes");
writer.WriteStartArray();
foreach (var node in snapshot.Nodes)
{
writer.WriteStartObject();
writer.WriteString("nodeId", node.NodeId);
writer.WriteString("symbol", node.Symbol);
writer.WriteString("file", node.File);
writer.WriteNumber("line", node.Line);
writer.WriteString("package", node.Package);
writer.WriteString("visibility", node.Visibility.ToString());
writer.WriteBoolean("isEntrypoint", node.IsEntrypoint);
if (node.EntrypointType is not null)
{
writer.WriteString("entrypointType", node.EntrypointType.Value.ToString());
}
writer.WriteBoolean("isSink", node.IsSink);
if (node.SinkCategory is not null)
{
writer.WriteString("sinkCategory", node.SinkCategory.Value.ToString());
}
writer.WriteEndObject();
}
writer.WriteEndArray();
writer.WritePropertyName("edges");
writer.WriteStartArray();
foreach (var edge in snapshot.Edges)
{
writer.WriteStartObject();
writer.WriteString("sourceId", edge.SourceId);
writer.WriteString("targetId", edge.TargetId);
writer.WriteString("callKind", edge.CallKind.ToString());
if (!string.IsNullOrWhiteSpace(edge.CallSite))
{
writer.WriteString("callSite", edge.CallSite);
}
if (edge.Explanation is not null)
{
writer.WritePropertyName("explanation");
writer.WriteStartObject();
writer.WriteString("type", edge.Explanation.Type.ToString());
writer.WriteNumber("confidence", edge.Explanation.Confidence);
if (!string.IsNullOrWhiteSpace(edge.Explanation.Guard))
{
writer.WriteString("guard", edge.Explanation.Guard);
}
if (edge.Explanation.Metadata is { Count: > 0 })
{
writer.WritePropertyName("metadata");
writer.WriteStartObject();
foreach (var kv in edge.Explanation.Metadata.OrderBy(kv => kv.Key, StringComparer.Ordinal))
{
writer.WriteString(kv.Key, kv.Value);
}
writer.WriteEndObject();
}
writer.WriteEndObject();
}
writer.WriteEndObject();
}
writer.WriteEndArray();
writer.WritePropertyName("entrypointIds");
writer.WriteStartArray();
foreach (var id in snapshot.EntrypointIds)
{
writer.WriteStringValue(id);
}
writer.WriteEndArray();
writer.WritePropertyName("sinkIds");
writer.WriteStartArray();
foreach (var id in snapshot.SinkIds)
{
writer.WriteStringValue(id);
}
writer.WriteEndArray();
writer.WriteEndObject();
}
private static void WriteResultDigestPayload(Utf8JsonWriter writer, ReachabilityAnalysisResult result)
{
writer.WriteStartObject();
writer.WriteString("schema", "stellaops.reachability@v1");
writer.WriteString("graphDigest", result.GraphDigest);
writer.WriteString("language", result.Language);
writer.WritePropertyName("reachableNodeIds");
writer.WriteStartArray();
foreach (var id in result.ReachableNodeIds)
{
writer.WriteStringValue(id);
}
writer.WriteEndArray();
writer.WritePropertyName("reachableSinkIds");
writer.WriteStartArray();
foreach (var id in result.ReachableSinkIds)
{
writer.WriteStringValue(id);
}
writer.WriteEndArray();
writer.WritePropertyName("paths");
writer.WriteStartArray();
foreach (var path in result.Paths)
{
writer.WriteStartObject();
writer.WriteString("entrypointId", path.EntrypointId);
writer.WriteString("sinkId", path.SinkId);
writer.WritePropertyName("nodeIds");
writer.WriteStartArray();
foreach (var nodeId in path.NodeIds)
{
writer.WriteStringValue(nodeId);
}
writer.WriteEndArray();
writer.WriteEndObject();
}
writer.WriteEndArray();
writer.WriteEndObject();
}
}

View File

@@ -0,0 +1,8 @@
// SPDX-License-Identifier: AGPL-3.0-or-later
// Copyright (c) StellaOps
using System.Runtime.CompilerServices;
[assembly: InternalsVisibleTo("StellaOps.Scanner.CallGraph")]
[assembly: InternalsVisibleTo("StellaOps.Scanner.Reachability")]
[assembly: InternalsVisibleTo("StellaOps.Scanner.Contracts.Tests")]

View File

@@ -0,0 +1,94 @@
// SPDX-License-Identifier: AGPL-3.0-or-later
// Copyright (c) StellaOps
// Shared contracts for Scanner CallGraph and Reachability modules.
namespace StellaOps.Scanner.Contracts;
using System.Text.Json.Serialization;
/// <summary>
/// Security-relevant sink categories for reachability analysis.
/// </summary>
[JsonConverter(typeof(JsonStringEnumConverter<SinkCategory>))]
public enum SinkCategory
{
/// <summary>Command/process execution (e.g., Runtime.exec, Process.Start)</summary>
[JsonStringEnumMemberName("CMD_EXEC")]
CmdExec,
/// <summary>Unsafe deserialization (e.g., BinaryFormatter, pickle.loads)</summary>
[JsonStringEnumMemberName("UNSAFE_DESER")]
UnsafeDeser,
/// <summary>Raw SQL execution (e.g., SqlCommand with string concat)</summary>
[JsonStringEnumMemberName("SQL_RAW")]
SqlRaw,
/// <summary>SQL injection (e.g., unparameterized queries with user input)</summary>
[JsonStringEnumMemberName("SQL_INJECTION")]
SqlInjection,
/// <summary>Server-side request forgery (e.g., HttpClient with user input)</summary>
[JsonStringEnumMemberName("SSRF")]
Ssrf,
/// <summary>Arbitrary file write (e.g., File.WriteAllBytes with user path)</summary>
[JsonStringEnumMemberName("FILE_WRITE")]
FileWrite,
/// <summary>Path traversal (e.g., Path.Combine with ../)</summary>
[JsonStringEnumMemberName("PATH_TRAVERSAL")]
PathTraversal,
/// <summary>Template/expression injection (e.g., Razor, JEXL)</summary>
[JsonStringEnumMemberName("TEMPLATE_INJECTION")]
TemplateInjection,
/// <summary>Weak cryptography (e.g., MD5, DES, ECB mode)</summary>
[JsonStringEnumMemberName("CRYPTO_WEAK")]
CryptoWeak,
/// <summary>Authorization bypass (e.g., JWT none alg, missing authz check)</summary>
[JsonStringEnumMemberName("AUTHZ_BYPASS")]
AuthzBypass,
/// <summary>LDAP injection (e.g., DirContext.search with user input)</summary>
[JsonStringEnumMemberName("LDAP_INJECTION")]
LdapInjection,
/// <summary>XPath injection (e.g., XPath.evaluate with user input)</summary>
[JsonStringEnumMemberName("XPATH_INJECTION")]
XPathInjection,
/// <summary>XML External Entity injection (XXE)</summary>
[JsonStringEnumMemberName("XXE")]
XxeInjection,
/// <summary>Code/expression injection (e.g., eval, ScriptEngine)</summary>
[JsonStringEnumMemberName("CODE_INJECTION")]
CodeInjection,
/// <summary>Log injection (e.g., unvalidated user input in logs)</summary>
[JsonStringEnumMemberName("LOG_INJECTION")]
LogInjection,
/// <summary>Reflection-based attacks (e.g., Class.forName with user input)</summary>
[JsonStringEnumMemberName("REFLECTION")]
Reflection,
/// <summary>Open redirect (e.g., sendRedirect with user-controlled URL)</summary>
[JsonStringEnumMemberName("OPEN_REDIRECT")]
OpenRedirect
}
/// <summary>
/// A known dangerous sink with its metadata.
/// </summary>
public sealed record SinkDefinition(
SinkCategory Category,
string SymbolPattern,
string Language,
string? Framework = null,
string? Description = null,
string? CweId = null,
double SeverityWeight = 1.0);

View File

@@ -0,0 +1,143 @@
// SPDX-License-Identifier: AGPL-3.0-or-later
// Copyright (c) StellaOps
// Registry of known dangerous sinks per language.
namespace StellaOps.Scanner.Contracts;
using System.Collections.Frozen;
using System.Collections.Immutable;
/// <summary>
/// Registry of known dangerous sinks per language.
/// </summary>
public static class SinkRegistry
{
private static readonly FrozenDictionary<string, ImmutableArray<SinkDefinition>> SinksByLanguage = BuildRegistry();
private static FrozenDictionary<string, ImmutableArray<SinkDefinition>> BuildRegistry()
{
var builder = new Dictionary<string, List<SinkDefinition>>(StringComparer.Ordinal);
// .NET sinks
AddSink(builder, "dotnet", SinkCategory.CmdExec, "System.Diagnostics.Process.Start", cweId: "CWE-78");
AddSink(builder, "dotnet", SinkCategory.CmdExec, "System.Diagnostics.ProcessStartInfo", cweId: "CWE-78");
AddSink(builder, "dotnet", SinkCategory.UnsafeDeser, "System.Runtime.Serialization.Formatters.Binary.BinaryFormatter.Deserialize", cweId: "CWE-502");
AddSink(builder, "dotnet", SinkCategory.UnsafeDeser, "Newtonsoft.Json.JsonConvert.DeserializeObject", cweId: "CWE-502", framework: "Newtonsoft.Json");
AddSink(builder, "dotnet", SinkCategory.SqlRaw, "System.Data.SqlClient.SqlCommand.ExecuteReader", cweId: "CWE-89");
AddSink(builder, "dotnet", SinkCategory.SqlRaw, "Microsoft.EntityFrameworkCore.RelationalQueryableExtensions.FromSqlRaw", cweId: "CWE-89", framework: "EFCore");
AddSink(builder, "dotnet", SinkCategory.Ssrf, "System.Net.Http.HttpClient.GetAsync", cweId: "CWE-918");
AddSink(builder, "dotnet", SinkCategory.FileWrite, "System.IO.File.WriteAllBytes", cweId: "CWE-73");
AddSink(builder, "dotnet", SinkCategory.PathTraversal, "System.IO.Path.Combine", cweId: "CWE-22");
AddSink(builder, "dotnet", SinkCategory.CryptoWeak, "System.Security.Cryptography.MD5.Create", cweId: "CWE-327");
AddSink(builder, "dotnet", SinkCategory.CryptoWeak, "System.Security.Cryptography.DES.Create", cweId: "CWE-327");
// Java sinks
AddSink(builder, "java", SinkCategory.CmdExec, "java.lang.Runtime.exec", cweId: "CWE-78");
AddSink(builder, "java", SinkCategory.CmdExec, "java.lang.ProcessBuilder.start", cweId: "CWE-78");
AddSink(builder, "java", SinkCategory.UnsafeDeser, "java.io.ObjectInputStream.readObject", cweId: "CWE-502");
AddSink(builder, "java", SinkCategory.SqlRaw, "java.sql.Statement.executeQuery", cweId: "CWE-89");
AddSink(builder, "java", SinkCategory.Ssrf, "java.net.URL.openConnection", cweId: "CWE-918");
AddSink(builder, "java", SinkCategory.TemplateInjection, "org.springframework.expression.ExpressionParser.parseExpression", cweId: "CWE-917", framework: "Spring");
// Node.js sinks
AddSink(builder, "node", SinkCategory.CmdExec, "child_process.exec", cweId: "CWE-78");
AddSink(builder, "node", SinkCategory.CmdExec, "child_process.spawn", cweId: "CWE-78");
AddSink(builder, "node", SinkCategory.UnsafeDeser, "node-serialize.unserialize", cweId: "CWE-502");
AddSink(builder, "node", SinkCategory.SqlRaw, "mysql.query", cweId: "CWE-89");
AddSink(builder, "node", SinkCategory.PathTraversal, "path.join", cweId: "CWE-22");
AddSink(builder, "node", SinkCategory.TemplateInjection, "eval", cweId: "CWE-94");
// Python sinks
AddSink(builder, "python", SinkCategory.CmdExec, "os.system", cweId: "CWE-78");
AddSink(builder, "python", SinkCategory.CmdExec, "subprocess.call", cweId: "CWE-78");
AddSink(builder, "python", SinkCategory.UnsafeDeser, "pickle.loads", cweId: "CWE-502");
AddSink(builder, "python", SinkCategory.UnsafeDeser, "yaml.load", cweId: "CWE-502");
AddSink(builder, "python", SinkCategory.SqlRaw, "sqlite3.Cursor.execute", cweId: "CWE-89");
AddSink(builder, "python", SinkCategory.TemplateInjection, "jinja2.Template.render", cweId: "CWE-1336", framework: "Jinja2");
// Go sinks
AddSink(builder, "go", SinkCategory.CmdExec, "os/exec.Command", cweId: "CWE-78");
AddSink(builder, "go", SinkCategory.CmdExec, "os/exec.CommandContext", cweId: "CWE-78");
AddSink(builder, "go", SinkCategory.SqlRaw, "database/sql.DB.Query", cweId: "CWE-89");
AddSink(builder, "go", SinkCategory.SqlRaw, "database/sql.DB.Exec", cweId: "CWE-89");
AddSink(builder, "go", SinkCategory.Ssrf, "net/http.Get", cweId: "CWE-918");
AddSink(builder, "go", SinkCategory.PathTraversal, "filepath.Join", cweId: "CWE-22");
// Ruby sinks
AddSink(builder, "ruby", SinkCategory.CmdExec, "Kernel.system", cweId: "CWE-78");
AddSink(builder, "ruby", SinkCategory.CmdExec, "Kernel.exec", cweId: "CWE-78");
AddSink(builder, "ruby", SinkCategory.UnsafeDeser, "Marshal.load", cweId: "CWE-502");
AddSink(builder, "ruby", SinkCategory.UnsafeDeser, "YAML.load", cweId: "CWE-502");
AddSink(builder, "ruby", SinkCategory.SqlRaw, "ActiveRecord::Base.connection.execute", cweId: "CWE-89", framework: "Rails");
AddSink(builder, "ruby", SinkCategory.TemplateInjection, "ERB.new", cweId: "CWE-1336");
// PHP sinks
AddSink(builder, "php", SinkCategory.CmdExec, "exec", cweId: "CWE-78");
AddSink(builder, "php", SinkCategory.CmdExec, "shell_exec", cweId: "CWE-78");
AddSink(builder, "php", SinkCategory.CmdExec, "system", cweId: "CWE-78");
AddSink(builder, "php", SinkCategory.UnsafeDeser, "unserialize", cweId: "CWE-502");
AddSink(builder, "php", SinkCategory.SqlRaw, "mysqli_query", cweId: "CWE-89");
AddSink(builder, "php", SinkCategory.SqlRaw, "PDO::query", cweId: "CWE-89");
AddSink(builder, "php", SinkCategory.FileWrite, "file_put_contents", cweId: "CWE-73");
AddSink(builder, "php", SinkCategory.CodeInjection, "eval", cweId: "CWE-94");
return builder.ToFrozenDictionary(
kvp => kvp.Key,
kvp => kvp.Value.ToImmutableArray(),
StringComparer.Ordinal);
}
private static void AddSink(
Dictionary<string, List<SinkDefinition>> builder,
string language,
SinkCategory category,
string symbolPattern,
string? cweId = null,
string? framework = null)
{
if (!builder.TryGetValue(language, out var list))
{
list = [];
builder[language] = list;
}
list.Add(new SinkDefinition(
Category: category,
SymbolPattern: symbolPattern,
Language: language,
Framework: framework,
CweId: cweId));
}
/// <summary>
/// Gets all sink definitions for a language.
/// </summary>
public static ImmutableArray<SinkDefinition> GetSinksForLanguage(string language)
{
if (string.IsNullOrWhiteSpace(language))
{
return ImmutableArray<SinkDefinition>.Empty;
}
return SinksByLanguage.GetValueOrDefault(language.Trim().ToLowerInvariant(), ImmutableArray<SinkDefinition>.Empty);
}
/// <summary>
/// Gets all registered languages.
/// </summary>
public static IEnumerable<string> GetRegisteredLanguages() => SinksByLanguage.Keys;
/// <summary>
/// Checks if a symbol matches any known sink.
/// </summary>
public static SinkDefinition? MatchSink(string language, string symbol)
{
if (string.IsNullOrWhiteSpace(language) || string.IsNullOrWhiteSpace(symbol))
{
return null;
}
var sinks = GetSinksForLanguage(language);
return sinks.FirstOrDefault(sink => symbol.Contains(sink.SymbolPattern, StringComparison.OrdinalIgnoreCase));
}
}

View File

@@ -0,0 +1,21 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<LangVersion>preview</LangVersion>
<Nullable>enable</Nullable>
<ImplicitUsings>enable</ImplicitUsings>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
<GenerateAssemblyInfo>false</GenerateAssemblyInfo>
<Description>Shared contracts for Scanner CallGraph and Reachability modules to break circular dependencies</Description>
</PropertyGroup>
<ItemGroup>
<InternalsVisibleTo Include="StellaOps.Scanner.CallGraph" />
<InternalsVisibleTo Include="StellaOps.Scanner.Reachability" />
<InternalsVisibleTo Include="StellaOps.Scanner.Contracts.Tests" />
</ItemGroup>
<ItemGroup>
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" />
</ItemGroup>
</Project>

View File

@@ -0,0 +1,96 @@
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.DependencyInjection.Extensions;
using StellaOps.Scanner.PatchVerification.Services;
namespace StellaOps.Scanner.PatchVerification.DependencyInjection;
/// <summary>
/// Extension methods for registering patch verification services.
/// </summary>
public static class ServiceCollectionExtensions
{
/// <summary>
/// Adds patch verification services to the service collection.
/// </summary>
/// <param name="services">The service collection.</param>
/// <returns>The service collection for chaining.</returns>
public static IServiceCollection AddPatchVerification(this IServiceCollection services)
{
// Core orchestrator
services.TryAddScoped<IPatchVerificationOrchestrator, PatchVerificationOrchestrator>();
// Default to in-memory store if no other implementation registered
services.TryAddSingleton<IPatchSignatureStore, InMemoryPatchSignatureStore>();
// Ensure TimeProvider is registered
services.TryAddSingleton(TimeProvider.System);
return services;
}
/// <summary>
/// Adds patch verification with a custom signature store.
/// </summary>
/// <typeparam name="TStore">The signature store implementation type.</typeparam>
/// <param name="services">The service collection.</param>
/// <returns>The service collection for chaining.</returns>
public static IServiceCollection AddPatchVerification<TStore>(this IServiceCollection services)
where TStore : class, IPatchSignatureStore
{
// Register custom store
services.AddSingleton<IPatchSignatureStore, TStore>();
// Core orchestrator
services.TryAddScoped<IPatchVerificationOrchestrator, PatchVerificationOrchestrator>();
// Ensure TimeProvider is registered
services.TryAddSingleton(TimeProvider.System);
return services;
}
/// <summary>
/// Adds patch verification with configuration options.
/// </summary>
/// <param name="services">The service collection.</param>
/// <param name="configure">Configuration action.</param>
/// <returns>The service collection for chaining.</returns>
public static IServiceCollection AddPatchVerification(
this IServiceCollection services,
Action<PatchVerificationServiceOptions> configure)
{
var options = new PatchVerificationServiceOptions();
configure(options);
if (options.UseInMemoryStore)
{
services.AddSingleton<IPatchSignatureStore, InMemoryPatchSignatureStore>();
}
services.TryAddScoped<IPatchVerificationOrchestrator, PatchVerificationOrchestrator>();
services.TryAddSingleton(TimeProvider.System);
return services;
}
}
/// <summary>
/// Options for configuring patch verification services.
/// </summary>
public sealed class PatchVerificationServiceOptions
{
/// <summary>
/// Use in-memory signature store (default: true for development).
/// </summary>
public bool UseInMemoryStore { get; set; } = true;
/// <summary>
/// Connection string for PostgreSQL store (when not using in-memory).
/// </summary>
public string? PostgresConnectionString { get; set; }
/// <summary>
/// Valkey/Redis connection string for caching.
/// </summary>
public string? CacheConnectionString { get; set; }
}

View File

@@ -0,0 +1,57 @@
using StellaOps.Scanner.PatchVerification.Models;
namespace StellaOps.Scanner.PatchVerification;
/// <summary>
/// Orchestrates patch verification during container scans.
/// Verifies that backported security patches are actually present in binaries
/// by comparing fingerprints against known-good patch signatures.
/// </summary>
public interface IPatchVerificationOrchestrator
{
/// <summary>
/// Verifies patches for vulnerabilities detected in a scan.
/// </summary>
/// <param name="context">Verification context with scan results and binary paths.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>Verification results for each CVE/binary pair.</returns>
Task<PatchVerificationResult> VerifyAsync(
PatchVerificationContext context,
CancellationToken cancellationToken = default);
/// <summary>
/// Verifies a single CVE patch in a specific binary.
/// </summary>
/// <param name="cveId">CVE identifier to verify.</param>
/// <param name="binaryPath">Path to the binary to verify.</param>
/// <param name="artifactPurl">PURL of the containing artifact.</param>
/// <param name="options">Verification options.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>Verification evidence for the CVE/binary pair.</returns>
Task<PatchVerificationEvidence> VerifySingleAsync(
string cveId,
string binaryPath,
string artifactPurl,
PatchVerificationOptions? options = null,
CancellationToken cancellationToken = default);
/// <summary>
/// Checks if patch verification data is available for a CVE.
/// </summary>
/// <param name="cveId">CVE identifier to check.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>True if patch fingerprints exist for the CVE.</returns>
Task<bool> HasPatchDataAsync(
string cveId,
CancellationToken cancellationToken = default);
/// <summary>
/// Gets the list of CVEs for which patch data is available.
/// </summary>
/// <param name="cveIds">CVE identifiers to check.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>Subset of input CVEs that have patch data available.</returns>
Task<IReadOnlyList<string>> GetCvesWithPatchDataAsync(
IEnumerable<string> cveIds,
CancellationToken cancellationToken = default);
}

View File

@@ -0,0 +1,27 @@
namespace StellaOps.Scanner.PatchVerification.Models;
/// <summary>
/// Reference to a DSSE envelope stored in the evidence system.
/// Provides traceability without embedding full envelope data.
/// </summary>
/// <param name="EnvelopeId">Unique identifier of the DSSE envelope.</param>
/// <param name="KeyId">Key ID used to sign the envelope (RFC7638 JWK thumbprint).</param>
/// <param name="Issuer">Issuer identifier (vendor, distro, community).</param>
/// <param name="SignedAt">When the envelope was signed.</param>
public sealed record DsseEnvelopeRef(
string EnvelopeId,
string KeyId,
string Issuer,
DateTimeOffset SignedAt)
{
/// <summary>
/// URI for retrieving the full envelope from storage.
/// </summary>
public string? StorageUri { get; init; }
/// <summary>
/// Digest of the envelope content for integrity verification.
/// Format: "sha256:..."
/// </summary>
public string? ContentDigest { get; init; }
}

View File

@@ -0,0 +1,79 @@
namespace StellaOps.Scanner.PatchVerification.Models;
/// <summary>
/// Context for patch verification containing scan results and binary locations.
/// </summary>
public sealed record PatchVerificationContext
{
/// <summary>
/// Unique scan identifier for correlation.
/// </summary>
public required string ScanId { get; init; }
/// <summary>
/// Tenant identifier.
/// </summary>
public required string TenantId { get; init; }
/// <summary>
/// Container image digest being scanned (sha256:...).
/// </summary>
public required string ImageDigest { get; init; }
/// <summary>
/// PURL of the scanned artifact.
/// </summary>
public required string ArtifactPurl { get; init; }
/// <summary>
/// CVE IDs detected in the scan that should be verified.
/// Only these CVEs will have patch verification attempted.
/// </summary>
public required IReadOnlyList<string> CveIds { get; init; }
/// <summary>
/// Mapping of binary paths to their extracted filesystem locations.
/// Key: original path in container (e.g., /usr/lib/libssl.so.1.1)
/// Value: extracted path on disk (e.g., /tmp/scan-123/usr/lib/libssl.so.1.1)
/// </summary>
public required IReadOnlyDictionary<string, string> BinaryPaths { get; init; }
/// <summary>
/// Verification options.
/// </summary>
public PatchVerificationOptions Options { get; init; } = new();
/// <summary>
/// Correlation ID for distributed tracing.
/// </summary>
public string? CorrelationId { get; init; }
/// <summary>
/// Operating system/distro information for targeted verification.
/// </summary>
public string? OsRelease { get; init; }
/// <summary>
/// Creates a context for a single CVE verification.
/// </summary>
public static PatchVerificationContext ForSingleCve(
string scanId,
string tenantId,
string imageDigest,
string artifactPurl,
string cveId,
IReadOnlyDictionary<string, string> binaryPaths,
PatchVerificationOptions? options = null)
{
return new PatchVerificationContext
{
ScanId = scanId,
TenantId = tenantId,
ImageDigest = imageDigest,
ArtifactPurl = artifactPurl,
CveIds = [cveId],
BinaryPaths = binaryPaths,
Options = options ?? new PatchVerificationOptions()
};
}
}

View File

@@ -0,0 +1,148 @@
using StellaOps.Feedser.BinaryAnalysis.Models;
namespace StellaOps.Scanner.PatchVerification.Models;
/// <summary>
/// Evidence of patch verification for a single CVE/binary pair.
/// Designed to feed into VEX trust score computation.
/// </summary>
public sealed record PatchVerificationEvidence
{
/// <summary>
/// Deterministic evidence ID (UUID5 from CVE + binary digest + scan ID).
/// Ensures reproducibility across verification runs.
/// </summary>
public required string EvidenceId { get; init; }
/// <summary>
/// CVE identifier being verified.
/// </summary>
public required string CveId { get; init; }
/// <summary>
/// PURL of the affected artifact.
/// </summary>
public required string ArtifactPurl { get; init; }
/// <summary>
/// Path to the verified binary within the container.
/// </summary>
public required string BinaryPath { get; init; }
/// <summary>
/// Verification status.
/// </summary>
public required PatchVerificationStatus Status { get; init; }
/// <summary>
/// Similarity score from fingerprint matching (0.0-1.0).
/// Higher values indicate closer match to expected patched state.
/// </summary>
public required double Similarity { get; init; }
/// <summary>
/// Confidence in the verification result (0.0-1.0).
/// Lower for stripped binaries, compiler variations, or partial matches.
/// </summary>
public required double Confidence { get; init; }
/// <summary>
/// Fingerprint method used for verification.
/// </summary>
public required FingerprintMethod Method { get; init; }
/// <summary>
/// Expected fingerprint from patch database (patched state).
/// </summary>
public BinaryFingerprint? ExpectedFingerprint { get; init; }
/// <summary>
/// Actual fingerprint computed from the scanned binary.
/// </summary>
public BinaryFingerprint? ActualFingerprint { get; init; }
/// <summary>
/// DSSE attestation if available.
/// Presence increases trust score.
/// </summary>
public DsseEnvelopeRef? Attestation { get; init; }
/// <summary>
/// Issuer of the patch signature (vendor, distro, community).
/// </summary>
public string? IssuerId { get; init; }
/// <summary>
/// Human-readable reason for the status.
/// </summary>
public string? Reason { get; init; }
/// <summary>
/// Verification timestamp.
/// </summary>
public required DateTimeOffset VerifiedAt { get; init; }
/// <summary>
/// Version of the verifier engine for reproducibility tracking.
/// </summary>
public string? VerifierVersion { get; init; }
/// <summary>
/// Computes trust score contribution for VEX consensus.
/// Follows the trust algebra defined in the architecture.
/// </summary>
/// <returns>Trust score between 0.0 and 1.0.</returns>
public double ComputeTrustScore()
{
// Base score from status
var baseScore = Status switch
{
PatchVerificationStatus.Verified => 0.50,
PatchVerificationStatus.PartialMatch => 0.25,
PatchVerificationStatus.Inconclusive => 0.10,
PatchVerificationStatus.NotPatched => 0.00,
PatchVerificationStatus.NoPatchData => 0.00,
_ => 0.00
};
// Adjust by confidence
var adjusted = baseScore * Confidence;
// Bonus for DSSE attestation (+15%)
if (Attestation is not null)
{
adjusted += 0.15;
}
// Bonus for function-level match (+10% scaled by similarity)
if (Method is FingerprintMethod.CFGHash or FingerprintMethod.InstructionHash)
{
adjusted += 0.10 * Similarity;
}
// Section-level match bonus (+5% scaled by similarity)
if (Method == FingerprintMethod.SectionHash)
{
adjusted += 0.05 * Similarity;
}
return Math.Clamp(adjusted, 0.0, 1.0);
}
/// <summary>
/// Determines if this evidence supports marking the CVE as fixed in VEX.
/// </summary>
/// <param name="minConfidence">Minimum confidence threshold.</param>
/// <returns>True if evidence supports FIXED status.</returns>
public bool SupportsFixedStatus(double minConfidence = 0.70)
{
return Status == PatchVerificationStatus.Verified && Confidence >= minConfidence;
}
/// <summary>
/// Determines if this evidence is inconclusive and requires manual review.
/// </summary>
public bool RequiresManualReview => Status is
PatchVerificationStatus.Inconclusive or
PatchVerificationStatus.PartialMatch;
}

View File

@@ -0,0 +1,62 @@
using StellaOps.Feedser.BinaryAnalysis.Models;
namespace StellaOps.Scanner.PatchVerification.Models;
/// <summary>
/// Options for controlling patch verification behavior.
/// </summary>
public sealed record PatchVerificationOptions
{
/// <summary>
/// Minimum confidence threshold to report as Verified (default: 0.7).
/// Results below this threshold are marked as PartialMatch or Inconclusive.
/// </summary>
public double MinConfidenceThreshold { get; init; } = 0.70;
/// <summary>
/// Minimum similarity threshold for fingerprint match (default: 0.85).
/// Lower values allow more fuzzy matching but increase false positive risk.
/// </summary>
public double MinSimilarityThreshold { get; init; } = 0.85;
/// <summary>
/// Preferred fingerprint methods in order of preference.
/// More precise methods (CFGHash, InstructionHash) are preferred over fuzzy (TLSH).
/// </summary>
public IReadOnlyList<FingerprintMethod> PreferredMethods { get; init; } =
[
FingerprintMethod.CFGHash,
FingerprintMethod.InstructionHash,
FingerprintMethod.SectionHash,
FingerprintMethod.TLSH
];
/// <summary>
/// Whether to require DSSE attestation for high-confidence results.
/// When true, unattested matches receive lower trust scores.
/// </summary>
public bool RequireAttestation { get; init; } = false;
/// <summary>
/// Maximum age of patch signature data to consider valid (hours).
/// Signatures older than this are treated as stale and may trigger warnings.
/// </summary>
public int MaxPatchDataAgeHours { get; init; } = 168; // 7 days
/// <summary>
/// Whether to emit evidence for CVEs with no patch data available.
/// When true, NoPatchData evidence is included in results for completeness.
/// </summary>
public bool EmitNoPatchDataEvidence { get; init; } = true;
/// <summary>
/// Whether to continue verification on errors for individual binaries.
/// When true, failures are logged but don't stop the overall verification.
/// </summary>
public bool ContinueOnError { get; init; } = true;
/// <summary>
/// Maximum concurrent binary verifications for performance tuning.
/// </summary>
public int MaxConcurrency { get; init; } = 4;
}

View File

@@ -0,0 +1,110 @@
using System.Collections.Immutable;
namespace StellaOps.Scanner.PatchVerification.Models;
/// <summary>
/// Aggregated result of patch verification for a scan.
/// </summary>
public sealed record PatchVerificationResult
{
/// <summary>
/// Scan identifier this result belongs to.
/// </summary>
public required string ScanId { get; init; }
/// <summary>
/// Individual verification evidence for each CVE/binary pair.
/// </summary>
public required IReadOnlyList<PatchVerificationEvidence> Evidence { get; init; }
/// <summary>
/// CVEs confirmed as patched (verification succeeded with high confidence).
/// </summary>
public required IReadOnlySet<string> PatchedCves { get; init; }
/// <summary>
/// CVEs confirmed as unpatched (verification failed - vulnerable code present).
/// </summary>
public required IReadOnlySet<string> UnpatchedCves { get; init; }
/// <summary>
/// CVEs with inconclusive verification (e.g., stripped binaries).
/// </summary>
public required IReadOnlySet<string> InconclusiveCves { get; init; }
/// <summary>
/// CVEs with no patch data available in the signature store.
/// </summary>
public required IReadOnlySet<string> NoPatchDataCves { get; init; }
/// <summary>
/// Overall verification timestamp.
/// </summary>
public required DateTimeOffset VerifiedAt { get; init; }
/// <summary>
/// Verifier engine version for reproducibility.
/// </summary>
public required string VerifierVersion { get; init; }
/// <summary>
/// Total number of CVEs processed.
/// </summary>
public int TotalCvesProcessed =>
PatchedCves.Count + UnpatchedCves.Count + InconclusiveCves.Count + NoPatchDataCves.Count;
/// <summary>
/// Percentage of CVEs that were verified as patched.
/// </summary>
public double PatchedPercentage =>
TotalCvesProcessed > 0 ? (double)PatchedCves.Count / TotalCvesProcessed : 0.0;
/// <summary>
/// Gets evidence for a specific CVE.
/// </summary>
public IEnumerable<PatchVerificationEvidence> GetEvidenceForCve(string cveId) =>
Evidence.Where(e => e.CveId == cveId);
/// <summary>
/// Gets evidence for a specific binary path.
/// </summary>
public IEnumerable<PatchVerificationEvidence> GetEvidenceForBinary(string binaryPath) =>
Evidence.Where(e => e.BinaryPath == binaryPath);
/// <summary>
/// Gets the highest-confidence evidence for each CVE.
/// </summary>
public IReadOnlyDictionary<string, PatchVerificationEvidence> GetBestEvidencePerCve()
{
return Evidence
.GroupBy(e => e.CveId)
.Select(g => g.OrderByDescending(e => e.Confidence).First())
.ToImmutableDictionary(e => e.CveId);
}
/// <summary>
/// Creates an empty result for when no verification was performed.
/// </summary>
public static PatchVerificationResult Empty(string scanId, string verifierVersion) => new()
{
ScanId = scanId,
Evidence = [],
PatchedCves = ImmutableHashSet<string>.Empty,
UnpatchedCves = ImmutableHashSet<string>.Empty,
InconclusiveCves = ImmutableHashSet<string>.Empty,
NoPatchDataCves = ImmutableHashSet<string>.Empty,
VerifiedAt = DateTimeOffset.UtcNow,
VerifierVersion = verifierVersion
};
/// <summary>
/// Computes aggregate trust score across all evidence.
/// </summary>
public double ComputeAggregateTrustScore()
{
if (Evidence.Count == 0)
return 0.0;
return Evidence.Average(e => e.ComputeTrustScore());
}
}

View File

@@ -0,0 +1,36 @@
namespace StellaOps.Scanner.PatchVerification.Models;
/// <summary>
/// Status of patch verification for a CVE/binary pair.
/// </summary>
public enum PatchVerificationStatus
{
/// <summary>
/// Patch verified - fingerprint matches expected patched state.
/// Binary-level evidence confirms the security fix is present.
/// </summary>
Verified,
/// <summary>
/// Partial match - some but not all expected changes detected.
/// May indicate incomplete patching or partial backport.
/// </summary>
PartialMatch,
/// <summary>
/// Inconclusive - unable to verify definitively.
/// Common causes: stripped binary, missing debug symbols, compiler variations.
/// </summary>
Inconclusive,
/// <summary>
/// Not patched - binary matches vulnerable state or lacks expected patch changes.
/// </summary>
NotPatched,
/// <summary>
/// No patch data available for this CVE in the patch signature store.
/// Verification cannot be performed without reference fingerprints.
/// </summary>
NoPatchData
}

View File

@@ -0,0 +1,441 @@
using System.Collections.Immutable;
using System.Security.Cryptography;
using Microsoft.Extensions.Logging;
using StellaOps.Feedser.BinaryAnalysis;
using StellaOps.Feedser.BinaryAnalysis.Models;
using StellaOps.Scanner.PatchVerification.Models;
using StellaOps.Scanner.PatchVerification.Services;
namespace StellaOps.Scanner.PatchVerification;
/// <summary>
/// Orchestrates patch verification by coordinating fingerprint extraction and matching.
/// </summary>
public sealed class PatchVerificationOrchestrator : IPatchVerificationOrchestrator
{
private readonly IEnumerable<IBinaryFingerprinter> _fingerprinters;
private readonly IPatchSignatureStore _signatureStore;
private readonly TimeProvider _timeProvider;
private readonly ILogger<PatchVerificationOrchestrator> _logger;
/// <summary>
/// Current verifier version for reproducibility tracking.
/// </summary>
public const string VerifierVersion = "1.0.0";
/// <summary>
/// Initializes a new instance of the orchestrator.
/// </summary>
public PatchVerificationOrchestrator(
IEnumerable<IBinaryFingerprinter> fingerprinters,
IPatchSignatureStore signatureStore,
TimeProvider timeProvider,
ILogger<PatchVerificationOrchestrator> logger)
{
_fingerprinters = fingerprinters ?? throw new ArgumentNullException(nameof(fingerprinters));
_signatureStore = signatureStore ?? throw new ArgumentNullException(nameof(signatureStore));
_timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider));
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
}
/// <inheritdoc />
public async Task<PatchVerificationResult> VerifyAsync(
PatchVerificationContext context,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(context);
_logger.LogInformation(
"Starting patch verification for scan {ScanId} with {CveCount} CVEs and {BinaryCount} binaries",
context.ScanId,
context.CveIds.Count,
context.BinaryPaths.Count);
var evidenceList = new List<PatchVerificationEvidence>();
var patchedCves = new HashSet<string>();
var unpatchedCves = new HashSet<string>();
var inconclusiveCves = new HashSet<string>();
var noPatchDataCves = new HashSet<string>();
// Filter CVEs to those with patch data
var cvesWithData = await _signatureStore.FilterWithPatchDataAsync(context.CveIds, cancellationToken);
var cvesWithDataSet = cvesWithData.ToHashSet();
foreach (var cveId in context.CveIds)
{
cancellationToken.ThrowIfCancellationRequested();
if (!cvesWithDataSet.Contains(cveId))
{
noPatchDataCves.Add(cveId);
if (context.Options.EmitNoPatchDataEvidence)
{
evidenceList.Add(CreateNoPatchDataEvidence(cveId, context));
}
continue;
}
try
{
var cveEvidence = await VerifyCveAsync(cveId, context, cancellationToken);
evidenceList.AddRange(cveEvidence);
// Categorize CVE based on best evidence
var bestEvidence = cveEvidence
.OrderByDescending(e => e.Confidence)
.FirstOrDefault();
if (bestEvidence is not null)
{
switch (bestEvidence.Status)
{
case PatchVerificationStatus.Verified:
patchedCves.Add(cveId);
break;
case PatchVerificationStatus.NotPatched:
unpatchedCves.Add(cveId);
break;
case PatchVerificationStatus.Inconclusive:
case PatchVerificationStatus.PartialMatch:
inconclusiveCves.Add(cveId);
break;
}
}
}
catch (Exception ex) when (context.Options.ContinueOnError)
{
_logger.LogWarning(ex, "Failed to verify CVE {CveId}, continuing with other CVEs", cveId);
inconclusiveCves.Add(cveId);
}
}
var result = new PatchVerificationResult
{
ScanId = context.ScanId,
Evidence = evidenceList.ToImmutableArray(),
PatchedCves = patchedCves.ToImmutableHashSet(),
UnpatchedCves = unpatchedCves.ToImmutableHashSet(),
InconclusiveCves = inconclusiveCves.ToImmutableHashSet(),
NoPatchDataCves = noPatchDataCves.ToImmutableHashSet(),
VerifiedAt = _timeProvider.GetUtcNow(),
VerifierVersion = VerifierVersion
};
_logger.LogInformation(
"Patch verification complete for scan {ScanId}: {Patched} patched, {Unpatched} unpatched, {Inconclusive} inconclusive, {NoData} no data",
context.ScanId,
patchedCves.Count,
unpatchedCves.Count,
inconclusiveCves.Count,
noPatchDataCves.Count);
return result;
}
/// <inheritdoc />
public async Task<PatchVerificationEvidence> VerifySingleAsync(
string cveId,
string binaryPath,
string artifactPurl,
PatchVerificationOptions? options = null,
CancellationToken cancellationToken = default)
{
ArgumentException.ThrowIfNullOrWhiteSpace(cveId);
ArgumentException.ThrowIfNullOrWhiteSpace(binaryPath);
ArgumentException.ThrowIfNullOrWhiteSpace(artifactPurl);
options ??= new PatchVerificationOptions();
// Check for patch data
var signatures = await _signatureStore.GetByCveAsync(cveId, cancellationToken);
if (signatures.Count == 0)
{
return CreateNoPatchDataEvidenceSingle(cveId, binaryPath, artifactPurl);
}
// Find matching signature for binary
var matchingSignature = signatures.FirstOrDefault(s =>
binaryPath.EndsWith(s.BinaryPath, StringComparison.OrdinalIgnoreCase) ||
s.BinaryPath == "*");
if (matchingSignature is null)
{
return CreateNoPatchDataEvidenceSingle(cveId, binaryPath, artifactPurl);
}
// Get appropriate fingerprinter
var fingerprinter = GetFingerprinter(matchingSignature.PatchedFingerprint.Method, options);
if (fingerprinter is null)
{
_logger.LogWarning(
"No fingerprinter available for method {Method}",
matchingSignature.PatchedFingerprint.Method);
return CreateInconclusiveEvidence(
cveId, binaryPath, artifactPurl,
"No fingerprinter available for required method",
matchingSignature.PatchedFingerprint.Method);
}
try
{
// Match binary against patched fingerprint
var matchResult = await fingerprinter.MatchAsync(
binaryPath,
matchingSignature.PatchedFingerprint,
cancellationToken);
return CreateEvidenceFromMatch(
cveId, binaryPath, artifactPurl,
matchResult, matchingSignature, options);
}
catch (Exception ex)
{
_logger.LogWarning(ex, "Failed to verify {CveId} in {Binary}", cveId, binaryPath);
return CreateInconclusiveEvidence(
cveId, binaryPath, artifactPurl,
$"Verification failed: {ex.Message}",
fingerprinter.Method);
}
}
/// <inheritdoc />
public async Task<bool> HasPatchDataAsync(
string cveId,
CancellationToken cancellationToken = default)
{
return await _signatureStore.ExistsAsync(cveId, cancellationToken);
}
/// <inheritdoc />
public async Task<IReadOnlyList<string>> GetCvesWithPatchDataAsync(
IEnumerable<string> cveIds,
CancellationToken cancellationToken = default)
{
return await _signatureStore.FilterWithPatchDataAsync(cveIds, cancellationToken);
}
private async Task<IReadOnlyList<PatchVerificationEvidence>> VerifyCveAsync(
string cveId,
PatchVerificationContext context,
CancellationToken cancellationToken)
{
var signatures = await _signatureStore.GetByCveAsync(cveId, cancellationToken);
var evidenceList = new List<PatchVerificationEvidence>();
foreach (var (containerPath, extractedPath) in context.BinaryPaths)
{
// Find signature matching this binary
var matchingSignature = signatures.FirstOrDefault(s =>
containerPath.EndsWith(s.BinaryPath, StringComparison.OrdinalIgnoreCase) ||
s.BinaryPath == "*");
if (matchingSignature is null)
{
continue; // No signature for this binary
}
var fingerprinter = GetFingerprinter(matchingSignature.PatchedFingerprint.Method, context.Options);
if (fingerprinter is null)
{
evidenceList.Add(CreateInconclusiveEvidence(
cveId, containerPath, context.ArtifactPurl,
"No fingerprinter available",
matchingSignature.PatchedFingerprint.Method));
continue;
}
try
{
var matchResult = await fingerprinter.MatchAsync(
extractedPath,
matchingSignature.PatchedFingerprint,
cancellationToken);
evidenceList.Add(CreateEvidenceFromMatch(
cveId, containerPath, context.ArtifactPurl,
matchResult, matchingSignature, context.Options,
context.ScanId));
}
catch (Exception ex)
{
_logger.LogWarning(ex, "Failed to verify {CveId} in {Binary}", cveId, containerPath);
evidenceList.Add(CreateInconclusiveEvidence(
cveId, containerPath, context.ArtifactPurl,
$"Verification error: {ex.Message}",
fingerprinter.Method));
}
}
return evidenceList;
}
private IBinaryFingerprinter? GetFingerprinter(
FingerprintMethod method,
PatchVerificationOptions options)
{
// Try exact method match first
var fingerprinter = _fingerprinters.FirstOrDefault(f => f.Method == method);
if (fingerprinter is not null)
{
return fingerprinter;
}
// Fall back to preferred methods
foreach (var preferredMethod in options.PreferredMethods)
{
fingerprinter = _fingerprinters.FirstOrDefault(f => f.Method == preferredMethod);
if (fingerprinter is not null)
{
return fingerprinter;
}
}
return _fingerprinters.FirstOrDefault();
}
private PatchVerificationEvidence CreateEvidenceFromMatch(
string cveId,
string binaryPath,
string artifactPurl,
FingerprintMatchResult matchResult,
PatchSignatureEntry signature,
PatchVerificationOptions options,
string? scanId = null)
{
var status = DetermineStatus(matchResult, options);
var evidenceId = scanId is not null
? EvidenceIdGenerator.GenerateFromPath(cveId, binaryPath, scanId)
: EvidenceIdGenerator.GenerateFromPath(cveId, binaryPath, Guid.NewGuid().ToString("N"));
return new PatchVerificationEvidence
{
EvidenceId = evidenceId,
CveId = cveId,
ArtifactPurl = artifactPurl,
BinaryPath = binaryPath,
Status = status,
Similarity = matchResult.Similarity,
Confidence = matchResult.Confidence,
Method = matchResult.Method,
ExpectedFingerprint = signature.PatchedFingerprint,
ActualFingerprint = null, // Could be populated if needed
Attestation = signature.Attestation,
IssuerId = signature.IssuerId,
Reason = GetReasonForStatus(status, matchResult),
VerifiedAt = _timeProvider.GetUtcNow(),
VerifierVersion = VerifierVersion
};
}
private static PatchVerificationStatus DetermineStatus(
FingerprintMatchResult matchResult,
PatchVerificationOptions options)
{
if (!matchResult.IsMatch)
{
return PatchVerificationStatus.NotPatched;
}
if (matchResult.Confidence >= options.MinConfidenceThreshold &&
matchResult.Similarity >= options.MinSimilarityThreshold)
{
return PatchVerificationStatus.Verified;
}
if (matchResult.Similarity >= options.MinSimilarityThreshold * 0.7)
{
return PatchVerificationStatus.PartialMatch;
}
return PatchVerificationStatus.Inconclusive;
}
private static string GetReasonForStatus(
PatchVerificationStatus status,
FingerprintMatchResult matchResult)
{
return status switch
{
PatchVerificationStatus.Verified =>
$"Binary matches patched fingerprint (similarity: {matchResult.Similarity:P0}, confidence: {matchResult.Confidence:P0})",
PatchVerificationStatus.PartialMatch =>
$"Partial match detected (similarity: {matchResult.Similarity:P0}, confidence: {matchResult.Confidence:P0})",
PatchVerificationStatus.NotPatched =>
"Binary does not match patched fingerprint",
PatchVerificationStatus.Inconclusive =>
$"Match inconclusive (similarity: {matchResult.Similarity:P0}, confidence: {matchResult.Confidence:P0})",
_ => "Unknown status"
};
}
private PatchVerificationEvidence CreateNoPatchDataEvidence(
string cveId,
PatchVerificationContext context)
{
var binaryPath = context.BinaryPaths.Keys.FirstOrDefault() ?? "unknown";
return new PatchVerificationEvidence
{
EvidenceId = EvidenceIdGenerator.GenerateFromPath(cveId, binaryPath, context.ScanId),
CveId = cveId,
ArtifactPurl = context.ArtifactPurl,
BinaryPath = binaryPath,
Status = PatchVerificationStatus.NoPatchData,
Similarity = 0.0,
Confidence = 0.0,
Method = FingerprintMethod.TLSH, // Default
Reason = "No patch signature data available for this CVE",
VerifiedAt = _timeProvider.GetUtcNow(),
VerifierVersion = VerifierVersion
};
}
private PatchVerificationEvidence CreateNoPatchDataEvidenceSingle(
string cveId,
string binaryPath,
string artifactPurl)
{
return new PatchVerificationEvidence
{
EvidenceId = EvidenceIdGenerator.GenerateFromPath(cveId, binaryPath, Guid.NewGuid().ToString("N")),
CveId = cveId,
ArtifactPurl = artifactPurl,
BinaryPath = binaryPath,
Status = PatchVerificationStatus.NoPatchData,
Similarity = 0.0,
Confidence = 0.0,
Method = FingerprintMethod.TLSH,
Reason = "No patch signature data available for this CVE",
VerifiedAt = _timeProvider.GetUtcNow(),
VerifierVersion = VerifierVersion
};
}
private PatchVerificationEvidence CreateInconclusiveEvidence(
string cveId,
string binaryPath,
string artifactPurl,
string reason,
FingerprintMethod method)
{
return new PatchVerificationEvidence
{
EvidenceId = EvidenceIdGenerator.GenerateFromPath(cveId, binaryPath, Guid.NewGuid().ToString("N")),
CveId = cveId,
ArtifactPurl = artifactPurl,
BinaryPath = binaryPath,
Status = PatchVerificationStatus.Inconclusive,
Similarity = 0.0,
Confidence = 0.0,
Method = method,
Reason = reason,
VerifiedAt = _timeProvider.GetUtcNow(),
VerifierVersion = VerifierVersion
};
}
}

View File

@@ -0,0 +1,92 @@
using System.Security.Cryptography;
using System.Text;
namespace StellaOps.Scanner.PatchVerification.Services;
/// <summary>
/// Generates deterministic evidence IDs for patch verification results.
/// Uses UUID5 (SHA-1 based) for reproducibility across verification runs.
/// </summary>
public static class EvidenceIdGenerator
{
/// <summary>
/// UUID5 namespace for patch verification evidence.
/// </summary>
private static readonly Guid PatchVerificationNamespace =
new("7d8f4a3c-2e1b-5c9d-8f6e-4a3b2c1d0e9f");
/// <summary>
/// Generates a deterministic evidence ID from verification inputs.
/// </summary>
/// <param name="cveId">CVE identifier.</param>
/// <param name="binaryDigest">SHA256 digest of the binary.</param>
/// <param name="scanId">Scan identifier.</param>
/// <returns>Deterministic UUID5-based evidence ID.</returns>
public static string Generate(string cveId, string binaryDigest, string scanId)
{
ArgumentException.ThrowIfNullOrWhiteSpace(cveId);
ArgumentException.ThrowIfNullOrWhiteSpace(binaryDigest);
ArgumentException.ThrowIfNullOrWhiteSpace(scanId);
var input = $"{cveId}|{binaryDigest}|{scanId}";
var uuid = CreateUuid5(PatchVerificationNamespace, input);
return $"pv:{uuid:N}";
}
/// <summary>
/// Generates a deterministic evidence ID from binary path when digest unavailable.
/// </summary>
/// <param name="cveId">CVE identifier.</param>
/// <param name="binaryPath">Path to the binary.</param>
/// <param name="scanId">Scan identifier.</param>
/// <returns>Deterministic UUID5-based evidence ID.</returns>
public static string GenerateFromPath(string cveId, string binaryPath, string scanId)
{
ArgumentException.ThrowIfNullOrWhiteSpace(cveId);
ArgumentException.ThrowIfNullOrWhiteSpace(binaryPath);
ArgumentException.ThrowIfNullOrWhiteSpace(scanId);
// Normalize path for cross-platform consistency
var normalizedPath = binaryPath.Replace('\\', '/');
var input = $"{cveId}|path:{normalizedPath}|{scanId}";
var uuid = CreateUuid5(PatchVerificationNamespace, input);
return $"pv:{uuid:N}";
}
/// <summary>
/// Creates a UUID5 (name-based, SHA-1) from namespace and name.
/// </summary>
private static Guid CreateUuid5(Guid namespaceId, string name)
{
var namespaceBytes = namespaceId.ToByteArray();
SwapByteOrder(namespaceBytes);
var nameBytes = Encoding.UTF8.GetBytes(name);
var combined = new byte[namespaceBytes.Length + nameBytes.Length];
Buffer.BlockCopy(namespaceBytes, 0, combined, 0, namespaceBytes.Length);
Buffer.BlockCopy(nameBytes, 0, combined, namespaceBytes.Length, nameBytes.Length);
var hash = SHA1.HashData(combined);
// Set version (5) and variant bits
hash[6] = (byte)((hash[6] & 0x0F) | 0x50); // Version 5
hash[8] = (byte)((hash[8] & 0x3F) | 0x80); // Variant RFC4122
var result = new byte[16];
Array.Copy(hash, 0, result, 0, 16);
SwapByteOrder(result);
return new Guid(result);
}
/// <summary>
/// Swaps byte order for UUID encoding (big-endian to little-endian).
/// </summary>
private static void SwapByteOrder(byte[] guid)
{
(guid[0], guid[3]) = (guid[3], guid[0]);
(guid[1], guid[2]) = (guid[2], guid[1]);
(guid[4], guid[5]) = (guid[5], guid[4]);
(guid[6], guid[7]) = (guid[7], guid[6]);
}
}

View File

@@ -0,0 +1,122 @@
using StellaOps.Feedser.BinaryAnalysis.Models;
using StellaOps.Scanner.PatchVerification.Models;
namespace StellaOps.Scanner.PatchVerification.Services;
/// <summary>
/// Store for known-good patch signatures used to verify backported patches.
/// </summary>
public interface IPatchSignatureStore
{
/// <summary>
/// Gets patch signatures for a specific CVE.
/// </summary>
/// <param name="cveId">CVE identifier.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>Patch signatures for the CVE, or empty if none available.</returns>
Task<IReadOnlyList<PatchSignatureEntry>> GetByCveAsync(
string cveId,
CancellationToken cancellationToken = default);
/// <summary>
/// Gets patch signatures for a CVE filtered by PURL pattern.
/// </summary>
/// <param name="cveId">CVE identifier.</param>
/// <param name="purlPattern">PURL pattern to filter by (e.g., "pkg:rpm/openssl*").</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>Matching patch signatures.</returns>
Task<IReadOnlyList<PatchSignatureEntry>> GetByCveAndPurlAsync(
string cveId,
string purlPattern,
CancellationToken cancellationToken = default);
/// <summary>
/// Checks if patch data exists for a CVE.
/// </summary>
/// <param name="cveId">CVE identifier.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>True if patch signatures exist.</returns>
Task<bool> ExistsAsync(
string cveId,
CancellationToken cancellationToken = default);
/// <summary>
/// Filters a list of CVEs to those with available patch data.
/// </summary>
/// <param name="cveIds">CVE identifiers to check.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>CVEs with patch data available.</returns>
Task<IReadOnlyList<string>> FilterWithPatchDataAsync(
IEnumerable<string> cveIds,
CancellationToken cancellationToken = default);
/// <summary>
/// Stores a patch signature entry.
/// </summary>
/// <param name="entry">Patch signature entry to store.</param>
/// <param name="cancellationToken">Cancellation token.</param>
Task StoreAsync(
PatchSignatureEntry entry,
CancellationToken cancellationToken = default);
}
/// <summary>
/// A patch signature entry containing fingerprints for a patched binary.
/// </summary>
public sealed record PatchSignatureEntry
{
/// <summary>
/// Unique entry identifier.
/// </summary>
public required string EntryId { get; init; }
/// <summary>
/// CVE this patch fixes.
/// </summary>
public required string CveId { get; init; }
/// <summary>
/// PURL of the patched package.
/// </summary>
public required string Purl { get; init; }
/// <summary>
/// Binary path within the package this signature applies to.
/// </summary>
public required string BinaryPath { get; init; }
/// <summary>
/// Fingerprint of the patched (fixed) binary.
/// </summary>
public required BinaryFingerprint PatchedFingerprint { get; init; }
/// <summary>
/// Optional fingerprint of the vulnerable binary (for comparison).
/// </summary>
public BinaryFingerprint? VulnerableFingerprint { get; init; }
/// <summary>
/// DSSE attestation for this signature.
/// </summary>
public DsseEnvelopeRef? Attestation { get; init; }
/// <summary>
/// Issuer of this signature (vendor, distro, community).
/// </summary>
public required string IssuerId { get; init; }
/// <summary>
/// When this signature was created.
/// </summary>
public required DateTimeOffset CreatedAt { get; init; }
/// <summary>
/// When this signature expires (null = never).
/// </summary>
public DateTimeOffset? ExpiresAt { get; init; }
/// <summary>
/// Additional metadata.
/// </summary>
public IReadOnlyDictionary<string, string>? Metadata { get; init; }
}

View File

@@ -0,0 +1,119 @@
using System.Collections.Concurrent;
namespace StellaOps.Scanner.PatchVerification.Services;
/// <summary>
/// In-memory implementation of patch signature store for development and testing.
/// For production, use a persistent store backed by PostgreSQL or distributed cache.
/// </summary>
public sealed class InMemoryPatchSignatureStore : IPatchSignatureStore
{
private readonly ConcurrentDictionary<string, List<PatchSignatureEntry>> _entriesByCve = new();
/// <inheritdoc />
public Task<IReadOnlyList<PatchSignatureEntry>> GetByCveAsync(
string cveId,
CancellationToken cancellationToken = default)
{
cancellationToken.ThrowIfCancellationRequested();
if (_entriesByCve.TryGetValue(cveId, out var entries))
{
return Task.FromResult<IReadOnlyList<PatchSignatureEntry>>(entries.ToList());
}
return Task.FromResult<IReadOnlyList<PatchSignatureEntry>>([]);
}
/// <inheritdoc />
public Task<IReadOnlyList<PatchSignatureEntry>> GetByCveAndPurlAsync(
string cveId,
string purlPattern,
CancellationToken cancellationToken = default)
{
cancellationToken.ThrowIfCancellationRequested();
if (!_entriesByCve.TryGetValue(cveId, out var entries))
{
return Task.FromResult<IReadOnlyList<PatchSignatureEntry>>([]);
}
// Simple pattern matching (supports * wildcard at end)
var filtered = entries
.Where(e => MatchesPurlPattern(e.Purl, purlPattern))
.ToList();
return Task.FromResult<IReadOnlyList<PatchSignatureEntry>>(filtered);
}
/// <inheritdoc />
public Task<bool> ExistsAsync(
string cveId,
CancellationToken cancellationToken = default)
{
cancellationToken.ThrowIfCancellationRequested();
return Task.FromResult(_entriesByCve.ContainsKey(cveId));
}
/// <inheritdoc />
public Task<IReadOnlyList<string>> FilterWithPatchDataAsync(
IEnumerable<string> cveIds,
CancellationToken cancellationToken = default)
{
cancellationToken.ThrowIfCancellationRequested();
var result = cveIds
.Where(cveId => _entriesByCve.ContainsKey(cveId))
.ToList();
return Task.FromResult<IReadOnlyList<string>>(result);
}
/// <inheritdoc />
public Task StoreAsync(
PatchSignatureEntry entry,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(entry);
cancellationToken.ThrowIfCancellationRequested();
_entriesByCve.AddOrUpdate(
entry.CveId,
_ => [entry],
(_, existingList) =>
{
// Remove existing entry with same ID and add new one
var updated = existingList
.Where(e => e.EntryId != entry.EntryId)
.Append(entry)
.ToList();
return updated;
});
return Task.CompletedTask;
}
/// <summary>
/// Clears all stored entries. Useful for testing.
/// </summary>
public void Clear()
{
_entriesByCve.Clear();
}
/// <summary>
/// Gets total count of stored entries.
/// </summary>
public int Count => _entriesByCve.Values.Sum(v => v.Count);
private static bool MatchesPurlPattern(string purl, string pattern)
{
if (pattern.EndsWith('*'))
{
var prefix = pattern[..^1];
return purl.StartsWith(prefix, StringComparison.OrdinalIgnoreCase);
}
return purl.Equals(pattern, StringComparison.OrdinalIgnoreCase);
}
}

View File

@@ -0,0 +1,25 @@
<?xml version='1.0' encoding='utf-8'?>
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<LangVersion>preview</LangVersion>
<Nullable>enable</Nullable>
<ImplicitUsings>enable</ImplicitUsings>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
<RootNamespace>StellaOps.Scanner.PatchVerification</RootNamespace>
<AssemblyName>StellaOps.Scanner.PatchVerification</AssemblyName>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" />
<PackageReference Include="Microsoft.Extensions.Options" />
<PackageReference Include="Microsoft.Extensions.DependencyInjection.Abstractions" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="../../../Feedser/StellaOps.Feedser.BinaryAnalysis/StellaOps.Feedser.BinaryAnalysis.csproj" />
<ProjectReference Include="../../../Feedser/StellaOps.Feedser.Core/StellaOps.Feedser.Core.csproj" />
<ProjectReference Include="../../../VexLens/StellaOps.VexLens/StellaOps.VexLens.Core/StellaOps.VexLens.Core.csproj" />
<ProjectReference Include="../../../__Libraries/StellaOps.Cryptography/StellaOps.Cryptography.csproj" />
</ItemGroup>
</Project>

View File

@@ -0,0 +1,510 @@
// SPDX-License-Identifier: AGPL-3.0-or-later
// Copyright (c) StellaOps
// Sprint: EVID-001-004 - Binary Patch Verification Implementation
using System.Collections.Concurrent;
using System.Collections.Immutable;
using System.Diagnostics;
using Microsoft.Extensions.Logging;
using StellaOps.BinaryIndex.Decompiler;
using StellaOps.BinaryIndex.Ghidra;
using StellaOps.Scanner.Explainability.Assumptions;
using StellaOps.Scanner.Reachability.Stack;
namespace StellaOps.Scanner.Reachability.Binary;
/// <summary>
/// Verifies binary patches using Ghidra decompilation and AST comparison.
/// Bridges the existing decompiler infrastructure to L2 reachability analysis.
/// </summary>
public sealed class BinaryPatchVerifier : IBinaryPatchVerifier
{
private readonly IGhidraService _ghidraService;
private readonly IDecompilerService _decompilerService;
private readonly ILogger<BinaryPatchVerifier> _logger;
private readonly TimeProvider _timeProvider;
// Supported binary formats
private static readonly HashSet<string> SupportedExtensions = new(StringComparer.OrdinalIgnoreCase)
{
".so", ".dll", ".exe", ".dylib", ".bin", ".elf", ""
};
public BinaryPatchVerifier(
IGhidraService ghidraService,
IDecompilerService decompilerService,
ILogger<BinaryPatchVerifier> logger,
TimeProvider? timeProvider = null)
{
_ghidraService = ghidraService ?? throw new ArgumentNullException(nameof(ghidraService));
_decompilerService = decompilerService ?? throw new ArgumentNullException(nameof(decompilerService));
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
_timeProvider = timeProvider ?? TimeProvider.System;
}
/// <inheritdoc />
public bool IsSupported(string binaryPath)
{
if (string.IsNullOrWhiteSpace(binaryPath))
return false;
var extension = Path.GetExtension(binaryPath);
return SupportedExtensions.Contains(extension);
}
/// <inheritdoc />
public async Task<PatchVerificationResult> VerifyPatchAsync(
PatchVerificationRequest request,
CancellationToken ct = default)
{
ArgumentNullException.ThrowIfNull(request);
var startTime = _timeProvider.GetUtcNow();
var sw = Stopwatch.StartNew();
_logger.LogInformation(
"Starting patch verification for CVE {CveId} with {SymbolCount} target symbols",
request.CveId, request.TargetSymbols.Count);
try
{
// Analyze both binaries in parallel
var vulnerableAnalysisTask = AnalyzeBinaryAsync(
request.VulnerableBinaryReference, true, ct);
var targetAnalysisTask = AnalyzeBinaryAsync(
request.TargetBinaryPath, true, ct);
await Task.WhenAll(vulnerableAnalysisTask, targetAnalysisTask);
var vulnerableAnalysis = await vulnerableAnalysisTask;
var targetAnalysis = await targetAnalysisTask;
if (vulnerableAnalysis is null)
{
return CreateFailedResult(request, sw.Elapsed,
"Failed to analyze vulnerable reference binary");
}
if (targetAnalysis is null)
{
return CreateFailedResult(request, sw.Elapsed,
"Failed to analyze target binary");
}
// Compare each target symbol
var results = new ConcurrentBag<FunctionPatchResult>();
var semaphore = new SemaphoreSlim(request.Options.MaxParallelism);
var tasks = request.TargetSymbols.Select(async symbol =>
{
await semaphore.WaitAsync(ct);
try
{
var result = await CompareFunctionInAnalysesAsync(
vulnerableAnalysis,
targetAnalysis,
symbol,
request.Options,
ct);
results.Add(result);
}
finally
{
semaphore.Release();
}
});
await Task.WhenAll(tasks);
var functionResults = results.ToList();
var status = DeterminePatchStatus(functionResults, request.Options);
var layer2 = BuildLayer2(functionResults, status, request.CveId);
var confidence = CalculateOverallConfidence(functionResults);
sw.Stop();
_logger.LogInformation(
"Patch verification completed: {Status} with confidence {Confidence:P1} in {Duration}ms",
status, confidence, sw.ElapsedMilliseconds);
return new PatchVerificationResult
{
Success = true,
Status = status,
FunctionResults = functionResults,
Layer2 = layer2,
Confidence = confidence,
Duration = sw.Elapsed
};
}
catch (Exception ex)
{
_logger.LogError(ex, "Patch verification failed for CVE {CveId}", request.CveId);
sw.Stop();
return CreateFailedResult(request, sw.Elapsed, ex.Message);
}
}
/// <inheritdoc />
public async Task<FunctionPatchResult> CompareFunctionAsync(
string vulnerableBinaryPath,
string targetBinaryPath,
string symbolName,
CancellationToken ct = default)
{
ArgumentException.ThrowIfNullOrWhiteSpace(vulnerableBinaryPath);
ArgumentException.ThrowIfNullOrWhiteSpace(targetBinaryPath);
ArgumentException.ThrowIfNullOrWhiteSpace(symbolName);
var vulnerableAnalysis = await AnalyzeBinaryAsync(vulnerableBinaryPath, true, ct);
var targetAnalysis = await AnalyzeBinaryAsync(targetBinaryPath, true, ct);
if (vulnerableAnalysis is null || targetAnalysis is null)
{
return new FunctionPatchResult
{
SymbolName = symbolName,
Success = false,
IsPatched = false,
Similarity = 0,
Confidence = 0,
Error = "Failed to analyze one or both binaries"
};
}
var symbol = new VulnerableSymbol { Name = symbolName };
return await CompareFunctionInAnalysesAsync(
vulnerableAnalysis,
targetAnalysis,
symbol,
new PatchVerificationOptions(),
ct);
}
private async Task<GhidraAnalysisResult?> AnalyzeBinaryAsync(
string binaryPath,
bool includeDecompilation,
CancellationToken ct)
{
try
{
_logger.LogDebug("Analyzing binary: {Path}", binaryPath);
return await _ghidraService.AnalyzeAsync(
binaryPath,
new GhidraAnalysisOptions
{
IncludeDecompilation = includeDecompilation,
ExtractDecompilation = includeDecompilation,
RunFullAnalysis = true,
GeneratePCodeHashes = true,
ExtractFunctions = true
},
ct);
}
catch (Exception ex)
{
_logger.LogError(ex, "Failed to analyze binary: {Path}", binaryPath);
return null;
}
}
private async Task<FunctionPatchResult> CompareFunctionInAnalysesAsync(
GhidraAnalysisResult vulnerableAnalysis,
GhidraAnalysisResult targetAnalysis,
VulnerableSymbol symbol,
PatchVerificationOptions options,
CancellationToken ct)
{
try
{
// Find the function in both analyses
var vulnerableFunc = FindFunction(vulnerableAnalysis, symbol);
var targetFunc = FindFunction(targetAnalysis, symbol);
if (vulnerableFunc is null)
{
_logger.LogWarning(
"Symbol {Symbol} not found in vulnerable binary",
symbol.Name);
return new FunctionPatchResult
{
SymbolName = symbol.Name,
Success = false,
IsPatched = false,
Similarity = 0,
Confidence = 0,
Error = "Symbol not found in vulnerable binary"
};
}
if (targetFunc is null)
{
// Function removed from target - likely patched by removal
_logger.LogDebug(
"Symbol {Symbol} not found in target binary - possibly removed",
symbol.Name);
return new FunctionPatchResult
{
SymbolName = symbol.Name,
Success = true,
IsPatched = true,
Similarity = 0,
Confidence = 0.7m,
Differences = ["Function removed from target binary"]
};
}
// Quick check: if P-Code hashes match, functions are identical
if (vulnerableFunc.PCodeHash is not null &&
targetFunc.PCodeHash is not null &&
vulnerableFunc.PCodeHash.SequenceEqual(targetFunc.PCodeHash))
{
_logger.LogDebug(
"Symbol {Symbol} has identical P-Code hash - not patched",
symbol.Name);
return new FunctionPatchResult
{
SymbolName = symbol.Name,
Success = true,
IsPatched = false,
Similarity = 1.0m,
StructuralSimilarity = 1.0m,
SemanticSimilarity = 1.0m,
Confidence = 0.99m,
VulnerableCode = options.IncludeDecompiledCode ? vulnerableFunc.DecompiledCode : null,
TargetCode = options.IncludeDecompiledCode ? targetFunc.DecompiledCode : null
};
}
// Decompile and compare
var vulnerableDecompiled = await _decompilerService.DecompileAsync(
vulnerableFunc,
new DecompileOptions { Timeout = options.FunctionTimeout },
ct);
var targetDecompiled = await _decompilerService.DecompileAsync(
targetFunc,
new DecompileOptions { Timeout = options.FunctionTimeout },
ct);
var comparison = await _decompilerService.CompareAsync(
vulnerableDecompiled,
targetDecompiled,
new ComparisonOptions
{
IgnoreVariableNames = true,
DetectOptimizations = true
},
ct);
// Determine if patched based on similarity
var isPatched = comparison.Similarity < options.PatchedThreshold;
var isIdentical = comparison.Similarity >= options.IdenticalThreshold;
// Build difference descriptions
var differences = comparison.Differences
.Take(10)
.Select(d => $"{d.Type}: {d.Description}")
.ToList();
// Map comparison confidence to our confidence
var confidence = MapConfidence(comparison.Confidence, comparison.Similarity);
return new FunctionPatchResult
{
SymbolName = symbol.Name,
Success = true,
IsPatched = isPatched,
Similarity = comparison.Similarity,
StructuralSimilarity = comparison.StructuralSimilarity,
SemanticSimilarity = comparison.SemanticSimilarity,
Confidence = confidence,
Differences = differences,
VulnerableCode = options.IncludeDecompiledCode ? vulnerableDecompiled.Code : null,
TargetCode = options.IncludeDecompiledCode ? targetDecompiled.Code : null
};
}
catch (Exception ex)
{
_logger.LogError(ex, "Failed to compare function {Symbol}", symbol.Name);
return new FunctionPatchResult
{
SymbolName = symbol.Name,
Success = false,
IsPatched = false,
Similarity = 0,
Confidence = 0,
Error = ex.Message
};
}
}
private static GhidraFunction? FindFunction(
GhidraAnalysisResult analysis,
VulnerableSymbol symbol)
{
// Try exact name match first
var func = analysis.Functions.FirstOrDefault(f =>
f.Name.Equals(symbol.Name, StringComparison.Ordinal));
if (func is not null)
return func;
// Try case-insensitive match
func = analysis.Functions.FirstOrDefault(f =>
f.Name.Equals(symbol.Name, StringComparison.OrdinalIgnoreCase));
if (func is not null)
return func;
// Try by address if specified
if (symbol.VulnerableAddress.HasValue)
{
func = analysis.Functions.FirstOrDefault(f =>
f.Address == symbol.VulnerableAddress.Value);
}
return func;
}
private static PatchStatus DeterminePatchStatus(
IReadOnlyList<FunctionPatchResult> results,
PatchVerificationOptions options)
{
if (results.Count == 0)
return PatchStatus.Unknown;
var successfulResults = results.Where(r => r.Success).ToList();
if (successfulResults.Count == 0)
return PatchStatus.Unknown;
var patchedCount = successfulResults.Count(r => r.IsPatched);
var vulnerableCount = successfulResults.Count(r => !r.IsPatched);
if (patchedCount == successfulResults.Count)
return PatchStatus.Patched;
if (vulnerableCount == successfulResults.Count)
return PatchStatus.Vulnerable;
if (patchedCount > 0 && vulnerableCount > 0)
return PatchStatus.PartiallyPatched;
return PatchStatus.Unknown;
}
private static ReachabilityLayer2 BuildLayer2(
IReadOnlyList<FunctionPatchResult> results,
PatchStatus status,
string cveId)
{
var successfulResults = results.Where(r => r.Success).ToList();
var avgConfidence = successfulResults.Count > 0
? successfulResults.Average(r => r.Confidence)
: 0m;
var confidenceLevel = avgConfidence switch
{
>= 0.8m => ConfidenceLevel.High,
>= 0.5m => ConfidenceLevel.Medium,
_ => ConfidenceLevel.Low
};
// Determine if the vulnerable symbol is resolved (linked) in the binary
var isResolved = status switch
{
PatchStatus.Vulnerable => true,
PatchStatus.PartiallyPatched => true,
PatchStatus.Patched => false, // Patched means different code
PatchStatus.Unknown => true, // Assume resolved if unknown
_ => true // Default: assume resolved
};
var reason = status switch
{
PatchStatus.Patched => $"All {results.Count} vulnerable function(s) have been patched",
PatchStatus.Vulnerable => $"All {results.Count} vulnerable function(s) are identical to known-vulnerable version",
PatchStatus.PartiallyPatched => $"{results.Count(r => r.IsPatched)} of {results.Count} functions patched",
PatchStatus.Unknown => "Unable to determine patch status",
_ => "Unknown status"
};
var symbols = successfulResults
.Select(r => new ResolvedSymbol(
Name: r.SymbolName,
Address: 0, // We don't track this
IsWeak: false,
BindingType: SymbolBinding.Global))
.ToImmutableArray();
return new ReachabilityLayer2
{
IsResolved = isResolved,
Confidence = confidenceLevel,
Reason = reason,
ResolvedSymbols = symbols
};
}
private static decimal CalculateOverallConfidence(
IReadOnlyList<FunctionPatchResult> results)
{
var successfulResults = results.Where(r => r.Success).ToList();
if (successfulResults.Count == 0)
return 0;
// Weighted by individual confidence
return successfulResults.Average(r => r.Confidence);
}
private static decimal MapConfidence(
ComparisonConfidence comparisonConfidence,
decimal similarity)
{
var baseConfidence = comparisonConfidence switch
{
ComparisonConfidence.VeryHigh => 0.95m,
ComparisonConfidence.High => 0.85m,
ComparisonConfidence.Medium => 0.7m,
ComparisonConfidence.Low => 0.5m,
_ => 0.5m
};
// Adjust based on similarity - extreme values are more confident
if (similarity > 0.95m || similarity < 0.3m)
{
baseConfidence = Math.Min(1.0m, baseConfidence + 0.1m);
}
return baseConfidence;
}
private PatchVerificationResult CreateFailedResult(
PatchVerificationRequest request,
TimeSpan duration,
string error)
{
return new PatchVerificationResult
{
Success = false,
Status = PatchStatus.Unknown,
FunctionResults = [],
Layer2 = new ReachabilityLayer2
{
IsResolved = true, // Assume resolved if we can't verify
Confidence = ConfidenceLevel.Low,
Reason = error
},
Confidence = 0,
Error = error,
Duration = duration
};
}
}

View File

@@ -0,0 +1,248 @@
// SPDX-License-Identifier: AGPL-3.0-or-later
// Copyright (c) StellaOps
// Sprint: EVID-001-004 - Binary Patch Verification
using StellaOps.Scanner.Reachability.Stack;
namespace StellaOps.Scanner.Reachability.Binary;
/// <summary>
/// Verifies whether a vulnerable binary has been patched by comparing
/// decompiled functions between vulnerable and patched versions.
/// </summary>
public interface IBinaryPatchVerifier
{
/// <summary>
/// Verifies if specific vulnerable functions have been patched in a binary.
/// </summary>
/// <param name="request">Verification request containing binary references and target symbols.</param>
/// <param name="ct">Cancellation token.</param>
/// <returns>Patch verification result for L2 reachability analysis.</returns>
Task<PatchVerificationResult> VerifyPatchAsync(
PatchVerificationRequest request,
CancellationToken ct = default);
/// <summary>
/// Compares a single function between vulnerable and potentially patched versions.
/// </summary>
/// <param name="vulnerableBinaryPath">Path to known-vulnerable binary.</param>
/// <param name="targetBinaryPath">Path to target binary to verify.</param>
/// <param name="symbolName">Name of the vulnerable function.</param>
/// <param name="ct">Cancellation token.</param>
/// <returns>Comparison result for the specific function.</returns>
Task<FunctionPatchResult> CompareFunctionAsync(
string vulnerableBinaryPath,
string targetBinaryPath,
string symbolName,
CancellationToken ct = default);
/// <summary>
/// Checks if the verifier supports the given binary format.
/// </summary>
/// <param name="binaryPath">Path to the binary.</param>
/// <returns>True if the binary format is supported.</returns>
bool IsSupported(string binaryPath);
}
/// <summary>
/// Request for patch verification.
/// </summary>
public sealed record PatchVerificationRequest
{
/// <summary>
/// Path to a known-vulnerable reference binary (or registry key).
/// </summary>
public required string VulnerableBinaryReference { get; init; }
/// <summary>
/// Path to the target binary to verify.
/// </summary>
public required string TargetBinaryPath { get; init; }
/// <summary>
/// CVE identifier for context.
/// </summary>
public required string CveId { get; init; }
/// <summary>
/// Target vulnerable symbols/functions to compare.
/// </summary>
public required IReadOnlyList<VulnerableSymbol> TargetSymbols { get; init; }
/// <summary>
/// Options for verification.
/// </summary>
public PatchVerificationOptions Options { get; init; } = new();
}
/// <summary>
/// A vulnerable symbol to verify.
/// </summary>
public sealed record VulnerableSymbol
{
/// <summary>
/// Symbol name (function name).
/// </summary>
public required string Name { get; init; }
/// <summary>
/// Optional address in the vulnerable binary.
/// </summary>
public ulong? VulnerableAddress { get; init; }
/// <summary>
/// Expected signature change in patched version.
/// </summary>
public string? ExpectedPatchPattern { get; init; }
}
/// <summary>
/// Options for patch verification.
/// </summary>
public sealed record PatchVerificationOptions
{
/// <summary>
/// Similarity threshold below which functions are considered different (patched).
/// </summary>
public decimal PatchedThreshold { get; init; } = 0.85m;
/// <summary>
/// Similarity threshold above which functions are considered identical.
/// </summary>
public decimal IdenticalThreshold { get; init; } = 0.98m;
/// <summary>
/// Timeout for each function comparison.
/// </summary>
public TimeSpan FunctionTimeout { get; init; } = TimeSpan.FromSeconds(30);
/// <summary>
/// Whether to include decompiled code in results.
/// </summary>
public bool IncludeDecompiledCode { get; init; } = false;
/// <summary>
/// Maximum number of functions to compare in parallel.
/// </summary>
public int MaxParallelism { get; init; } = 4;
}
/// <summary>
/// Result of patch verification.
/// </summary>
public sealed record PatchVerificationResult
{
/// <summary>
/// Whether verification completed successfully.
/// </summary>
public required bool Success { get; init; }
/// <summary>
/// Overall patch status determination.
/// </summary>
public required PatchStatus Status { get; init; }
/// <summary>
/// Results for each verified function.
/// </summary>
public required IReadOnlyList<FunctionPatchResult> FunctionResults { get; init; }
/// <summary>
/// L2 reachability layer from verification.
/// </summary>
public required ReachabilityLayer2 Layer2 { get; init; }
/// <summary>
/// Overall confidence in the determination.
/// </summary>
public required decimal Confidence { get; init; }
/// <summary>
/// Error message if verification failed.
/// </summary>
public string? Error { get; init; }
/// <summary>
/// Duration of verification.
/// </summary>
public TimeSpan Duration { get; init; }
}
/// <summary>
/// Overall patch status.
/// </summary>
public enum PatchStatus
{
/// <summary>All vulnerable functions appear patched.</summary>
Patched,
/// <summary>All vulnerable functions appear identical to vulnerable version.</summary>
Vulnerable,
/// <summary>Some functions patched, some not.</summary>
PartiallyPatched,
/// <summary>Unable to determine patch status.</summary>
Unknown
}
/// <summary>
/// Result for a single function comparison.
/// </summary>
public sealed record FunctionPatchResult
{
/// <summary>
/// Symbol name that was compared.
/// </summary>
public required string SymbolName { get; init; }
/// <summary>
/// Whether comparison was successful.
/// </summary>
public required bool Success { get; init; }
/// <summary>
/// Whether the function appears to be patched.
/// </summary>
public required bool IsPatched { get; init; }
/// <summary>
/// Similarity score (0.0 to 1.0).
/// </summary>
public required decimal Similarity { get; init; }
/// <summary>
/// Structural similarity.
/// </summary>
public decimal StructuralSimilarity { get; init; }
/// <summary>
/// Semantic similarity.
/// </summary>
public decimal SemanticSimilarity { get; init; }
/// <summary>
/// Confidence in the determination.
/// </summary>
public required decimal Confidence { get; init; }
/// <summary>
/// Description of differences found.
/// </summary>
public IReadOnlyList<string> Differences { get; init; } = [];
/// <summary>
/// Decompiled code from vulnerable binary (if requested).
/// </summary>
public string? VulnerableCode { get; init; }
/// <summary>
/// Decompiled code from target binary (if requested).
/// </summary>
public string? TargetCode { get; init; }
/// <summary>
/// Error message if comparison failed.
/// </summary>
public string? Error { get; init; }
}

View File

@@ -0,0 +1,23 @@
// SPDX-License-Identifier: AGPL-3.0-or-later
// Copyright (c) StellaOps
// Sprint: EVID-001-002 - Reachability Evidence Job Executor
using StellaOps.Scanner.Reachability.Stack;
namespace StellaOps.Scanner.Reachability.Jobs;
/// <summary>
/// Executor for reachability evidence jobs.
/// </summary>
public interface IReachabilityEvidenceJobExecutor
{
/// <summary>
/// Executes a reachability evidence job and returns the result.
/// </summary>
/// <param name="job">The job to execute.</param>
/// <param name="ct">Cancellation token.</param>
/// <returns>The job result containing the reachability stack.</returns>
Task<ReachabilityEvidenceJobResult> ExecuteAsync(
ReachabilityEvidenceJob job,
CancellationToken ct = default);
}

View File

@@ -0,0 +1,231 @@
// SPDX-License-Identifier: AGPL-3.0-or-later
// Copyright (c) StellaOps
// Sprint: EVID-001-002 - Reachability Evidence Job
namespace StellaOps.Scanner.Reachability.Jobs;
/// <summary>
/// A job request for reachability evidence analysis.
/// </summary>
public sealed record ReachabilityEvidenceJob
{
/// <summary>
/// Unique job identifier. Deterministic from inputs hash if not specified.
/// </summary>
public required string JobId { get; init; }
/// <summary>
/// Image digest to analyze (sha256:...).
/// </summary>
public required string ImageDigest { get; init; }
/// <summary>
/// CVE identifier to check reachability for.
/// </summary>
public required string CveId { get; init; }
/// <summary>
/// Package URL of the affected component.
/// </summary>
public required string Purl { get; init; }
/// <summary>
/// Optional source commit for the image.
/// </summary>
public string? SourceCommit { get; init; }
/// <summary>
/// Job options controlling analysis behavior.
/// </summary>
public required ReachabilityJobOptions Options { get; init; }
/// <summary>
/// When the job was queued.
/// </summary>
public required DateTimeOffset QueuedAt { get; init; }
/// <summary>
/// Tenant ID for multi-tenant deployments.
/// </summary>
public string TenantId { get; init; } = "default";
/// <summary>
/// Priority of the job (lower = higher priority).
/// </summary>
public int Priority { get; init; } = 100;
/// <summary>
/// Creates a deterministic job ID from inputs.
/// </summary>
public static string ComputeJobId(string imageDigest, string cveId, string purl)
{
var input = $"{imageDigest}:{cveId}:{purl}";
var hash = System.Security.Cryptography.SHA256.HashData(
System.Text.Encoding.UTF8.GetBytes(input));
return Convert.ToHexString(hash)[..32].ToLowerInvariant();
}
}
/// <summary>
/// Options for reachability evidence analysis.
/// </summary>
public sealed record ReachabilityJobOptions
{
/// <summary>
/// Include Layer 2 (binary resolution) analysis.
/// </summary>
public bool IncludeL2 { get; init; } = false;
/// <summary>
/// Include Layer 3 (runtime observation) analysis if available.
/// </summary>
public bool IncludeL3 { get; init; } = false;
/// <summary>
/// Maximum number of paths to return per sink.
/// </summary>
public int MaxPathsPerSink { get; init; } = 3;
/// <summary>
/// Maximum total paths across all sinks.
/// </summary>
public int MaxTotalPaths { get; init; } = 10;
/// <summary>
/// Maximum depth for call graph traversal.
/// </summary>
public int MaxDepth { get; init; } = 256;
/// <summary>
/// Timeout for the job in seconds.
/// </summary>
public int TimeoutSeconds { get; init; } = 300;
/// <summary>
/// Whether to force recomputation even if cached.
/// </summary>
public bool ForceRecompute { get; init; } = false;
// --- Layer 2 (Binary Patch Verification) Options ---
/// <summary>
/// Path to known-vulnerable reference binary for L2 analysis.
/// </summary>
public string? VulnerableBinaryPath { get; init; }
/// <summary>
/// Path to target binary to verify for L2 analysis.
/// </summary>
public string? TargetBinaryPath { get; init; }
// --- Layer 3 (Runtime Observation) Options ---
/// <summary>
/// Container ID for runtime observation (L3 analysis).
/// </summary>
public string? ContainerId { get; init; }
/// <summary>
/// Duration for runtime observation. Defaults to 5 minutes.
/// </summary>
public TimeSpan? RuntimeObservationDuration { get; init; }
/// <summary>
/// Use historical runtime observation data if available.
/// </summary>
public bool UseHistoricalRuntimeData { get; init; } = true;
/// <summary>
/// Default options for standard analysis.
/// </summary>
public static ReachabilityJobOptions Default => new();
/// <summary>
/// Options for thorough analysis including all layers.
/// </summary>
public static ReachabilityJobOptions Thorough => new()
{
IncludeL2 = true,
IncludeL3 = true,
MaxPathsPerSink = 5,
MaxTotalPaths = 25,
MaxDepth = 512,
TimeoutSeconds = 600,
RuntimeObservationDuration = TimeSpan.FromMinutes(5)
};
}
/// <summary>
/// Result of a reachability evidence job.
/// </summary>
public sealed record ReachabilityEvidenceJobResult
{
/// <summary>
/// The job that was executed.
/// </summary>
public required string JobId { get; init; }
/// <summary>
/// Job execution status.
/// </summary>
public required JobStatus Status { get; init; }
/// <summary>
/// The reachability stack result (null if failed).
/// </summary>
public Stack.ReachabilityStack? Stack { get; init; }
/// <summary>
/// Evidence bundle ID where results are stored.
/// </summary>
public string? EvidenceBundleId { get; init; }
/// <summary>
/// Evidence bundle URI (stella:// scheme).
/// </summary>
public string? EvidenceUri { get; init; }
/// <summary>
/// Error message if failed.
/// </summary>
public string? Error { get; init; }
/// <summary>
/// When the job started.
/// </summary>
public DateTimeOffset? StartedAt { get; init; }
/// <summary>
/// When the job completed.
/// </summary>
public DateTimeOffset? CompletedAt { get; init; }
/// <summary>
/// Duration in milliseconds.
/// </summary>
public long? DurationMs { get; init; }
}
/// <summary>
/// Status of a reachability job.
/// </summary>
public enum JobStatus
{
/// <summary>Job is queued.</summary>
Queued,
/// <summary>Job is running.</summary>
Running,
/// <summary>Job completed successfully.</summary>
Completed,
/// <summary>Job failed.</summary>
Failed,
/// <summary>Job was cancelled.</summary>
Cancelled,
/// <summary>Job timed out.</summary>
TimedOut
}

View File

@@ -0,0 +1,473 @@
// SPDX-License-Identifier: AGPL-3.0-or-later
// Copyright (c) StellaOps
// Sprint: EVID-001-002 - Reachability Evidence Job Executor
using System.Collections.Immutable;
using System.Diagnostics;
using Microsoft.Extensions.Logging;
using StellaOps.Scanner.CallGraph;
using StellaOps.Scanner.Explainability.Assumptions;
using StellaOps.Scanner.Reachability.Binary;
using StellaOps.Scanner.Reachability.Runtime;
using StellaOps.Scanner.Reachability.Services;
using StellaOps.Scanner.Reachability.Stack;
// Aliases to disambiguate types with same name in different namespaces
using StackEntrypointType = StellaOps.Scanner.Reachability.Stack.EntrypointType;
using StackVulnerableSymbol = StellaOps.Scanner.Reachability.Stack.VulnerableSymbol;
using BinaryVulnerableSymbol = StellaOps.Scanner.Reachability.Binary.VulnerableSymbol;
using StackCallPath = StellaOps.Scanner.Reachability.Stack.CallPath;
namespace StellaOps.Scanner.Reachability.Jobs;
/// <summary>
/// Executes reachability evidence jobs by orchestrating call graph analysis.
/// </summary>
public sealed class ReachabilityEvidenceJobExecutor : IReachabilityEvidenceJobExecutor
{
private readonly ICveSymbolMappingService _cveSymbolService;
private readonly ICallGraphSnapshotProvider _callGraphProvider;
private readonly IReachabilityStackEvaluator _stackEvaluator;
private readonly IEvidenceStorageService _evidenceStorage;
private readonly IBinaryPatchVerifier? _binaryPatchVerifier;
private readonly IRuntimeReachabilityCollector? _runtimeCollector;
private readonly ILogger<ReachabilityEvidenceJobExecutor> _logger;
private readonly TimeProvider _timeProvider;
public ReachabilityEvidenceJobExecutor(
ICveSymbolMappingService cveSymbolService,
ICallGraphSnapshotProvider callGraphProvider,
IReachabilityStackEvaluator stackEvaluator,
IEvidenceStorageService evidenceStorage,
ILogger<ReachabilityEvidenceJobExecutor> logger,
IBinaryPatchVerifier? binaryPatchVerifier = null,
IRuntimeReachabilityCollector? runtimeCollector = null,
TimeProvider? timeProvider = null)
{
_cveSymbolService = cveSymbolService ?? throw new ArgumentNullException(nameof(cveSymbolService));
_callGraphProvider = callGraphProvider ?? throw new ArgumentNullException(nameof(callGraphProvider));
_stackEvaluator = stackEvaluator ?? throw new ArgumentNullException(nameof(stackEvaluator));
_evidenceStorage = evidenceStorage ?? throw new ArgumentNullException(nameof(evidenceStorage));
_binaryPatchVerifier = binaryPatchVerifier;
_runtimeCollector = runtimeCollector;
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
_timeProvider = timeProvider ?? TimeProvider.System;
}
/// <inheritdoc />
public async Task<ReachabilityEvidenceJobResult> ExecuteAsync(
ReachabilityEvidenceJob job,
CancellationToken ct = default)
{
ArgumentNullException.ThrowIfNull(job);
var startedAt = _timeProvider.GetUtcNow();
var sw = Stopwatch.StartNew();
_logger.LogInformation(
"Starting reachability evidence job: JobId={JobId} CVE={CveId} Image={ImageDigest}",
job.JobId, job.CveId, job.ImageDigest);
try
{
// 1. Get vulnerable sinks from CVE mapping
var sinks = await _cveSymbolService.GetSinksForCveAsync(
job.CveId, job.Purl, ct);
if (sinks.Count == 0)
{
_logger.LogWarning(
"No sink mappings found for CVE {CveId} and PURL {Purl}",
job.CveId, job.Purl);
return CreateUnknownResult(job, startedAt, sw,
$"No sink mappings found for CVE {job.CveId}");
}
_logger.LogDebug(
"Found {SinkCount} sinks for CVE {CveId}: {Sinks}",
sinks.Count, job.CveId,
string.Join(", ", sinks.Select(s => s.SymbolName)));
// 2. Get or compute call graph snapshot
var snapshot = await _callGraphProvider.GetOrComputeAsync(
job.ImageDigest, job.Options.ForceRecompute, ct);
if (snapshot is null || snapshot.Nodes.IsDefaultOrEmpty)
{
_logger.LogWarning(
"No call graph available for image {ImageDigest}",
job.ImageDigest);
return CreateUnknownResult(job, startedAt, sw,
"No call graph available for image");
}
// 3. Run reachability analysis with explicit sinks
var sinkIds = sinks
.Select(s => s.CanonicalId ?? s.SymbolName)
.ToImmutableArray();
var analysisOptions = new ReachabilityAnalysisOptions
{
ExplicitSinks = sinkIds,
MaxPathsPerSink = job.Options.MaxPathsPerSink,
MaxTotalPaths = job.Options.MaxTotalPaths,
MaxDepth = job.Options.MaxDepth
};
var analyzer = new ReachabilityAnalyzer(_timeProvider, analysisOptions);
var analysisResult = analyzer.Analyze(snapshot, analysisOptions);
_logger.LogInformation(
"Reachability analysis complete: ReachableNodes={NodeCount} ReachableSinks={SinkCount} Paths={PathCount}",
analysisResult.ReachableNodeIds.Length,
analysisResult.ReachableSinkIds.Length,
analysisResult.Paths.Length);
// 4. Build Layer 1 from analysis result
var layer1 = BuildLayer1(analysisResult, sinks);
// 5. Build Layer 2 (binary patch verification via Ghidra)
var layer2 = job.Options.IncludeL2
? await BuildLayer2Async(job, sinks, ct)
: CreateUnknownLayer2();
// 6. Build Layer 3 (runtime observation via eBPF)
var layer3 = job.Options.IncludeL3
? await BuildLayer3Async(job, sinks, ct)
: CreateUnknownLayer3();
// 7. Evaluate the stack
var primarySink = sinks.First();
var vulnerableSymbol = primarySink.ToVulnerableSymbol();
var stack = _stackEvaluator.Evaluate(
findingId: $"{job.CveId}:{job.Purl}",
symbol: vulnerableSymbol,
layer1: layer1,
layer2: layer2,
layer3: layer3,
_timeProvider);
_logger.LogInformation(
"Reachability verdict: Verdict={Verdict} FindingId={FindingId}",
stack.Verdict, stack.FindingId);
// 8. Store evidence
var (bundleId, evidenceUri) = await _evidenceStorage.StoreReachabilityStackAsync(
stack, job, ct);
sw.Stop();
return new ReachabilityEvidenceJobResult
{
JobId = job.JobId,
Status = JobStatus.Completed,
Stack = stack,
EvidenceBundleId = bundleId,
EvidenceUri = evidenceUri,
StartedAt = startedAt,
CompletedAt = _timeProvider.GetUtcNow(),
DurationMs = sw.ElapsedMilliseconds
};
}
catch (OperationCanceledException)
{
_logger.LogWarning("Reachability job cancelled: JobId={JobId}", job.JobId);
sw.Stop();
return new ReachabilityEvidenceJobResult
{
JobId = job.JobId,
Status = JobStatus.Cancelled,
Error = "Job was cancelled",
StartedAt = startedAt,
CompletedAt = _timeProvider.GetUtcNow(),
DurationMs = sw.ElapsedMilliseconds
};
}
catch (Exception ex)
{
_logger.LogError(ex,
"Reachability job failed: JobId={JobId} Error={Error}",
job.JobId, ex.Message);
sw.Stop();
return new ReachabilityEvidenceJobResult
{
JobId = job.JobId,
Status = JobStatus.Failed,
Error = ex.Message,
StartedAt = startedAt,
CompletedAt = _timeProvider.GetUtcNow(),
DurationMs = sw.ElapsedMilliseconds
};
}
}
private ReachabilityLayer1 BuildLayer1(
ReachabilityAnalysisResult analysisResult,
IReadOnlyList<CveSinkMapping> sinks)
{
var isReachable = analysisResult.ReachableSinkIds.Length > 0;
var paths = analysisResult.Paths
.Select(p => new StackCallPath
{
Sites = p.NodeIds.Select((id, idx) => new CallSite(
MethodName: id,
ClassName: null,
FileName: null,
LineNumber: null,
Type: CallSiteType.Direct)).ToImmutableArray(),
Entrypoint = new Entrypoint(
Name: p.EntrypointId,
Type: StackEntrypointType.HttpEndpoint,
Location: null,
Description: null),
Confidence = 1.0,
HasConditionals = false
})
.ToImmutableArray();
var reachingEntrypoints = analysisResult.Paths
.Select(p => p.EntrypointId)
.Distinct()
.Select(ep => new Entrypoint(
Name: ep,
Type: StackEntrypointType.HttpEndpoint,
Location: null,
Description: null))
.ToImmutableArray();
var limitations = new List<string>
{
"Best-effort within analyzed call graph only",
"Reflection and dynamic invocation not modeled",
"Virtual dispatch expanded to known implementations only"
};
return new ReachabilityLayer1
{
IsReachable = isReachable,
Confidence = isReachable ? ConfidenceLevel.High : ConfidenceLevel.Medium,
Paths = paths,
ReachingEntrypoints = reachingEntrypoints,
AnalysisMethod = "BFS",
Limitations = limitations.ToImmutableArray()
};
}
private static ReachabilityLayer2 CreateUnknownLayer2()
{
return new ReachabilityLayer2
{
IsResolved = false,
Confidence = ConfidenceLevel.Low,
Reason = "Binary resolution analysis not performed"
};
}
private static ReachabilityLayer3 CreateUnknownLayer3()
{
return new ReachabilityLayer3
{
IsGated = false,
Outcome = GatingOutcome.Unknown,
Confidence = ConfidenceLevel.Low,
Description = "Runtime gating analysis not performed"
};
}
private async Task<ReachabilityLayer2> BuildLayer2Async(
ReachabilityEvidenceJob job,
IReadOnlyList<CveSinkMapping> sinks,
CancellationToken ct)
{
if (_binaryPatchVerifier is null)
{
_logger.LogDebug("Binary patch verifier not available, skipping L2 analysis");
return CreateUnknownLayer2();
}
// Check if we have binary paths to compare
if (string.IsNullOrWhiteSpace(job.Options.VulnerableBinaryPath) ||
string.IsNullOrWhiteSpace(job.Options.TargetBinaryPath))
{
_logger.LogDebug("Binary paths not provided, skipping L2 analysis");
return CreateUnknownLayer2();
}
try
{
var targetSymbols = sinks.Select(s => new BinaryVulnerableSymbol
{
Name = s.SymbolName
}).ToList();
var request = new PatchVerificationRequest
{
VulnerableBinaryReference = job.Options.VulnerableBinaryPath,
TargetBinaryPath = job.Options.TargetBinaryPath,
CveId = job.CveId,
TargetSymbols = targetSymbols,
Options = new PatchVerificationOptions
{
IncludeDecompiledCode = false,
PatchedThreshold = 0.85m
}
};
var result = await _binaryPatchVerifier.VerifyPatchAsync(request, ct);
if (!result.Success)
{
_logger.LogWarning("Binary patch verification failed: {Error}", result.Error);
return CreateUnknownLayer2();
}
_logger.LogInformation(
"Binary patch verification completed: Status={Status} Confidence={Confidence:P1}",
result.Status, result.Confidence);
return result.Layer2;
}
catch (Exception ex)
{
_logger.LogError(ex, "Error during L2 binary analysis");
return CreateUnknownLayer2();
}
}
private async Task<ReachabilityLayer3> BuildLayer3Async(
ReachabilityEvidenceJob job,
IReadOnlyList<CveSinkMapping> sinks,
CancellationToken ct)
{
if (_runtimeCollector is null || !_runtimeCollector.IsAvailable)
{
_logger.LogDebug(
"Runtime collector not available (available={Available}), skipping L3 analysis",
_runtimeCollector?.IsAvailable ?? false);
return CreateUnknownLayer3();
}
// Check if we have container info for runtime observation
if (string.IsNullOrWhiteSpace(job.Options.ContainerId))
{
_logger.LogDebug("Container ID not provided, skipping L3 analysis");
return CreateUnknownLayer3();
}
try
{
var targetSymbols = sinks
.Select(s => s.SymbolName)
.ToList();
var request = new RuntimeObservationRequest
{
ContainerId = job.Options.ContainerId,
ImageDigest = job.ImageDigest,
TargetSymbols = targetSymbols,
Duration = job.Options.RuntimeObservationDuration ?? TimeSpan.FromMinutes(5),
UseHistoricalData = true
};
var result = await _runtimeCollector.ObserveAsync(request, ct);
if (!result.Success)
{
_logger.LogWarning("Runtime observation failed: {Error}", result.Error);
return CreateUnknownLayer3();
}
_logger.LogInformation(
"Runtime observation completed: Outcome={Outcome} Source={Source} ObservedSymbols={Count}",
result.Layer3.Outcome,
result.Source,
result.Observations.Count(o => o.WasObserved));
return result.Layer3;
}
catch (Exception ex)
{
_logger.LogError(ex, "Error during L3 runtime analysis");
return CreateUnknownLayer3();
}
}
private ReachabilityEvidenceJobResult CreateUnknownResult(
ReachabilityEvidenceJob job,
DateTimeOffset startedAt,
Stopwatch sw,
string reason)
{
sw.Stop();
// Create a minimal stack with Unknown verdict
var unknownStack = new ReachabilityStack
{
Id = Guid.NewGuid().ToString("N"),
FindingId = $"{job.CveId}:{job.Purl}",
Symbol = new StackVulnerableSymbol(
Name: "unknown",
Library: null,
Version: null,
VulnerabilityId: job.CveId,
Type: SymbolType.Function),
StaticCallGraph = new ReachabilityLayer1
{
IsReachable = false,
Confidence = ConfidenceLevel.Low,
AnalysisMethod = "none",
Limitations = [reason]
},
BinaryResolution = CreateUnknownLayer2(),
RuntimeGating = CreateUnknownLayer3(),
Verdict = ReachabilityVerdict.Unknown,
AnalyzedAt = _timeProvider.GetUtcNow(),
Explanation = reason
};
return new ReachabilityEvidenceJobResult
{
JobId = job.JobId,
Status = JobStatus.Completed,
Stack = unknownStack,
StartedAt = startedAt,
CompletedAt = _timeProvider.GetUtcNow(),
DurationMs = sw.ElapsedMilliseconds
};
}
}
/// <summary>
/// Provider for call graph snapshots.
/// </summary>
public interface ICallGraphSnapshotProvider
{
/// <summary>
/// Gets or computes a call graph snapshot for an image.
/// </summary>
Task<CallGraphSnapshot?> GetOrComputeAsync(
string imageDigest,
bool forceRecompute,
CancellationToken ct);
}
/// <summary>
/// Service for storing reachability evidence.
/// </summary>
public interface IEvidenceStorageService
{
/// <summary>
/// Stores a reachability stack as an evidence bundle.
/// </summary>
/// <returns>Tuple of (bundleId, evidenceUri).</returns>
Task<(string BundleId, string EvidenceUri)> StoreReachabilityStackAsync(
ReachabilityStack stack,
ReachabilityEvidenceJob job,
CancellationToken ct);
}

View File

@@ -0,0 +1,302 @@
// SPDX-License-Identifier: AGPL-3.0-or-later
// Copyright (c) StellaOps
// Sprint: EVID-001-004 - eBPF Runtime Reachability Collector
using System.Collections.Immutable;
using System.Runtime.InteropServices;
using Microsoft.Extensions.Logging;
using StellaOps.Scanner.Explainability.Assumptions;
using StellaOps.Scanner.Reachability.Stack;
using StellaOps.Signals.Ebpf.Schema;
using StellaOps.Signals.Ebpf.Services;
namespace StellaOps.Scanner.Reachability.Runtime;
/// <summary>
/// Runtime reachability collector using the existing eBPF signal infrastructure.
/// </summary>
public sealed class EbpfRuntimeReachabilityCollector : IRuntimeReachabilityCollector
{
private readonly IRuntimeSignalCollector _signalCollector;
private readonly IRuntimeObservationStore _observationStore;
private readonly ILogger<EbpfRuntimeReachabilityCollector> _logger;
private readonly TimeProvider _timeProvider;
public EbpfRuntimeReachabilityCollector(
IRuntimeSignalCollector signalCollector,
IRuntimeObservationStore observationStore,
ILogger<EbpfRuntimeReachabilityCollector> logger,
TimeProvider? timeProvider = null)
{
_signalCollector = signalCollector ?? throw new ArgumentNullException(nameof(signalCollector));
_observationStore = observationStore ?? throw new ArgumentNullException(nameof(observationStore));
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
_timeProvider = timeProvider ?? TimeProvider.System;
}
/// <inheritdoc />
public bool IsAvailable => _signalCollector.IsSupported() && RuntimeInformation.IsOSPlatform(OSPlatform.Linux);
/// <inheritdoc />
public string Platform => IsAvailable ? "linux/ebpf" : "unsupported";
/// <inheritdoc />
public async Task<RuntimeReachabilityResult> ObserveAsync(
RuntimeObservationRequest request,
CancellationToken ct = default)
{
ArgumentNullException.ThrowIfNull(request);
var startTime = _timeProvider.GetUtcNow();
_logger.LogInformation(
"Starting runtime observation for container {ContainerId} with {SymbolCount} target symbols",
request.ContainerId, request.TargetSymbols.Count);
try
{
// Try historical data first if requested
if (request.UseHistoricalData)
{
var historical = await TryGetHistoricalObservationsAsync(request, ct);
if (historical is not null)
{
_logger.LogDebug(
"Using historical observation data for container {ContainerId}",
request.ContainerId);
return historical;
}
}
// Fall back to live observation if available
if (!IsAvailable)
{
return CreateUnavailableResult(request, startTime,
"eBPF runtime observation not available on this platform");
}
return await PerformLiveObservationAsync(request, startTime, ct);
}
catch (Exception ex)
{
_logger.LogError(ex,
"Runtime observation failed for container {ContainerId}",
request.ContainerId);
return CreateFailedResult(request, startTime, ex.Message);
}
}
/// <inheritdoc />
public async Task<IReadOnlyList<SymbolObservation>> CheckObservationsAsync(
string containerId,
IReadOnlyList<string> symbols,
CancellationToken ct = default)
{
ArgumentException.ThrowIfNullOrWhiteSpace(containerId);
ArgumentNullException.ThrowIfNull(symbols);
return await _observationStore.GetObservationsAsync(containerId, symbols, ct);
}
private async Task<RuntimeReachabilityResult?> TryGetHistoricalObservationsAsync(
RuntimeObservationRequest request,
CancellationToken ct)
{
var observations = await _observationStore.GetObservationsAsync(
request.ContainerId,
request.TargetSymbols,
ct);
if (observations.Count == 0)
{
return null;
}
var anyObserved = observations.Any(o => o.WasObserved);
var layer3 = BuildLayer3FromObservations(observations, ObservationSource.Historical);
return new RuntimeReachabilityResult
{
Success = true,
Layer3 = layer3,
Observations = observations,
ObservedAt = _timeProvider.GetUtcNow(),
Duration = TimeSpan.Zero,
Source = ObservationSource.Historical
};
}
private async Task<RuntimeReachabilityResult> PerformLiveObservationAsync(
RuntimeObservationRequest request,
DateTimeOffset startTime,
CancellationToken ct)
{
var options = new RuntimeSignalOptions
{
TargetSymbols = request.TargetSymbols.ToList(),
MaxDuration = request.Duration,
SampleRate = request.SampleRate,
ResolveSymbols = true,
MaxEventsPerSecond = 5000
};
var handle = await _signalCollector.StartCollectionAsync(
request.ContainerId, options, ct);
_logger.LogDebug(
"Started eBPF signal collection session {SessionId} for container {ContainerId}",
handle.SessionId, request.ContainerId);
try
{
// Wait for collection duration or cancellation
using var linkedCts = CancellationTokenSource.CreateLinkedTokenSource(ct);
linkedCts.CancelAfter(request.Duration);
try
{
await Task.Delay(request.Duration, linkedCts.Token);
}
catch (OperationCanceledException) when (!ct.IsCancellationRequested)
{
// Duration elapsed, this is expected
}
}
finally
{
// Always stop collection
var summary = await _signalCollector.StopCollectionAsync(handle, ct);
_logger.LogInformation(
"Stopped eBPF signal collection: {TotalEvents} events, {UniqueSymbols} symbols observed",
summary.TotalEvents, summary.ObservedSymbols.Count);
}
// Get final observations
var observations = await _observationStore.GetObservationsAsync(
request.ContainerId,
request.TargetSymbols,
ct);
var layer3 = BuildLayer3FromObservations(observations, ObservationSource.Live);
var duration = _timeProvider.GetUtcNow() - startTime;
return new RuntimeReachabilityResult
{
Success = true,
Layer3 = layer3,
Observations = observations,
ObservedAt = startTime,
Duration = duration,
Source = ObservationSource.Live
};
}
private ReachabilityLayer3 BuildLayer3FromObservations(
IReadOnlyList<SymbolObservation> observations,
ObservationSource source)
{
var observedSymbols = observations.Where(o => o.WasObserved).ToList();
var anyTargetObserved = observedSymbols.Count > 0;
if (!anyTargetObserved)
{
// No target symbols observed - potentially gated
return new ReachabilityLayer3
{
IsGated = false, // We can't confirm it's gated, just not observed
Outcome = GatingOutcome.Unknown,
Confidence = ConfidenceLevel.Medium,
Description = $"No target symbols observed during {source.ToString().ToLowerInvariant()} monitoring"
};
}
// Target symbols were observed - definitely not gated
var conditions = observedSymbols
.Select(o => new GatingCondition(
Type: GatingType.EnvironmentVariable, // Best approximation
Description: $"Symbol '{o.Symbol}' was executed {o.ObservationCount} time(s)",
ConfigKey: null,
EnvVar: null,
IsBlocking: false,
Status: GatingStatus.Enabled))
.ToImmutableArray();
return new ReachabilityLayer3
{
IsGated = false,
Outcome = GatingOutcome.NotGated,
Confidence = ConfidenceLevel.High,
Conditions = conditions,
Description = $"{observedSymbols.Count} target symbol(s) observed executing at runtime"
};
}
private RuntimeReachabilityResult CreateUnavailableResult(
RuntimeObservationRequest request,
DateTimeOffset startTime,
string reason)
{
return new RuntimeReachabilityResult
{
Success = false,
Layer3 = new ReachabilityLayer3
{
IsGated = false,
Outcome = GatingOutcome.Unknown,
Confidence = ConfidenceLevel.Low,
Description = reason
},
ObservedAt = startTime,
Duration = TimeSpan.Zero,
Error = reason,
Source = ObservationSource.None
};
}
private RuntimeReachabilityResult CreateFailedResult(
RuntimeObservationRequest request,
DateTimeOffset startTime,
string error)
{
return new RuntimeReachabilityResult
{
Success = false,
Layer3 = new ReachabilityLayer3
{
IsGated = false,
Outcome = GatingOutcome.Unknown,
Confidence = ConfidenceLevel.Low,
Description = $"Runtime observation failed: {error}"
},
ObservedAt = startTime,
Duration = _timeProvider.GetUtcNow() - startTime,
Error = error,
Source = ObservationSource.None
};
}
}
/// <summary>
/// Store for persisting and retrieving runtime observations.
/// </summary>
public interface IRuntimeObservationStore
{
/// <summary>
/// Gets observations for specific symbols in a container.
/// </summary>
Task<IReadOnlyList<SymbolObservation>> GetObservationsAsync(
string containerId,
IReadOnlyList<string> symbols,
CancellationToken ct = default);
/// <summary>
/// Stores a symbol observation.
/// </summary>
Task StoreObservationAsync(
string containerId,
string imageDigest,
SymbolObservation observation,
CancellationToken ct = default);
}

View File

@@ -0,0 +1,195 @@
// SPDX-License-Identifier: AGPL-3.0-or-later
// Copyright (c) StellaOps
// Sprint: EVID-001-004 - Runtime Reachability Collection
using StellaOps.Scanner.Reachability.Stack;
namespace StellaOps.Scanner.Reachability.Runtime;
/// <summary>
/// Collects runtime reachability evidence by observing function calls.
/// Bridges the eBPF signal infrastructure with reachability Layer 3.
/// </summary>
public interface IRuntimeReachabilityCollector
{
/// <summary>
/// Checks if runtime collection is available on this system.
/// </summary>
bool IsAvailable { get; }
/// <summary>
/// Gets the platform and collection method.
/// </summary>
string Platform { get; }
/// <summary>
/// Observes runtime execution and builds Layer 3 evidence.
/// </summary>
/// <param name="request">Observation request with targets.</param>
/// <param name="ct">Cancellation token.</param>
/// <returns>Layer 3 reachability result.</returns>
Task<RuntimeReachabilityResult> ObserveAsync(
RuntimeObservationRequest request,
CancellationToken ct = default);
/// <summary>
/// Checks if specific symbols were observed in past runtime data.
/// </summary>
/// <param name="containerId">Container to check.</param>
/// <param name="symbols">Symbols to look for.</param>
/// <param name="ct">Cancellation token.</param>
/// <returns>Observation results for each symbol.</returns>
Task<IReadOnlyList<SymbolObservation>> CheckObservationsAsync(
string containerId,
IReadOnlyList<string> symbols,
CancellationToken ct = default);
}
/// <summary>
/// Request to observe runtime reachability.
/// </summary>
public sealed record RuntimeObservationRequest
{
/// <summary>
/// Container ID to observe.
/// </summary>
public required string ContainerId { get; init; }
/// <summary>
/// Image digest the container is running.
/// </summary>
public required string ImageDigest { get; init; }
/// <summary>
/// Target symbols (vulnerable sinks) to watch for.
/// </summary>
public required IReadOnlyList<string> TargetSymbols { get; init; }
/// <summary>
/// Observation duration.
/// </summary>
public TimeSpan Duration { get; init; } = TimeSpan.FromMinutes(5);
/// <summary>
/// Sample rate (1 = every call).
/// </summary>
public int SampleRate { get; init; } = 1;
/// <summary>
/// Use historical data if available instead of live observation.
/// </summary>
public bool UseHistoricalData { get; init; } = true;
/// <summary>
/// Time window for historical data lookup.
/// </summary>
public TimeSpan HistoricalWindow { get; init; } = TimeSpan.FromDays(7);
}
/// <summary>
/// Result of runtime reachability observation.
/// </summary>
public sealed record RuntimeReachabilityResult
{
/// <summary>
/// Whether the observation was successful.
/// </summary>
public required bool Success { get; init; }
/// <summary>
/// Layer 3 model populated from observations.
/// </summary>
public required ReachabilityLayer3 Layer3 { get; init; }
/// <summary>
/// Details about each observed target symbol.
/// </summary>
public IReadOnlyList<SymbolObservation> Observations { get; init; } = [];
/// <summary>
/// When observation started.
/// </summary>
public DateTimeOffset ObservedAt { get; init; }
/// <summary>
/// Observation duration.
/// </summary>
public TimeSpan Duration { get; init; }
/// <summary>
/// Error message if observation failed.
/// </summary>
public string? Error { get; init; }
/// <summary>
/// Source of the data (live, historical, none).
/// </summary>
public ObservationSource Source { get; init; }
}
/// <summary>
/// Observation result for a specific symbol.
/// </summary>
public sealed record SymbolObservation
{
/// <summary>
/// Symbol name that was searched for.
/// </summary>
public required string Symbol { get; init; }
/// <summary>
/// Whether this symbol was observed executing.
/// </summary>
public required bool WasObserved { get; init; }
/// <summary>
/// Number of times observed.
/// </summary>
public int ObservationCount { get; init; }
/// <summary>
/// First observation timestamp.
/// </summary>
public DateTimeOffset? FirstObservedAt { get; init; }
/// <summary>
/// Last observation timestamp.
/// </summary>
public DateTimeOffset? LastObservedAt { get; init; }
/// <summary>
/// Call paths that led to this symbol.
/// </summary>
public IReadOnlyList<ObservedPath> Paths { get; init; } = [];
}
/// <summary>
/// A call path observed at runtime.
/// </summary>
public sealed record ObservedPath
{
/// <summary>
/// Symbols in the path from entry to target.
/// </summary>
public required IReadOnlyList<string> Symbols { get; init; }
/// <summary>
/// Number of times this specific path was observed.
/// </summary>
public int Count { get; init; }
}
/// <summary>
/// Source of observation data.
/// </summary>
public enum ObservationSource
{
/// <summary>Live observation via eBPF.</summary>
Live,
/// <summary>Historical data from past observations.</summary>
Historical,
/// <summary>No observation data available.</summary>
None
}

View File

@@ -0,0 +1,142 @@
// SPDX-License-Identifier: AGPL-3.0-or-later
// Copyright (c) StellaOps
// Sprint: EVID-001 - Evidence Pipeline DI Registration
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.DependencyInjection.Extensions;
using StellaOps.Scanner.Reachability.Binary;
using StellaOps.Scanner.Reachability.Jobs;
using StellaOps.Scanner.Reachability.Runtime;
using StellaOps.Scanner.Reachability.Services;
using StellaOps.Scanner.Reachability.Stack;
using StellaOps.Scanner.Reachability.Vex;
namespace StellaOps.Scanner.Reachability;
/// <summary>
/// Extension methods for registering reachability evidence services.
/// </summary>
public static class ServiceCollectionExtensions
{
/// <summary>
/// Adds reachability evidence pipeline services.
/// </summary>
/// <param name="services">Service collection.</param>
/// <param name="connectionString">PostgreSQL connection string for CVE-symbol mappings.</param>
/// <returns>Service collection for chaining.</returns>
public static IServiceCollection AddReachabilityEvidence(
this IServiceCollection services,
string connectionString)
{
ArgumentNullException.ThrowIfNull(services);
ArgumentException.ThrowIfNullOrWhiteSpace(connectionString);
// CVE-Symbol Mapping Service
services.TryAddSingleton<ICveSymbolMappingService>(sp =>
new PostgresCveSymbolMappingRepository(
connectionString,
sp.GetRequiredService<Microsoft.Extensions.Logging.ILogger<PostgresCveSymbolMappingRepository>>()));
// Stack Evaluator (already exists, ensure registered)
services.TryAddSingleton<IReachabilityStackEvaluator, ReachabilityStackEvaluator>();
// VEX Integration
services.TryAddSingleton<IVexStatusDeterminer, VexStatusDeterminer>();
// Job Executor
services.TryAddScoped<IReachabilityEvidenceJobExecutor, ReachabilityEvidenceJobExecutor>();
// Runtime Collection (optional - requires eBPF infrastructure)
services.TryAddSingleton<IRuntimeReachabilityCollector, EbpfRuntimeReachabilityCollector>();
// Binary Patch Verification (requires Ghidra infrastructure)
services.TryAddSingleton<IBinaryPatchVerifier, BinaryPatchVerifier>();
return services;
}
/// <summary>
/// Adds reachability evidence pipeline services with custom CVE mapping service.
/// </summary>
/// <param name="services">Service collection.</param>
/// <param name="cveSymbolMappingFactory">Factory for CVE mapping service.</param>
/// <returns>Service collection for chaining.</returns>
public static IServiceCollection AddReachabilityEvidence(
this IServiceCollection services,
Func<IServiceProvider, ICveSymbolMappingService> cveSymbolMappingFactory)
{
ArgumentNullException.ThrowIfNull(services);
ArgumentNullException.ThrowIfNull(cveSymbolMappingFactory);
services.TryAddSingleton(cveSymbolMappingFactory);
services.TryAddSingleton<IReachabilityStackEvaluator, ReachabilityStackEvaluator>();
services.TryAddSingleton<IVexStatusDeterminer, VexStatusDeterminer>();
services.TryAddScoped<IReachabilityEvidenceJobExecutor, ReachabilityEvidenceJobExecutor>();
services.TryAddSingleton<IRuntimeReachabilityCollector, EbpfRuntimeReachabilityCollector>();
services.TryAddSingleton<IBinaryPatchVerifier, BinaryPatchVerifier>();
return services;
}
/// <summary>
/// Adds reachability evidence pipeline services with all dependencies.
/// </summary>
/// <param name="services">Service collection.</param>
/// <param name="configure">Configuration callback.</param>
/// <returns>Service collection for chaining.</returns>
public static IServiceCollection AddReachabilityEvidence(
this IServiceCollection services,
Action<ReachabilityEvidenceOptions> configure)
{
ArgumentNullException.ThrowIfNull(services);
ArgumentNullException.ThrowIfNull(configure);
var options = new ReachabilityEvidenceOptions();
configure(options);
if (string.IsNullOrWhiteSpace(options.ConnectionString))
{
throw new InvalidOperationException("ConnectionString is required");
}
services.AddReachabilityEvidence(options.ConnectionString);
if (options.TimeProvider is not null)
{
services.AddSingleton(options.TimeProvider);
}
return services;
}
}
/// <summary>
/// Options for configuring reachability evidence services.
/// </summary>
public sealed class ReachabilityEvidenceOptions
{
/// <summary>
/// PostgreSQL connection string.
/// </summary>
public string ConnectionString { get; set; } = string.Empty;
/// <summary>
/// Optional time provider (for testing).
/// </summary>
public TimeProvider? TimeProvider { get; set; }
/// <summary>
/// Enable runtime observation via eBPF.
/// </summary>
public bool EnableRuntimeObservation { get; set; } = false;
/// <summary>
/// Enable binary patch verification.
/// </summary>
public bool EnableBinaryPatchVerification { get; set; } = false;
/// <summary>
/// Maximum job timeout in seconds.
/// </summary>
public int MaxJobTimeoutSeconds { get; set; } = 300;
}

View File

@@ -0,0 +1,215 @@
// SPDX-License-Identifier: AGPL-3.0-or-later
// Copyright (c) StellaOps
// Sprint: EVID-001-001 - CVE-Symbol Mapping Service
using StellaOps.Scanner.Reachability.Stack;
namespace StellaOps.Scanner.Reachability.Services;
/// <summary>
/// Service for looking up vulnerable symbols (sinks) associated with CVEs.
/// Used to target reachability analysis at specific vulnerable code paths.
/// </summary>
public interface ICveSymbolMappingService
{
/// <summary>
/// Gets vulnerable symbols (sinks) for a given CVE and package.
/// </summary>
/// <param name="cveId">The CVE identifier (e.g., "CVE-2021-44228").</param>
/// <param name="purl">Package URL of the affected component.</param>
/// <param name="ct">Cancellation token.</param>
/// <returns>List of vulnerable symbols that should be treated as sinks.</returns>
Task<IReadOnlyList<CveSinkMapping>> GetSinksForCveAsync(
string cveId,
string purl,
CancellationToken ct = default);
/// <summary>
/// Checks if any sink mappings exist for a CVE.
/// </summary>
/// <param name="cveId">The CVE identifier.</param>
/// <param name="ct">Cancellation token.</param>
/// <returns>True if mappings exist.</returns>
Task<bool> HasMappingAsync(string cveId, CancellationToken ct = default);
/// <summary>
/// Gets all sink mappings for a CVE regardless of package.
/// </summary>
/// <param name="cveId">The CVE identifier.</param>
/// <param name="ct">Cancellation token.</param>
/// <returns>All sink mappings for the CVE.</returns>
Task<IReadOnlyList<CveSinkMapping>> GetAllMappingsForCveAsync(
string cveId,
CancellationToken ct = default);
/// <summary>
/// Adds or updates a CVE-to-sink mapping.
/// </summary>
/// <param name="mapping">The mapping to upsert.</param>
/// <param name="ct">Cancellation token.</param>
/// <returns>The stored mapping with generated ID.</returns>
Task<CveSinkMapping> UpsertMappingAsync(
CveSinkMapping mapping,
CancellationToken ct = default);
/// <summary>
/// Gets the total count of mappings in the database.
/// </summary>
Task<int> GetMappingCountAsync(CancellationToken ct = default);
}
/// <summary>
/// A mapping from a CVE to a vulnerable symbol (sink).
/// </summary>
public sealed record CveSinkMapping
{
/// <summary>Unique mapping ID.</summary>
public Guid? MappingId { get; init; }
/// <summary>CVE identifier (e.g., "CVE-2021-44228").</summary>
public required string CveId { get; init; }
/// <summary>Package URL of the affected component.</summary>
public required string Purl { get; init; }
/// <summary>Name of the vulnerable symbol/function.</summary>
public required string SymbolName { get; init; }
/// <summary>Canonical symbol ID for cross-language matching.</summary>
public string? CanonicalId { get; init; }
/// <summary>File path where the symbol is defined.</summary>
public string? FilePath { get; init; }
/// <summary>Start line number in the file.</summary>
public int? StartLine { get; init; }
/// <summary>End line number in the file.</summary>
public int? EndLine { get; init; }
/// <summary>Source of this mapping.</summary>
public required MappingSource Source { get; init; }
/// <summary>Type of vulnerability (source, sink, gadget).</summary>
public required VulnerabilityType VulnType { get; init; }
/// <summary>Confidence score (0.0 to 1.0).</summary>
public required decimal Confidence { get; init; }
/// <summary>URI to evidence supporting this mapping.</summary>
public string? EvidenceUri { get; init; }
/// <summary>URL to the source commit (if from patch analysis).</summary>
public string? SourceCommitUrl { get; init; }
/// <summary>URL to the patch.</summary>
public string? PatchUrl { get; init; }
/// <summary>Affected version ranges.</summary>
public IReadOnlyList<string>? AffectedVersions { get; init; }
/// <summary>Fixed versions.</summary>
public IReadOnlyList<string>? FixedVersions { get; init; }
/// <summary>When this mapping was created.</summary>
public DateTimeOffset CreatedAt { get; init; }
/// <summary>When this mapping was last updated.</summary>
public DateTimeOffset UpdatedAt { get; init; }
/// <summary>
/// Converts this mapping to a VulnerableSymbol for use in reachability analysis.
/// </summary>
public VulnerableSymbol ToVulnerableSymbol()
{
var symbolType = InferSymbolType(SymbolName, Purl);
return new VulnerableSymbol(
Name: CanonicalId ?? SymbolName,
Library: ExtractLibraryFromPurl(Purl),
Version: ExtractVersionFromPurl(Purl),
VulnerabilityId: CveId,
Type: symbolType);
}
private static SymbolType InferSymbolType(string symbolName, string purl)
{
if (purl.StartsWith("pkg:maven/", StringComparison.OrdinalIgnoreCase))
return SymbolType.JavaMethod;
if (purl.StartsWith("pkg:npm/", StringComparison.OrdinalIgnoreCase) ||
purl.StartsWith("pkg:deno/", StringComparison.OrdinalIgnoreCase))
return SymbolType.JsFunction;
if (purl.StartsWith("pkg:pypi/", StringComparison.OrdinalIgnoreCase))
return SymbolType.PyFunction;
if (purl.StartsWith("pkg:nuget/", StringComparison.OrdinalIgnoreCase))
return SymbolType.Method;
if (purl.StartsWith("pkg:golang/", StringComparison.OrdinalIgnoreCase))
return SymbolType.GoFunction;
if (purl.StartsWith("pkg:cargo/", StringComparison.OrdinalIgnoreCase))
return SymbolType.RustFunction;
return SymbolType.Function;
}
private static string? ExtractLibraryFromPurl(string purl)
{
// pkg:maven/org.apache.logging.log4j/log4j-core@2.14.1
var atIndex = purl.IndexOf('@');
var path = atIndex > 0 ? purl[..atIndex] : purl;
var lastSlash = path.LastIndexOf('/');
return lastSlash > 0 ? path[(lastSlash + 1)..] : null;
}
private static string? ExtractVersionFromPurl(string purl)
{
var atIndex = purl.IndexOf('@');
return atIndex > 0 ? purl[(atIndex + 1)..] : null;
}
}
/// <summary>
/// Source of a CVE-symbol mapping.
/// </summary>
public enum MappingSource
{
/// <summary>Derived from patch/commit analysis.</summary>
PatchAnalysis,
/// <summary>From OSV advisory.</summary>
OsvAdvisory,
/// <summary>From NVD CPE data.</summary>
NvdCpe,
/// <summary>Manually curated.</summary>
ManualCuration,
/// <summary>From fuzzing corpus.</summary>
FuzzingCorpus,
/// <summary>From exploit database.</summary>
ExploitDatabase,
/// <summary>Unknown source.</summary>
Unknown
}
/// <summary>
/// Type of vulnerability for taint analysis.
/// </summary>
public enum VulnerabilityType
{
/// <summary>Taint source (user input).</summary>
Source,
/// <summary>Dangerous sink (vulnerable function).</summary>
Sink,
/// <summary>Gadget in an exploit chain.</summary>
Gadget,
/// <summary>Both source and sink.</summary>
BothSourceAndSink,
/// <summary>Unknown type.</summary>
Unknown
}

View File

@@ -0,0 +1,277 @@
// SPDX-License-Identifier: AGPL-3.0-or-later
// Copyright (c) StellaOps
// Sprint: EVID-001-001 - CVE-Symbol Mapping Repository
using System.Data;
using Microsoft.Extensions.Logging;
using Npgsql;
namespace StellaOps.Scanner.Reachability.Services;
/// <summary>
/// PostgreSQL implementation of <see cref="ICveSymbolMappingService"/>.
/// Uses the reachability.cve_symbol_mappings schema.
/// </summary>
public sealed class PostgresCveSymbolMappingRepository : ICveSymbolMappingService
{
private readonly string _connectionString;
private readonly ILogger<PostgresCveSymbolMappingRepository> _logger;
public PostgresCveSymbolMappingRepository(
string connectionString,
ILogger<PostgresCveSymbolMappingRepository> logger)
{
_connectionString = connectionString ?? throw new ArgumentNullException(nameof(connectionString));
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
}
/// <inheritdoc />
public async Task<IReadOnlyList<CveSinkMapping>> GetSinksForCveAsync(
string cveId,
string purl,
CancellationToken ct = default)
{
ArgumentException.ThrowIfNullOrWhiteSpace(cveId);
ArgumentException.ThrowIfNullOrWhiteSpace(purl);
const string sql = """
SELECT
mapping_id, cve_id, purl, symbol_name, canonical_id,
file_path, start_line, end_line, source, vulnerability_type,
confidence, evidence_uri, source_commit_url, patch_url,
affected_versions, fixed_versions, created_at, updated_at
FROM reachability.cve_symbol_mappings
WHERE UPPER(cve_id) = UPPER(@cveId)
AND purl = @purl
AND vulnerability_type IN ('sink', 'both_source_and_sink')
ORDER BY confidence DESC, symbol_name ASC
""";
await using var conn = await OpenConnectionAsync(ct);
await using var cmd = new NpgsqlCommand(sql, conn);
cmd.Parameters.AddWithValue("@cveId", cveId);
cmd.Parameters.AddWithValue("@purl", purl);
var results = new List<CveSinkMapping>();
await using var reader = await cmd.ExecuteReaderAsync(ct);
while (await reader.ReadAsync(ct))
{
results.Add(MapFromReader(reader));
}
_logger.LogDebug(
"Found {Count} sink mappings for CVE {CveId} and PURL {Purl}",
results.Count, cveId, purl);
return results;
}
/// <inheritdoc />
public async Task<bool> HasMappingAsync(string cveId, CancellationToken ct = default)
{
ArgumentException.ThrowIfNullOrWhiteSpace(cveId);
const string sql = """
SELECT EXISTS(
SELECT 1 FROM reachability.cve_symbol_mappings
WHERE UPPER(cve_id) = UPPER(@cveId)
)
""";
await using var conn = await OpenConnectionAsync(ct);
await using var cmd = new NpgsqlCommand(sql, conn);
cmd.Parameters.AddWithValue("@cveId", cveId);
var result = await cmd.ExecuteScalarAsync(ct);
return result is true;
}
/// <inheritdoc />
public async Task<IReadOnlyList<CveSinkMapping>> GetAllMappingsForCveAsync(
string cveId,
CancellationToken ct = default)
{
ArgumentException.ThrowIfNullOrWhiteSpace(cveId);
const string sql = """
SELECT
mapping_id, cve_id, purl, symbol_name, canonical_id,
file_path, start_line, end_line, source, vulnerability_type,
confidence, evidence_uri, source_commit_url, patch_url,
affected_versions, fixed_versions, created_at, updated_at
FROM reachability.cve_symbol_mappings
WHERE UPPER(cve_id) = UPPER(@cveId)
ORDER BY purl, confidence DESC, symbol_name ASC
""";
await using var conn = await OpenConnectionAsync(ct);
await using var cmd = new NpgsqlCommand(sql, conn);
cmd.Parameters.AddWithValue("@cveId", cveId);
var results = new List<CveSinkMapping>();
await using var reader = await cmd.ExecuteReaderAsync(ct);
while (await reader.ReadAsync(ct))
{
results.Add(MapFromReader(reader));
}
return results;
}
/// <inheritdoc />
public async Task<CveSinkMapping> UpsertMappingAsync(
CveSinkMapping mapping,
CancellationToken ct = default)
{
ArgumentNullException.ThrowIfNull(mapping);
const string sql = """
INSERT INTO reachability.cve_symbol_mappings (
cve_id, purl, symbol_name, canonical_id, file_path,
start_line, end_line, source, vulnerability_type, confidence,
evidence_uri, source_commit_url, patch_url,
affected_versions, fixed_versions
) VALUES (
@cveId, @purl, @symbolName, @canonicalId, @filePath,
@startLine, @endLine, @source::reachability.mapping_source,
@vulnType::reachability.vulnerability_type, @confidence,
@evidenceUri, @sourceCommitUrl, @patchUrl,
@affectedVersions, @fixedVersions
)
ON CONFLICT (cve_id_normalized, purl, symbol_name)
DO UPDATE SET
canonical_id = EXCLUDED.canonical_id,
file_path = EXCLUDED.file_path,
start_line = EXCLUDED.start_line,
end_line = EXCLUDED.end_line,
source = EXCLUDED.source,
vulnerability_type = EXCLUDED.vulnerability_type,
confidence = EXCLUDED.confidence,
evidence_uri = EXCLUDED.evidence_uri,
source_commit_url = EXCLUDED.source_commit_url,
patch_url = EXCLUDED.patch_url,
affected_versions = EXCLUDED.affected_versions,
fixed_versions = EXCLUDED.fixed_versions,
updated_at = NOW()
RETURNING mapping_id, created_at, updated_at
""";
await using var conn = await OpenConnectionAsync(ct);
await using var cmd = new NpgsqlCommand(sql, conn);
cmd.Parameters.AddWithValue("@cveId", mapping.CveId);
cmd.Parameters.AddWithValue("@purl", mapping.Purl);
cmd.Parameters.AddWithValue("@symbolName", mapping.SymbolName);
cmd.Parameters.AddWithValue("@canonicalId", (object?)mapping.CanonicalId ?? DBNull.Value);
cmd.Parameters.AddWithValue("@filePath", (object?)mapping.FilePath ?? DBNull.Value);
cmd.Parameters.AddWithValue("@startLine", (object?)mapping.StartLine ?? DBNull.Value);
cmd.Parameters.AddWithValue("@endLine", (object?)mapping.EndLine ?? DBNull.Value);
cmd.Parameters.AddWithValue("@source", MapSourceToString(mapping.Source));
cmd.Parameters.AddWithValue("@vulnType", MapVulnTypeToString(mapping.VulnType));
cmd.Parameters.AddWithValue("@confidence", mapping.Confidence);
cmd.Parameters.AddWithValue("@evidenceUri", (object?)mapping.EvidenceUri ?? DBNull.Value);
cmd.Parameters.AddWithValue("@sourceCommitUrl", (object?)mapping.SourceCommitUrl ?? DBNull.Value);
cmd.Parameters.AddWithValue("@patchUrl", (object?)mapping.PatchUrl ?? DBNull.Value);
cmd.Parameters.AddWithValue("@affectedVersions", (object?)mapping.AffectedVersions?.ToArray() ?? DBNull.Value);
cmd.Parameters.AddWithValue("@fixedVersions", (object?)mapping.FixedVersions?.ToArray() ?? DBNull.Value);
await using var reader = await cmd.ExecuteReaderAsync(ct);
if (await reader.ReadAsync(ct))
{
return mapping with
{
MappingId = reader.GetGuid(0),
CreatedAt = reader.GetDateTime(1),
UpdatedAt = reader.GetDateTime(2)
};
}
throw new InvalidOperationException("Upsert did not return a result");
}
/// <inheritdoc />
public async Task<int> GetMappingCountAsync(CancellationToken ct = default)
{
const string sql = "SELECT COUNT(*) FROM reachability.cve_symbol_mappings";
await using var conn = await OpenConnectionAsync(ct);
await using var cmd = new NpgsqlCommand(sql, conn);
var result = await cmd.ExecuteScalarAsync(ct);
return Convert.ToInt32(result);
}
private async Task<NpgsqlConnection> OpenConnectionAsync(CancellationToken ct)
{
var conn = new NpgsqlConnection(_connectionString);
await conn.OpenAsync(ct);
return conn;
}
private static CveSinkMapping MapFromReader(NpgsqlDataReader reader)
{
return new CveSinkMapping
{
MappingId = reader.GetGuid(0),
CveId = reader.GetString(1),
Purl = reader.GetString(2),
SymbolName = reader.GetString(3),
CanonicalId = reader.IsDBNull(4) ? null : reader.GetString(4),
FilePath = reader.IsDBNull(5) ? null : reader.GetString(5),
StartLine = reader.IsDBNull(6) ? null : reader.GetInt32(6),
EndLine = reader.IsDBNull(7) ? null : reader.GetInt32(7),
Source = MapStringToSource(reader.GetString(8)),
VulnType = MapStringToVulnType(reader.GetString(9)),
Confidence = reader.GetDecimal(10),
EvidenceUri = reader.IsDBNull(11) ? null : reader.GetString(11),
SourceCommitUrl = reader.IsDBNull(12) ? null : reader.GetString(12),
PatchUrl = reader.IsDBNull(13) ? null : reader.GetString(13),
AffectedVersions = reader.IsDBNull(14) ? null : (string[])reader.GetValue(14),
FixedVersions = reader.IsDBNull(15) ? null : (string[])reader.GetValue(15),
CreatedAt = reader.GetDateTime(16),
UpdatedAt = reader.GetDateTime(17)
};
}
private static string MapSourceToString(MappingSource source) => source switch
{
MappingSource.PatchAnalysis => "patch_analysis",
MappingSource.OsvAdvisory => "osv_advisory",
MappingSource.NvdCpe => "nvd_cpe",
MappingSource.ManualCuration => "manual_curation",
MappingSource.FuzzingCorpus => "fuzzing_corpus",
MappingSource.ExploitDatabase => "exploit_database",
_ => "unknown"
};
private static MappingSource MapStringToSource(string source) => source switch
{
"patch_analysis" => MappingSource.PatchAnalysis,
"osv_advisory" => MappingSource.OsvAdvisory,
"nvd_cpe" => MappingSource.NvdCpe,
"manual_curation" => MappingSource.ManualCuration,
"fuzzing_corpus" => MappingSource.FuzzingCorpus,
"exploit_database" => MappingSource.ExploitDatabase,
_ => MappingSource.Unknown
};
private static string MapVulnTypeToString(VulnerabilityType vulnType) => vulnType switch
{
VulnerabilityType.Source => "source",
VulnerabilityType.Sink => "sink",
VulnerabilityType.Gadget => "gadget",
VulnerabilityType.BothSourceAndSink => "both_source_and_sink",
_ => "unknown"
};
private static VulnerabilityType MapStringToVulnType(string vulnType) => vulnType switch
{
"source" => VulnerabilityType.Source,
"sink" => VulnerabilityType.Sink,
"gadget" => VulnerabilityType.Gadget,
"both_source_and_sink" => VulnerabilityType.BothSourceAndSink,
_ => VulnerabilityType.Unknown
};
}

View File

@@ -1,202 +1,6 @@
namespace StellaOps.Scanner.Reachability;
using System.Collections.Frozen;
using System.Collections.Immutable;
using System.Text.Json.Serialization;
/// <summary>
/// Security-relevant sink categories for reachability analysis.
/// </summary>
[JsonConverter(typeof(JsonStringEnumConverter<SinkCategory>))]
public enum SinkCategory
{
/// <summary>Command/process execution (e.g., Runtime.exec, Process.Start)</summary>
[JsonStringEnumMemberName("CMD_EXEC")]
CmdExec,
/// <summary>Unsafe deserialization (e.g., BinaryFormatter, pickle.loads)</summary>
[JsonStringEnumMemberName("UNSAFE_DESER")]
UnsafeDeser,
/// <summary>Raw SQL execution (e.g., SqlCommand with string concat)</summary>
[JsonStringEnumMemberName("SQL_RAW")]
SqlRaw,
/// <summary>SQL injection (e.g., unparameterized queries with user input)</summary>
[JsonStringEnumMemberName("SQL_INJECTION")]
SqlInjection,
/// <summary>Server-side request forgery (e.g., HttpClient with user input)</summary>
[JsonStringEnumMemberName("SSRF")]
Ssrf,
/// <summary>Arbitrary file write (e.g., File.WriteAllBytes with user path)</summary>
[JsonStringEnumMemberName("FILE_WRITE")]
FileWrite,
/// <summary>Path traversal (e.g., Path.Combine with ../)</summary>
[JsonStringEnumMemberName("PATH_TRAVERSAL")]
PathTraversal,
/// <summary>Template/expression injection (e.g., Razor, JEXL)</summary>
[JsonStringEnumMemberName("TEMPLATE_INJECTION")]
TemplateInjection,
/// <summary>Weak cryptography (e.g., MD5, DES, ECB mode)</summary>
[JsonStringEnumMemberName("CRYPTO_WEAK")]
CryptoWeak,
/// <summary>Authorization bypass (e.g., JWT none alg, missing authz check)</summary>
[JsonStringEnumMemberName("AUTHZ_BYPASS")]
AuthzBypass,
/// <summary>LDAP injection (e.g., DirContext.search with user input)</summary>
[JsonStringEnumMemberName("LDAP_INJECTION")]
LdapInjection,
/// <summary>XPath injection (e.g., XPath.evaluate with user input)</summary>
[JsonStringEnumMemberName("XPATH_INJECTION")]
XPathInjection,
/// <summary>XML External Entity injection (XXE)</summary>
[JsonStringEnumMemberName("XXE")]
XxeInjection,
/// <summary>Code/expression injection (e.g., eval, ScriptEngine)</summary>
[JsonStringEnumMemberName("CODE_INJECTION")]
CodeInjection,
/// <summary>Log injection (e.g., unvalidated user input in logs)</summary>
[JsonStringEnumMemberName("LOG_INJECTION")]
LogInjection,
/// <summary>Reflection-based attacks (e.g., Class.forName with user input)</summary>
[JsonStringEnumMemberName("REFLECTION")]
Reflection,
/// <summary>Open redirect (e.g., sendRedirect with user-controlled URL)</summary>
[JsonStringEnumMemberName("OPEN_REDIRECT")]
OpenRedirect
}
/// <summary>
/// A known dangerous sink with its metadata.
/// </summary>
public sealed record SinkDefinition(
SinkCategory Category,
string SymbolPattern,
string Language,
string? Framework = null,
string? Description = null,
string? CweId = null,
double SeverityWeight = 1.0);
/// <summary>
/// Registry of known dangerous sinks per language.
/// </summary>
public static class SinkRegistry
{
private static readonly FrozenDictionary<string, ImmutableArray<SinkDefinition>> SinksByLanguage = BuildRegistry();
private static FrozenDictionary<string, ImmutableArray<SinkDefinition>> BuildRegistry()
{
var builder = new Dictionary<string, List<SinkDefinition>>(StringComparer.Ordinal);
// .NET sinks
AddSink(builder, "dotnet", SinkCategory.CmdExec, "System.Diagnostics.Process.Start", cweId: "CWE-78");
AddSink(builder, "dotnet", SinkCategory.CmdExec, "System.Diagnostics.ProcessStartInfo", cweId: "CWE-78");
AddSink(builder, "dotnet", SinkCategory.UnsafeDeser, "System.Runtime.Serialization.Formatters.Binary.BinaryFormatter.Deserialize", cweId: "CWE-502");
AddSink(builder, "dotnet", SinkCategory.UnsafeDeser, "Newtonsoft.Json.JsonConvert.DeserializeObject", cweId: "CWE-502", framework: "Newtonsoft.Json");
AddSink(builder, "dotnet", SinkCategory.SqlRaw, "System.Data.SqlClient.SqlCommand.ExecuteReader", cweId: "CWE-89");
AddSink(builder, "dotnet", SinkCategory.SqlRaw, "Microsoft.EntityFrameworkCore.RelationalQueryableExtensions.FromSqlRaw", cweId: "CWE-89", framework: "EFCore");
AddSink(builder, "dotnet", SinkCategory.Ssrf, "System.Net.Http.HttpClient.GetAsync", cweId: "CWE-918");
AddSink(builder, "dotnet", SinkCategory.FileWrite, "System.IO.File.WriteAllBytes", cweId: "CWE-73");
AddSink(builder, "dotnet", SinkCategory.PathTraversal, "System.IO.Path.Combine", cweId: "CWE-22");
AddSink(builder, "dotnet", SinkCategory.CryptoWeak, "System.Security.Cryptography.MD5.Create", cweId: "CWE-327");
AddSink(builder, "dotnet", SinkCategory.CryptoWeak, "System.Security.Cryptography.DES.Create", cweId: "CWE-327");
// Java sinks
AddSink(builder, "java", SinkCategory.CmdExec, "java.lang.Runtime.exec", cweId: "CWE-78");
AddSink(builder, "java", SinkCategory.CmdExec, "java.lang.ProcessBuilder.start", cweId: "CWE-78");
AddSink(builder, "java", SinkCategory.UnsafeDeser, "java.io.ObjectInputStream.readObject", cweId: "CWE-502");
AddSink(builder, "java", SinkCategory.SqlRaw, "java.sql.Statement.executeQuery", cweId: "CWE-89");
AddSink(builder, "java", SinkCategory.Ssrf, "java.net.URL.openConnection", cweId: "CWE-918");
AddSink(builder, "java", SinkCategory.TemplateInjection, "org.springframework.expression.ExpressionParser.parseExpression", cweId: "CWE-917", framework: "Spring");
// Node.js sinks
AddSink(builder, "node", SinkCategory.CmdExec, "child_process.exec", cweId: "CWE-78");
AddSink(builder, "node", SinkCategory.CmdExec, "child_process.spawn", cweId: "CWE-78");
AddSink(builder, "node", SinkCategory.UnsafeDeser, "node-serialize.unserialize", cweId: "CWE-502");
AddSink(builder, "node", SinkCategory.SqlRaw, "mysql.query", cweId: "CWE-89");
AddSink(builder, "node", SinkCategory.PathTraversal, "path.join", cweId: "CWE-22");
AddSink(builder, "node", SinkCategory.TemplateInjection, "eval", cweId: "CWE-94");
// Python sinks
AddSink(builder, "python", SinkCategory.CmdExec, "os.system", cweId: "CWE-78");
AddSink(builder, "python", SinkCategory.CmdExec, "subprocess.call", cweId: "CWE-78");
AddSink(builder, "python", SinkCategory.UnsafeDeser, "pickle.loads", cweId: "CWE-502");
AddSink(builder, "python", SinkCategory.UnsafeDeser, "yaml.load", cweId: "CWE-502");
AddSink(builder, "python", SinkCategory.SqlRaw, "sqlite3.Cursor.execute", cweId: "CWE-89");
AddSink(builder, "python", SinkCategory.TemplateInjection, "jinja2.Template.render", cweId: "CWE-1336", framework: "Jinja2");
return builder.ToFrozenDictionary(
kvp => kvp.Key,
kvp => kvp.Value.ToImmutableArray(),
StringComparer.Ordinal);
}
private static void AddSink(
Dictionary<string, List<SinkDefinition>> builder,
string language,
SinkCategory category,
string symbolPattern,
string? cweId = null,
string? framework = null)
{
if (!builder.TryGetValue(language, out var list))
{
list = new List<SinkDefinition>();
builder[language] = list;
}
list.Add(new SinkDefinition(
Category: category,
SymbolPattern: symbolPattern,
Language: language,
Framework: framework,
CweId: cweId));
}
/// <summary>
/// Gets all sink definitions for a language.
/// </summary>
public static ImmutableArray<SinkDefinition> GetSinksForLanguage(string language)
{
if (string.IsNullOrWhiteSpace(language))
{
return ImmutableArray<SinkDefinition>.Empty;
}
return SinksByLanguage.GetValueOrDefault(language.Trim().ToLowerInvariant(), ImmutableArray<SinkDefinition>.Empty);
}
/// <summary>
/// Gets all registered languages.
/// </summary>
public static IEnumerable<string> GetRegisteredLanguages() => SinksByLanguage.Keys;
/// <summary>
/// Checks if a symbol matches any known sink.
/// </summary>
public static SinkDefinition? MatchSink(string language, string symbol)
{
if (string.IsNullOrWhiteSpace(language) || string.IsNullOrWhiteSpace(symbol))
{
return null;
}
var sinks = GetSinksForLanguage(language);
return sinks.FirstOrDefault(sink => symbol.Contains(sink.SymbolPattern, StringComparison.OrdinalIgnoreCase));
}
}
// SPDX-License-Identifier: AGPL-3.0-or-later
// Copyright (c) StellaOps
// SinkCategory, SinkDefinition, and SinkRegistry are now defined in StellaOps.Scanner.Contracts.
// This file is kept for backward compatibility - all types are imported via global using.
global using StellaOps.Scanner.Contracts;

View File

@@ -193,6 +193,34 @@ public sealed record ReachabilityLayer2
/// <summary>Alternative symbols that could be loaded instead</summary>
public ImmutableArray<string> Alternatives { get; init; } = [];
/// <summary>Resolved symbols from binary analysis</summary>
public ImmutableArray<ResolvedSymbol> ResolvedSymbols { get; init; } = [];
}
/// <summary>
/// A symbol that was resolved during binary analysis.
/// </summary>
public sealed record ResolvedSymbol(
string Name,
ulong Address,
bool IsWeak,
SymbolBinding BindingType
);
/// <summary>
/// Symbol binding type in ELF/PE binaries.
/// </summary>
public enum SymbolBinding
{
/// <summary>Local symbol (STB_LOCAL)</summary>
Local,
/// <summary>Global symbol (STB_GLOBAL)</summary>
Global,
/// <summary>Weak symbol (STB_WEAK)</summary>
Weak
}
/// <summary>

View File

@@ -11,12 +11,14 @@
<PackageReference Include="Npgsql" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\StellaOps.Scanner.Contracts\StellaOps.Scanner.Contracts.csproj" />
<ProjectReference Include="..\StellaOps.Scanner.Core\StellaOps.Scanner.Core.csproj" />
<ProjectReference Include="..\StellaOps.Scanner.Explainability\StellaOps.Scanner.Explainability.csproj" />
<ProjectReference Include="..\StellaOps.Scanner.Cache\StellaOps.Scanner.Cache.csproj" />
<ProjectReference Include="..\StellaOps.Scanner.ProofSpine\StellaOps.Scanner.ProofSpine.csproj" />
<ProjectReference Include="..\StellaOps.Scanner.Surface.Env\StellaOps.Scanner.Surface.Env.csproj" />
<ProjectReference Include="..\StellaOps.Scanner.SmartDiff\StellaOps.Scanner.SmartDiff.csproj" />
<ProjectReference Include="..\StellaOps.Scanner.CallGraph\StellaOps.Scanner.CallGraph.csproj" />
<ProjectReference Include="..\..\StellaOps.Scanner.Analyzers.Native\StellaOps.Scanner.Analyzers.Native.csproj" />
<ProjectReference Include="..\..\..\Attestor\StellaOps.Attestor\StellaOps.Attestor.Core\StellaOps.Attestor.Core.csproj" />
<ProjectReference Include="..\..\..\Attestor\StellaOps.Attestor.Envelope\StellaOps.Attestor.Envelope.csproj" />
@@ -25,5 +27,7 @@
<ProjectReference Include="..\..\..\__Libraries\StellaOps.Replay.Core\StellaOps.Replay.Core.csproj" />
<ProjectReference Include="..\..\..\__Libraries\StellaOps.Cryptography\StellaOps.Cryptography.csproj" />
<ProjectReference Include="..\..\..\Signals\__Libraries\StellaOps.Signals.Ebpf\StellaOps.Signals.Ebpf.csproj" />
<ProjectReference Include="..\..\..\BinaryIndex\__Libraries\StellaOps.BinaryIndex.Ghidra\StellaOps.BinaryIndex.Ghidra.csproj" />
<ProjectReference Include="..\..\..\BinaryIndex\__Libraries\StellaOps.BinaryIndex.Decompiler\StellaOps.BinaryIndex.Decompiler.csproj" />
</ItemGroup>
</Project>

Some files were not shown because too many files have changed in this diff Show More