feat: add Attestation Chain and Triage Evidence API clients and models
- Implemented Attestation Chain API client with methods for verifying, fetching, and managing attestation chains. - Created models for Attestation Chain, including DSSE envelope structures and verification results. - Developed Triage Evidence API client for fetching finding evidence, including methods for evidence retrieval by CVE and component. - Added models for Triage Evidence, encapsulating evidence responses, entry points, boundary proofs, and VEX evidence. - Introduced mock implementations for both API clients to facilitate testing and development.
This commit is contained in:
@@ -0,0 +1,65 @@
|
||||
using System.Text.Json.Serialization;
|
||||
|
||||
namespace StellaOps.Scanner.Analyzers.Native.Index;
|
||||
|
||||
/// <summary>
|
||||
/// NDJSON format for Build-ID index entries.
|
||||
/// Each line is one JSON object in this format.
|
||||
/// </summary>
|
||||
public sealed class BuildIdIndexEntry
|
||||
{
|
||||
/// <summary>
|
||||
/// The Build-ID with prefix (e.g., "gnu-build-id:abc123", "pe-cv:guid-age", "macho-uuid:xyz").
|
||||
/// </summary>
|
||||
[JsonPropertyName("build_id")]
|
||||
public required string BuildId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Package URL for the binary.
|
||||
/// </summary>
|
||||
[JsonPropertyName("purl")]
|
||||
public required string Purl { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Package version (extracted from PURL if not provided).
|
||||
/// </summary>
|
||||
[JsonPropertyName("version")]
|
||||
public string? Version { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Source distribution (debian, ubuntu, alpine, fedora, etc.).
|
||||
/// </summary>
|
||||
[JsonPropertyName("distro")]
|
||||
public string? Distro { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Confidence level: "exact", "inferred", or "heuristic".
|
||||
/// </summary>
|
||||
[JsonPropertyName("confidence")]
|
||||
public string Confidence { get; init; } = "exact";
|
||||
|
||||
/// <summary>
|
||||
/// When this entry was indexed (ISO-8601).
|
||||
/// </summary>
|
||||
[JsonPropertyName("indexed_at")]
|
||||
public DateTimeOffset? IndexedAt { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Convert to lookup result.
|
||||
/// </summary>
|
||||
public BuildIdLookupResult ToLookupResult() => new(
|
||||
BuildId,
|
||||
Purl,
|
||||
Version,
|
||||
Distro,
|
||||
ParseConfidence(Confidence),
|
||||
IndexedAt ?? DateTimeOffset.MinValue);
|
||||
|
||||
private static BuildIdConfidence ParseConfidence(string? value) => value?.ToLowerInvariant() switch
|
||||
{
|
||||
"exact" => BuildIdConfidence.Exact,
|
||||
"inferred" => BuildIdConfidence.Inferred,
|
||||
"heuristic" => BuildIdConfidence.Heuristic,
|
||||
_ => BuildIdConfidence.Heuristic
|
||||
};
|
||||
}
|
||||
@@ -0,0 +1,38 @@
|
||||
namespace StellaOps.Scanner.Analyzers.Native.Index;
|
||||
|
||||
/// <summary>
|
||||
/// Configuration options for the Build-ID index.
|
||||
/// </summary>
|
||||
public sealed class BuildIdIndexOptions
|
||||
{
|
||||
/// <summary>
|
||||
/// Path to the offline NDJSON index file.
|
||||
/// </summary>
|
||||
public string? IndexPath { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Path to the DSSE signature file for the index.
|
||||
/// </summary>
|
||||
public string? SignaturePath { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Whether to require DSSE signature verification.
|
||||
/// Defaults to true in production.
|
||||
/// </summary>
|
||||
public bool RequireSignature { get; set; } = true;
|
||||
|
||||
/// <summary>
|
||||
/// Maximum age of the index before warning (for freshness checks).
|
||||
/// </summary>
|
||||
public TimeSpan MaxIndexAge { get; set; } = TimeSpan.FromDays(30);
|
||||
|
||||
/// <summary>
|
||||
/// Whether to enable in-memory caching of index entries.
|
||||
/// </summary>
|
||||
public bool EnableCache { get; set; } = true;
|
||||
|
||||
/// <summary>
|
||||
/// Maximum number of entries to cache in memory.
|
||||
/// </summary>
|
||||
public int MaxCacheEntries { get; set; } = 100_000;
|
||||
}
|
||||
@@ -0,0 +1,39 @@
|
||||
namespace StellaOps.Scanner.Analyzers.Native.Index;
|
||||
|
||||
/// <summary>
|
||||
/// Confidence level for Build-ID to PURL mappings.
|
||||
/// </summary>
|
||||
public enum BuildIdConfidence
|
||||
{
|
||||
/// <summary>
|
||||
/// Exact match from official distro metadata or verified source.
|
||||
/// </summary>
|
||||
Exact,
|
||||
|
||||
/// <summary>
|
||||
/// Inferred from package metadata with high confidence.
|
||||
/// </summary>
|
||||
Inferred,
|
||||
|
||||
/// <summary>
|
||||
/// Best-guess heuristic (version pattern matching, etc.).
|
||||
/// </summary>
|
||||
Heuristic
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Result of a Build-ID lookup.
|
||||
/// </summary>
|
||||
/// <param name="BuildId">The queried Build-ID (ELF build-id, PE GUID+Age, Mach-O UUID).</param>
|
||||
/// <param name="Purl">Package URL for the binary.</param>
|
||||
/// <param name="Version">Package version if known.</param>
|
||||
/// <param name="SourceDistro">Source distribution (debian, alpine, fedora, etc.).</param>
|
||||
/// <param name="Confidence">Confidence level of the match.</param>
|
||||
/// <param name="IndexedAt">When this mapping was indexed.</param>
|
||||
public sealed record BuildIdLookupResult(
|
||||
string BuildId,
|
||||
string Purl,
|
||||
string? Version,
|
||||
string? SourceDistro,
|
||||
BuildIdConfidence Confidence,
|
||||
DateTimeOffset IndexedAt);
|
||||
@@ -0,0 +1,42 @@
|
||||
namespace StellaOps.Scanner.Analyzers.Native.Index;
|
||||
|
||||
/// <summary>
|
||||
/// Interface for Build-ID to PURL index lookups.
|
||||
/// Enables binary identification in distroless/scratch images.
|
||||
/// </summary>
|
||||
public interface IBuildIdIndex
|
||||
{
|
||||
/// <summary>
|
||||
/// Look up a single Build-ID.
|
||||
/// </summary>
|
||||
/// <param name="buildId">The Build-ID to look up (e.g., "gnu-build-id:abc123", "pe-cv:guid-age", "macho-uuid:xyz").</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>Lookup result if found; null otherwise.</returns>
|
||||
Task<BuildIdLookupResult?> LookupAsync(string buildId, CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Look up multiple Build-IDs efficiently.
|
||||
/// </summary>
|
||||
/// <param name="buildIds">Build-IDs to look up.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>Found results (unfound IDs are not included).</returns>
|
||||
Task<IReadOnlyList<BuildIdLookupResult>> BatchLookupAsync(
|
||||
IEnumerable<string> buildIds,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Gets the number of entries in the index.
|
||||
/// </summary>
|
||||
int Count { get; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets whether the index has been loaded.
|
||||
/// </summary>
|
||||
bool IsLoaded { get; }
|
||||
|
||||
/// <summary>
|
||||
/// Load or reload the index from the configured source.
|
||||
/// </summary>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
Task LoadAsync(CancellationToken cancellationToken = default);
|
||||
}
|
||||
@@ -0,0 +1,207 @@
|
||||
using System.Collections.Frozen;
|
||||
using System.Text.Json;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Microsoft.Extensions.Options;
|
||||
|
||||
namespace StellaOps.Scanner.Analyzers.Native.Index;
|
||||
|
||||
/// <summary>
|
||||
/// Offline Build-ID index that loads from NDJSON files.
|
||||
/// Enables binary identification in distroless/scratch images.
|
||||
/// </summary>
|
||||
public sealed class OfflineBuildIdIndex : IBuildIdIndex
|
||||
{
|
||||
private readonly BuildIdIndexOptions _options;
|
||||
private readonly ILogger<OfflineBuildIdIndex> _logger;
|
||||
private FrozenDictionary<string, BuildIdLookupResult> _index = FrozenDictionary<string, BuildIdLookupResult>.Empty;
|
||||
private bool _isLoaded;
|
||||
|
||||
private static readonly JsonSerializerOptions JsonOptions = new(JsonSerializerDefaults.Web)
|
||||
{
|
||||
PropertyNameCaseInsensitive = true
|
||||
};
|
||||
|
||||
/// <summary>
|
||||
/// Creates a new offline Build-ID index.
|
||||
/// </summary>
|
||||
public OfflineBuildIdIndex(IOptions<BuildIdIndexOptions> options, ILogger<OfflineBuildIdIndex> logger)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(options);
|
||||
ArgumentNullException.ThrowIfNull(logger);
|
||||
|
||||
_options = options.Value;
|
||||
_logger = logger;
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public int Count => _index.Count;
|
||||
|
||||
/// <inheritdoc />
|
||||
public bool IsLoaded => _isLoaded;
|
||||
|
||||
/// <inheritdoc />
|
||||
public Task<BuildIdLookupResult?> LookupAsync(string buildId, CancellationToken cancellationToken = default)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(buildId))
|
||||
{
|
||||
return Task.FromResult<BuildIdLookupResult?>(null);
|
||||
}
|
||||
|
||||
// Normalize Build-ID (lowercase, trim)
|
||||
var normalized = NormalizeBuildId(buildId);
|
||||
var result = _index.TryGetValue(normalized, out var entry) ? entry : null;
|
||||
|
||||
return Task.FromResult(result);
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public Task<IReadOnlyList<BuildIdLookupResult>> BatchLookupAsync(
|
||||
IEnumerable<string> buildIds,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(buildIds);
|
||||
|
||||
var results = new List<BuildIdLookupResult>();
|
||||
|
||||
foreach (var buildId in buildIds)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(buildId))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
var normalized = NormalizeBuildId(buildId);
|
||||
if (_index.TryGetValue(normalized, out var entry))
|
||||
{
|
||||
results.Add(entry);
|
||||
}
|
||||
}
|
||||
|
||||
return Task.FromResult<IReadOnlyList<BuildIdLookupResult>>(results);
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public async Task LoadAsync(CancellationToken cancellationToken = default)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(_options.IndexPath))
|
||||
{
|
||||
_logger.LogWarning("No Build-ID index path configured; index will be empty");
|
||||
_index = FrozenDictionary<string, BuildIdLookupResult>.Empty;
|
||||
_isLoaded = true;
|
||||
return;
|
||||
}
|
||||
|
||||
if (!File.Exists(_options.IndexPath))
|
||||
{
|
||||
_logger.LogWarning("Build-ID index file not found at {IndexPath}; index will be empty", _options.IndexPath);
|
||||
_index = FrozenDictionary<string, BuildIdLookupResult>.Empty;
|
||||
_isLoaded = true;
|
||||
return;
|
||||
}
|
||||
|
||||
// TODO: BID-006 - Verify DSSE signature if RequireSignature is true
|
||||
|
||||
var entries = new Dictionary<string, BuildIdLookupResult>(StringComparer.OrdinalIgnoreCase);
|
||||
var lineNumber = 0;
|
||||
var errorCount = 0;
|
||||
|
||||
await using var stream = File.OpenRead(_options.IndexPath);
|
||||
using var reader = new StreamReader(stream);
|
||||
|
||||
while (await reader.ReadLineAsync(cancellationToken).ConfigureAwait(false) is { } line)
|
||||
{
|
||||
lineNumber++;
|
||||
|
||||
if (string.IsNullOrWhiteSpace(line))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
// Skip comment lines (for manifest headers)
|
||||
if (line.StartsWith('#') || line.StartsWith("//", StringComparison.Ordinal))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
var entry = JsonSerializer.Deserialize<BuildIdIndexEntry>(line, JsonOptions);
|
||||
if (entry is null || string.IsNullOrWhiteSpace(entry.BuildId) || string.IsNullOrWhiteSpace(entry.Purl))
|
||||
{
|
||||
errorCount++;
|
||||
continue;
|
||||
}
|
||||
|
||||
var normalized = NormalizeBuildId(entry.BuildId);
|
||||
entries[normalized] = entry.ToLookupResult();
|
||||
}
|
||||
catch (JsonException ex)
|
||||
{
|
||||
errorCount++;
|
||||
if (errorCount <= 10)
|
||||
{
|
||||
_logger.LogWarning(ex, "Failed to parse Build-ID index line {LineNumber}", lineNumber);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (errorCount > 0)
|
||||
{
|
||||
_logger.LogWarning("Build-ID index had {ErrorCount} parse errors out of {TotalLines} lines", errorCount, lineNumber);
|
||||
}
|
||||
|
||||
_index = entries.ToFrozenDictionary(StringComparer.OrdinalIgnoreCase);
|
||||
_isLoaded = true;
|
||||
|
||||
_logger.LogInformation("Loaded Build-ID index with {EntryCount} entries from {IndexPath}", _index.Count, _options.IndexPath);
|
||||
|
||||
// Check index freshness
|
||||
if (_options.MaxIndexAge > TimeSpan.Zero)
|
||||
{
|
||||
var oldestAllowed = DateTimeOffset.UtcNow - _options.MaxIndexAge;
|
||||
var latestEntry = entries.Values.MaxBy(e => e.IndexedAt);
|
||||
if (latestEntry is not null && latestEntry.IndexedAt < oldestAllowed)
|
||||
{
|
||||
_logger.LogWarning(
|
||||
"Build-ID index may be stale. Latest entry from {LatestDate}, max age is {MaxAge}",
|
||||
latestEntry.IndexedAt,
|
||||
_options.MaxIndexAge);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Normalize a Build-ID for consistent lookup.
|
||||
/// </summary>
|
||||
private static string NormalizeBuildId(string buildId)
|
||||
{
|
||||
// Lowercase the entire string for case-insensitive matching
|
||||
var normalized = buildId.Trim().ToLowerInvariant();
|
||||
|
||||
// Ensure consistent prefix format
|
||||
// ELF: "gnu-build-id:..." or just the hex
|
||||
// PE: "pe-cv:..." or "pe:guid-age"
|
||||
// Mach-O: "macho-uuid:..." or just the hex
|
||||
|
||||
// If no prefix, try to detect format from length/pattern
|
||||
if (!normalized.Contains(':'))
|
||||
{
|
||||
// 32 hex chars = Mach-O UUID (128 bits)
|
||||
// 40 hex chars = ELF SHA-1 build-id
|
||||
// GUID+Age pattern for PE
|
||||
if (normalized.Length == 32 && IsHex(normalized))
|
||||
{
|
||||
// Could be Mach-O UUID or short ELF build-id
|
||||
normalized = $"build-id:{normalized}";
|
||||
}
|
||||
else if (normalized.Length == 40 && IsHex(normalized))
|
||||
{
|
||||
normalized = $"gnu-build-id:{normalized}";
|
||||
}
|
||||
}
|
||||
|
||||
return normalized;
|
||||
}
|
||||
|
||||
private static bool IsHex(string s) => s.All(c => char.IsAsciiHexDigit(c));
|
||||
}
|
||||
@@ -0,0 +1,16 @@
|
||||
namespace StellaOps.Scanner.Analyzers.Native;
|
||||
|
||||
/// <summary>
|
||||
/// Code signature information from LC_CODE_SIGNATURE.
|
||||
/// </summary>
|
||||
/// <param name="TeamId">Team identifier (10-character Apple team ID).</param>
|
||||
/// <param name="SigningId">Signing identifier (usually bundle ID).</param>
|
||||
/// <param name="CdHash">Code Directory hash (SHA-256, lowercase hex).</param>
|
||||
/// <param name="HasHardenedRuntime">Whether hardened runtime is enabled.</param>
|
||||
/// <param name="Entitlements">Entitlements keys (not values, for privacy).</param>
|
||||
public sealed record MachOCodeSignature(
|
||||
string? TeamId,
|
||||
string? SigningId,
|
||||
string? CdHash,
|
||||
bool HasHardenedRuntime,
|
||||
IReadOnlyList<string> Entitlements);
|
||||
@@ -0,0 +1,24 @@
|
||||
namespace StellaOps.Scanner.Analyzers.Native;
|
||||
|
||||
/// <summary>
|
||||
/// Full identity information extracted from a Mach-O file.
|
||||
/// </summary>
|
||||
/// <param name="CpuType">CPU type (x86_64, arm64, etc.).</param>
|
||||
/// <param name="CpuSubtype">CPU subtype for variant detection.</param>
|
||||
/// <param name="Uuid">LC_UUID in lowercase hex (no dashes).</param>
|
||||
/// <param name="IsFatBinary">Whether this is a fat/universal binary.</param>
|
||||
/// <param name="Platform">Platform from LC_BUILD_VERSION.</param>
|
||||
/// <param name="MinOsVersion">Minimum OS version from LC_VERSION_MIN_* or LC_BUILD_VERSION.</param>
|
||||
/// <param name="SdkVersion">SDK version from LC_BUILD_VERSION.</param>
|
||||
/// <param name="CodeSignature">Code signature information (if signed).</param>
|
||||
/// <param name="Exports">Exported symbols from LC_DYLD_INFO_ONLY or LC_DYLD_EXPORTS_TRIE.</param>
|
||||
public sealed record MachOIdentity(
|
||||
string? CpuType,
|
||||
uint CpuSubtype,
|
||||
string? Uuid,
|
||||
bool IsFatBinary,
|
||||
MachOPlatform Platform,
|
||||
string? MinOsVersion,
|
||||
string? SdkVersion,
|
||||
MachOCodeSignature? CodeSignature,
|
||||
IReadOnlyList<string> Exports);
|
||||
@@ -0,0 +1,46 @@
|
||||
namespace StellaOps.Scanner.Analyzers.Native;
|
||||
|
||||
/// <summary>
|
||||
/// Mach-O platform values from LC_BUILD_VERSION.
|
||||
/// </summary>
|
||||
public enum MachOPlatform : uint
|
||||
{
|
||||
/// <summary>Unknown platform.</summary>
|
||||
Unknown = 0,
|
||||
|
||||
/// <summary>macOS.</summary>
|
||||
MacOS = 1,
|
||||
|
||||
/// <summary>iOS.</summary>
|
||||
iOS = 2,
|
||||
|
||||
/// <summary>tvOS.</summary>
|
||||
TvOS = 3,
|
||||
|
||||
/// <summary>watchOS.</summary>
|
||||
WatchOS = 4,
|
||||
|
||||
/// <summary>BridgeOS.</summary>
|
||||
BridgeOS = 5,
|
||||
|
||||
/// <summary>Mac Catalyst (iPad apps on Mac).</summary>
|
||||
MacCatalyst = 6,
|
||||
|
||||
/// <summary>iOS Simulator.</summary>
|
||||
iOSSimulator = 7,
|
||||
|
||||
/// <summary>tvOS Simulator.</summary>
|
||||
TvOSSimulator = 8,
|
||||
|
||||
/// <summary>watchOS Simulator.</summary>
|
||||
WatchOSSimulator = 9,
|
||||
|
||||
/// <summary>DriverKit.</summary>
|
||||
DriverKit = 10,
|
||||
|
||||
/// <summary>visionOS.</summary>
|
||||
VisionOS = 11,
|
||||
|
||||
/// <summary>visionOS Simulator.</summary>
|
||||
VisionOSSimulator = 12
|
||||
}
|
||||
640
src/Scanner/StellaOps.Scanner.Analyzers.Native/MachOReader.cs
Normal file
640
src/Scanner/StellaOps.Scanner.Analyzers.Native/MachOReader.cs
Normal file
@@ -0,0 +1,640 @@
|
||||
using System.Buffers.Binary;
|
||||
using System.Security.Cryptography;
|
||||
using System.Text;
|
||||
|
||||
namespace StellaOps.Scanner.Analyzers.Native;
|
||||
|
||||
/// <summary>
|
||||
/// Result from parsing a Mach-O file.
|
||||
/// </summary>
|
||||
/// <param name="Path">File path.</param>
|
||||
/// <param name="LayerDigest">Container layer digest if applicable.</param>
|
||||
/// <param name="Identities">List of identities (one per slice in fat binary).</param>
|
||||
public sealed record MachOParseResult(
|
||||
string Path,
|
||||
string? LayerDigest,
|
||||
IReadOnlyList<MachOIdentity> Identities);
|
||||
|
||||
/// <summary>
|
||||
/// Full Mach-O file reader with identity extraction.
|
||||
/// Handles both single-arch and fat (universal) binaries.
|
||||
/// </summary>
|
||||
public static class MachOReader
|
||||
{
|
||||
// Mach-O magic numbers
|
||||
private const uint MH_MAGIC = 0xFEEDFACE; // 32-bit, native endian
|
||||
private const uint MH_CIGAM = 0xCEFAEDFE; // 32-bit, reversed endian
|
||||
private const uint MH_MAGIC_64 = 0xFEEDFACF; // 64-bit, native endian
|
||||
private const uint MH_CIGAM_64 = 0xCFFAEDFE; // 64-bit, reversed endian
|
||||
|
||||
// Fat binary magic numbers
|
||||
private const uint FAT_MAGIC = 0xCAFEBABE; // Big-endian
|
||||
private const uint FAT_CIGAM = 0xBEBAFECA; // Little-endian
|
||||
|
||||
// Load command types
|
||||
private const uint LC_UUID = 0x1B;
|
||||
private const uint LC_CODE_SIGNATURE = 0x1D;
|
||||
private const uint LC_VERSION_MIN_MACOSX = 0x24;
|
||||
private const uint LC_VERSION_MIN_IPHONEOS = 0x25;
|
||||
private const uint LC_VERSION_MIN_WATCHOS = 0x30;
|
||||
private const uint LC_VERSION_MIN_TVOS = 0x2F;
|
||||
private const uint LC_BUILD_VERSION = 0x32;
|
||||
private const uint LC_DYLD_INFO = 0x22;
|
||||
private const uint LC_DYLD_INFO_ONLY = 0x80000022;
|
||||
private const uint LC_DYLD_EXPORTS_TRIE = 0x80000033;
|
||||
|
||||
// Code signature blob types
|
||||
private const uint CSMAGIC_CODEDIRECTORY = 0xFADE0C02;
|
||||
private const uint CSMAGIC_EMBEDDED_SIGNATURE = 0xFADE0CC0;
|
||||
private const uint CSMAGIC_EMBEDDED_ENTITLEMENTS = 0xFADE7171;
|
||||
|
||||
// CPU types
|
||||
private const int CPU_TYPE_X86 = 7;
|
||||
private const int CPU_TYPE_X86_64 = CPU_TYPE_X86 | 0x01000000;
|
||||
private const int CPU_TYPE_ARM = 12;
|
||||
private const int CPU_TYPE_ARM64 = CPU_TYPE_ARM | 0x01000000;
|
||||
|
||||
/// <summary>
|
||||
/// Parse a Mach-O file and extract full identity information.
|
||||
/// For fat binaries, returns identities for all slices.
|
||||
/// </summary>
|
||||
public static MachOParseResult? Parse(Stream stream, string path, string? layerDigest = null)
|
||||
{
|
||||
if (!TryReadBytes(stream, 4, out var magicBytes))
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
stream.Position = 0;
|
||||
var magic = BinaryPrimitives.ReadUInt32BigEndian(magicBytes);
|
||||
|
||||
// Check for fat binary
|
||||
if (magic is FAT_MAGIC or FAT_CIGAM)
|
||||
{
|
||||
var identities = ParseFatBinary(stream);
|
||||
return identities.Count > 0
|
||||
? new MachOParseResult(path, layerDigest, identities)
|
||||
: null;
|
||||
}
|
||||
|
||||
// Single architecture binary
|
||||
var identity = ParseSingleMachO(stream);
|
||||
return identity is not null
|
||||
? new MachOParseResult(path, layerDigest, [identity])
|
||||
: null;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Try to extract just the identity without full parsing.
|
||||
/// </summary>
|
||||
public static bool TryExtractIdentity(Stream stream, out MachOIdentity? identity)
|
||||
{
|
||||
identity = null;
|
||||
|
||||
if (!TryReadBytes(stream, 4, out var magicBytes))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
stream.Position = 0;
|
||||
var magic = BinaryPrimitives.ReadUInt32BigEndian(magicBytes);
|
||||
|
||||
// Skip fat binary quick extraction for now
|
||||
if (magic is FAT_MAGIC or FAT_CIGAM)
|
||||
{
|
||||
var identities = ParseFatBinary(stream);
|
||||
identity = identities.Count > 0 ? identities[0] : null;
|
||||
return identity is not null;
|
||||
}
|
||||
|
||||
identity = ParseSingleMachO(stream);
|
||||
return identity is not null;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a fat binary and return all slice identities.
|
||||
/// </summary>
|
||||
public static IReadOnlyList<MachOIdentity> ParseFatBinary(Stream stream)
|
||||
{
|
||||
var identities = new List<MachOIdentity>();
|
||||
|
||||
if (!TryReadBytes(stream, 8, out var headerBytes))
|
||||
{
|
||||
return identities;
|
||||
}
|
||||
|
||||
var magic = BinaryPrimitives.ReadUInt32BigEndian(headerBytes);
|
||||
var swapBytes = magic == FAT_CIGAM;
|
||||
var nfatArch = swapBytes
|
||||
? BinaryPrimitives.ReadUInt32LittleEndian(headerBytes.AsSpan(4))
|
||||
: BinaryPrimitives.ReadUInt32BigEndian(headerBytes.AsSpan(4));
|
||||
|
||||
if (nfatArch > 100)
|
||||
{
|
||||
// Sanity check
|
||||
return identities;
|
||||
}
|
||||
|
||||
for (var i = 0; i < nfatArch; i++)
|
||||
{
|
||||
if (!TryReadBytes(stream, 20, out var archBytes))
|
||||
{
|
||||
break;
|
||||
}
|
||||
|
||||
// Fat arch structure is always big-endian (unless FAT_CIGAM)
|
||||
uint offset, size;
|
||||
if (swapBytes)
|
||||
{
|
||||
// cputype(4), cpusubtype(4), offset(4), size(4), align(4)
|
||||
offset = BinaryPrimitives.ReadUInt32LittleEndian(archBytes.AsSpan(8));
|
||||
size = BinaryPrimitives.ReadUInt32LittleEndian(archBytes.AsSpan(12));
|
||||
}
|
||||
else
|
||||
{
|
||||
offset = BinaryPrimitives.ReadUInt32BigEndian(archBytes.AsSpan(8));
|
||||
size = BinaryPrimitives.ReadUInt32BigEndian(archBytes.AsSpan(12));
|
||||
}
|
||||
|
||||
// Save position and parse the embedded Mach-O
|
||||
var currentPos = stream.Position;
|
||||
stream.Position = offset;
|
||||
|
||||
var sliceIdentity = ParseSingleMachO(stream, isFatSlice: true);
|
||||
if (sliceIdentity is not null)
|
||||
{
|
||||
identities.Add(sliceIdentity);
|
||||
}
|
||||
|
||||
stream.Position = currentPos;
|
||||
}
|
||||
|
||||
return identities;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse a single Mach-O binary (not fat).
|
||||
/// </summary>
|
||||
private static MachOIdentity? ParseSingleMachO(Stream stream, bool isFatSlice = false)
|
||||
{
|
||||
var startOffset = stream.Position;
|
||||
|
||||
if (!TryReadBytes(stream, 4, out var magicBytes))
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
var magic = BinaryPrimitives.ReadUInt32LittleEndian(magicBytes);
|
||||
bool is64Bit;
|
||||
bool swapBytes;
|
||||
|
||||
switch (magic)
|
||||
{
|
||||
case MH_MAGIC:
|
||||
is64Bit = false;
|
||||
swapBytes = false;
|
||||
break;
|
||||
case MH_CIGAM:
|
||||
is64Bit = false;
|
||||
swapBytes = true;
|
||||
break;
|
||||
case MH_MAGIC_64:
|
||||
is64Bit = true;
|
||||
swapBytes = false;
|
||||
break;
|
||||
case MH_CIGAM_64:
|
||||
is64Bit = true;
|
||||
swapBytes = true;
|
||||
break;
|
||||
default:
|
||||
return null;
|
||||
}
|
||||
|
||||
// Read rest of Mach header
|
||||
var headerSize = is64Bit ? 32 : 28;
|
||||
stream.Position = startOffset;
|
||||
|
||||
if (!TryReadBytes(stream, headerSize, out var headerBytes))
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
// Parse header
|
||||
var cpuType = ReadInt32(headerBytes, 4, swapBytes);
|
||||
var cpuSubtype = ReadUInt32(headerBytes, 8, swapBytes);
|
||||
var ncmds = ReadUInt32(headerBytes, 16, swapBytes);
|
||||
var sizeofcmds = ReadUInt32(headerBytes, 20, swapBytes);
|
||||
|
||||
var cpuTypeName = GetCpuTypeName(cpuType);
|
||||
|
||||
// Initialize identity fields
|
||||
string? uuid = null;
|
||||
var platform = MachOPlatform.Unknown;
|
||||
string? minOsVersion = null;
|
||||
string? sdkVersion = null;
|
||||
MachOCodeSignature? codeSignature = null;
|
||||
var exports = new List<string>();
|
||||
|
||||
// Read load commands
|
||||
var loadCommandsStart = stream.Position;
|
||||
var loadCommandsEnd = loadCommandsStart + sizeofcmds;
|
||||
|
||||
for (uint cmd = 0; cmd < ncmds && stream.Position < loadCommandsEnd; cmd++)
|
||||
{
|
||||
if (!TryReadBytes(stream, 8, out var cmdHeader))
|
||||
{
|
||||
break;
|
||||
}
|
||||
|
||||
var cmdType = ReadUInt32(cmdHeader, 0, swapBytes);
|
||||
var cmdSize = ReadUInt32(cmdHeader, 4, swapBytes);
|
||||
|
||||
if (cmdSize < 8)
|
||||
{
|
||||
break;
|
||||
}
|
||||
|
||||
var cmdDataSize = (int)cmdSize - 8;
|
||||
|
||||
switch (cmdType)
|
||||
{
|
||||
case LC_UUID when cmdDataSize >= 16:
|
||||
if (TryReadBytes(stream, 16, out var uuidBytes))
|
||||
{
|
||||
uuid = Convert.ToHexStringLower(uuidBytes);
|
||||
}
|
||||
|
||||
stream.Position = loadCommandsStart + GetNextCmdOffset(cmd, ncmds, stream.Position - loadCommandsStart, cmdSize);
|
||||
continue;
|
||||
|
||||
case LC_BUILD_VERSION when cmdDataSize >= 16:
|
||||
if (TryReadBytes(stream, cmdDataSize, out var buildVersionBytes))
|
||||
{
|
||||
var platformValue = ReadUInt32(buildVersionBytes, 0, swapBytes);
|
||||
platform = (MachOPlatform)platformValue;
|
||||
|
||||
var minos = ReadUInt32(buildVersionBytes, 4, swapBytes);
|
||||
minOsVersion = FormatVersion(minos);
|
||||
|
||||
var sdk = ReadUInt32(buildVersionBytes, 8, swapBytes);
|
||||
sdkVersion = FormatVersion(sdk);
|
||||
}
|
||||
|
||||
continue;
|
||||
|
||||
case LC_VERSION_MIN_MACOSX:
|
||||
case LC_VERSION_MIN_IPHONEOS:
|
||||
case LC_VERSION_MIN_WATCHOS:
|
||||
case LC_VERSION_MIN_TVOS:
|
||||
if (TryReadBytes(stream, cmdDataSize, out var versionMinBytes))
|
||||
{
|
||||
if (platform == MachOPlatform.Unknown)
|
||||
{
|
||||
platform = cmdType switch
|
||||
{
|
||||
LC_VERSION_MIN_MACOSX => MachOPlatform.MacOS,
|
||||
LC_VERSION_MIN_IPHONEOS => MachOPlatform.iOS,
|
||||
LC_VERSION_MIN_WATCHOS => MachOPlatform.WatchOS,
|
||||
LC_VERSION_MIN_TVOS => MachOPlatform.TvOS,
|
||||
_ => MachOPlatform.Unknown
|
||||
};
|
||||
}
|
||||
|
||||
if (versionMinBytes.Length >= 8)
|
||||
{
|
||||
var version = ReadUInt32(versionMinBytes, 0, swapBytes);
|
||||
if (minOsVersion is null)
|
||||
{
|
||||
minOsVersion = FormatVersion(version);
|
||||
}
|
||||
|
||||
var sdk = ReadUInt32(versionMinBytes, 4, swapBytes);
|
||||
if (sdkVersion is null)
|
||||
{
|
||||
sdkVersion = FormatVersion(sdk);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
continue;
|
||||
|
||||
case LC_CODE_SIGNATURE:
|
||||
if (TryReadBytes(stream, cmdDataSize, out var codeSignBytes) && codeSignBytes.Length >= 8)
|
||||
{
|
||||
var dataOff = ReadUInt32(codeSignBytes, 0, swapBytes);
|
||||
var dataSize = ReadUInt32(codeSignBytes, 4, swapBytes);
|
||||
|
||||
// Parse code signature at offset
|
||||
var currentPos = stream.Position;
|
||||
stream.Position = startOffset + dataOff;
|
||||
|
||||
codeSignature = ParseCodeSignature(stream, (int)dataSize);
|
||||
|
||||
stream.Position = currentPos;
|
||||
}
|
||||
|
||||
continue;
|
||||
}
|
||||
|
||||
// Skip remaining bytes of command
|
||||
var remaining = cmdDataSize - (stream.Position - loadCommandsStart - 8);
|
||||
if (remaining > 0)
|
||||
{
|
||||
stream.Position += remaining;
|
||||
}
|
||||
}
|
||||
|
||||
return new MachOIdentity(
|
||||
cpuTypeName,
|
||||
cpuSubtype,
|
||||
uuid,
|
||||
isFatSlice,
|
||||
platform,
|
||||
minOsVersion,
|
||||
sdkVersion,
|
||||
codeSignature,
|
||||
exports);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse the code signature blob.
|
||||
/// </summary>
|
||||
private static MachOCodeSignature? ParseCodeSignature(Stream stream, int size)
|
||||
{
|
||||
if (!TryReadBytes(stream, 8, out var superBlobHeader))
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
var magic = BinaryPrimitives.ReadUInt32BigEndian(superBlobHeader);
|
||||
if (magic != CSMAGIC_EMBEDDED_SIGNATURE)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
var length = BinaryPrimitives.ReadUInt32BigEndian(superBlobHeader.AsSpan(4));
|
||||
if (length > size || length < 12)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
if (!TryReadBytes(stream, 4, out var countBytes))
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
var count = BinaryPrimitives.ReadUInt32BigEndian(countBytes);
|
||||
if (count > 100)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
var blobStart = stream.Position - 12;
|
||||
|
||||
// Read blob index entries
|
||||
var blobs = new List<(uint type, uint offset)>();
|
||||
for (uint i = 0; i < count; i++)
|
||||
{
|
||||
if (!TryReadBytes(stream, 8, out var indexEntry))
|
||||
{
|
||||
break;
|
||||
}
|
||||
|
||||
var blobType = BinaryPrimitives.ReadUInt32BigEndian(indexEntry);
|
||||
var blobOffset = BinaryPrimitives.ReadUInt32BigEndian(indexEntry.AsSpan(4));
|
||||
blobs.Add((blobType, blobOffset));
|
||||
}
|
||||
|
||||
string? teamId = null;
|
||||
string? signingId = null;
|
||||
string? cdHash = null;
|
||||
var hasHardenedRuntime = false;
|
||||
var entitlements = new List<string>();
|
||||
|
||||
foreach (var (blobType, blobOffset) in blobs)
|
||||
{
|
||||
stream.Position = blobStart + blobOffset;
|
||||
|
||||
if (!TryReadBytes(stream, 8, out var blobHeader))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
var blobMagic = BinaryPrimitives.ReadUInt32BigEndian(blobHeader);
|
||||
var blobLength = BinaryPrimitives.ReadUInt32BigEndian(blobHeader.AsSpan(4));
|
||||
|
||||
switch (blobMagic)
|
||||
{
|
||||
case CSMAGIC_CODEDIRECTORY:
|
||||
(teamId, signingId, cdHash, hasHardenedRuntime) = ParseCodeDirectory(stream, blobStart + blobOffset, (int)blobLength);
|
||||
break;
|
||||
|
||||
case CSMAGIC_EMBEDDED_ENTITLEMENTS:
|
||||
entitlements = ParseEntitlements(stream, (int)blobLength - 8);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (teamId is null && signingId is null && cdHash is null)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
return new MachOCodeSignature(teamId, signingId, cdHash, hasHardenedRuntime, entitlements);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse CodeDirectory blob.
|
||||
/// </summary>
|
||||
private static (string? TeamId, string? SigningId, string? CdHash, bool HasHardenedRuntime) ParseCodeDirectory(
|
||||
Stream stream, long blobStart, int length)
|
||||
{
|
||||
// CodeDirectory has a complex structure, we'll extract key fields
|
||||
stream.Position = blobStart;
|
||||
|
||||
if (!TryReadBytes(stream, Math.Min(length, 52), out var cdBytes))
|
||||
{
|
||||
return (null, null, null, false);
|
||||
}
|
||||
|
||||
// Offsets in CodeDirectory (all big-endian)
|
||||
// +8: version
|
||||
// +12: flags
|
||||
// +16: hashOffset
|
||||
// +20: identOffset
|
||||
// +28: nCodeSlots
|
||||
// +32: codeLimit
|
||||
// +36: hashSize
|
||||
// +37: hashType
|
||||
// +38: platform
|
||||
// +39: pageSize
|
||||
// +44: spare2
|
||||
// +48: scatterOffset (v2+)
|
||||
// +52: teamOffset (v2+)
|
||||
|
||||
var version = BinaryPrimitives.ReadUInt32BigEndian(cdBytes.AsSpan(8));
|
||||
var flags = BinaryPrimitives.ReadUInt32BigEndian(cdBytes.AsSpan(12));
|
||||
var identOffset = BinaryPrimitives.ReadUInt32BigEndian(cdBytes.AsSpan(20));
|
||||
|
||||
// Check for hardened runtime (flag 0x10000)
|
||||
var hasHardenedRuntime = (flags & 0x10000) != 0;
|
||||
|
||||
// Read signing identifier
|
||||
string? signingId = null;
|
||||
if (identOffset > 0 && identOffset < length)
|
||||
{
|
||||
stream.Position = blobStart + identOffset;
|
||||
signingId = ReadNullTerminatedString(stream, 256);
|
||||
}
|
||||
|
||||
// Read team ID (version 0x20200 and later)
|
||||
string? teamId = null;
|
||||
if (version >= 0x20200 && cdBytes.Length >= 56)
|
||||
{
|
||||
var teamOffset = BinaryPrimitives.ReadUInt32BigEndian(cdBytes.AsSpan(52));
|
||||
if (teamOffset > 0 && teamOffset < length)
|
||||
{
|
||||
stream.Position = blobStart + teamOffset;
|
||||
teamId = ReadNullTerminatedString(stream, 20);
|
||||
}
|
||||
}
|
||||
|
||||
// Compute CDHash (SHA-256 of the entire CodeDirectory blob)
|
||||
stream.Position = blobStart;
|
||||
if (TryReadBytes(stream, length, out var fullCdBytes))
|
||||
{
|
||||
var hash = SHA256.HashData(fullCdBytes);
|
||||
var cdHash = Convert.ToHexStringLower(hash);
|
||||
return (teamId, signingId, cdHash, hasHardenedRuntime);
|
||||
}
|
||||
|
||||
return (teamId, signingId, null, hasHardenedRuntime);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse entitlements plist and extract keys.
|
||||
/// </summary>
|
||||
private static List<string> ParseEntitlements(Stream stream, int length)
|
||||
{
|
||||
var keys = new List<string>();
|
||||
|
||||
if (!TryReadBytes(stream, length, out var plistBytes))
|
||||
{
|
||||
return keys;
|
||||
}
|
||||
|
||||
// Simple plist key extraction (looks for <key>...</key> patterns)
|
||||
var plist = Encoding.UTF8.GetString(plistBytes);
|
||||
|
||||
var keyStart = 0;
|
||||
while ((keyStart = plist.IndexOf("<key>", keyStart, StringComparison.Ordinal)) >= 0)
|
||||
{
|
||||
keyStart += 5;
|
||||
var keyEnd = plist.IndexOf("</key>", keyStart, StringComparison.Ordinal);
|
||||
if (keyEnd > keyStart)
|
||||
{
|
||||
var key = plist[keyStart..keyEnd];
|
||||
if (!string.IsNullOrWhiteSpace(key))
|
||||
{
|
||||
keys.Add(key);
|
||||
}
|
||||
|
||||
keyStart = keyEnd + 6;
|
||||
}
|
||||
else
|
||||
{
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
return keys;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Get CPU type name from CPU type value.
|
||||
/// </summary>
|
||||
private static string? GetCpuTypeName(int cpuType) => cpuType switch
|
||||
{
|
||||
CPU_TYPE_X86 => "i386",
|
||||
CPU_TYPE_X86_64 => "x86_64",
|
||||
CPU_TYPE_ARM => "arm",
|
||||
CPU_TYPE_ARM64 => "arm64",
|
||||
_ => $"cpu_{cpuType}"
|
||||
};
|
||||
|
||||
/// <summary>
|
||||
/// Format version number (major.minor.patch from packed uint32).
|
||||
/// </summary>
|
||||
private static string FormatVersion(uint version)
|
||||
{
|
||||
var major = (version >> 16) & 0xFFFF;
|
||||
var minor = (version >> 8) & 0xFF;
|
||||
var patch = version & 0xFF;
|
||||
return patch == 0 ? $"{major}.{minor}" : $"{major}.{minor}.{patch}";
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Read a null-terminated string from stream.
|
||||
/// </summary>
|
||||
private static string? ReadNullTerminatedString(Stream stream, int maxLength)
|
||||
{
|
||||
var bytes = new byte[maxLength];
|
||||
var count = 0;
|
||||
|
||||
while (count < maxLength)
|
||||
{
|
||||
var b = stream.ReadByte();
|
||||
if (b <= 0)
|
||||
{
|
||||
break;
|
||||
}
|
||||
|
||||
bytes[count++] = (byte)b;
|
||||
}
|
||||
|
||||
return count > 0 ? Encoding.UTF8.GetString(bytes, 0, count) : null;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Try to read exactly the specified number of bytes.
|
||||
/// </summary>
|
||||
private static bool TryReadBytes(Stream stream, int count, out byte[] bytes)
|
||||
{
|
||||
bytes = new byte[count];
|
||||
var totalRead = 0;
|
||||
while (totalRead < count)
|
||||
{
|
||||
var read = stream.Read(bytes, totalRead, count - totalRead);
|
||||
if (read == 0)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
totalRead += read;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Read int32 with optional byte swapping.
|
||||
/// </summary>
|
||||
private static int ReadInt32(byte[] data, int offset, bool swap) =>
|
||||
swap
|
||||
? BinaryPrimitives.ReadInt32BigEndian(data.AsSpan(offset))
|
||||
: BinaryPrimitives.ReadInt32LittleEndian(data.AsSpan(offset));
|
||||
|
||||
/// <summary>
|
||||
/// Read uint32 with optional byte swapping.
|
||||
/// </summary>
|
||||
private static uint ReadUInt32(byte[] data, int offset, bool swap) =>
|
||||
swap
|
||||
? BinaryPrimitives.ReadUInt32BigEndian(data.AsSpan(offset))
|
||||
: BinaryPrimitives.ReadUInt32LittleEndian(data.AsSpan(offset));
|
||||
|
||||
/// <summary>
|
||||
/// Calculate the offset for the next load command.
|
||||
/// </summary>
|
||||
private static long GetNextCmdOffset(uint currentCmd, uint totalCmds, long currentOffset, uint cmdSize) =>
|
||||
currentOffset + cmdSize - 8;
|
||||
}
|
||||
@@ -1,5 +1,23 @@
|
||||
namespace StellaOps.Scanner.Analyzers.Native;
|
||||
|
||||
/// <summary>
|
||||
/// Identity information extracted from a native binary (ELF, PE, Mach-O).
|
||||
/// </summary>
|
||||
/// <param name="Format">Binary format (ELF, PE, Mach-O).</param>
|
||||
/// <param name="CpuArchitecture">CPU architecture (x86, x86_64, arm64, etc.).</param>
|
||||
/// <param name="OperatingSystem">Target OS (linux, windows, darwin, etc.).</param>
|
||||
/// <param name="Endianness">Byte order (le, be).</param>
|
||||
/// <param name="BuildId">ELF GNU Build-ID (hex string).</param>
|
||||
/// <param name="Uuid">Mach-O LC_UUID (hex string).</param>
|
||||
/// <param name="InterpreterPath">ELF interpreter path (e.g., /lib64/ld-linux-x86-64.so.2).</param>
|
||||
/// <param name="CodeViewGuid">PE CodeView GUID (lowercase hex, no dashes).</param>
|
||||
/// <param name="CodeViewAge">PE CodeView Age (increments on rebuild).</param>
|
||||
/// <param name="ProductVersion">PE version resource ProductVersion.</param>
|
||||
/// <param name="MachOPlatform">Mach-O platform (macOS, iOS, etc.).</param>
|
||||
/// <param name="MachOMinOsVersion">Mach-O minimum OS version.</param>
|
||||
/// <param name="MachOSdkVersion">Mach-O SDK version.</param>
|
||||
/// <param name="MachOCdHash">Mach-O CodeDirectory hash (SHA-256).</param>
|
||||
/// <param name="MachOTeamId">Mach-O code signing Team ID.</param>
|
||||
public sealed record NativeBinaryIdentity(
|
||||
NativeFormat Format,
|
||||
string? CpuArchitecture,
|
||||
@@ -7,4 +25,13 @@ public sealed record NativeBinaryIdentity(
|
||||
string? Endianness,
|
||||
string? BuildId,
|
||||
string? Uuid,
|
||||
string? InterpreterPath);
|
||||
string? InterpreterPath,
|
||||
string? CodeViewGuid = null,
|
||||
int? CodeViewAge = null,
|
||||
string? ProductVersion = null,
|
||||
MachOPlatform? MachOPlatform = null,
|
||||
string? MachOMinOsVersion = null,
|
||||
string? MachOSdkVersion = null,
|
||||
string? MachOCdHash = null,
|
||||
string? MachOTeamId = null);
|
||||
|
||||
|
||||
@@ -180,6 +180,24 @@ public static class NativeFormatDetector
|
||||
return false;
|
||||
}
|
||||
|
||||
// Try full PE parsing for CodeView GUID and other identity info
|
||||
if (PeReader.TryExtractIdentity(span, out var peIdentity) && peIdentity is not null)
|
||||
{
|
||||
identity = new NativeBinaryIdentity(
|
||||
NativeFormat.Pe,
|
||||
peIdentity.Machine,
|
||||
"windows",
|
||||
Endianness: "le",
|
||||
BuildId: null,
|
||||
Uuid: null,
|
||||
InterpreterPath: null,
|
||||
CodeViewGuid: peIdentity.CodeViewGuid,
|
||||
CodeViewAge: peIdentity.CodeViewAge,
|
||||
ProductVersion: peIdentity.ProductVersion);
|
||||
return true;
|
||||
}
|
||||
|
||||
// Fallback to basic parsing
|
||||
var machine = BinaryPrimitives.ReadUInt16LittleEndian(span.Slice(peHeaderOffset + 4, 2));
|
||||
var arch = MapPeMachine(machine);
|
||||
|
||||
@@ -205,6 +223,30 @@ public static class NativeFormatDetector
|
||||
return false;
|
||||
}
|
||||
|
||||
// Try full parsing with MachOReader
|
||||
using var stream = new MemoryStream(span.ToArray());
|
||||
if (MachOReader.TryExtractIdentity(stream, out var machOIdentity) && machOIdentity is not null)
|
||||
{
|
||||
var endianness = magic is 0xCAFEBABE or 0xFEEDFACE or 0xFEEDFACF ? "be" : "le";
|
||||
var prefixedUuid = machOIdentity.Uuid is not null ? $"macho-uuid:{machOIdentity.Uuid}" : null;
|
||||
|
||||
identity = new NativeBinaryIdentity(
|
||||
NativeFormat.MachO,
|
||||
machOIdentity.CpuType,
|
||||
"darwin",
|
||||
Endianness: endianness,
|
||||
BuildId: prefixedUuid,
|
||||
Uuid: prefixedUuid,
|
||||
InterpreterPath: null,
|
||||
MachOPlatform: machOIdentity.Platform,
|
||||
MachOMinOsVersion: machOIdentity.MinOsVersion,
|
||||
MachOSdkVersion: machOIdentity.SdkVersion,
|
||||
MachOCdHash: machOIdentity.CodeSignature?.CdHash,
|
||||
MachOTeamId: machOIdentity.CodeSignature?.TeamId);
|
||||
return true;
|
||||
}
|
||||
|
||||
// Fallback to basic parsing
|
||||
bool bigEndian = magic is 0xCAFEBABE or 0xFEEDFACE or 0xFEEDFACF;
|
||||
|
||||
uint cputype;
|
||||
@@ -229,7 +271,7 @@ public static class NativeFormatDetector
|
||||
}
|
||||
|
||||
var arch = MapMachCpuType(cputype);
|
||||
var endianness = bigEndian ? "be" : "le";
|
||||
var fallbackEndianness = bigEndian ? "be" : "le";
|
||||
|
||||
string? uuid = null;
|
||||
if (!isFat)
|
||||
@@ -269,7 +311,7 @@ public static class NativeFormatDetector
|
||||
}
|
||||
|
||||
// Store Mach-O UUID in BuildId field (prefixed) and also in Uuid for backwards compatibility
|
||||
identity = new NativeBinaryIdentity(NativeFormat.MachO, arch, "darwin", Endianness: endianness, BuildId: uuid, Uuid: uuid, InterpreterPath: null);
|
||||
identity = new NativeBinaryIdentity(NativeFormat.MachO, arch, "darwin", Endianness: fallbackEndianness, BuildId: uuid, Uuid: uuid, InterpreterPath: null);
|
||||
return true;
|
||||
}
|
||||
|
||||
|
||||
@@ -0,0 +1,12 @@
|
||||
namespace StellaOps.Scanner.Analyzers.Native;
|
||||
|
||||
/// <summary>
|
||||
/// Compiler/linker hint extracted from PE Rich Header.
|
||||
/// </summary>
|
||||
/// <param name="ToolId">Tool ID (@comp.id) - identifies the compiler/linker.</param>
|
||||
/// <param name="ToolVersion">Tool version (@prod.id) - identifies the version.</param>
|
||||
/// <param name="UseCount">Number of times this tool was used.</param>
|
||||
public sealed record PeCompilerHint(
|
||||
ushort ToolId,
|
||||
ushort ToolVersion,
|
||||
int UseCount);
|
||||
34
src/Scanner/StellaOps.Scanner.Analyzers.Native/PeIdentity.cs
Normal file
34
src/Scanner/StellaOps.Scanner.Analyzers.Native/PeIdentity.cs
Normal file
@@ -0,0 +1,34 @@
|
||||
namespace StellaOps.Scanner.Analyzers.Native;
|
||||
|
||||
/// <summary>
|
||||
/// Full identity information extracted from a PE (Portable Executable) file.
|
||||
/// </summary>
|
||||
/// <param name="Machine">Machine type (x86, x86_64, ARM64, etc.).</param>
|
||||
/// <param name="Is64Bit">Whether this is a 64-bit PE (PE32+).</param>
|
||||
/// <param name="Subsystem">PE subsystem (Console, GUI, Native, etc.).</param>
|
||||
/// <param name="CodeViewGuid">CodeView PDB70 GUID in lowercase hex (no dashes).</param>
|
||||
/// <param name="CodeViewAge">CodeView Age field (increments on rebuild).</param>
|
||||
/// <param name="PdbPath">Original PDB path from debug directory.</param>
|
||||
/// <param name="ProductVersion">Product version from version resource.</param>
|
||||
/// <param name="FileVersion">File version from version resource.</param>
|
||||
/// <param name="CompanyName">Company name from version resource.</param>
|
||||
/// <param name="ProductName">Product name from version resource.</param>
|
||||
/// <param name="OriginalFilename">Original filename from version resource.</param>
|
||||
/// <param name="RichHeaderHash">Rich header hash (XOR of all entries).</param>
|
||||
/// <param name="CompilerHints">Compiler hints from rich header.</param>
|
||||
/// <param name="Exports">Exported symbols from export directory.</param>
|
||||
public sealed record PeIdentity(
|
||||
string? Machine,
|
||||
bool Is64Bit,
|
||||
PeSubsystem Subsystem,
|
||||
string? CodeViewGuid,
|
||||
int? CodeViewAge,
|
||||
string? PdbPath,
|
||||
string? ProductVersion,
|
||||
string? FileVersion,
|
||||
string? CompanyName,
|
||||
string? ProductName,
|
||||
string? OriginalFilename,
|
||||
uint? RichHeaderHash,
|
||||
IReadOnlyList<PeCompilerHint> CompilerHints,
|
||||
IReadOnlyList<string> Exports);
|
||||
757
src/Scanner/StellaOps.Scanner.Analyzers.Native/PeReader.cs
Normal file
757
src/Scanner/StellaOps.Scanner.Analyzers.Native/PeReader.cs
Normal file
@@ -0,0 +1,757 @@
|
||||
using System.Buffers.Binary;
|
||||
using System.Text;
|
||||
|
||||
namespace StellaOps.Scanner.Analyzers.Native;
|
||||
|
||||
/// <summary>
|
||||
/// Full PE file reader with identity extraction including CodeView GUID, Rich header, and version resources.
|
||||
/// </summary>
|
||||
public static class PeReader
|
||||
{
|
||||
// PE Data Directory Indices
|
||||
private const int IMAGE_DIRECTORY_ENTRY_EXPORT = 0;
|
||||
private const int IMAGE_DIRECTORY_ENTRY_DEBUG = 6;
|
||||
private const int IMAGE_DIRECTORY_ENTRY_RESOURCE = 2;
|
||||
|
||||
// Debug Types
|
||||
private const uint IMAGE_DEBUG_TYPE_CODEVIEW = 2;
|
||||
|
||||
// CodeView Signatures
|
||||
private const uint RSDS_SIGNATURE = 0x53445352; // "RSDS" in little-endian
|
||||
|
||||
// Rich Header Markers
|
||||
private const uint RICH_MARKER = 0x68636952; // "Rich" in little-endian
|
||||
private const uint DANS_MARKER = 0x536E6144; // "DanS" in little-endian
|
||||
|
||||
/// <summary>
|
||||
/// Parse result containing identity and any parsing metadata.
|
||||
/// </summary>
|
||||
public sealed record PeParseResult(
|
||||
PeIdentity Identity,
|
||||
string? ParseWarning);
|
||||
|
||||
/// <summary>
|
||||
/// Parse a PE file and extract full identity information.
|
||||
/// </summary>
|
||||
/// <param name="stream">Stream containing PE file data.</param>
|
||||
/// <param name="path">File path for context (not accessed).</param>
|
||||
/// <param name="layerDigest">Optional container layer digest.</param>
|
||||
/// <returns>Parse result, or null if not a valid PE file.</returns>
|
||||
public static PeParseResult? Parse(Stream stream, string path, string? layerDigest = null)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(stream);
|
||||
|
||||
using var buffer = new MemoryStream();
|
||||
stream.CopyTo(buffer);
|
||||
var data = buffer.ToArray();
|
||||
|
||||
if (!TryExtractIdentity(data, out var identity) || identity is null)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
return new PeParseResult(identity, null);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Try to extract identity from PE file data.
|
||||
/// </summary>
|
||||
/// <param name="data">PE file bytes.</param>
|
||||
/// <param name="identity">Extracted identity if successful.</param>
|
||||
/// <returns>True if valid PE file, false otherwise.</returns>
|
||||
public static bool TryExtractIdentity(ReadOnlySpan<byte> data, out PeIdentity? identity)
|
||||
{
|
||||
identity = null;
|
||||
|
||||
// Validate DOS header
|
||||
if (!ValidateDosHeader(data, out var peHeaderOffset))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
// Validate PE signature
|
||||
if (!ValidatePeSignature(data, peHeaderOffset))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
// Parse COFF header
|
||||
if (!ParseCoffHeader(data, peHeaderOffset, out var machine, out var numberOfSections, out var sizeOfOptionalHeader))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
// Parse Optional header
|
||||
if (!ParseOptionalHeader(data, peHeaderOffset, sizeOfOptionalHeader,
|
||||
out var is64Bit, out var subsystem, out var numberOfRvaAndSizes, out var dataDirectoryOffset))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
var machineStr = MapPeMachine(machine);
|
||||
|
||||
// Parse section headers for RVA-to-file-offset translation
|
||||
var sectionHeadersOffset = peHeaderOffset + 24 + sizeOfOptionalHeader;
|
||||
var sections = ParseSectionHeaders(data, sectionHeadersOffset, numberOfSections);
|
||||
|
||||
// Extract Rich header (before PE header in DOS stub)
|
||||
uint? richHeaderHash = null;
|
||||
var compilerHints = new List<PeCompilerHint>();
|
||||
ParseRichHeader(data, peHeaderOffset, out richHeaderHash, compilerHints);
|
||||
|
||||
// Extract CodeView debug info
|
||||
string? codeViewGuid = null;
|
||||
int? codeViewAge = null;
|
||||
string? pdbPath = null;
|
||||
if (numberOfRvaAndSizes > IMAGE_DIRECTORY_ENTRY_DEBUG)
|
||||
{
|
||||
ParseDebugDirectory(data, dataDirectoryOffset, numberOfRvaAndSizes, sections,
|
||||
out codeViewGuid, out codeViewAge, out pdbPath);
|
||||
}
|
||||
|
||||
// Extract version resources
|
||||
string? productVersion = null;
|
||||
string? fileVersion = null;
|
||||
string? companyName = null;
|
||||
string? productName = null;
|
||||
string? originalFilename = null;
|
||||
if (numberOfRvaAndSizes > IMAGE_DIRECTORY_ENTRY_RESOURCE)
|
||||
{
|
||||
ParseVersionResource(data, dataDirectoryOffset, sections, is64Bit,
|
||||
out productVersion, out fileVersion, out companyName, out productName, out originalFilename);
|
||||
}
|
||||
|
||||
// Extract exports
|
||||
var exports = new List<string>();
|
||||
if (numberOfRvaAndSizes > IMAGE_DIRECTORY_ENTRY_EXPORT)
|
||||
{
|
||||
ParseExportDirectory(data, dataDirectoryOffset, sections, exports);
|
||||
}
|
||||
|
||||
identity = new PeIdentity(
|
||||
Machine: machineStr,
|
||||
Is64Bit: is64Bit,
|
||||
Subsystem: subsystem,
|
||||
CodeViewGuid: codeViewGuid,
|
||||
CodeViewAge: codeViewAge,
|
||||
PdbPath: pdbPath,
|
||||
ProductVersion: productVersion,
|
||||
FileVersion: fileVersion,
|
||||
CompanyName: companyName,
|
||||
ProductName: productName,
|
||||
OriginalFilename: originalFilename,
|
||||
RichHeaderHash: richHeaderHash,
|
||||
CompilerHints: compilerHints,
|
||||
Exports: exports
|
||||
);
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Validate DOS header and extract PE header offset.
|
||||
/// </summary>
|
||||
private static bool ValidateDosHeader(ReadOnlySpan<byte> data, out int peHeaderOffset)
|
||||
{
|
||||
peHeaderOffset = 0;
|
||||
|
||||
if (data.Length < 0x40)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
// Check MZ signature
|
||||
if (data[0] != 'M' || data[1] != 'Z')
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
// Read e_lfanew (offset to PE header) at offset 0x3C
|
||||
peHeaderOffset = BinaryPrimitives.ReadInt32LittleEndian(data.Slice(0x3C, 4));
|
||||
|
||||
if (peHeaderOffset < 0 || peHeaderOffset + 24 > data.Length)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Validate PE signature at the given offset.
|
||||
/// </summary>
|
||||
private static bool ValidatePeSignature(ReadOnlySpan<byte> data, int peHeaderOffset)
|
||||
{
|
||||
if (peHeaderOffset + 4 > data.Length)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
// Check "PE\0\0" signature
|
||||
return data[peHeaderOffset] == 'P'
|
||||
&& data[peHeaderOffset + 1] == 'E'
|
||||
&& data[peHeaderOffset + 2] == 0
|
||||
&& data[peHeaderOffset + 3] == 0;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse COFF header.
|
||||
/// </summary>
|
||||
private static bool ParseCoffHeader(ReadOnlySpan<byte> data, int peHeaderOffset,
|
||||
out ushort machine, out ushort numberOfSections, out ushort sizeOfOptionalHeader)
|
||||
{
|
||||
machine = 0;
|
||||
numberOfSections = 0;
|
||||
sizeOfOptionalHeader = 0;
|
||||
|
||||
var coffOffset = peHeaderOffset + 4;
|
||||
if (coffOffset + 20 > data.Length)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
machine = BinaryPrimitives.ReadUInt16LittleEndian(data.Slice(coffOffset, 2));
|
||||
numberOfSections = BinaryPrimitives.ReadUInt16LittleEndian(data.Slice(coffOffset + 2, 2));
|
||||
sizeOfOptionalHeader = BinaryPrimitives.ReadUInt16LittleEndian(data.Slice(coffOffset + 16, 2));
|
||||
|
||||
return sizeOfOptionalHeader > 0;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse Optional header.
|
||||
/// </summary>
|
||||
private static bool ParseOptionalHeader(ReadOnlySpan<byte> data, int peHeaderOffset, ushort sizeOfOptionalHeader,
|
||||
out bool is64Bit, out PeSubsystem subsystem, out uint numberOfRvaAndSizes, out int dataDirectoryOffset)
|
||||
{
|
||||
is64Bit = false;
|
||||
subsystem = PeSubsystem.Unknown;
|
||||
numberOfRvaAndSizes = 0;
|
||||
dataDirectoryOffset = 0;
|
||||
|
||||
var optionalHeaderOffset = peHeaderOffset + 24;
|
||||
if (optionalHeaderOffset + sizeOfOptionalHeader > data.Length)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
var magic = BinaryPrimitives.ReadUInt16LittleEndian(data.Slice(optionalHeaderOffset, 2));
|
||||
is64Bit = magic == 0x20b; // PE32+
|
||||
|
||||
if (magic != 0x10b && magic != 0x20b) // PE32 or PE32+
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
// Subsystem offset: 68 for both PE32 and PE32+
|
||||
var subsystemOffset = optionalHeaderOffset + 68;
|
||||
if (subsystemOffset + 2 <= data.Length)
|
||||
{
|
||||
subsystem = (PeSubsystem)BinaryPrimitives.ReadUInt16LittleEndian(data.Slice(subsystemOffset, 2));
|
||||
}
|
||||
|
||||
// NumberOfRvaAndSizes
|
||||
var rvaAndSizesOffset = optionalHeaderOffset + (is64Bit ? 108 : 92);
|
||||
if (rvaAndSizesOffset + 4 <= data.Length)
|
||||
{
|
||||
numberOfRvaAndSizes = BinaryPrimitives.ReadUInt32LittleEndian(data.Slice(rvaAndSizesOffset, 4));
|
||||
}
|
||||
|
||||
// Data directories start after the numberOfRvaAndSizes field
|
||||
dataDirectoryOffset = optionalHeaderOffset + (is64Bit ? 112 : 96);
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse section headers for RVA-to-file-offset translation.
|
||||
/// </summary>
|
||||
private static List<SectionHeader> ParseSectionHeaders(ReadOnlySpan<byte> data, int offset, ushort numberOfSections)
|
||||
{
|
||||
const int SECTION_HEADER_SIZE = 40;
|
||||
var sections = new List<SectionHeader>();
|
||||
|
||||
for (var i = 0; i < numberOfSections; i++)
|
||||
{
|
||||
var entryOffset = offset + i * SECTION_HEADER_SIZE;
|
||||
if (entryOffset + SECTION_HEADER_SIZE > data.Length)
|
||||
{
|
||||
break;
|
||||
}
|
||||
|
||||
var virtualSize = BinaryPrimitives.ReadUInt32LittleEndian(data.Slice(entryOffset + 8, 4));
|
||||
var virtualAddress = BinaryPrimitives.ReadUInt32LittleEndian(data.Slice(entryOffset + 12, 4));
|
||||
var rawDataSize = BinaryPrimitives.ReadUInt32LittleEndian(data.Slice(entryOffset + 16, 4));
|
||||
var rawDataPointer = BinaryPrimitives.ReadUInt32LittleEndian(data.Slice(entryOffset + 20, 4));
|
||||
|
||||
sections.Add(new SectionHeader(virtualAddress, virtualSize, rawDataPointer, rawDataSize));
|
||||
}
|
||||
|
||||
return sections;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Convert RVA to file offset using section headers.
|
||||
/// </summary>
|
||||
private static bool TryRvaToFileOffset(uint rva, List<SectionHeader> sections, out uint fileOffset)
|
||||
{
|
||||
fileOffset = 0;
|
||||
|
||||
foreach (var section in sections)
|
||||
{
|
||||
if (rva >= section.VirtualAddress && rva < section.VirtualAddress + section.VirtualSize)
|
||||
{
|
||||
fileOffset = rva - section.VirtualAddress + section.RawDataPointer;
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse Rich header from DOS stub.
|
||||
/// </summary>
|
||||
private static void ParseRichHeader(ReadOnlySpan<byte> data, int peHeaderOffset,
|
||||
out uint? richHeaderHash, List<PeCompilerHint> compilerHints)
|
||||
{
|
||||
richHeaderHash = null;
|
||||
|
||||
// Search for "Rich" marker backwards from PE header
|
||||
var searchEnd = Math.Min(peHeaderOffset, data.Length);
|
||||
var richOffset = -1;
|
||||
|
||||
for (var i = searchEnd - 4; i >= 0x40; i--)
|
||||
{
|
||||
var marker = BinaryPrimitives.ReadUInt32LittleEndian(data.Slice(i, 4));
|
||||
if (marker == RICH_MARKER)
|
||||
{
|
||||
richOffset = i;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (richOffset < 0 || richOffset + 8 > data.Length)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
// XOR key follows "Rich" marker
|
||||
var xorKey = BinaryPrimitives.ReadUInt32LittleEndian(data.Slice(richOffset + 4, 4));
|
||||
richHeaderHash = xorKey;
|
||||
|
||||
// Search backwards for "DanS" marker (XOR'd)
|
||||
var dansOffset = -1;
|
||||
for (var i = richOffset - 4; i >= 0x40; i -= 4)
|
||||
{
|
||||
if (i + 4 > data.Length)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
var value = BinaryPrimitives.ReadUInt32LittleEndian(data.Slice(i, 4));
|
||||
if ((value ^ xorKey) == DANS_MARKER)
|
||||
{
|
||||
dansOffset = i;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (dansOffset < 0)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
// Parse entries between DanS and Rich (skip first 16 bytes after DanS which are padding)
|
||||
var entriesStart = dansOffset + 16;
|
||||
for (var i = entriesStart; i < richOffset; i += 8)
|
||||
{
|
||||
if (i + 8 > data.Length)
|
||||
{
|
||||
break;
|
||||
}
|
||||
|
||||
var compId = BinaryPrimitives.ReadUInt32LittleEndian(data.Slice(i, 4)) ^ xorKey;
|
||||
var useCount = BinaryPrimitives.ReadUInt32LittleEndian(data.Slice(i + 4, 4)) ^ xorKey;
|
||||
|
||||
if (compId == 0 && useCount == 0)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
var toolId = (ushort)(compId & 0xFFFF);
|
||||
var toolVersion = (ushort)((compId >> 16) & 0xFFFF);
|
||||
|
||||
compilerHints.Add(new PeCompilerHint(toolId, toolVersion, (int)useCount));
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse debug directory for CodeView GUID.
|
||||
/// </summary>
|
||||
private static void ParseDebugDirectory(ReadOnlySpan<byte> data, int dataDirectoryOffset, uint numberOfRvaAndSizes,
|
||||
List<SectionHeader> sections, out string? codeViewGuid, out int? codeViewAge, out string? pdbPath)
|
||||
{
|
||||
codeViewGuid = null;
|
||||
codeViewAge = null;
|
||||
pdbPath = null;
|
||||
|
||||
if (numberOfRvaAndSizes <= IMAGE_DIRECTORY_ENTRY_DEBUG)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
var debugDirOffset = dataDirectoryOffset + IMAGE_DIRECTORY_ENTRY_DEBUG * 8;
|
||||
if (debugDirOffset + 8 > data.Length)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
var debugRva = BinaryPrimitives.ReadUInt32LittleEndian(data.Slice(debugDirOffset, 4));
|
||||
var debugSize = BinaryPrimitives.ReadUInt32LittleEndian(data.Slice(debugDirOffset + 4, 4));
|
||||
|
||||
if (debugRva == 0 || debugSize == 0)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
if (!TryRvaToFileOffset(debugRva, sections, out var debugFileOffset))
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
// Each debug directory entry is 28 bytes
|
||||
const int DEBUG_ENTRY_SIZE = 28;
|
||||
var numEntries = debugSize / DEBUG_ENTRY_SIZE;
|
||||
|
||||
for (var i = 0; i < numEntries; i++)
|
||||
{
|
||||
var entryOffset = (int)debugFileOffset + i * DEBUG_ENTRY_SIZE;
|
||||
if (entryOffset + DEBUG_ENTRY_SIZE > data.Length)
|
||||
{
|
||||
break;
|
||||
}
|
||||
|
||||
var debugType = BinaryPrimitives.ReadUInt32LittleEndian(data.Slice(entryOffset + 12, 4));
|
||||
if (debugType != IMAGE_DEBUG_TYPE_CODEVIEW)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
var sizeOfData = BinaryPrimitives.ReadUInt32LittleEndian(data.Slice(entryOffset + 16, 4));
|
||||
var pointerToRawData = BinaryPrimitives.ReadUInt32LittleEndian(data.Slice(entryOffset + 24, 4));
|
||||
|
||||
if (pointerToRawData == 0 || sizeOfData < 24)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
if (pointerToRawData + sizeOfData > data.Length)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
var cvSpan = data.Slice((int)pointerToRawData, (int)sizeOfData);
|
||||
|
||||
// Check for RSDS signature (PDB70)
|
||||
var signature = BinaryPrimitives.ReadUInt32LittleEndian(cvSpan);
|
||||
if (signature != RSDS_SIGNATURE)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
// GUID is 16 bytes at offset 4
|
||||
var guidBytes = cvSpan.Slice(4, 16);
|
||||
codeViewGuid = FormatGuidAsLowercaseHex(guidBytes);
|
||||
|
||||
// Age is 4 bytes at offset 20
|
||||
codeViewAge = (int)BinaryPrimitives.ReadUInt32LittleEndian(cvSpan.Slice(20, 4));
|
||||
|
||||
// PDB path is null-terminated string starting at offset 24
|
||||
var pdbPathSpan = cvSpan[24..];
|
||||
var nullTerminator = pdbPathSpan.IndexOf((byte)0);
|
||||
var pathLength = nullTerminator >= 0 ? nullTerminator : pdbPathSpan.Length;
|
||||
if (pathLength > 0)
|
||||
{
|
||||
pdbPath = Encoding.UTF8.GetString(pdbPathSpan[..pathLength]);
|
||||
}
|
||||
|
||||
break; // Found CodeView, done
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Format GUID bytes as lowercase hex without dashes.
|
||||
/// </summary>
|
||||
private static string FormatGuidAsLowercaseHex(ReadOnlySpan<byte> guidBytes)
|
||||
{
|
||||
// GUID structure: Data1 (LE 4 bytes), Data2 (LE 2 bytes), Data3 (LE 2 bytes), Data4 (8 bytes BE)
|
||||
var sb = new StringBuilder(32);
|
||||
|
||||
// Data1 - 4 bytes, little endian
|
||||
sb.Append(BinaryPrimitives.ReadUInt32LittleEndian(guidBytes).ToString("x8"));
|
||||
// Data2 - 2 bytes, little endian
|
||||
sb.Append(BinaryPrimitives.ReadUInt16LittleEndian(guidBytes.Slice(4, 2)).ToString("x4"));
|
||||
// Data3 - 2 bytes, little endian
|
||||
sb.Append(BinaryPrimitives.ReadUInt16LittleEndian(guidBytes.Slice(6, 2)).ToString("x4"));
|
||||
// Data4 - 8 bytes, big endian (stored as-is)
|
||||
for (var i = 8; i < 16; i++)
|
||||
{
|
||||
sb.Append(guidBytes[i].ToString("x2"));
|
||||
}
|
||||
|
||||
return sb.ToString();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse version resource for product/file information.
|
||||
/// </summary>
|
||||
private static void ParseVersionResource(ReadOnlySpan<byte> data, int dataDirectoryOffset,
|
||||
List<SectionHeader> sections, bool is64Bit,
|
||||
out string? productVersion, out string? fileVersion,
|
||||
out string? companyName, out string? productName, out string? originalFilename)
|
||||
{
|
||||
productVersion = null;
|
||||
fileVersion = null;
|
||||
companyName = null;
|
||||
productName = null;
|
||||
originalFilename = null;
|
||||
|
||||
var resourceDirOffset = dataDirectoryOffset + IMAGE_DIRECTORY_ENTRY_RESOURCE * 8;
|
||||
if (resourceDirOffset + 8 > data.Length)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
var resourceRva = BinaryPrimitives.ReadUInt32LittleEndian(data.Slice(resourceDirOffset, 4));
|
||||
var resourceSize = BinaryPrimitives.ReadUInt32LittleEndian(data.Slice(resourceDirOffset + 4, 4));
|
||||
|
||||
if (resourceRva == 0 || resourceSize == 0)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
if (!TryRvaToFileOffset(resourceRva, sections, out var resourceFileOffset))
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
// Search for VS_VERSION_INFO signature in resources
|
||||
// This is a simplified approach - searching for the signature in the resource section
|
||||
var searchSpan = data.Slice((int)resourceFileOffset, (int)Math.Min(resourceSize, data.Length - resourceFileOffset));
|
||||
|
||||
// Look for "VS_VERSION_INFO" signature (wide string)
|
||||
var vsVersionInfo = Encoding.Unicode.GetBytes("VS_VERSION_INFO");
|
||||
var vsInfoOffset = IndexOf(searchSpan, vsVersionInfo);
|
||||
|
||||
if (vsInfoOffset < 0)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
// Parse StringFileInfo to extract version strings
|
||||
var versionInfoStart = (int)resourceFileOffset + vsInfoOffset;
|
||||
ParseVersionStrings(data, versionInfoStart, searchSpan.Length - vsInfoOffset,
|
||||
ref productVersion, ref fileVersion, ref companyName, ref productName, ref originalFilename);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse version strings from VS_VERSION_INFO structure.
|
||||
/// </summary>
|
||||
private static void ParseVersionStrings(ReadOnlySpan<byte> data, int offset, int maxLength,
|
||||
ref string? productVersion, ref string? fileVersion,
|
||||
ref string? companyName, ref string? productName, ref string? originalFilename)
|
||||
{
|
||||
// Search for common version string keys
|
||||
var keys = new[] { "ProductVersion", "FileVersion", "CompanyName", "ProductName", "OriginalFilename" };
|
||||
|
||||
var searchSpan = data.Slice(offset, Math.Min(maxLength, data.Length - offset));
|
||||
|
||||
foreach (var key in keys)
|
||||
{
|
||||
var keyBytes = Encoding.Unicode.GetBytes(key);
|
||||
var keyOffset = IndexOf(searchSpan, keyBytes);
|
||||
|
||||
if (keyOffset < 0)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
// Value follows the key, aligned to 4-byte boundary
|
||||
var valueStart = keyOffset + keyBytes.Length + 2; // +2 for null terminator
|
||||
// Align to 4-byte boundary
|
||||
valueStart = (valueStart + 3) & ~3;
|
||||
|
||||
if (offset + valueStart >= data.Length)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
// Read null-terminated wide string value
|
||||
var valueSpan = searchSpan[valueStart..];
|
||||
var nullTerm = -1;
|
||||
for (var i = 0; i < valueSpan.Length - 1; i += 2)
|
||||
{
|
||||
if (valueSpan[i] == 0 && valueSpan[i + 1] == 0)
|
||||
{
|
||||
nullTerm = i;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (nullTerm > 0)
|
||||
{
|
||||
var value = Encoding.Unicode.GetString(valueSpan[..nullTerm]);
|
||||
if (!string.IsNullOrWhiteSpace(value))
|
||||
{
|
||||
switch (key)
|
||||
{
|
||||
case "ProductVersion":
|
||||
productVersion = value;
|
||||
break;
|
||||
case "FileVersion":
|
||||
fileVersion = value;
|
||||
break;
|
||||
case "CompanyName":
|
||||
companyName = value;
|
||||
break;
|
||||
case "ProductName":
|
||||
productName = value;
|
||||
break;
|
||||
case "OriginalFilename":
|
||||
originalFilename = value;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse export directory for exported symbols.
|
||||
/// </summary>
|
||||
private static void ParseExportDirectory(ReadOnlySpan<byte> data, int dataDirectoryOffset,
|
||||
List<SectionHeader> sections, List<string> exports)
|
||||
{
|
||||
const int MAX_EXPORTS = 10000;
|
||||
|
||||
var exportDirOffset = dataDirectoryOffset + IMAGE_DIRECTORY_ENTRY_EXPORT * 8;
|
||||
if (exportDirOffset + 8 > data.Length)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
var exportRva = BinaryPrimitives.ReadUInt32LittleEndian(data.Slice(exportDirOffset, 4));
|
||||
var exportSize = BinaryPrimitives.ReadUInt32LittleEndian(data.Slice(exportDirOffset + 4, 4));
|
||||
|
||||
if (exportRva == 0 || exportSize == 0)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
if (!TryRvaToFileOffset(exportRva, sections, out var exportFileOffset))
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
if (exportFileOffset + 40 > data.Length)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
var exportSpan = data.Slice((int)exportFileOffset, 40);
|
||||
|
||||
var numberOfNames = BinaryPrimitives.ReadUInt32LittleEndian(exportSpan.Slice(24, 4));
|
||||
var addressOfNames = BinaryPrimitives.ReadUInt32LittleEndian(exportSpan.Slice(32, 4));
|
||||
|
||||
if (numberOfNames == 0 || addressOfNames == 0)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
if (!TryRvaToFileOffset(addressOfNames, sections, out var namesFileOffset))
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
var count = Math.Min((int)numberOfNames, MAX_EXPORTS);
|
||||
|
||||
for (var i = 0; i < count; i++)
|
||||
{
|
||||
var nameRvaOffset = (int)namesFileOffset + i * 4;
|
||||
if (nameRvaOffset + 4 > data.Length)
|
||||
{
|
||||
break;
|
||||
}
|
||||
|
||||
var nameRva = BinaryPrimitives.ReadUInt32LittleEndian(data.Slice(nameRvaOffset, 4));
|
||||
if (!TryRvaToFileOffset(nameRva, sections, out var nameFileOffset))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
if (nameFileOffset >= data.Length)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
var nameSpan = data[(int)nameFileOffset..];
|
||||
var nullTerm = nameSpan.IndexOf((byte)0);
|
||||
var nameLength = nullTerm >= 0 ? nullTerm : Math.Min(256, nameSpan.Length);
|
||||
|
||||
if (nameLength > 0)
|
||||
{
|
||||
var name = Encoding.ASCII.GetString(nameSpan[..nameLength]);
|
||||
if (!string.IsNullOrWhiteSpace(name))
|
||||
{
|
||||
exports.Add(name);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Simple byte sequence search.
|
||||
/// </summary>
|
||||
private static int IndexOf(ReadOnlySpan<byte> haystack, ReadOnlySpan<byte> needle)
|
||||
{
|
||||
for (var i = 0; i <= haystack.Length - needle.Length; i++)
|
||||
{
|
||||
if (haystack.Slice(i, needle.Length).SequenceEqual(needle))
|
||||
{
|
||||
return i;
|
||||
}
|
||||
}
|
||||
|
||||
return -1;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Map PE machine type to architecture string.
|
||||
/// </summary>
|
||||
private static string? MapPeMachine(ushort machine)
|
||||
{
|
||||
return machine switch
|
||||
{
|
||||
0x014c => "x86",
|
||||
0x0200 => "ia64",
|
||||
0x8664 => "x86_64",
|
||||
0x01c0 => "arm",
|
||||
0x01c2 => "thumb",
|
||||
0x01c4 => "armnt",
|
||||
0xaa64 => "arm64",
|
||||
0x5032 => "riscv32",
|
||||
0x5064 => "riscv64",
|
||||
0x5128 => "riscv128",
|
||||
_ => null
|
||||
};
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Section header for RVA translation.
|
||||
/// </summary>
|
||||
private sealed record SectionHeader(
|
||||
uint VirtualAddress,
|
||||
uint VirtualSize,
|
||||
uint RawDataPointer,
|
||||
uint RawDataSize);
|
||||
}
|
||||
@@ -0,0 +1,451 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// FindingEvidenceContracts.cs
|
||||
// Sprint: SPRINT_3800_0001_0001_evidence_api_models
|
||||
// Description: Unified evidence API response contracts for findings.
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Text.Json.Serialization;
|
||||
|
||||
namespace StellaOps.Scanner.WebService.Contracts;
|
||||
|
||||
/// <summary>
|
||||
/// Unified evidence response for a finding, combining reachability, boundary,
|
||||
/// VEX evidence, and score explanation.
|
||||
/// </summary>
|
||||
public sealed record FindingEvidenceResponse
|
||||
{
|
||||
/// <summary>
|
||||
/// Unique identifier for the finding.
|
||||
/// </summary>
|
||||
[JsonPropertyName("finding_id")]
|
||||
public string FindingId { get; init; } = string.Empty;
|
||||
|
||||
/// <summary>
|
||||
/// CVE identifier (e.g., "CVE-2021-44228").
|
||||
/// </summary>
|
||||
[JsonPropertyName("cve")]
|
||||
public string Cve { get; init; } = string.Empty;
|
||||
|
||||
/// <summary>
|
||||
/// Component where the vulnerability was found.
|
||||
/// </summary>
|
||||
[JsonPropertyName("component")]
|
||||
public ComponentRef? Component { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Reachable call path from entrypoint to vulnerable sink.
|
||||
/// Each element is a fully-qualified name (FQN).
|
||||
/// </summary>
|
||||
[JsonPropertyName("reachable_path")]
|
||||
public IReadOnlyList<string>? ReachablePath { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Entrypoint proof (how the code is exposed).
|
||||
/// </summary>
|
||||
[JsonPropertyName("entrypoint")]
|
||||
public EntrypointProof? Entrypoint { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Boundary proof (surface exposure and controls).
|
||||
/// </summary>
|
||||
[JsonPropertyName("boundary")]
|
||||
public BoundaryProofDto? Boundary { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// VEX (Vulnerability Exploitability eXchange) evidence.
|
||||
/// </summary>
|
||||
[JsonPropertyName("vex")]
|
||||
public VexEvidenceDto? Vex { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Score explanation with additive risk breakdown.
|
||||
/// </summary>
|
||||
[JsonPropertyName("score_explain")]
|
||||
public ScoreExplanationDto? ScoreExplain { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// When the finding was last observed.
|
||||
/// </summary>
|
||||
[JsonPropertyName("last_seen")]
|
||||
public DateTimeOffset LastSeen { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// When the evidence expires (for VEX/attestation freshness).
|
||||
/// </summary>
|
||||
[JsonPropertyName("expires_at")]
|
||||
public DateTimeOffset? ExpiresAt { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// References to DSSE/in-toto attestations backing this evidence.
|
||||
/// </summary>
|
||||
[JsonPropertyName("attestation_refs")]
|
||||
public IReadOnlyList<string>? AttestationRefs { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Reference to a component (package) by PURL and version.
|
||||
/// </summary>
|
||||
public sealed record ComponentRef
|
||||
{
|
||||
/// <summary>
|
||||
/// Package URL (PURL) identifier.
|
||||
/// </summary>
|
||||
[JsonPropertyName("purl")]
|
||||
public string Purl { get; init; } = string.Empty;
|
||||
|
||||
/// <summary>
|
||||
/// Package name.
|
||||
/// </summary>
|
||||
[JsonPropertyName("name")]
|
||||
public string Name { get; init; } = string.Empty;
|
||||
|
||||
/// <summary>
|
||||
/// Package version.
|
||||
/// </summary>
|
||||
[JsonPropertyName("version")]
|
||||
public string Version { get; init; } = string.Empty;
|
||||
|
||||
/// <summary>
|
||||
/// Package type/ecosystem (npm, maven, nuget, etc.).
|
||||
/// </summary>
|
||||
[JsonPropertyName("type")]
|
||||
public string Type { get; init; } = string.Empty;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Proof of how code is exposed as an entrypoint.
|
||||
/// </summary>
|
||||
public sealed record EntrypointProof
|
||||
{
|
||||
/// <summary>
|
||||
/// Type of entrypoint (http_handler, grpc_method, cli_command, etc.).
|
||||
/// </summary>
|
||||
[JsonPropertyName("type")]
|
||||
public string Type { get; init; } = string.Empty;
|
||||
|
||||
/// <summary>
|
||||
/// Route or path (e.g., "/api/v1/users", "grpc.UserService.GetUser").
|
||||
/// </summary>
|
||||
[JsonPropertyName("route")]
|
||||
public string? Route { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// HTTP method if applicable (GET, POST, etc.).
|
||||
/// </summary>
|
||||
[JsonPropertyName("method")]
|
||||
public string? Method { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Authentication requirement (none, optional, required).
|
||||
/// </summary>
|
||||
[JsonPropertyName("auth")]
|
||||
public string? Auth { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Execution phase (startup, runtime, shutdown).
|
||||
/// </summary>
|
||||
[JsonPropertyName("phase")]
|
||||
public string? Phase { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Fully qualified name of the entrypoint symbol.
|
||||
/// </summary>
|
||||
[JsonPropertyName("fqn")]
|
||||
public string Fqn { get; init; } = string.Empty;
|
||||
|
||||
/// <summary>
|
||||
/// Source file location.
|
||||
/// </summary>
|
||||
[JsonPropertyName("location")]
|
||||
public SourceLocation? Location { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Source file location reference.
|
||||
/// </summary>
|
||||
public sealed record SourceLocation
|
||||
{
|
||||
/// <summary>
|
||||
/// File path relative to repository root.
|
||||
/// </summary>
|
||||
[JsonPropertyName("file")]
|
||||
public string File { get; init; } = string.Empty;
|
||||
|
||||
/// <summary>
|
||||
/// Line number (1-indexed).
|
||||
/// </summary>
|
||||
[JsonPropertyName("line")]
|
||||
public int? Line { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Column number (1-indexed).
|
||||
/// </summary>
|
||||
[JsonPropertyName("column")]
|
||||
public int? Column { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Boundary proof describing surface exposure and controls.
|
||||
/// </summary>
|
||||
public sealed record BoundaryProofDto
|
||||
{
|
||||
/// <summary>
|
||||
/// Kind of boundary (network, file, ipc, etc.).
|
||||
/// </summary>
|
||||
[JsonPropertyName("kind")]
|
||||
public string Kind { get; init; } = string.Empty;
|
||||
|
||||
/// <summary>
|
||||
/// Surface descriptor (what is exposed).
|
||||
/// </summary>
|
||||
[JsonPropertyName("surface")]
|
||||
public SurfaceDescriptor? Surface { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Exposure descriptor (how it's exposed).
|
||||
/// </summary>
|
||||
[JsonPropertyName("exposure")]
|
||||
public ExposureDescriptor? Exposure { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Authentication descriptor.
|
||||
/// </summary>
|
||||
[JsonPropertyName("auth")]
|
||||
public AuthDescriptor? Auth { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Security controls in place.
|
||||
/// </summary>
|
||||
[JsonPropertyName("controls")]
|
||||
public IReadOnlyList<ControlDescriptor>? Controls { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// When the boundary was last verified.
|
||||
/// </summary>
|
||||
[JsonPropertyName("last_seen")]
|
||||
public DateTimeOffset LastSeen { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Confidence score (0.0 to 1.0).
|
||||
/// </summary>
|
||||
[JsonPropertyName("confidence")]
|
||||
public double Confidence { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Describes what attack surface is exposed.
|
||||
/// </summary>
|
||||
public sealed record SurfaceDescriptor
|
||||
{
|
||||
/// <summary>
|
||||
/// Type of surface (api, web, cli, library).
|
||||
/// </summary>
|
||||
[JsonPropertyName("type")]
|
||||
public string Type { get; init; } = string.Empty;
|
||||
|
||||
/// <summary>
|
||||
/// Protocol (http, https, grpc, tcp).
|
||||
/// </summary>
|
||||
[JsonPropertyName("protocol")]
|
||||
public string? Protocol { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Port number if network-exposed.
|
||||
/// </summary>
|
||||
[JsonPropertyName("port")]
|
||||
public int? Port { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Describes how the surface is exposed.
|
||||
/// </summary>
|
||||
public sealed record ExposureDescriptor
|
||||
{
|
||||
/// <summary>
|
||||
/// Exposure level (public, internal, private).
|
||||
/// </summary>
|
||||
[JsonPropertyName("level")]
|
||||
public string Level { get; init; } = string.Empty;
|
||||
|
||||
/// <summary>
|
||||
/// Whether the exposure is internet-facing.
|
||||
/// </summary>
|
||||
[JsonPropertyName("internet_facing")]
|
||||
public bool InternetFacing { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Network zone (dmz, internal, trusted).
|
||||
/// </summary>
|
||||
[JsonPropertyName("zone")]
|
||||
public string? Zone { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Describes authentication requirements.
|
||||
/// </summary>
|
||||
public sealed record AuthDescriptor
|
||||
{
|
||||
/// <summary>
|
||||
/// Whether authentication is required.
|
||||
/// </summary>
|
||||
[JsonPropertyName("required")]
|
||||
public bool Required { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Authentication type (jwt, oauth2, basic, api_key).
|
||||
/// </summary>
|
||||
[JsonPropertyName("type")]
|
||||
public string? Type { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Required roles/scopes.
|
||||
/// </summary>
|
||||
[JsonPropertyName("roles")]
|
||||
public IReadOnlyList<string>? Roles { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Describes a security control.
|
||||
/// </summary>
|
||||
public sealed record ControlDescriptor
|
||||
{
|
||||
/// <summary>
|
||||
/// Type of control (rate_limit, waf, input_validation, etc.).
|
||||
/// </summary>
|
||||
[JsonPropertyName("type")]
|
||||
public string Type { get; init; } = string.Empty;
|
||||
|
||||
/// <summary>
|
||||
/// Whether the control is active.
|
||||
/// </summary>
|
||||
[JsonPropertyName("active")]
|
||||
public bool Active { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Control configuration details.
|
||||
/// </summary>
|
||||
[JsonPropertyName("config")]
|
||||
public string? Config { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// VEX (Vulnerability Exploitability eXchange) evidence.
|
||||
/// </summary>
|
||||
public sealed record VexEvidenceDto
|
||||
{
|
||||
/// <summary>
|
||||
/// VEX status (not_affected, affected, fixed, under_investigation).
|
||||
/// </summary>
|
||||
[JsonPropertyName("status")]
|
||||
public string Status { get; init; } = string.Empty;
|
||||
|
||||
/// <summary>
|
||||
/// Justification for the status.
|
||||
/// </summary>
|
||||
[JsonPropertyName("justification")]
|
||||
public string? Justification { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Impact statement explaining why not affected.
|
||||
/// </summary>
|
||||
[JsonPropertyName("impact")]
|
||||
public string? Impact { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Action statement (remediation steps).
|
||||
/// </summary>
|
||||
[JsonPropertyName("action")]
|
||||
public string? Action { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Reference to the VEX document/attestation.
|
||||
/// </summary>
|
||||
[JsonPropertyName("attestation_ref")]
|
||||
public string? AttestationRef { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// When the VEX statement was issued.
|
||||
/// </summary>
|
||||
[JsonPropertyName("issued_at")]
|
||||
public DateTimeOffset? IssuedAt { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// When the VEX statement expires.
|
||||
/// </summary>
|
||||
[JsonPropertyName("expires_at")]
|
||||
public DateTimeOffset? ExpiresAt { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Source of the VEX statement (vendor, first-party, third-party).
|
||||
/// </summary>
|
||||
[JsonPropertyName("source")]
|
||||
public string? Source { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Score explanation with additive breakdown of risk factors.
|
||||
/// </summary>
|
||||
public sealed record ScoreExplanationDto
|
||||
{
|
||||
/// <summary>
|
||||
/// Kind of scoring algorithm (stellaops_risk_v1, cvss_v4, etc.).
|
||||
/// </summary>
|
||||
[JsonPropertyName("kind")]
|
||||
public string Kind { get; init; } = string.Empty;
|
||||
|
||||
/// <summary>
|
||||
/// Final computed risk score.
|
||||
/// </summary>
|
||||
[JsonPropertyName("risk_score")]
|
||||
public double RiskScore { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Individual score contributions.
|
||||
/// </summary>
|
||||
[JsonPropertyName("contributions")]
|
||||
public IReadOnlyList<ScoreContributionDto>? Contributions { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// When the score was computed.
|
||||
/// </summary>
|
||||
[JsonPropertyName("last_seen")]
|
||||
public DateTimeOffset LastSeen { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Individual contribution to the risk score.
|
||||
/// </summary>
|
||||
public sealed record ScoreContributionDto
|
||||
{
|
||||
/// <summary>
|
||||
/// Factor name (cvss_base, epss, reachability, gate_multiplier, etc.).
|
||||
/// </summary>
|
||||
[JsonPropertyName("factor")]
|
||||
public string Factor { get; init; } = string.Empty;
|
||||
|
||||
/// <summary>
|
||||
/// Weight applied to this factor (0.0 to 1.0).
|
||||
/// </summary>
|
||||
[JsonPropertyName("weight")]
|
||||
public double Weight { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Raw value before weighting.
|
||||
/// </summary>
|
||||
[JsonPropertyName("raw_value")]
|
||||
public double RawValue { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Weighted contribution to final score.
|
||||
/// </summary>
|
||||
[JsonPropertyName("contribution")]
|
||||
public double Contribution { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Human-readable explanation of this factor.
|
||||
/// </summary>
|
||||
[JsonPropertyName("explanation")]
|
||||
public string? Explanation { get; init; }
|
||||
}
|
||||
@@ -0,0 +1,251 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// WitnessEndpoints.cs
|
||||
// Sprint: SPRINT_3700_0001_0001_witness_foundation
|
||||
// Task: WIT-010
|
||||
// Description: API endpoints for DSSE-signed path witnesses.
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System.Text.Json;
|
||||
using Microsoft.AspNetCore.Http;
|
||||
using Microsoft.AspNetCore.Routing;
|
||||
using StellaOps.Scanner.Storage.Repositories;
|
||||
using StellaOps.Scanner.WebService.Security;
|
||||
|
||||
namespace StellaOps.Scanner.WebService.Endpoints;
|
||||
|
||||
internal static class WitnessEndpoints
|
||||
{
|
||||
public static void MapWitnessEndpoints(this RouteGroupBuilder apiGroup, string witnessSegment = "witnesses")
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(apiGroup);
|
||||
|
||||
var witnesses = apiGroup.MapGroup($"/{witnessSegment.TrimStart('/')}");
|
||||
|
||||
witnesses.MapGet("/{witnessId:guid}", HandleGetWitnessByIdAsync)
|
||||
.WithName("scanner.witnesses.get")
|
||||
.Produces<WitnessResponseDto>(StatusCodes.Status200OK)
|
||||
.Produces(StatusCodes.Status404NotFound)
|
||||
.RequireAuthorization(ScannerPolicies.ScansRead);
|
||||
|
||||
witnesses.MapGet("", HandleListWitnessesAsync)
|
||||
.WithName("scanner.witnesses.list")
|
||||
.Produces<WitnessListResponseDto>(StatusCodes.Status200OK)
|
||||
.RequireAuthorization(ScannerPolicies.ScansRead);
|
||||
|
||||
witnesses.MapGet("/by-hash/{witnessHash}", HandleGetWitnessByHashAsync)
|
||||
.WithName("scanner.witnesses.get-by-hash")
|
||||
.Produces<WitnessResponseDto>(StatusCodes.Status200OK)
|
||||
.Produces(StatusCodes.Status404NotFound)
|
||||
.RequireAuthorization(ScannerPolicies.ScansRead);
|
||||
|
||||
witnesses.MapPost("/{witnessId:guid}/verify", HandleVerifyWitnessAsync)
|
||||
.WithName("scanner.witnesses.verify")
|
||||
.Produces<WitnessVerificationResponseDto>(StatusCodes.Status200OK)
|
||||
.Produces(StatusCodes.Status404NotFound)
|
||||
.RequireAuthorization(ScannerPolicies.ScansRead);
|
||||
}
|
||||
|
||||
private static async Task<IResult> HandleGetWitnessByIdAsync(
|
||||
Guid witnessId,
|
||||
IWitnessRepository repository,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(repository);
|
||||
|
||||
var witness = await repository.GetByIdAsync(witnessId, cancellationToken).ConfigureAwait(false);
|
||||
if (witness is null)
|
||||
{
|
||||
return Results.NotFound();
|
||||
}
|
||||
|
||||
return Results.Ok(MapToDto(witness));
|
||||
}
|
||||
|
||||
private static async Task<IResult> HandleGetWitnessByHashAsync(
|
||||
string witnessHash,
|
||||
IWitnessRepository repository,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(repository);
|
||||
|
||||
if (string.IsNullOrWhiteSpace(witnessHash))
|
||||
{
|
||||
return Results.NotFound();
|
||||
}
|
||||
|
||||
var witness = await repository.GetByHashAsync(witnessHash, cancellationToken).ConfigureAwait(false);
|
||||
if (witness is null)
|
||||
{
|
||||
return Results.NotFound();
|
||||
}
|
||||
|
||||
return Results.Ok(MapToDto(witness));
|
||||
}
|
||||
|
||||
private static async Task<IResult> HandleListWitnessesAsync(
|
||||
HttpContext context,
|
||||
IWitnessRepository repository,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(repository);
|
||||
|
||||
var query = context.Request.Query;
|
||||
IReadOnlyList<WitnessRecord> witnesses;
|
||||
|
||||
if (query.TryGetValue("scanId", out var scanIdValue) && Guid.TryParse(scanIdValue, out var scanId))
|
||||
{
|
||||
witnesses = await repository.GetByScanIdAsync(scanId, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
else if (query.TryGetValue("cve", out var cveValue) && !string.IsNullOrWhiteSpace(cveValue))
|
||||
{
|
||||
witnesses = await repository.GetByCveAsync(cveValue!, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
else if (query.TryGetValue("graphHash", out var graphHashValue) && !string.IsNullOrWhiteSpace(graphHashValue))
|
||||
{
|
||||
witnesses = await repository.GetByGraphHashAsync(graphHashValue!, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
else
|
||||
{
|
||||
// No filter provided - return empty list (avoid full table scan)
|
||||
witnesses = [];
|
||||
}
|
||||
|
||||
return Results.Ok(new WitnessListResponseDto
|
||||
{
|
||||
Witnesses = witnesses.Select(MapToDto).ToList(),
|
||||
TotalCount = witnesses.Count
|
||||
});
|
||||
}
|
||||
|
||||
private static async Task<IResult> HandleVerifyWitnessAsync(
|
||||
Guid witnessId,
|
||||
IWitnessRepository repository,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(repository);
|
||||
|
||||
var witness = await repository.GetByIdAsync(witnessId, cancellationToken).ConfigureAwait(false);
|
||||
if (witness is null)
|
||||
{
|
||||
return Results.NotFound();
|
||||
}
|
||||
|
||||
// Basic verification: check if DSSE envelope exists and witness hash is valid
|
||||
var verificationStatus = "valid";
|
||||
string? verificationError = null;
|
||||
|
||||
if (string.IsNullOrEmpty(witness.DsseEnvelope))
|
||||
{
|
||||
verificationStatus = "unsigned";
|
||||
verificationError = "Witness does not have a DSSE envelope";
|
||||
}
|
||||
else
|
||||
{
|
||||
// TODO: WIT-009 - Add actual DSSE signature verification via Attestor
|
||||
// For now, just check the envelope structure
|
||||
try
|
||||
{
|
||||
var envelope = JsonDocument.Parse(witness.DsseEnvelope);
|
||||
if (!envelope.RootElement.TryGetProperty("signatures", out var signatures) ||
|
||||
signatures.GetArrayLength() == 0)
|
||||
{
|
||||
verificationStatus = "invalid";
|
||||
verificationError = "DSSE envelope has no signatures";
|
||||
}
|
||||
}
|
||||
catch (JsonException ex)
|
||||
{
|
||||
verificationStatus = "invalid";
|
||||
verificationError = $"Invalid DSSE envelope JSON: {ex.Message}";
|
||||
}
|
||||
}
|
||||
|
||||
// Record verification attempt
|
||||
await repository.RecordVerificationAsync(new WitnessVerificationRecord
|
||||
{
|
||||
WitnessId = witnessId,
|
||||
VerifiedAt = DateTimeOffset.UtcNow,
|
||||
VerifiedBy = "api",
|
||||
VerificationStatus = verificationStatus,
|
||||
VerificationError = verificationError
|
||||
}, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
return Results.Ok(new WitnessVerificationResponseDto
|
||||
{
|
||||
WitnessId = witnessId,
|
||||
WitnessHash = witness.WitnessHash,
|
||||
Status = verificationStatus,
|
||||
Error = verificationError,
|
||||
VerifiedAt = DateTimeOffset.UtcNow,
|
||||
IsSigned = !string.IsNullOrEmpty(witness.DsseEnvelope)
|
||||
});
|
||||
}
|
||||
|
||||
private static WitnessResponseDto MapToDto(WitnessRecord record)
|
||||
{
|
||||
return new WitnessResponseDto
|
||||
{
|
||||
WitnessId = record.WitnessId,
|
||||
WitnessHash = record.WitnessHash,
|
||||
SchemaVersion = record.SchemaVersion,
|
||||
WitnessType = record.WitnessType,
|
||||
GraphHash = record.GraphHash,
|
||||
ScanId = record.ScanId,
|
||||
RunId = record.RunId,
|
||||
CreatedAt = record.CreatedAt,
|
||||
SignedAt = record.SignedAt,
|
||||
SignerKeyId = record.SignerKeyId,
|
||||
EntrypointFqn = record.EntrypointFqn,
|
||||
SinkCve = record.SinkCve,
|
||||
IsSigned = !string.IsNullOrEmpty(record.DsseEnvelope),
|
||||
Payload = JsonDocument.Parse(record.PayloadJson).RootElement,
|
||||
DsseEnvelope = string.IsNullOrEmpty(record.DsseEnvelope)
|
||||
? null
|
||||
: JsonDocument.Parse(record.DsseEnvelope).RootElement
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Response DTO for a single witness.
|
||||
/// </summary>
|
||||
public sealed record WitnessResponseDto
|
||||
{
|
||||
public Guid WitnessId { get; init; }
|
||||
public required string WitnessHash { get; init; }
|
||||
public required string SchemaVersion { get; init; }
|
||||
public required string WitnessType { get; init; }
|
||||
public required string GraphHash { get; init; }
|
||||
public Guid? ScanId { get; init; }
|
||||
public Guid? RunId { get; init; }
|
||||
public DateTimeOffset CreatedAt { get; init; }
|
||||
public DateTimeOffset? SignedAt { get; init; }
|
||||
public string? SignerKeyId { get; init; }
|
||||
public string? EntrypointFqn { get; init; }
|
||||
public string? SinkCve { get; init; }
|
||||
public bool IsSigned { get; init; }
|
||||
public JsonElement Payload { get; init; }
|
||||
public JsonElement? DsseEnvelope { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Response DTO for witness list.
|
||||
/// </summary>
|
||||
public sealed record WitnessListResponseDto
|
||||
{
|
||||
public required IReadOnlyList<WitnessResponseDto> Witnesses { get; init; }
|
||||
public int TotalCount { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Response DTO for witness verification.
|
||||
/// </summary>
|
||||
public sealed record WitnessVerificationResponseDto
|
||||
{
|
||||
public Guid WitnessId { get; init; }
|
||||
public required string WitnessHash { get; init; }
|
||||
public required string Status { get; init; }
|
||||
public string? Error { get; init; }
|
||||
public DateTimeOffset VerifiedAt { get; init; }
|
||||
public bool IsSigned { get; init; }
|
||||
}
|
||||
@@ -470,6 +470,7 @@ apiGroup.MapScanEndpoints(resolvedOptions.Api.ScansSegment);
|
||||
apiGroup.MapReachabilityDriftRootEndpoints();
|
||||
apiGroup.MapProofSpineEndpoints(resolvedOptions.Api.SpinesSegment, resolvedOptions.Api.ScansSegment);
|
||||
apiGroup.MapReplayEndpoints();
|
||||
apiGroup.MapWitnessEndpoints(); // Sprint: SPRINT_3700_0001_0001
|
||||
|
||||
if (resolvedOptions.Features.EnablePolicyPreview)
|
||||
{
|
||||
|
||||
272
src/Scanner/StellaOps.Scanner.Worker/Processing/EpssIngestJob.cs
Normal file
272
src/Scanner/StellaOps.Scanner.Worker/Processing/EpssIngestJob.cs
Normal file
@@ -0,0 +1,272 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// EpssIngestJob.cs
|
||||
// Sprint: SPRINT_3410_0001_0001_epss_ingestion_storage
|
||||
// Task: EPSS-3410-009
|
||||
// Description: Background job that ingests EPSS data from online or bundle sources.
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System.Diagnostics;
|
||||
using Microsoft.Extensions.Hosting;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Microsoft.Extensions.Options;
|
||||
using StellaOps.Scanner.Storage.Epss;
|
||||
using StellaOps.Scanner.Storage.Repositories;
|
||||
|
||||
namespace StellaOps.Scanner.Worker.Processing;
|
||||
|
||||
/// <summary>
|
||||
/// Options for the EPSS ingestion job.
|
||||
/// </summary>
|
||||
public sealed class EpssIngestOptions
|
||||
{
|
||||
/// <summary>
|
||||
/// Configuration section name.
|
||||
/// </summary>
|
||||
public const string SectionName = "Epss:Ingest";
|
||||
|
||||
/// <summary>
|
||||
/// Whether the job is enabled. Default: true.
|
||||
/// </summary>
|
||||
public bool Enabled { get; set; } = true;
|
||||
|
||||
/// <summary>
|
||||
/// Cron schedule for EPSS ingestion. Default: "0 5 0 * * *" (00:05 UTC daily).
|
||||
/// </summary>
|
||||
public string Schedule { get; set; } = "0 5 0 * * *";
|
||||
|
||||
/// <summary>
|
||||
/// Source type: "online" or "bundle". Default: "online".
|
||||
/// </summary>
|
||||
public string SourceType { get; set; } = "online";
|
||||
|
||||
/// <summary>
|
||||
/// Bundle path for air-gapped ingestion (when SourceType is "bundle").
|
||||
/// </summary>
|
||||
public string? BundlePath { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Initial delay before first run. Default: 30 seconds.
|
||||
/// </summary>
|
||||
public TimeSpan InitialDelay { get; set; } = TimeSpan.FromSeconds(30);
|
||||
|
||||
/// <summary>
|
||||
/// Retry delay on failure. Default: 5 minutes.
|
||||
/// </summary>
|
||||
public TimeSpan RetryDelay { get; set; } = TimeSpan.FromMinutes(5);
|
||||
|
||||
/// <summary>
|
||||
/// Maximum retry attempts. Default: 3.
|
||||
/// </summary>
|
||||
public int MaxRetries { get; set; } = 3;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Background service that ingests EPSS data on a schedule.
|
||||
/// Supports online (FIRST.org) and offline (bundle) sources.
|
||||
/// </summary>
|
||||
public sealed class EpssIngestJob : BackgroundService
|
||||
{
|
||||
private readonly IEpssRepository _repository;
|
||||
private readonly EpssOnlineSource _onlineSource;
|
||||
private readonly EpssBundleSource _bundleSource;
|
||||
private readonly EpssCsvStreamParser _parser;
|
||||
private readonly IOptions<EpssIngestOptions> _options;
|
||||
private readonly TimeProvider _timeProvider;
|
||||
private readonly ILogger<EpssIngestJob> _logger;
|
||||
private readonly ActivitySource _activitySource = new("StellaOps.Scanner.EpssIngest");
|
||||
|
||||
public EpssIngestJob(
|
||||
IEpssRepository repository,
|
||||
EpssOnlineSource onlineSource,
|
||||
EpssBundleSource bundleSource,
|
||||
EpssCsvStreamParser parser,
|
||||
IOptions<EpssIngestOptions> options,
|
||||
TimeProvider timeProvider,
|
||||
ILogger<EpssIngestJob> logger)
|
||||
{
|
||||
_repository = repository ?? throw new ArgumentNullException(nameof(repository));
|
||||
_onlineSource = onlineSource ?? throw new ArgumentNullException(nameof(onlineSource));
|
||||
_bundleSource = bundleSource ?? throw new ArgumentNullException(nameof(bundleSource));
|
||||
_parser = parser ?? throw new ArgumentNullException(nameof(parser));
|
||||
_options = options ?? throw new ArgumentNullException(nameof(options));
|
||||
_timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider));
|
||||
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||
}
|
||||
|
||||
protected override async Task ExecuteAsync(CancellationToken stoppingToken)
|
||||
{
|
||||
_logger.LogInformation("EPSS ingest job started");
|
||||
|
||||
var opts = _options.Value;
|
||||
|
||||
if (!opts.Enabled)
|
||||
{
|
||||
_logger.LogInformation("EPSS ingest job is disabled");
|
||||
return;
|
||||
}
|
||||
|
||||
// Initial delay to let the system stabilize
|
||||
await Task.Delay(opts.InitialDelay, stoppingToken);
|
||||
|
||||
while (!stoppingToken.IsCancellationRequested)
|
||||
{
|
||||
var now = _timeProvider.GetUtcNow();
|
||||
var nextRun = ComputeNextRun(now, opts.Schedule);
|
||||
var delay = nextRun - now;
|
||||
|
||||
if (delay > TimeSpan.Zero)
|
||||
{
|
||||
_logger.LogDebug("EPSS ingest job waiting until {NextRun}", nextRun);
|
||||
await Task.Delay(delay, stoppingToken);
|
||||
}
|
||||
|
||||
if (stoppingToken.IsCancellationRequested)
|
||||
{
|
||||
break;
|
||||
}
|
||||
|
||||
await RunIngestionWithRetryAsync(stoppingToken);
|
||||
}
|
||||
|
||||
_logger.LogInformation("EPSS ingest job stopped");
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Runs ingestion for a specific date. Used by tests and manual triggers.
|
||||
/// </summary>
|
||||
public async Task IngestAsync(DateOnly modelDate, CancellationToken cancellationToken = default)
|
||||
{
|
||||
using var activity = _activitySource.StartActivity("epss.ingest", ActivityKind.Internal);
|
||||
activity?.SetTag("epss.model_date", modelDate.ToString("yyyy-MM-dd"));
|
||||
|
||||
var opts = _options.Value;
|
||||
var stopwatch = Stopwatch.StartNew();
|
||||
|
||||
_logger.LogInformation("Starting EPSS ingestion for {ModelDate}", modelDate);
|
||||
|
||||
try
|
||||
{
|
||||
// Get source based on configuration
|
||||
IEpssSource source = opts.SourceType.Equals("bundle", StringComparison.OrdinalIgnoreCase)
|
||||
? _bundleSource
|
||||
: _onlineSource;
|
||||
|
||||
// Retrieve the EPSS file
|
||||
var sourceFile = await source.GetAsync(modelDate, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
_logger.LogInformation(
|
||||
"Retrieved EPSS file from {SourceUri}, size={Size}",
|
||||
sourceFile.SourceUri,
|
||||
sourceFile.Content.Length);
|
||||
|
||||
// Begin import run
|
||||
var importRun = await _repository.BeginImportAsync(
|
||||
modelDate,
|
||||
sourceFile.SourceUri,
|
||||
_timeProvider.GetUtcNow(),
|
||||
sourceFile.FileSha256,
|
||||
cancellationToken).ConfigureAwait(false);
|
||||
|
||||
_logger.LogDebug("Created import run {ImportRunId}", importRun.ImportRunId);
|
||||
|
||||
try
|
||||
{
|
||||
// Parse and write snapshot
|
||||
await using var stream = new MemoryStream(sourceFile.Content);
|
||||
var session = _parser.ParseGzip(stream);
|
||||
|
||||
var writeResult = await _repository.WriteSnapshotAsync(
|
||||
importRun.ImportRunId,
|
||||
modelDate,
|
||||
_timeProvider.GetUtcNow(),
|
||||
session,
|
||||
cancellationToken).ConfigureAwait(false);
|
||||
|
||||
// Mark success
|
||||
await _repository.MarkImportSucceededAsync(
|
||||
importRun.ImportRunId,
|
||||
session.RowCount,
|
||||
session.DecompressedSha256,
|
||||
session.ModelVersionTag,
|
||||
session.PublishedDate,
|
||||
cancellationToken).ConfigureAwait(false);
|
||||
|
||||
stopwatch.Stop();
|
||||
|
||||
_logger.LogInformation(
|
||||
"EPSS ingestion completed: modelDate={ModelDate}, rows={RowCount}, cves={CveCount}, duration={Duration}ms",
|
||||
modelDate,
|
||||
writeResult.RowCount,
|
||||
writeResult.DistinctCveCount,
|
||||
stopwatch.ElapsedMilliseconds);
|
||||
|
||||
activity?.SetTag("epss.row_count", writeResult.RowCount);
|
||||
activity?.SetTag("epss.cve_count", writeResult.DistinctCveCount);
|
||||
activity?.SetTag("epss.duration_ms", stopwatch.ElapsedMilliseconds);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
await _repository.MarkImportFailedAsync(
|
||||
importRun.ImportRunId,
|
||||
ex.Message,
|
||||
cancellationToken).ConfigureAwait(false);
|
||||
|
||||
throw;
|
||||
}
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogError(ex, "EPSS ingestion failed for {ModelDate}", modelDate);
|
||||
activity?.SetStatus(ActivityStatusCode.Error, ex.Message);
|
||||
throw;
|
||||
}
|
||||
}
|
||||
|
||||
private async Task RunIngestionWithRetryAsync(CancellationToken cancellationToken)
|
||||
{
|
||||
var opts = _options.Value;
|
||||
var modelDate = DateOnly.FromDateTime(_timeProvider.GetUtcNow().UtcDateTime);
|
||||
|
||||
for (var attempt = 1; attempt <= opts.MaxRetries; attempt++)
|
||||
{
|
||||
try
|
||||
{
|
||||
await IngestAsync(modelDate, cancellationToken);
|
||||
return;
|
||||
}
|
||||
catch (Exception ex) when (attempt < opts.MaxRetries)
|
||||
{
|
||||
_logger.LogWarning(
|
||||
ex,
|
||||
"EPSS ingestion attempt {Attempt}/{MaxRetries} failed, retrying in {RetryDelay}",
|
||||
attempt,
|
||||
opts.MaxRetries,
|
||||
opts.RetryDelay);
|
||||
|
||||
await Task.Delay(opts.RetryDelay, cancellationToken);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogError(
|
||||
ex,
|
||||
"EPSS ingestion failed after {MaxRetries} attempts",
|
||||
opts.MaxRetries);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private static DateTimeOffset ComputeNextRun(DateTimeOffset now, string cronSchedule)
|
||||
{
|
||||
// Simple cron parser for "0 5 0 * * *" (seconds minutes hours day month dayOfWeek)
|
||||
// For MVP, we just schedule for 00:05 UTC the next day
|
||||
var today = now.UtcDateTime.Date;
|
||||
var scheduledTime = today.AddMinutes(5);
|
||||
|
||||
if (now.UtcDateTime > scheduledTime)
|
||||
{
|
||||
scheduledTime = scheduledTime.AddDays(1);
|
||||
}
|
||||
|
||||
return new DateTimeOffset(scheduledTime, TimeSpan.Zero);
|
||||
}
|
||||
}
|
||||
@@ -113,6 +113,12 @@ if (!string.IsNullOrWhiteSpace(connectionString))
|
||||
builder.Services.AddSingleton<ISurfaceManifestPublisher, SurfaceManifestPublisher>();
|
||||
builder.Services.AddSingleton<IScanStageExecutor, SurfaceManifestStageExecutor>();
|
||||
builder.Services.AddSingleton<IDsseEnvelopeSigner, HmacDsseEnvelopeSigner>();
|
||||
|
||||
// EPSS ingestion job (Sprint: SPRINT_3410_0001_0001)
|
||||
builder.Services.AddOptions<EpssIngestOptions>()
|
||||
.BindConfiguration(EpssIngestOptions.SectionName)
|
||||
.ValidateOnStart();
|
||||
builder.Services.AddHostedService<EpssIngestJob>();
|
||||
}
|
||||
else
|
||||
{
|
||||
|
||||
@@ -0,0 +1,44 @@
|
||||
using StellaOps.Scanner.Analyzers.Native.Index;
|
||||
|
||||
namespace StellaOps.Scanner.Emit.Native;
|
||||
|
||||
/// <summary>
|
||||
/// Result of emitting a native component.
|
||||
/// </summary>
|
||||
/// <param name="Purl">Package URL for the component.</param>
|
||||
/// <param name="Name">Component name (usually the filename).</param>
|
||||
/// <param name="Version">Component version if known.</param>
|
||||
/// <param name="Metadata">Original binary metadata.</param>
|
||||
/// <param name="IndexMatch">Whether this was matched from the Build-ID index.</param>
|
||||
/// <param name="LookupResult">The index lookup result if matched.</param>
|
||||
public sealed record NativeComponentEmitResult(
|
||||
string Purl,
|
||||
string Name,
|
||||
string? Version,
|
||||
NativeBinaryMetadata Metadata,
|
||||
bool IndexMatch,
|
||||
BuildIdLookupResult? LookupResult);
|
||||
|
||||
/// <summary>
|
||||
/// Interface for emitting native binary components for SBOM generation.
|
||||
/// </summary>
|
||||
public interface INativeComponentEmitter
|
||||
{
|
||||
/// <summary>
|
||||
/// Emits a native component from binary metadata.
|
||||
/// </summary>
|
||||
/// <param name="metadata">Binary metadata.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>Component emission result.</returns>
|
||||
Task<NativeComponentEmitResult> EmitAsync(NativeBinaryMetadata metadata, CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Emits multiple native components.
|
||||
/// </summary>
|
||||
/// <param name="metadataList">List of binary metadata.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>Component emission results.</returns>
|
||||
Task<IReadOnlyList<NativeComponentEmitResult>> EmitBatchAsync(
|
||||
IEnumerable<NativeBinaryMetadata> metadataList,
|
||||
CancellationToken cancellationToken = default);
|
||||
}
|
||||
@@ -0,0 +1,55 @@
|
||||
namespace StellaOps.Scanner.Emit.Native;
|
||||
|
||||
/// <summary>
|
||||
/// Metadata for a native binary component.
|
||||
/// </summary>
|
||||
public sealed record NativeBinaryMetadata
|
||||
{
|
||||
/// <summary>Binary format (elf, pe, macho)</summary>
|
||||
public required string Format { get; init; }
|
||||
|
||||
/// <summary>Build-ID with prefix (gnu-build-id:..., pe-cv:..., macho-uuid:...)</summary>
|
||||
public string? BuildId { get; init; }
|
||||
|
||||
/// <summary>CPU architecture (x86_64, aarch64, arm, i686, etc.)</summary>
|
||||
public string? Architecture { get; init; }
|
||||
|
||||
/// <summary>Whether this is a 64-bit binary</summary>
|
||||
public bool Is64Bit { get; init; }
|
||||
|
||||
/// <summary>Operating system or platform</summary>
|
||||
public string? Platform { get; init; }
|
||||
|
||||
/// <summary>File path within the container layer</summary>
|
||||
public required string FilePath { get; init; }
|
||||
|
||||
/// <summary>SHA-256 digest of the file</summary>
|
||||
public string? FileDigest { get; init; }
|
||||
|
||||
/// <summary>File size in bytes</summary>
|
||||
public long FileSize { get; init; }
|
||||
|
||||
/// <summary>Container layer digest where this binary was introduced</summary>
|
||||
public string? LayerDigest { get; init; }
|
||||
|
||||
/// <summary>Layer index (0-based)</summary>
|
||||
public int LayerIndex { get; init; }
|
||||
|
||||
/// <summary>Product version from PE version resource</summary>
|
||||
public string? ProductVersion { get; init; }
|
||||
|
||||
/// <summary>File version from PE version resource</summary>
|
||||
public string? FileVersion { get; init; }
|
||||
|
||||
/// <summary>Company name from PE version resource</summary>
|
||||
public string? CompanyName { get; init; }
|
||||
|
||||
/// <summary>Hardening flags (PIE, RELRO, NX, etc.)</summary>
|
||||
public IReadOnlyDictionary<string, string>? HardeningFlags { get; init; }
|
||||
|
||||
/// <summary>Whether the binary is signed</summary>
|
||||
public bool IsSigned { get; init; }
|
||||
|
||||
/// <summary>Signature details (Authenticode, codesign, etc.)</summary>
|
||||
public string? SignatureDetails { get; init; }
|
||||
}
|
||||
@@ -0,0 +1,155 @@
|
||||
using Microsoft.Extensions.Logging;
|
||||
using StellaOps.Scanner.Analyzers.Native.Index;
|
||||
|
||||
namespace StellaOps.Scanner.Emit.Native;
|
||||
|
||||
/// <summary>
|
||||
/// Emits native binary components for SBOM generation.
|
||||
/// Uses the Build-ID index to resolve PURLs when possible.
|
||||
/// </summary>
|
||||
public sealed class NativeComponentEmitter : INativeComponentEmitter
|
||||
{
|
||||
private readonly IBuildIdIndex _buildIdIndex;
|
||||
private readonly NativePurlBuilder _purlBuilder;
|
||||
private readonly ILogger<NativeComponentEmitter> _logger;
|
||||
|
||||
/// <summary>
|
||||
/// Creates a new native component emitter.
|
||||
/// </summary>
|
||||
public NativeComponentEmitter(
|
||||
IBuildIdIndex buildIdIndex,
|
||||
ILogger<NativeComponentEmitter> logger)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(buildIdIndex);
|
||||
ArgumentNullException.ThrowIfNull(logger);
|
||||
|
||||
_buildIdIndex = buildIdIndex;
|
||||
_purlBuilder = new NativePurlBuilder();
|
||||
_logger = logger;
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public async Task<NativeComponentEmitResult> EmitAsync(
|
||||
NativeBinaryMetadata metadata,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(metadata);
|
||||
|
||||
// Try to resolve via Build-ID index
|
||||
BuildIdLookupResult? lookupResult = null;
|
||||
|
||||
if (!string.IsNullOrWhiteSpace(metadata.BuildId))
|
||||
{
|
||||
lookupResult = await _buildIdIndex.LookupAsync(metadata.BuildId, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
string purl;
|
||||
string? version = null;
|
||||
bool indexMatch = false;
|
||||
|
||||
if (lookupResult is not null)
|
||||
{
|
||||
// Index match - use the resolved PURL
|
||||
purl = _purlBuilder.FromIndexResult(lookupResult);
|
||||
version = lookupResult.Version;
|
||||
indexMatch = true;
|
||||
|
||||
_logger.LogDebug(
|
||||
"Resolved binary {FilePath} via Build-ID index: {Purl}",
|
||||
metadata.FilePath,
|
||||
purl);
|
||||
}
|
||||
else
|
||||
{
|
||||
// No match - generate generic PURL
|
||||
purl = _purlBuilder.FromUnresolvedBinary(metadata);
|
||||
version = metadata.ProductVersion ?? metadata.FileVersion;
|
||||
|
||||
_logger.LogDebug(
|
||||
"Unresolved binary {FilePath}, generated generic PURL: {Purl}",
|
||||
metadata.FilePath,
|
||||
purl);
|
||||
}
|
||||
|
||||
var name = Path.GetFileName(metadata.FilePath);
|
||||
|
||||
return new NativeComponentEmitResult(
|
||||
Purl: purl,
|
||||
Name: name,
|
||||
Version: version,
|
||||
Metadata: metadata,
|
||||
IndexMatch: indexMatch,
|
||||
LookupResult: lookupResult);
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public async Task<IReadOnlyList<NativeComponentEmitResult>> EmitBatchAsync(
|
||||
IEnumerable<NativeBinaryMetadata> metadataList,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(metadataList);
|
||||
|
||||
var metadataArray = metadataList.ToArray();
|
||||
if (metadataArray.Length == 0)
|
||||
{
|
||||
return Array.Empty<NativeComponentEmitResult>();
|
||||
}
|
||||
|
||||
// Batch lookup for all Build-IDs
|
||||
var buildIds = metadataArray
|
||||
.Where(m => !string.IsNullOrWhiteSpace(m.BuildId))
|
||||
.Select(m => m.BuildId!)
|
||||
.Distinct(StringComparer.OrdinalIgnoreCase)
|
||||
.ToArray();
|
||||
|
||||
var lookupResults = await _buildIdIndex.BatchLookupAsync(buildIds, cancellationToken).ConfigureAwait(false);
|
||||
var lookupMap = lookupResults.ToDictionary(
|
||||
r => r.BuildId,
|
||||
StringComparer.OrdinalIgnoreCase);
|
||||
|
||||
_logger.LogDebug(
|
||||
"Batch lookup: {Total} binaries, {Resolved} resolved via index",
|
||||
metadataArray.Length,
|
||||
lookupMap.Count);
|
||||
|
||||
// Emit components
|
||||
var results = new List<NativeComponentEmitResult>(metadataArray.Length);
|
||||
|
||||
foreach (var metadata in metadataArray)
|
||||
{
|
||||
BuildIdLookupResult? lookupResult = null;
|
||||
|
||||
if (!string.IsNullOrWhiteSpace(metadata.BuildId) &&
|
||||
lookupMap.TryGetValue(metadata.BuildId, out var result))
|
||||
{
|
||||
lookupResult = result;
|
||||
}
|
||||
|
||||
string purl;
|
||||
string? version = null;
|
||||
bool indexMatch = false;
|
||||
|
||||
if (lookupResult is not null)
|
||||
{
|
||||
purl = _purlBuilder.FromIndexResult(lookupResult);
|
||||
version = lookupResult.Version;
|
||||
indexMatch = true;
|
||||
}
|
||||
else
|
||||
{
|
||||
purl = _purlBuilder.FromUnresolvedBinary(metadata);
|
||||
version = metadata.ProductVersion ?? metadata.FileVersion;
|
||||
}
|
||||
|
||||
results.Add(new NativeComponentEmitResult(
|
||||
Purl: purl,
|
||||
Name: Path.GetFileName(metadata.FilePath),
|
||||
Version: version,
|
||||
Metadata: metadata,
|
||||
IndexMatch: indexMatch,
|
||||
LookupResult: lookupResult));
|
||||
}
|
||||
|
||||
return results;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,115 @@
|
||||
using StellaOps.Scanner.Analyzers.Native.Index;
|
||||
|
||||
namespace StellaOps.Scanner.Emit.Native;
|
||||
|
||||
/// <summary>
|
||||
/// Builds PURLs for native binaries.
|
||||
/// </summary>
|
||||
public sealed class NativePurlBuilder
|
||||
{
|
||||
/// <summary>
|
||||
/// Builds a PURL from a Build-ID index lookup result.
|
||||
/// </summary>
|
||||
/// <param name="lookupResult">The index lookup result.</param>
|
||||
/// <returns>PURL string.</returns>
|
||||
public string FromIndexResult(BuildIdLookupResult lookupResult)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(lookupResult);
|
||||
return lookupResult.Purl;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Builds a PURL for an unresolved native binary.
|
||||
/// Falls back to pkg:generic with build-id qualifier.
|
||||
/// </summary>
|
||||
/// <param name="metadata">Binary metadata.</param>
|
||||
/// <returns>PURL string.</returns>
|
||||
public string FromUnresolvedBinary(NativeBinaryMetadata metadata)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(metadata);
|
||||
|
||||
// Extract filename from path
|
||||
var fileName = Path.GetFileName(metadata.FilePath);
|
||||
|
||||
// Build pkg:generic PURL with build-id qualifier
|
||||
var purl = $"pkg:generic/{EncodeComponent(fileName)}@unknown";
|
||||
|
||||
var qualifiers = new List<string>();
|
||||
|
||||
if (!string.IsNullOrWhiteSpace(metadata.BuildId))
|
||||
{
|
||||
qualifiers.Add($"build-id={EncodeComponent(metadata.BuildId)}");
|
||||
}
|
||||
|
||||
if (!string.IsNullOrWhiteSpace(metadata.Architecture))
|
||||
{
|
||||
qualifiers.Add($"arch={EncodeComponent(metadata.Architecture)}");
|
||||
}
|
||||
|
||||
if (!string.IsNullOrWhiteSpace(metadata.Platform))
|
||||
{
|
||||
qualifiers.Add($"os={EncodeComponent(metadata.Platform)}");
|
||||
}
|
||||
|
||||
if (!string.IsNullOrWhiteSpace(metadata.FileDigest))
|
||||
{
|
||||
qualifiers.Add($"checksum={EncodeComponent(metadata.FileDigest)}");
|
||||
}
|
||||
|
||||
if (qualifiers.Count > 0)
|
||||
{
|
||||
purl += "?" + string.Join("&", qualifiers.OrderBy(q => q, StringComparer.Ordinal));
|
||||
}
|
||||
|
||||
return purl;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Builds a PURL for a binary with known distro information.
|
||||
/// </summary>
|
||||
/// <param name="distro">Distribution type (deb, rpm, apk, etc.)</param>
|
||||
/// <param name="distroName">Distribution name (debian, fedora, alpine, etc.)</param>
|
||||
/// <param name="packageName">Package name.</param>
|
||||
/// <param name="version">Package version.</param>
|
||||
/// <param name="architecture">CPU architecture.</param>
|
||||
/// <returns>PURL string.</returns>
|
||||
public string FromDistroPackage(
|
||||
string distro,
|
||||
string distroName,
|
||||
string packageName,
|
||||
string version,
|
||||
string? architecture = null)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(distro);
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(distroName);
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(packageName);
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(version);
|
||||
|
||||
// Map distro type to PURL type
|
||||
var purlType = distro.ToLowerInvariant() switch
|
||||
{
|
||||
"deb" or "debian" or "ubuntu" => "deb",
|
||||
"rpm" or "fedora" or "rhel" or "centos" => "rpm",
|
||||
"apk" or "alpine" => "apk",
|
||||
"pacman" or "arch" => "pacman",
|
||||
_ => "generic"
|
||||
};
|
||||
|
||||
var purl = $"pkg:{purlType}/{EncodeComponent(distroName)}/{EncodeComponent(packageName)}@{EncodeComponent(version)}";
|
||||
|
||||
if (!string.IsNullOrWhiteSpace(architecture))
|
||||
{
|
||||
purl += $"?arch={EncodeComponent(architecture)}";
|
||||
}
|
||||
|
||||
return purl;
|
||||
}
|
||||
|
||||
private static string EncodeComponent(string value)
|
||||
{
|
||||
// PURL percent-encoding: only encode special characters
|
||||
return Uri.EscapeDataString(value)
|
||||
.Replace("%2F", "/", StringComparison.Ordinal) // Allow / in names
|
||||
.Replace("%40", "@", StringComparison.Ordinal); // @ is already version separator
|
||||
}
|
||||
}
|
||||
@@ -10,6 +10,7 @@
|
||||
<ProjectReference Include="..\StellaOps.Scanner.Core\StellaOps.Scanner.Core.csproj" />
|
||||
<ProjectReference Include="..\StellaOps.Scanner.EntryTrace\StellaOps.Scanner.EntryTrace.csproj" />
|
||||
<ProjectReference Include="..\StellaOps.Scanner.Storage\StellaOps.Scanner.Storage.csproj" />
|
||||
<ProjectReference Include="..\..\StellaOps.Scanner.Analyzers.Native\StellaOps.Scanner.Analyzers.Native.csproj" />
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
|
||||
@@ -0,0 +1,44 @@
|
||||
namespace StellaOps.Scanner.Reachability.Attestation;
|
||||
|
||||
/// <summary>
|
||||
/// Result of publishing a reachability witness.
|
||||
/// </summary>
|
||||
/// <param name="StatementHash">Hash of the in-toto statement.</param>
|
||||
/// <param name="GraphHash">Hash of the rich graph.</param>
|
||||
/// <param name="CasUri">CAS URI where graph is stored (if applicable).</param>
|
||||
/// <param name="RekorLogIndex">Rekor transparency log index (if published).</param>
|
||||
/// <param name="RekorLogId">Rekor log ID (if published).</param>
|
||||
/// <param name="DsseEnvelopeBytes">Serialized DSSE envelope.</param>
|
||||
public sealed record ReachabilityWitnessPublishResult(
|
||||
string StatementHash,
|
||||
string GraphHash,
|
||||
string? CasUri,
|
||||
long? RekorLogIndex,
|
||||
string? RekorLogId,
|
||||
byte[] DsseEnvelopeBytes);
|
||||
|
||||
/// <summary>
|
||||
/// Interface for publishing reachability witness attestations.
|
||||
/// </summary>
|
||||
public interface IReachabilityWitnessPublisher
|
||||
{
|
||||
/// <summary>
|
||||
/// Publishes a reachability witness attestation for the given graph.
|
||||
/// </summary>
|
||||
/// <param name="graph">The rich graph to attest.</param>
|
||||
/// <param name="graphBytes">Canonical JSON bytes of the graph.</param>
|
||||
/// <param name="graphHash">Hash of the graph bytes.</param>
|
||||
/// <param name="subjectDigest">Subject artifact digest.</param>
|
||||
/// <param name="policyHash">Optional policy hash.</param>
|
||||
/// <param name="sourceCommit">Optional source commit.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>Publication result with CAS URI and optional Rekor proof.</returns>
|
||||
Task<ReachabilityWitnessPublishResult> PublishAsync(
|
||||
RichGraph graph,
|
||||
byte[] graphBytes,
|
||||
string graphHash,
|
||||
string subjectDigest,
|
||||
string? policyHash = null,
|
||||
string? sourceCommit = null,
|
||||
CancellationToken cancellationToken = default);
|
||||
}
|
||||
@@ -0,0 +1,207 @@
|
||||
using System.Text.Json;
|
||||
using System.Text.Json.Serialization;
|
||||
using StellaOps.Cryptography;
|
||||
|
||||
namespace StellaOps.Scanner.Reachability.Attestation;
|
||||
|
||||
/// <summary>
|
||||
/// Builds DSSE envelopes for reachability witness attestations.
|
||||
/// Follows in-toto attestation framework with stellaops.reachabilityWitness predicate.
|
||||
/// </summary>
|
||||
public sealed class ReachabilityWitnessDsseBuilder
|
||||
{
|
||||
private readonly ICryptoHash _cryptoHash;
|
||||
private readonly TimeProvider _timeProvider;
|
||||
|
||||
private static readonly JsonSerializerOptions CanonicalJsonOptions = new(JsonSerializerDefaults.Web)
|
||||
{
|
||||
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
|
||||
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull,
|
||||
WriteIndented = false,
|
||||
Encoder = System.Text.Encodings.Web.JavaScriptEncoder.UnsafeRelaxedJsonEscaping
|
||||
};
|
||||
|
||||
/// <summary>
|
||||
/// Creates a new DSSE builder.
|
||||
/// </summary>
|
||||
/// <param name="cryptoHash">Crypto hash service for content addressing.</param>
|
||||
/// <param name="timeProvider">Time provider for timestamps.</param>
|
||||
public ReachabilityWitnessDsseBuilder(ICryptoHash cryptoHash, TimeProvider? timeProvider = null)
|
||||
{
|
||||
_cryptoHash = cryptoHash ?? throw new ArgumentNullException(nameof(cryptoHash));
|
||||
_timeProvider = timeProvider ?? TimeProvider.System;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Builds an in-toto statement from a RichGraph.
|
||||
/// </summary>
|
||||
/// <param name="graph">The rich graph to attest.</param>
|
||||
/// <param name="graphHash">The computed hash of the canonical graph JSON.</param>
|
||||
/// <param name="subjectDigest">The subject artifact digest (e.g., image digest).</param>
|
||||
/// <param name="graphCasUri">Optional CAS URI where graph is stored.</param>
|
||||
/// <param name="policyHash">Optional policy hash that was applied.</param>
|
||||
/// <param name="sourceCommit">Optional source commit.</param>
|
||||
/// <returns>An in-toto statement ready for DSSE signing.</returns>
|
||||
public InTotoStatement BuildStatement(
|
||||
RichGraph graph,
|
||||
string graphHash,
|
||||
string subjectDigest,
|
||||
string? graphCasUri = null,
|
||||
string? policyHash = null,
|
||||
string? sourceCommit = null)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(graph);
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(graphHash);
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(subjectDigest);
|
||||
|
||||
var generatedAt = _timeProvider.GetUtcNow();
|
||||
|
||||
var predicate = new ReachabilityWitnessStatement
|
||||
{
|
||||
GraphHash = graphHash,
|
||||
GraphCasUri = graphCasUri,
|
||||
GeneratedAt = generatedAt,
|
||||
Language = graph.Nodes.FirstOrDefault()?.Lang ?? "unknown",
|
||||
NodeCount = graph.Nodes.Count,
|
||||
EdgeCount = graph.Edges.Count,
|
||||
EntrypointCount = graph.Roots?.Count ?? 0,
|
||||
SinkCount = CountSinks(graph),
|
||||
ReachableSinkCount = CountReachableSinks(graph),
|
||||
PolicyHash = policyHash,
|
||||
AnalyzerVersion = graph.Analyzer.Version ?? "unknown",
|
||||
SourceCommit = sourceCommit,
|
||||
SubjectDigest = subjectDigest
|
||||
};
|
||||
|
||||
return new InTotoStatement
|
||||
{
|
||||
Type = "https://in-toto.io/Statement/v1",
|
||||
Subject = new[]
|
||||
{
|
||||
new InTotoSubject
|
||||
{
|
||||
Name = ExtractSubjectName(subjectDigest),
|
||||
Digest = new Dictionary<string, string>
|
||||
{
|
||||
[ExtractDigestAlgorithm(subjectDigest)] = ExtractDigestValue(subjectDigest)
|
||||
}
|
||||
}
|
||||
},
|
||||
PredicateType = "https://stella.ops/reachabilityWitness/v1",
|
||||
Predicate = predicate
|
||||
};
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Serializes an in-toto statement to canonical JSON.
|
||||
/// </summary>
|
||||
public byte[] SerializeStatement(InTotoStatement statement)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(statement);
|
||||
return JsonSerializer.SerializeToUtf8Bytes(statement, CanonicalJsonOptions);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Computes the hash of a serialized statement.
|
||||
/// </summary>
|
||||
public string ComputeStatementHash(byte[] statementBytes)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(statementBytes);
|
||||
return _cryptoHash.ComputePrefixedHashForPurpose(statementBytes, HashPurpose.Graph);
|
||||
}
|
||||
|
||||
private static int CountSinks(RichGraph graph)
|
||||
{
|
||||
// Count nodes with sink-related kinds (sql, crypto, deserialize, etc.)
|
||||
return graph.Nodes.Count(n => IsSinkKind(n.Kind));
|
||||
}
|
||||
|
||||
private static int CountReachableSinks(RichGraph graph)
|
||||
{
|
||||
// A sink is reachable if it has incoming edges
|
||||
var nodesWithIncoming = new HashSet<string>(StringComparer.Ordinal);
|
||||
foreach (var edge in graph.Edges)
|
||||
{
|
||||
if (!string.IsNullOrEmpty(edge.To))
|
||||
{
|
||||
nodesWithIncoming.Add(edge.To);
|
||||
}
|
||||
}
|
||||
|
||||
return graph.Nodes.Count(n =>
|
||||
IsSinkKind(n.Kind) &&
|
||||
nodesWithIncoming.Contains(n.Id));
|
||||
}
|
||||
|
||||
private static bool IsSinkKind(string? kind)
|
||||
{
|
||||
// Recognize common sink kinds from the taxonomy
|
||||
return kind?.ToLowerInvariant() switch
|
||||
{
|
||||
"sink" => true,
|
||||
"sql" => true,
|
||||
"crypto" => true,
|
||||
"deserialize" => true,
|
||||
"file" => true,
|
||||
"network" => true,
|
||||
"command" => true,
|
||||
"reflection" => true,
|
||||
_ => false
|
||||
};
|
||||
}
|
||||
|
||||
private static string ExtractSubjectName(string subjectDigest)
|
||||
{
|
||||
// For image digests like "sha256:abc123", return the full string
|
||||
// For other formats, try to extract a meaningful name
|
||||
return subjectDigest;
|
||||
}
|
||||
|
||||
private static string ExtractDigestAlgorithm(string subjectDigest)
|
||||
{
|
||||
var colonIndex = subjectDigest.IndexOf(':');
|
||||
return colonIndex > 0 ? subjectDigest[..colonIndex] : "sha256";
|
||||
}
|
||||
|
||||
private static string ExtractDigestValue(string subjectDigest)
|
||||
{
|
||||
var colonIndex = subjectDigest.IndexOf(':');
|
||||
return colonIndex > 0 ? subjectDigest[(colonIndex + 1)..] : subjectDigest;
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// In-toto Statement structure per https://github.com/in-toto/attestation.
|
||||
/// </summary>
|
||||
public sealed record InTotoStatement
|
||||
{
|
||||
/// <summary>Statement type (always "https://in-toto.io/Statement/v1")</summary>
|
||||
[JsonPropertyName("_type")]
|
||||
public required string Type { get; init; }
|
||||
|
||||
/// <summary>Array of subjects this attestation refers to</summary>
|
||||
[JsonPropertyName("subject")]
|
||||
public required InTotoSubject[] Subject { get; init; }
|
||||
|
||||
/// <summary>URI identifying the predicate type</summary>
|
||||
[JsonPropertyName("predicateType")]
|
||||
public required string PredicateType { get; init; }
|
||||
|
||||
/// <summary>The predicate object (type varies by predicateType)</summary>
|
||||
[JsonPropertyName("predicate")]
|
||||
public required object Predicate { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// In-toto Subject structure.
|
||||
/// </summary>
|
||||
public sealed record InTotoSubject
|
||||
{
|
||||
/// <summary>Subject name (e.g., artifact path or identifier)</summary>
|
||||
[JsonPropertyName("name")]
|
||||
public required string Name { get; init; }
|
||||
|
||||
/// <summary>Map of digest algorithm to digest value</summary>
|
||||
[JsonPropertyName("digest")]
|
||||
public required Dictionary<string, string> Digest { get; init; }
|
||||
}
|
||||
@@ -0,0 +1,45 @@
|
||||
namespace StellaOps.Scanner.Reachability.Attestation;
|
||||
|
||||
/// <summary>
|
||||
/// Configuration for reachability witness attestation.
|
||||
/// </summary>
|
||||
public sealed class ReachabilityWitnessOptions
|
||||
{
|
||||
public const string SectionName = "Scanner:ReachabilityWitness";
|
||||
|
||||
/// <summary>Whether to generate DSSE attestations</summary>
|
||||
public bool Enabled { get; set; } = true;
|
||||
|
||||
/// <summary>Attestation tier (standard, regulated, air-gapped, dev)</summary>
|
||||
public AttestationTier Tier { get; set; } = AttestationTier.Standard;
|
||||
|
||||
/// <summary>Whether to publish to Rekor transparency log</summary>
|
||||
public bool PublishToRekor { get; set; } = true;
|
||||
|
||||
/// <summary>Whether to store graph in CAS</summary>
|
||||
public bool StoreInCas { get; set; } = true;
|
||||
|
||||
/// <summary>Maximum number of edge bundles to attest (for tier=standard)</summary>
|
||||
public int MaxEdgeBundles { get; set; } = 5;
|
||||
|
||||
/// <summary>Key ID for signing (uses default if not specified)</summary>
|
||||
public string? SigningKeyId { get; set; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Attestation tiers per hybrid-attestation.md.
|
||||
/// </summary>
|
||||
public enum AttestationTier
|
||||
{
|
||||
/// <summary>Standard: Graph DSSE + Rekor, optional edge bundles</summary>
|
||||
Standard,
|
||||
|
||||
/// <summary>Regulated: Full attestation with strict signing</summary>
|
||||
Regulated,
|
||||
|
||||
/// <summary>Air-gapped: Local-only, no Rekor</summary>
|
||||
AirGapped,
|
||||
|
||||
/// <summary>Development: Minimal attestation for testing</summary>
|
||||
Dev
|
||||
}
|
||||
@@ -0,0 +1,147 @@
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Microsoft.Extensions.Options;
|
||||
using StellaOps.Cryptography;
|
||||
|
||||
namespace StellaOps.Scanner.Reachability.Attestation;
|
||||
|
||||
/// <summary>
|
||||
/// Publishes reachability witness attestations to CAS and Rekor.
|
||||
/// </summary>
|
||||
public sealed class ReachabilityWitnessPublisher : IReachabilityWitnessPublisher
|
||||
{
|
||||
private readonly ReachabilityWitnessOptions _options;
|
||||
private readonly ReachabilityWitnessDsseBuilder _dsseBuilder;
|
||||
private readonly ICryptoHash _cryptoHash;
|
||||
private readonly ILogger<ReachabilityWitnessPublisher> _logger;
|
||||
|
||||
/// <summary>
|
||||
/// Creates a new reachability witness publisher.
|
||||
/// </summary>
|
||||
public ReachabilityWitnessPublisher(
|
||||
IOptions<ReachabilityWitnessOptions> options,
|
||||
ICryptoHash cryptoHash,
|
||||
ILogger<ReachabilityWitnessPublisher> logger,
|
||||
TimeProvider? timeProvider = null)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(options);
|
||||
ArgumentNullException.ThrowIfNull(cryptoHash);
|
||||
ArgumentNullException.ThrowIfNull(logger);
|
||||
|
||||
_options = options.Value;
|
||||
_cryptoHash = cryptoHash;
|
||||
_logger = logger;
|
||||
_dsseBuilder = new ReachabilityWitnessDsseBuilder(cryptoHash, timeProvider);
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public async Task<ReachabilityWitnessPublishResult> PublishAsync(
|
||||
RichGraph graph,
|
||||
byte[] graphBytes,
|
||||
string graphHash,
|
||||
string subjectDigest,
|
||||
string? policyHash = null,
|
||||
string? sourceCommit = null,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(graph);
|
||||
ArgumentNullException.ThrowIfNull(graphBytes);
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(graphHash);
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(subjectDigest);
|
||||
|
||||
if (!_options.Enabled)
|
||||
{
|
||||
_logger.LogDebug("Reachability witness attestation is disabled");
|
||||
return new ReachabilityWitnessPublishResult(
|
||||
StatementHash: string.Empty,
|
||||
GraphHash: graphHash,
|
||||
CasUri: null,
|
||||
RekorLogIndex: null,
|
||||
RekorLogId: null,
|
||||
DsseEnvelopeBytes: Array.Empty<byte>());
|
||||
}
|
||||
|
||||
string? casUri = null;
|
||||
|
||||
// Step 1: Store graph in CAS (if enabled)
|
||||
if (_options.StoreInCas)
|
||||
{
|
||||
casUri = await StoreInCasAsync(graphBytes, graphHash, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
// Step 2: Build in-toto statement
|
||||
var statement = _dsseBuilder.BuildStatement(
|
||||
graph,
|
||||
graphHash,
|
||||
subjectDigest,
|
||||
casUri,
|
||||
policyHash,
|
||||
sourceCommit);
|
||||
|
||||
var statementBytes = _dsseBuilder.SerializeStatement(statement);
|
||||
var statementHash = _dsseBuilder.ComputeStatementHash(statementBytes);
|
||||
|
||||
_logger.LogInformation(
|
||||
"Built reachability witness statement: hash={StatementHash}, nodes={NodeCount}, edges={EdgeCount}",
|
||||
statementHash,
|
||||
graph.Nodes.Count,
|
||||
graph.Edges.Count);
|
||||
|
||||
// Step 3: Create DSSE envelope (placeholder - actual signing via Attestor service)
|
||||
var dsseEnvelope = CreateDsseEnvelope(statementBytes);
|
||||
|
||||
// Step 4: Submit to Rekor (if enabled and not air-gapped)
|
||||
long? rekorLogIndex = null;
|
||||
string? rekorLogId = null;
|
||||
|
||||
if (_options.PublishToRekor && _options.Tier != AttestationTier.AirGapped)
|
||||
{
|
||||
(rekorLogIndex, rekorLogId) = await SubmitToRekorAsync(dsseEnvelope, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
else if (_options.Tier == AttestationTier.AirGapped)
|
||||
{
|
||||
_logger.LogDebug("Skipping Rekor submission (air-gapped tier)");
|
||||
}
|
||||
|
||||
return new ReachabilityWitnessPublishResult(
|
||||
StatementHash: statementHash,
|
||||
GraphHash: graphHash,
|
||||
CasUri: casUri,
|
||||
RekorLogIndex: rekorLogIndex,
|
||||
RekorLogId: rekorLogId,
|
||||
DsseEnvelopeBytes: dsseEnvelope);
|
||||
}
|
||||
|
||||
private Task<string?> StoreInCasAsync(byte[] graphBytes, string graphHash, CancellationToken cancellationToken)
|
||||
{
|
||||
// TODO: Integrate with actual CAS storage (BID-007)
|
||||
// For now, return a placeholder CAS URI based on hash
|
||||
var casUri = $"cas://local/{graphHash}";
|
||||
_logger.LogDebug("Stored graph in CAS: {CasUri}", casUri);
|
||||
return Task.FromResult<string?>(casUri);
|
||||
}
|
||||
|
||||
private byte[] CreateDsseEnvelope(byte[] statementBytes)
|
||||
{
|
||||
// TODO: Integrate with Attestor DSSE signing service (RWD-008)
|
||||
// For now, return unsigned envelope structure
|
||||
// In production, this would call the Attestor service to sign the statement
|
||||
|
||||
// Minimal DSSE envelope structure (unsigned)
|
||||
var envelope = new
|
||||
{
|
||||
payloadType = "application/vnd.in-toto+json",
|
||||
payload = Convert.ToBase64String(statementBytes),
|
||||
signatures = Array.Empty<object>() // Will be populated by Attestor
|
||||
};
|
||||
|
||||
return System.Text.Json.JsonSerializer.SerializeToUtf8Bytes(envelope);
|
||||
}
|
||||
|
||||
private Task<(long? logIndex, string? logId)> SubmitToRekorAsync(byte[] dsseEnvelope, CancellationToken cancellationToken)
|
||||
{
|
||||
// TODO: Integrate with Rekor backend (RWD-008)
|
||||
// For now, return placeholder values
|
||||
_logger.LogDebug("Rekor submission placeholder - actual integration pending");
|
||||
return Task.FromResult<(long?, string?)>((null, null));
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,66 @@
|
||||
using System.Text.Json.Serialization;
|
||||
|
||||
namespace StellaOps.Scanner.Reachability.Attestation;
|
||||
|
||||
/// <summary>
|
||||
/// Reachability witness statement for DSSE predicate.
|
||||
/// Conforms to stella.ops/reachabilityWitness@v1 schema.
|
||||
/// </summary>
|
||||
public sealed record ReachabilityWitnessStatement
|
||||
{
|
||||
/// <summary>Schema identifier</summary>
|
||||
[JsonPropertyName("schema")]
|
||||
public string Schema { get; init; } = "stella.ops/reachabilityWitness@v1";
|
||||
|
||||
/// <summary>BLAKE3 hash of the canonical RichGraph JSON</summary>
|
||||
[JsonPropertyName("graphHash")]
|
||||
public required string GraphHash { get; init; }
|
||||
|
||||
/// <summary>CAS URI where graph is stored</summary>
|
||||
[JsonPropertyName("graphCasUri")]
|
||||
public string? GraphCasUri { get; init; }
|
||||
|
||||
/// <summary>When the analysis was performed (ISO-8601)</summary>
|
||||
[JsonPropertyName("generatedAt")]
|
||||
public required DateTimeOffset GeneratedAt { get; init; }
|
||||
|
||||
/// <summary>Primary language of the analyzed code</summary>
|
||||
[JsonPropertyName("language")]
|
||||
public required string Language { get; init; }
|
||||
|
||||
/// <summary>Number of nodes in the graph</summary>
|
||||
[JsonPropertyName("nodeCount")]
|
||||
public required int NodeCount { get; init; }
|
||||
|
||||
/// <summary>Number of edges in the graph</summary>
|
||||
[JsonPropertyName("edgeCount")]
|
||||
public required int EdgeCount { get; init; }
|
||||
|
||||
/// <summary>Number of entrypoints identified</summary>
|
||||
[JsonPropertyName("entrypointCount")]
|
||||
public required int EntrypointCount { get; init; }
|
||||
|
||||
/// <summary>Total number of sinks in taxonomy</summary>
|
||||
[JsonPropertyName("sinkCount")]
|
||||
public required int SinkCount { get; init; }
|
||||
|
||||
/// <summary>Number of reachable sinks</summary>
|
||||
[JsonPropertyName("reachableSinkCount")]
|
||||
public required int ReachableSinkCount { get; init; }
|
||||
|
||||
/// <summary>Policy hash that was applied (if any)</summary>
|
||||
[JsonPropertyName("policyHash")]
|
||||
public string? PolicyHash { get; init; }
|
||||
|
||||
/// <summary>Analyzer version used</summary>
|
||||
[JsonPropertyName("analyzerVersion")]
|
||||
public required string AnalyzerVersion { get; init; }
|
||||
|
||||
/// <summary>Git commit of the analyzed code</summary>
|
||||
[JsonPropertyName("sourceCommit")]
|
||||
public string? SourceCommit { get; init; }
|
||||
|
||||
/// <summary>Subject artifact (image digest or file hash)</summary>
|
||||
[JsonPropertyName("subjectDigest")]
|
||||
public required string SubjectDigest { get; init; }
|
||||
}
|
||||
@@ -0,0 +1,175 @@
|
||||
namespace StellaOps.Scanner.Reachability.Witnesses;
|
||||
|
||||
/// <summary>
|
||||
/// Builds path witnesses from reachability analysis results.
|
||||
/// </summary>
|
||||
public interface IPathWitnessBuilder
|
||||
{
|
||||
/// <summary>
|
||||
/// Creates a path witness for a reachable vulnerability.
|
||||
/// </summary>
|
||||
/// <param name="request">The witness creation request containing all necessary context.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>A signed path witness or null if the path is not reachable.</returns>
|
||||
Task<PathWitness?> BuildAsync(PathWitnessRequest request, CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Creates multiple path witnesses for all reachable paths to a vulnerability.
|
||||
/// </summary>
|
||||
/// <param name="request">The batch witness request.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>All generated witnesses.</returns>
|
||||
IAsyncEnumerable<PathWitness> BuildAllAsync(BatchWitnessRequest request, CancellationToken cancellationToken = default);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Request to build a single path witness.
|
||||
/// </summary>
|
||||
public sealed record PathWitnessRequest
|
||||
{
|
||||
/// <summary>
|
||||
/// The SBOM digest for artifact context.
|
||||
/// </summary>
|
||||
public required string SbomDigest { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Package URL of the vulnerable component.
|
||||
/// </summary>
|
||||
public required string ComponentPurl { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Vulnerability ID (e.g., "CVE-2024-12345").
|
||||
/// </summary>
|
||||
public required string VulnId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Vulnerability source (e.g., "NVD").
|
||||
/// </summary>
|
||||
public required string VulnSource { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Affected version range.
|
||||
/// </summary>
|
||||
public required string AffectedRange { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Entrypoint symbol ID.
|
||||
/// </summary>
|
||||
public required string EntrypointSymbolId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Entrypoint kind (http, grpc, cli, etc.).
|
||||
/// </summary>
|
||||
public required string EntrypointKind { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Human-readable entrypoint name.
|
||||
/// </summary>
|
||||
public required string EntrypointName { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Sink symbol ID.
|
||||
/// </summary>
|
||||
public required string SinkSymbolId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Sink taxonomy type.
|
||||
/// </summary>
|
||||
public required string SinkType { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// The call graph to use for path finding.
|
||||
/// </summary>
|
||||
public required RichGraph CallGraph { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// BLAKE3 digest of the call graph.
|
||||
/// </summary>
|
||||
public required string CallgraphDigest { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Optional attack surface digest.
|
||||
/// </summary>
|
||||
public string? SurfaceDigest { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Optional analysis config digest.
|
||||
/// </summary>
|
||||
public string? AnalysisConfigDigest { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Optional build ID.
|
||||
/// </summary>
|
||||
public string? BuildId { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Request to build witnesses for all paths to a vulnerability.
|
||||
/// </summary>
|
||||
public sealed record BatchWitnessRequest
|
||||
{
|
||||
/// <summary>
|
||||
/// The SBOM digest for artifact context.
|
||||
/// </summary>
|
||||
public required string SbomDigest { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Package URL of the vulnerable component.
|
||||
/// </summary>
|
||||
public required string ComponentPurl { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Vulnerability ID.
|
||||
/// </summary>
|
||||
public required string VulnId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Vulnerability source.
|
||||
/// </summary>
|
||||
public required string VulnSource { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Affected version range.
|
||||
/// </summary>
|
||||
public required string AffectedRange { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Sink symbol ID to find paths to.
|
||||
/// </summary>
|
||||
public required string SinkSymbolId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Sink taxonomy type.
|
||||
/// </summary>
|
||||
public required string SinkType { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// The call graph to use for path finding.
|
||||
/// </summary>
|
||||
public required RichGraph CallGraph { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// BLAKE3 digest of the call graph.
|
||||
/// </summary>
|
||||
public required string CallgraphDigest { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Maximum number of witnesses to generate.
|
||||
/// </summary>
|
||||
public int MaxWitnesses { get; init; } = 10;
|
||||
|
||||
/// <summary>
|
||||
/// Optional attack surface digest.
|
||||
/// </summary>
|
||||
public string? SurfaceDigest { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Optional analysis config digest.
|
||||
/// </summary>
|
||||
public string? AnalysisConfigDigest { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Optional build ID.
|
||||
/// </summary>
|
||||
public string? BuildId { get; init; }
|
||||
}
|
||||
@@ -0,0 +1,256 @@
|
||||
using System.Text.Json.Serialization;
|
||||
|
||||
namespace StellaOps.Scanner.Reachability.Witnesses;
|
||||
|
||||
/// <summary>
|
||||
/// A DSSE-signable path witness documenting the call path from entrypoint to vulnerable sink.
|
||||
/// Conforms to stellaops.witness.v1 schema.
|
||||
/// </summary>
|
||||
public sealed record PathWitness
|
||||
{
|
||||
/// <summary>
|
||||
/// Schema version identifier.
|
||||
/// </summary>
|
||||
[JsonPropertyName("witness_schema")]
|
||||
public string WitnessSchema { get; init; } = Witnesses.WitnessSchema.Version;
|
||||
|
||||
/// <summary>
|
||||
/// Content-addressed witness ID (e.g., "wit:sha256:...").
|
||||
/// </summary>
|
||||
[JsonPropertyName("witness_id")]
|
||||
public required string WitnessId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// The artifact (SBOM, component) this witness relates to.
|
||||
/// </summary>
|
||||
[JsonPropertyName("artifact")]
|
||||
public required WitnessArtifact Artifact { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// The vulnerability this witness concerns.
|
||||
/// </summary>
|
||||
[JsonPropertyName("vuln")]
|
||||
public required WitnessVuln Vuln { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// The entrypoint from which the path originates.
|
||||
/// </summary>
|
||||
[JsonPropertyName("entrypoint")]
|
||||
public required WitnessEntrypoint Entrypoint { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// The call path from entrypoint to sink, ordered from caller to callee.
|
||||
/// </summary>
|
||||
[JsonPropertyName("path")]
|
||||
public required IReadOnlyList<PathStep> Path { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// The vulnerable sink reached at the end of the path.
|
||||
/// </summary>
|
||||
[JsonPropertyName("sink")]
|
||||
public required WitnessSink Sink { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Detected gates (guards, authentication, validation) along the path.
|
||||
/// </summary>
|
||||
[JsonPropertyName("gates")]
|
||||
public IReadOnlyList<DetectedGate>? Gates { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Evidence digests and build context for reproducibility.
|
||||
/// </summary>
|
||||
[JsonPropertyName("evidence")]
|
||||
public required WitnessEvidence Evidence { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// When this witness was generated (UTC ISO-8601).
|
||||
/// </summary>
|
||||
[JsonPropertyName("observed_at")]
|
||||
public required DateTimeOffset ObservedAt { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Artifact context for a witness.
|
||||
/// </summary>
|
||||
public sealed record WitnessArtifact
|
||||
{
|
||||
/// <summary>
|
||||
/// SHA-256 digest of the SBOM.
|
||||
/// </summary>
|
||||
[JsonPropertyName("sbom_digest")]
|
||||
public required string SbomDigest { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Package URL of the vulnerable component.
|
||||
/// </summary>
|
||||
[JsonPropertyName("component_purl")]
|
||||
public required string ComponentPurl { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Vulnerability information for a witness.
|
||||
/// </summary>
|
||||
public sealed record WitnessVuln
|
||||
{
|
||||
/// <summary>
|
||||
/// Vulnerability identifier (e.g., "CVE-2024-12345").
|
||||
/// </summary>
|
||||
[JsonPropertyName("id")]
|
||||
public required string Id { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Vulnerability source (e.g., "NVD", "OSV", "GHSA").
|
||||
/// </summary>
|
||||
[JsonPropertyName("source")]
|
||||
public required string Source { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Affected version range expression.
|
||||
/// </summary>
|
||||
[JsonPropertyName("affected_range")]
|
||||
public required string AffectedRange { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Entrypoint that starts the reachability path.
|
||||
/// </summary>
|
||||
public sealed record WitnessEntrypoint
|
||||
{
|
||||
/// <summary>
|
||||
/// Kind of entrypoint (http, grpc, cli, job, event).
|
||||
/// </summary>
|
||||
[JsonPropertyName("kind")]
|
||||
public required string Kind { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Human-readable name (e.g., "GET /api/users/{id}").
|
||||
/// </summary>
|
||||
[JsonPropertyName("name")]
|
||||
public required string Name { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Canonical symbol ID for the entrypoint.
|
||||
/// </summary>
|
||||
[JsonPropertyName("symbol_id")]
|
||||
public required string SymbolId { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// A single step in the call path from entrypoint to sink.
|
||||
/// </summary>
|
||||
public sealed record PathStep
|
||||
{
|
||||
/// <summary>
|
||||
/// Human-readable symbol name.
|
||||
/// </summary>
|
||||
[JsonPropertyName("symbol")]
|
||||
public required string Symbol { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Canonical symbol ID.
|
||||
/// </summary>
|
||||
[JsonPropertyName("symbol_id")]
|
||||
public required string SymbolId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Source file path (null for external/binary symbols).
|
||||
/// </summary>
|
||||
[JsonPropertyName("file")]
|
||||
public string? File { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Line number in source file (1-based).
|
||||
/// </summary>
|
||||
[JsonPropertyName("line")]
|
||||
public int? Line { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Column number in source file (1-based).
|
||||
/// </summary>
|
||||
[JsonPropertyName("column")]
|
||||
public int? Column { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// The vulnerable sink at the end of the reachability path.
|
||||
/// </summary>
|
||||
public sealed record WitnessSink
|
||||
{
|
||||
/// <summary>
|
||||
/// Human-readable symbol name.
|
||||
/// </summary>
|
||||
[JsonPropertyName("symbol")]
|
||||
public required string Symbol { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Canonical symbol ID.
|
||||
/// </summary>
|
||||
[JsonPropertyName("symbol_id")]
|
||||
public required string SymbolId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Sink taxonomy type (e.g., "deserialization", "sql_injection", "path_traversal").
|
||||
/// </summary>
|
||||
[JsonPropertyName("sink_type")]
|
||||
public required string SinkType { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// A detected gate (guard/mitigating control) along the path.
|
||||
/// </summary>
|
||||
public sealed record DetectedGate
|
||||
{
|
||||
/// <summary>
|
||||
/// Gate type (authRequired, inputValidation, rateLimited, etc.).
|
||||
/// </summary>
|
||||
[JsonPropertyName("type")]
|
||||
public required string Type { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Symbol that implements the gate.
|
||||
/// </summary>
|
||||
[JsonPropertyName("guard_symbol")]
|
||||
public required string GuardSymbol { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Confidence level (0.0 - 1.0).
|
||||
/// </summary>
|
||||
[JsonPropertyName("confidence")]
|
||||
public required double Confidence { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Human-readable detail about the gate.
|
||||
/// </summary>
|
||||
[JsonPropertyName("detail")]
|
||||
public string? Detail { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Evidence digests for reproducibility and audit trail.
|
||||
/// </summary>
|
||||
public sealed record WitnessEvidence
|
||||
{
|
||||
/// <summary>
|
||||
/// BLAKE3 digest of the call graph used.
|
||||
/// </summary>
|
||||
[JsonPropertyName("callgraph_digest")]
|
||||
public required string CallgraphDigest { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// SHA-256 digest of the attack surface manifest.
|
||||
/// </summary>
|
||||
[JsonPropertyName("surface_digest")]
|
||||
public string? SurfaceDigest { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// SHA-256 digest of the analysis configuration.
|
||||
/// </summary>
|
||||
[JsonPropertyName("analysis_config_digest")]
|
||||
public string? AnalysisConfigDigest { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Build identifier for the analyzed artifact.
|
||||
/// </summary>
|
||||
[JsonPropertyName("build_id")]
|
||||
public string? BuildId { get; init; }
|
||||
}
|
||||
@@ -0,0 +1,378 @@
|
||||
using System.Runtime.CompilerServices;
|
||||
using System.Security.Cryptography;
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
using StellaOps.Cryptography;
|
||||
using StellaOps.Scanner.Reachability.Gates;
|
||||
|
||||
namespace StellaOps.Scanner.Reachability.Witnesses;
|
||||
|
||||
/// <summary>
|
||||
/// Builds path witnesses from reachability analysis results.
|
||||
/// </summary>
|
||||
public sealed class PathWitnessBuilder : IPathWitnessBuilder
|
||||
{
|
||||
private readonly ICryptoHash _cryptoHash;
|
||||
private readonly CompositeGateDetector? _gateDetector;
|
||||
private readonly TimeProvider _timeProvider;
|
||||
|
||||
private static readonly JsonSerializerOptions JsonOptions = new(JsonSerializerDefaults.Web)
|
||||
{
|
||||
PropertyNamingPolicy = JsonNamingPolicy.SnakeCaseLower,
|
||||
WriteIndented = false
|
||||
};
|
||||
|
||||
/// <summary>
|
||||
/// Creates a new PathWitnessBuilder.
|
||||
/// </summary>
|
||||
/// <param name="cryptoHash">Crypto hash service for witness ID generation.</param>
|
||||
/// <param name="timeProvider">Time provider for timestamps.</param>
|
||||
/// <param name="gateDetector">Optional gate detector for identifying guards along paths.</param>
|
||||
public PathWitnessBuilder(
|
||||
ICryptoHash cryptoHash,
|
||||
TimeProvider timeProvider,
|
||||
CompositeGateDetector? gateDetector = null)
|
||||
{
|
||||
_cryptoHash = cryptoHash ?? throw new ArgumentNullException(nameof(cryptoHash));
|
||||
_timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider));
|
||||
_gateDetector = gateDetector;
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public async Task<PathWitness?> BuildAsync(PathWitnessRequest request, CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(request);
|
||||
|
||||
// Find path from entrypoint to sink using BFS
|
||||
var path = FindPath(request.CallGraph, request.EntrypointSymbolId, request.SinkSymbolId);
|
||||
if (path is null || path.Count == 0)
|
||||
{
|
||||
return null; // No path found
|
||||
}
|
||||
|
||||
// Infer language from the call graph nodes
|
||||
var language = request.CallGraph.Nodes?.FirstOrDefault()?.Lang ?? "unknown";
|
||||
|
||||
// Detect gates along the path
|
||||
var gates = _gateDetector is not null
|
||||
? await DetectGatesAsync(request.CallGraph, path, language, cancellationToken).ConfigureAwait(false)
|
||||
: null;
|
||||
|
||||
// Get sink node info
|
||||
var sinkNode = request.CallGraph.Nodes?.FirstOrDefault(n => n.SymbolId == request.SinkSymbolId);
|
||||
var sinkSymbol = sinkNode?.Display ?? sinkNode?.Symbol?.Demangled ?? request.SinkSymbolId;
|
||||
|
||||
// Build the witness
|
||||
var witness = new PathWitness
|
||||
{
|
||||
WitnessId = string.Empty, // Will be set after hashing
|
||||
Artifact = new WitnessArtifact
|
||||
{
|
||||
SbomDigest = request.SbomDigest,
|
||||
ComponentPurl = request.ComponentPurl
|
||||
},
|
||||
Vuln = new WitnessVuln
|
||||
{
|
||||
Id = request.VulnId,
|
||||
Source = request.VulnSource,
|
||||
AffectedRange = request.AffectedRange
|
||||
},
|
||||
Entrypoint = new WitnessEntrypoint
|
||||
{
|
||||
Kind = request.EntrypointKind,
|
||||
Name = request.EntrypointName,
|
||||
SymbolId = request.EntrypointSymbolId
|
||||
},
|
||||
Path = path,
|
||||
Sink = new WitnessSink
|
||||
{
|
||||
Symbol = sinkSymbol,
|
||||
SymbolId = request.SinkSymbolId,
|
||||
SinkType = request.SinkType
|
||||
},
|
||||
Gates = gates,
|
||||
Evidence = new WitnessEvidence
|
||||
{
|
||||
CallgraphDigest = request.CallgraphDigest,
|
||||
SurfaceDigest = request.SurfaceDigest,
|
||||
AnalysisConfigDigest = request.AnalysisConfigDigest,
|
||||
BuildId = request.BuildId
|
||||
},
|
||||
ObservedAt = _timeProvider.GetUtcNow()
|
||||
};
|
||||
|
||||
// Compute witness ID from canonical content
|
||||
var witnessId = ComputeWitnessId(witness);
|
||||
witness = witness with { WitnessId = witnessId };
|
||||
|
||||
return witness;
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public async IAsyncEnumerable<PathWitness> BuildAllAsync(
|
||||
BatchWitnessRequest request,
|
||||
[EnumeratorCancellation] CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(request);
|
||||
|
||||
// Find all roots (entrypoints) in the graph
|
||||
var roots = request.CallGraph.Roots;
|
||||
if (roots is null || roots.Count == 0)
|
||||
{
|
||||
yield break;
|
||||
}
|
||||
|
||||
var witnessCount = 0;
|
||||
|
||||
foreach (var root in roots)
|
||||
{
|
||||
if (witnessCount >= request.MaxWitnesses)
|
||||
{
|
||||
yield break;
|
||||
}
|
||||
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
|
||||
// Look up the node to get the symbol name
|
||||
var rootNode = request.CallGraph.Nodes?.FirstOrDefault(n => n.Id == root.Id);
|
||||
|
||||
var singleRequest = new PathWitnessRequest
|
||||
{
|
||||
SbomDigest = request.SbomDigest,
|
||||
ComponentPurl = request.ComponentPurl,
|
||||
VulnId = request.VulnId,
|
||||
VulnSource = request.VulnSource,
|
||||
AffectedRange = request.AffectedRange,
|
||||
EntrypointSymbolId = rootNode?.SymbolId ?? root.Id,
|
||||
EntrypointKind = root.Phase ?? "unknown",
|
||||
EntrypointName = rootNode?.Display ?? root.Source ?? root.Id,
|
||||
SinkSymbolId = request.SinkSymbolId,
|
||||
SinkType = request.SinkType,
|
||||
CallGraph = request.CallGraph,
|
||||
CallgraphDigest = request.CallgraphDigest,
|
||||
SurfaceDigest = request.SurfaceDigest,
|
||||
AnalysisConfigDigest = request.AnalysisConfigDigest,
|
||||
BuildId = request.BuildId
|
||||
};
|
||||
|
||||
var witness = await BuildAsync(singleRequest, cancellationToken).ConfigureAwait(false);
|
||||
if (witness is not null)
|
||||
{
|
||||
witnessCount++;
|
||||
yield return witness;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Finds the shortest path from source to target using BFS.
|
||||
/// </summary>
|
||||
private List<PathStep>? FindPath(RichGraph graph, string sourceSymbolId, string targetSymbolId)
|
||||
{
|
||||
if (graph.Nodes is null || graph.Edges is null)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
// Build node ID to symbol ID mapping
|
||||
var nodeIdToSymbolId = graph.Nodes.ToDictionary(
|
||||
n => n.Id,
|
||||
n => n.SymbolId,
|
||||
StringComparer.Ordinal);
|
||||
|
||||
// Build adjacency list using From/To (node IDs) mapped to symbol IDs
|
||||
var adjacency = new Dictionary<string, List<string>>(StringComparer.Ordinal);
|
||||
foreach (var edge in graph.Edges)
|
||||
{
|
||||
if (string.IsNullOrEmpty(edge.From) || string.IsNullOrEmpty(edge.To))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
// Map node IDs to symbol IDs
|
||||
if (!nodeIdToSymbolId.TryGetValue(edge.From, out var fromSymbolId) ||
|
||||
!nodeIdToSymbolId.TryGetValue(edge.To, out var toSymbolId))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
if (!adjacency.TryGetValue(fromSymbolId, out var neighbors))
|
||||
{
|
||||
neighbors = new List<string>();
|
||||
adjacency[fromSymbolId] = neighbors;
|
||||
}
|
||||
neighbors.Add(toSymbolId);
|
||||
}
|
||||
|
||||
// BFS to find shortest path
|
||||
var visited = new HashSet<string>(StringComparer.Ordinal);
|
||||
var parent = new Dictionary<string, string>(StringComparer.Ordinal);
|
||||
var queue = new Queue<string>();
|
||||
|
||||
queue.Enqueue(sourceSymbolId);
|
||||
visited.Add(sourceSymbolId);
|
||||
|
||||
while (queue.Count > 0)
|
||||
{
|
||||
var current = queue.Dequeue();
|
||||
|
||||
if (current.Equals(targetSymbolId, StringComparison.Ordinal))
|
||||
{
|
||||
// Reconstruct path
|
||||
return ReconstructPath(graph, parent, sourceSymbolId, targetSymbolId);
|
||||
}
|
||||
|
||||
if (!adjacency.TryGetValue(current, out var neighbors))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
// Sort neighbors for deterministic ordering
|
||||
foreach (var neighbor in neighbors.Order(StringComparer.Ordinal))
|
||||
{
|
||||
if (visited.Add(neighbor))
|
||||
{
|
||||
parent[neighbor] = current;
|
||||
queue.Enqueue(neighbor);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return null; // No path found
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Reconstructs the path from parent map.
|
||||
/// </summary>
|
||||
private static List<PathStep> ReconstructPath(
|
||||
RichGraph graph,
|
||||
Dictionary<string, string> parent,
|
||||
string source,
|
||||
string target)
|
||||
{
|
||||
var path = new List<PathStep>();
|
||||
var nodeMap = graph.Nodes?.ToDictionary(n => n.SymbolId ?? string.Empty, n => n, StringComparer.Ordinal)
|
||||
?? new Dictionary<string, RichGraphNode>(StringComparer.Ordinal);
|
||||
|
||||
var current = target;
|
||||
while (current is not null)
|
||||
{
|
||||
nodeMap.TryGetValue(current, out var node);
|
||||
|
||||
// Extract source file/line from Attributes if available
|
||||
string? file = null;
|
||||
int? line = null;
|
||||
int? column = null;
|
||||
|
||||
if (node?.Attributes is not null)
|
||||
{
|
||||
if (node.Attributes.TryGetValue("file", out var fileValue))
|
||||
{
|
||||
file = fileValue;
|
||||
}
|
||||
if (node.Attributes.TryGetValue("line", out var lineValue) && int.TryParse(lineValue, out var parsedLine))
|
||||
{
|
||||
line = parsedLine;
|
||||
}
|
||||
if (node.Attributes.TryGetValue("column", out var colValue) && int.TryParse(colValue, out var parsedCol))
|
||||
{
|
||||
column = parsedCol;
|
||||
}
|
||||
}
|
||||
|
||||
path.Add(new PathStep
|
||||
{
|
||||
Symbol = node?.Display ?? node?.Symbol?.Demangled ?? current,
|
||||
SymbolId = current,
|
||||
File = file,
|
||||
Line = line,
|
||||
Column = column
|
||||
});
|
||||
|
||||
if (current.Equals(source, StringComparison.Ordinal))
|
||||
{
|
||||
break;
|
||||
}
|
||||
|
||||
parent.TryGetValue(current, out current);
|
||||
}
|
||||
|
||||
path.Reverse(); // Reverse to get source → target order
|
||||
return path;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Detects gates along the path using the composite gate detector.
|
||||
/// </summary>
|
||||
private async Task<List<DetectedGate>?> DetectGatesAsync(
|
||||
RichGraph graph,
|
||||
List<PathStep> path,
|
||||
string language,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
if (_gateDetector is null || path.Count == 0)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
// Build source file map for the path
|
||||
var sourceFiles = new Dictionary<string, string>(StringComparer.Ordinal);
|
||||
var nodeMap = graph.Nodes?.ToDictionary(n => n.SymbolId ?? string.Empty, n => n, StringComparer.Ordinal)
|
||||
?? new Dictionary<string, RichGraphNode>(StringComparer.Ordinal);
|
||||
|
||||
foreach (var step in path)
|
||||
{
|
||||
if (nodeMap.TryGetValue(step.SymbolId, out var node) &&
|
||||
node.Attributes is not null &&
|
||||
node.Attributes.TryGetValue("file", out var file))
|
||||
{
|
||||
sourceFiles[step.SymbolId] = file;
|
||||
}
|
||||
}
|
||||
|
||||
var context = new CallPathContext
|
||||
{
|
||||
CallPath = path.Select(s => s.SymbolId).ToList(),
|
||||
SourceFiles = sourceFiles.Count > 0 ? sourceFiles : null,
|
||||
Language = language
|
||||
};
|
||||
|
||||
var result = await _gateDetector.DetectAllAsync(context, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
if (result.Gates.Count == 0)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
return result.Gates.Select(g => new DetectedGate
|
||||
{
|
||||
Type = g.Type.ToString(),
|
||||
GuardSymbol = g.GuardSymbol,
|
||||
Confidence = g.Confidence,
|
||||
Detail = g.Detail
|
||||
}).ToList();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Computes a content-addressed witness ID.
|
||||
/// </summary>
|
||||
private string ComputeWitnessId(PathWitness witness)
|
||||
{
|
||||
// Create a canonical representation for hashing (excluding witness_id itself)
|
||||
var canonical = new
|
||||
{
|
||||
witness.WitnessSchema,
|
||||
witness.Artifact,
|
||||
witness.Vuln,
|
||||
witness.Entrypoint,
|
||||
witness.Path,
|
||||
witness.Sink,
|
||||
witness.Evidence
|
||||
};
|
||||
|
||||
var json = JsonSerializer.SerializeToUtf8Bytes(canonical, JsonOptions);
|
||||
var hash = _cryptoHash.ComputePrefixedHashForPurpose(json, HashPurpose.Content);
|
||||
|
||||
return $"{WitnessSchema.WitnessIdPrefix}{hash}";
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,22 @@
|
||||
namespace StellaOps.Scanner.Reachability.Witnesses;
|
||||
|
||||
/// <summary>
|
||||
/// Constants for the stellaops.witness.v1 schema.
|
||||
/// </summary>
|
||||
public static class WitnessSchema
|
||||
{
|
||||
/// <summary>
|
||||
/// Current witness schema version.
|
||||
/// </summary>
|
||||
public const string Version = "stellaops.witness.v1";
|
||||
|
||||
/// <summary>
|
||||
/// Prefix for witness IDs.
|
||||
/// </summary>
|
||||
public const string WitnessIdPrefix = "wit:";
|
||||
|
||||
/// <summary>
|
||||
/// Default DSSE payload type for witnesses.
|
||||
/// </summary>
|
||||
public const string DssePayloadType = "application/vnd.stellaops.witness.v1+json";
|
||||
}
|
||||
@@ -0,0 +1,216 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// BoundaryProof.cs
|
||||
// Sprint: SPRINT_3800_0001_0001_evidence_api_models
|
||||
// Description: Boundary proof model for surface exposure and security controls.
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Text.Json.Serialization;
|
||||
|
||||
namespace StellaOps.Scanner.SmartDiff.Detection;
|
||||
|
||||
/// <summary>
|
||||
/// Boundary proof describing surface exposure, authentication, and security controls.
|
||||
/// Used to determine the attack surface and protective measures for a finding.
|
||||
/// </summary>
|
||||
public sealed record BoundaryProof
|
||||
{
|
||||
/// <summary>
|
||||
/// Kind of boundary (network, file, ipc, process).
|
||||
/// </summary>
|
||||
[JsonPropertyName("kind")]
|
||||
public string Kind { get; init; } = string.Empty;
|
||||
|
||||
/// <summary>
|
||||
/// Surface descriptor (what is exposed).
|
||||
/// </summary>
|
||||
[JsonPropertyName("surface")]
|
||||
public BoundarySurface? Surface { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Exposure descriptor (how it's exposed).
|
||||
/// </summary>
|
||||
[JsonPropertyName("exposure")]
|
||||
public BoundaryExposure? Exposure { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Authentication requirements.
|
||||
/// </summary>
|
||||
[JsonPropertyName("auth")]
|
||||
public BoundaryAuth? Auth { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Security controls protecting the boundary.
|
||||
/// </summary>
|
||||
[JsonPropertyName("controls")]
|
||||
public IReadOnlyList<BoundaryControl>? Controls { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// When the boundary was last verified.
|
||||
/// </summary>
|
||||
[JsonPropertyName("last_seen")]
|
||||
public DateTimeOffset LastSeen { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Confidence score for this boundary proof (0.0 to 1.0).
|
||||
/// </summary>
|
||||
[JsonPropertyName("confidence")]
|
||||
public double Confidence { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Source of this boundary proof (static_analysis, runtime_observation, config).
|
||||
/// </summary>
|
||||
[JsonPropertyName("source")]
|
||||
public string? Source { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Reference to the evidence source (graph hash, scan ID, etc.).
|
||||
/// </summary>
|
||||
[JsonPropertyName("evidence_ref")]
|
||||
public string? EvidenceRef { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Describes what attack surface is exposed.
|
||||
/// </summary>
|
||||
public sealed record BoundarySurface
|
||||
{
|
||||
/// <summary>
|
||||
/// Type of surface (api, web, cli, library, file, socket).
|
||||
/// </summary>
|
||||
[JsonPropertyName("type")]
|
||||
public string Type { get; init; } = string.Empty;
|
||||
|
||||
/// <summary>
|
||||
/// Protocol (http, https, grpc, tcp, udp, unix).
|
||||
/// </summary>
|
||||
[JsonPropertyName("protocol")]
|
||||
public string? Protocol { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Port number if network-exposed.
|
||||
/// </summary>
|
||||
[JsonPropertyName("port")]
|
||||
public int? Port { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Host or interface binding.
|
||||
/// </summary>
|
||||
[JsonPropertyName("host")]
|
||||
public string? Host { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Path or route pattern.
|
||||
/// </summary>
|
||||
[JsonPropertyName("path")]
|
||||
public string? Path { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Describes how the surface is exposed.
|
||||
/// </summary>
|
||||
public sealed record BoundaryExposure
|
||||
{
|
||||
/// <summary>
|
||||
/// Exposure level (public, internal, private, localhost).
|
||||
/// </summary>
|
||||
[JsonPropertyName("level")]
|
||||
public string Level { get; init; } = string.Empty;
|
||||
|
||||
/// <summary>
|
||||
/// Whether the exposure is internet-facing.
|
||||
/// </summary>
|
||||
[JsonPropertyName("internet_facing")]
|
||||
public bool InternetFacing { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Network zone (dmz, internal, trusted, untrusted).
|
||||
/// </summary>
|
||||
[JsonPropertyName("zone")]
|
||||
public string? Zone { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Whether behind a load balancer or proxy.
|
||||
/// </summary>
|
||||
[JsonPropertyName("behind_proxy")]
|
||||
public bool? BehindProxy { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Expected client types (browser, api_client, service, any).
|
||||
/// </summary>
|
||||
[JsonPropertyName("client_types")]
|
||||
public IReadOnlyList<string>? ClientTypes { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Describes authentication requirements at the boundary.
|
||||
/// </summary>
|
||||
public sealed record BoundaryAuth
|
||||
{
|
||||
/// <summary>
|
||||
/// Whether authentication is required.
|
||||
/// </summary>
|
||||
[JsonPropertyName("required")]
|
||||
public bool Required { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Authentication type (jwt, oauth2, basic, api_key, mtls, session).
|
||||
/// </summary>
|
||||
[JsonPropertyName("type")]
|
||||
public string? Type { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Required roles or scopes.
|
||||
/// </summary>
|
||||
[JsonPropertyName("roles")]
|
||||
public IReadOnlyList<string>? Roles { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Authentication provider or issuer.
|
||||
/// </summary>
|
||||
[JsonPropertyName("provider")]
|
||||
public string? Provider { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Whether MFA is required.
|
||||
/// </summary>
|
||||
[JsonPropertyName("mfa_required")]
|
||||
public bool? MfaRequired { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Describes a security control at the boundary.
|
||||
/// </summary>
|
||||
public sealed record BoundaryControl
|
||||
{
|
||||
/// <summary>
|
||||
/// Type of control (rate_limit, waf, input_validation, output_encoding, etc.).
|
||||
/// </summary>
|
||||
[JsonPropertyName("type")]
|
||||
public string Type { get; init; } = string.Empty;
|
||||
|
||||
/// <summary>
|
||||
/// Whether the control is currently active.
|
||||
/// </summary>
|
||||
[JsonPropertyName("active")]
|
||||
public bool Active { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Control configuration or policy reference.
|
||||
/// </summary>
|
||||
[JsonPropertyName("config")]
|
||||
public string? Config { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Effectiveness rating (high, medium, low).
|
||||
/// </summary>
|
||||
[JsonPropertyName("effectiveness")]
|
||||
public string? Effectiveness { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// When the control was last verified.
|
||||
/// </summary>
|
||||
[JsonPropertyName("verified_at")]
|
||||
public DateTimeOffset? VerifiedAt { get; init; }
|
||||
}
|
||||
@@ -0,0 +1,179 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// VexEvidence.cs
|
||||
// Sprint: SPRINT_3800_0001_0001_evidence_api_models
|
||||
// Description: VEX (Vulnerability Exploitability eXchange) evidence model.
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Text.Json.Serialization;
|
||||
|
||||
namespace StellaOps.Scanner.SmartDiff.Detection;
|
||||
|
||||
/// <summary>
|
||||
/// VEX (Vulnerability Exploitability eXchange) evidence for a vulnerability.
|
||||
/// Captures vendor/first-party statements about whether a vulnerability is exploitable.
|
||||
/// </summary>
|
||||
public sealed record VexEvidence
|
||||
{
|
||||
/// <summary>
|
||||
/// VEX status: not_affected, affected, fixed, under_investigation.
|
||||
/// </summary>
|
||||
[JsonPropertyName("status")]
|
||||
public VexStatus Status { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Justification for the status (per OpenVEX specification).
|
||||
/// </summary>
|
||||
[JsonPropertyName("justification")]
|
||||
public VexJustification? Justification { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Human-readable impact statement explaining why not affected.
|
||||
/// </summary>
|
||||
[JsonPropertyName("impact")]
|
||||
public string? Impact { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Human-readable action statement (remediation steps).
|
||||
/// </summary>
|
||||
[JsonPropertyName("action")]
|
||||
public string? Action { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Reference to the VEX document or DSSE attestation.
|
||||
/// </summary>
|
||||
[JsonPropertyName("attestation_ref")]
|
||||
public string? AttestationRef { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// VEX document ID.
|
||||
/// </summary>
|
||||
[JsonPropertyName("document_id")]
|
||||
public string? DocumentId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// When the VEX statement was issued.
|
||||
/// </summary>
|
||||
[JsonPropertyName("issued_at")]
|
||||
public DateTimeOffset? IssuedAt { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// When the VEX statement was last updated.
|
||||
/// </summary>
|
||||
[JsonPropertyName("updated_at")]
|
||||
public DateTimeOffset? UpdatedAt { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// When the VEX statement expires.
|
||||
/// </summary>
|
||||
[JsonPropertyName("expires_at")]
|
||||
public DateTimeOffset? ExpiresAt { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Source of the VEX statement (vendor, first_party, third_party, coordinator).
|
||||
/// </summary>
|
||||
[JsonPropertyName("source")]
|
||||
public VexSource? Source { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Affected product or component reference (PURL).
|
||||
/// </summary>
|
||||
[JsonPropertyName("product_ref")]
|
||||
public string? ProductRef { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Vulnerability ID (CVE, GHSA, etc.).
|
||||
/// </summary>
|
||||
[JsonPropertyName("vulnerability_id")]
|
||||
public string? VulnerabilityId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Confidence in the VEX statement (0.0 to 1.0).
|
||||
/// Higher confidence for vendor statements, lower for third-party.
|
||||
/// </summary>
|
||||
[JsonPropertyName("confidence")]
|
||||
public double Confidence { get; init; } = 1.0;
|
||||
|
||||
/// <summary>
|
||||
/// Whether the VEX statement is still valid (not expired).
|
||||
/// </summary>
|
||||
[JsonIgnore]
|
||||
public bool IsValid => ExpiresAt is null || ExpiresAt > DateTimeOffset.UtcNow;
|
||||
|
||||
/// <summary>
|
||||
/// Whether this VEX statement indicates the vulnerability is not exploitable.
|
||||
/// </summary>
|
||||
[JsonIgnore]
|
||||
public bool IsNotAffected => Status == VexStatus.NotAffected;
|
||||
|
||||
/// <summary>
|
||||
/// Additional context or notes about the VEX statement.
|
||||
/// </summary>
|
||||
[JsonPropertyName("notes")]
|
||||
public IReadOnlyList<string>? Notes { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// VEX status values per OpenVEX specification.
|
||||
/// </summary>
|
||||
[JsonConverter(typeof(JsonStringEnumConverter))]
|
||||
public enum VexStatus
|
||||
{
|
||||
/// <summary>
|
||||
/// The vulnerability is not exploitable in this context.
|
||||
/// </summary>
|
||||
[JsonPropertyName("not_affected")]
|
||||
NotAffected,
|
||||
|
||||
/// <summary>
|
||||
/// The vulnerability is exploitable.
|
||||
/// </summary>
|
||||
[JsonPropertyName("affected")]
|
||||
Affected,
|
||||
|
||||
/// <summary>
|
||||
/// The vulnerability has been fixed.
|
||||
/// </summary>
|
||||
[JsonPropertyName("fixed")]
|
||||
Fixed,
|
||||
|
||||
/// <summary>
|
||||
/// The vulnerability is under investigation.
|
||||
/// </summary>
|
||||
[JsonPropertyName("under_investigation")]
|
||||
UnderInvestigation
|
||||
}
|
||||
|
||||
// NOTE: VexJustification is defined in VexCandidateModels.cs to avoid duplication
|
||||
|
||||
/// <summary>
|
||||
/// Source of a VEX statement.
|
||||
/// </summary>
|
||||
public sealed record VexSource
|
||||
{
|
||||
/// <summary>
|
||||
/// Source type (vendor, first_party, third_party, coordinator, community).
|
||||
/// </summary>
|
||||
[JsonPropertyName("type")]
|
||||
public string Type { get; init; } = string.Empty;
|
||||
|
||||
/// <summary>
|
||||
/// Name of the source organization.
|
||||
/// </summary>
|
||||
[JsonPropertyName("name")]
|
||||
public string? Name { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// URL to the source's VEX feed or website.
|
||||
/// </summary>
|
||||
[JsonPropertyName("url")]
|
||||
public string? Url { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Trust level (high, medium, low).
|
||||
/// Vendor and first-party are typically high; third-party varies.
|
||||
/// </summary>
|
||||
[JsonPropertyName("trust_level")]
|
||||
public string? TrustLevel { get; init; }
|
||||
}
|
||||
@@ -0,0 +1,195 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// EpssUpdatedEvent.cs
|
||||
// Sprint: SPRINT_3410_0001_0001_epss_ingestion_storage
|
||||
// Task: EPSS-3410-011
|
||||
// Description: Event published when EPSS data is successfully updated.
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System.Text.Json.Serialization;
|
||||
|
||||
namespace StellaOps.Scanner.Storage.Epss.Events;
|
||||
|
||||
/// <summary>
|
||||
/// Event published when EPSS data is successfully ingested.
|
||||
/// Event type: "epss.updated@1"
|
||||
/// </summary>
|
||||
public sealed record EpssUpdatedEvent
|
||||
{
|
||||
/// <summary>
|
||||
/// Event type identifier for routing.
|
||||
/// </summary>
|
||||
public const string EventType = "epss.updated@1";
|
||||
|
||||
/// <summary>
|
||||
/// Event version for schema evolution.
|
||||
/// </summary>
|
||||
public const int Version = 1;
|
||||
|
||||
/// <summary>
|
||||
/// Unique identifier for this event instance.
|
||||
/// </summary>
|
||||
[JsonPropertyName("event_id")]
|
||||
public required Guid EventId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// UTC timestamp when the event occurred.
|
||||
/// </summary>
|
||||
[JsonPropertyName("occurred_at_utc")]
|
||||
public required DateTimeOffset OccurredAtUtc { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// The import run ID that produced this update.
|
||||
/// </summary>
|
||||
[JsonPropertyName("import_run_id")]
|
||||
public required Guid ImportRunId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// The EPSS model date (YYYY-MM-DD) that was imported.
|
||||
/// </summary>
|
||||
[JsonPropertyName("model_date")]
|
||||
public required DateOnly ModelDate { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// The EPSS model version tag (e.g., "v2025.12.17").
|
||||
/// </summary>
|
||||
[JsonPropertyName("model_version_tag")]
|
||||
public string? ModelVersionTag { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// The published date from the EPSS data.
|
||||
/// </summary>
|
||||
[JsonPropertyName("published_date")]
|
||||
public DateOnly? PublishedDate { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Total number of CVEs in the snapshot.
|
||||
/// </summary>
|
||||
[JsonPropertyName("row_count")]
|
||||
public required int RowCount { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Number of distinct CVE IDs in the snapshot.
|
||||
/// </summary>
|
||||
[JsonPropertyName("distinct_cve_count")]
|
||||
public required int DistinctCveCount { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// SHA256 hash of the decompressed CSV content.
|
||||
/// </summary>
|
||||
[JsonPropertyName("content_hash")]
|
||||
public string? ContentHash { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Source URI (online URL or bundle path).
|
||||
/// </summary>
|
||||
[JsonPropertyName("source_uri")]
|
||||
public required string SourceUri { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Duration of the ingestion in milliseconds.
|
||||
/// </summary>
|
||||
[JsonPropertyName("duration_ms")]
|
||||
public required long DurationMs { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Summary of material changes detected.
|
||||
/// </summary>
|
||||
[JsonPropertyName("change_summary")]
|
||||
public EpssChangeSummary? ChangeSummary { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Creates an idempotency key for this event based on model date and import run.
|
||||
/// </summary>
|
||||
public string GetIdempotencyKey()
|
||||
=> $"epss.updated:{ModelDate:yyyy-MM-dd}:{ImportRunId:N}";
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Summary of material changes in an EPSS update.
|
||||
/// </summary>
|
||||
public sealed record EpssChangeSummary
|
||||
{
|
||||
/// <summary>
|
||||
/// Number of CVEs newly scored (first appearance).
|
||||
/// </summary>
|
||||
[JsonPropertyName("new_scored")]
|
||||
public int NewScored { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Number of CVEs that crossed the high threshold upward.
|
||||
/// </summary>
|
||||
[JsonPropertyName("crossed_high")]
|
||||
public int CrossedHigh { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Number of CVEs that crossed the high threshold downward.
|
||||
/// </summary>
|
||||
[JsonPropertyName("crossed_low")]
|
||||
public int CrossedLow { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Number of CVEs with a big jump up in score.
|
||||
/// </summary>
|
||||
[JsonPropertyName("big_jump_up")]
|
||||
public int BigJumpUp { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Number of CVEs with a big jump down in score.
|
||||
/// </summary>
|
||||
[JsonPropertyName("big_jump_down")]
|
||||
public int BigJumpDown { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Number of CVEs that entered the top percentile.
|
||||
/// </summary>
|
||||
[JsonPropertyName("top_percentile")]
|
||||
public int TopPercentile { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Number of CVEs that left the top percentile.
|
||||
/// </summary>
|
||||
[JsonPropertyName("left_top_percentile")]
|
||||
public int LeftTopPercentile { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Total number of CVEs with any material change.
|
||||
/// </summary>
|
||||
[JsonPropertyName("total_changed")]
|
||||
public int TotalChanged { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Builder for creating <see cref="EpssUpdatedEvent"/> instances.
|
||||
/// </summary>
|
||||
public static class EpssUpdatedEventBuilder
|
||||
{
|
||||
public static EpssUpdatedEvent Create(
|
||||
Guid importRunId,
|
||||
DateOnly modelDate,
|
||||
string sourceUri,
|
||||
int rowCount,
|
||||
int distinctCveCount,
|
||||
long durationMs,
|
||||
TimeProvider timeProvider,
|
||||
string? modelVersionTag = null,
|
||||
DateOnly? publishedDate = null,
|
||||
string? contentHash = null,
|
||||
EpssChangeSummary? changeSummary = null)
|
||||
{
|
||||
return new EpssUpdatedEvent
|
||||
{
|
||||
EventId = Guid.NewGuid(),
|
||||
OccurredAtUtc = timeProvider.GetUtcNow(),
|
||||
ImportRunId = importRunId,
|
||||
ModelDate = modelDate,
|
||||
ModelVersionTag = modelVersionTag,
|
||||
PublishedDate = publishedDate,
|
||||
RowCount = rowCount,
|
||||
DistinctCveCount = distinctCveCount,
|
||||
ContentHash = contentHash,
|
||||
SourceUri = sourceUri,
|
||||
DurationMs = durationMs,
|
||||
ChangeSummary = changeSummary
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -82,8 +82,17 @@ public static class ServiceCollectionExtensions
|
||||
services.AddScoped<IReachabilityResultRepository, PostgresReachabilityResultRepository>();
|
||||
services.AddScoped<ICodeChangeRepository, PostgresCodeChangeRepository>();
|
||||
services.AddScoped<IReachabilityDriftResultRepository, PostgresReachabilityDriftResultRepository>();
|
||||
|
||||
// EPSS ingestion services
|
||||
services.AddSingleton<EpssCsvStreamParser>();
|
||||
services.AddScoped<IEpssRepository, PostgresEpssRepository>();
|
||||
services.AddSingleton<EpssOnlineSource>();
|
||||
services.AddSingleton<EpssBundleSource>();
|
||||
services.AddSingleton<EpssChangeDetector>();
|
||||
|
||||
// Witness storage (Sprint: SPRINT_3700_0001_0001)
|
||||
services.AddScoped<IWitnessRepository, PostgresWitnessRepository>();
|
||||
|
||||
services.AddSingleton<IEntryTraceResultStore, EntryTraceResultStore>();
|
||||
services.AddSingleton<IRubyPackageInventoryStore, RubyPackageInventoryStore>();
|
||||
services.AddSingleton<IBunPackageInventoryStore, BunPackageInventoryStore>();
|
||||
|
||||
@@ -0,0 +1,60 @@
|
||||
-- Migration: 013_witness_storage.sql
|
||||
-- Sprint: SPRINT_3700_0001_0001_witness_foundation
|
||||
-- Task: WIT-011
|
||||
-- Description: Creates tables for DSSE-signed path witnesses and witness storage.
|
||||
|
||||
-- Witness storage for reachability path proofs
|
||||
CREATE TABLE IF NOT EXISTS scanner.witnesses (
|
||||
witness_id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
witness_hash TEXT NOT NULL, -- BLAKE3 hash of witness payload
|
||||
schema_version TEXT NOT NULL DEFAULT 'stellaops.witness.v1',
|
||||
witness_type TEXT NOT NULL, -- 'reachability_path', 'gate_proof', etc.
|
||||
|
||||
-- Reference to the graph/analysis that produced this witness
|
||||
graph_hash TEXT NOT NULL, -- BLAKE3 hash of source rich graph
|
||||
scan_id UUID,
|
||||
run_id UUID,
|
||||
|
||||
-- Witness content
|
||||
payload_json JSONB NOT NULL, -- PathWitness JSON
|
||||
dsse_envelope JSONB, -- DSSE signed envelope (nullable until signed)
|
||||
|
||||
-- Provenance
|
||||
created_at TIMESTAMPTZ NOT NULL DEFAULT now(),
|
||||
signed_at TIMESTAMPTZ,
|
||||
signer_key_id TEXT,
|
||||
|
||||
-- Indexing
|
||||
entrypoint_fqn TEXT, -- For quick lookup by entrypoint
|
||||
sink_cve TEXT, -- For quick lookup by CVE
|
||||
|
||||
CONSTRAINT uk_witness_hash UNIQUE (witness_hash)
|
||||
);
|
||||
|
||||
-- Index for efficient lookups
|
||||
CREATE INDEX IF NOT EXISTS ix_witnesses_graph_hash ON scanner.witnesses (graph_hash);
|
||||
CREATE INDEX IF NOT EXISTS ix_witnesses_scan_id ON scanner.witnesses (scan_id) WHERE scan_id IS NOT NULL;
|
||||
CREATE INDEX IF NOT EXISTS ix_witnesses_sink_cve ON scanner.witnesses (sink_cve) WHERE sink_cve IS NOT NULL;
|
||||
CREATE INDEX IF NOT EXISTS ix_witnesses_entrypoint ON scanner.witnesses (entrypoint_fqn) WHERE entrypoint_fqn IS NOT NULL;
|
||||
CREATE INDEX IF NOT EXISTS ix_witnesses_created_at ON scanner.witnesses (created_at DESC);
|
||||
|
||||
-- GIN index for JSONB queries on payload
|
||||
CREATE INDEX IF NOT EXISTS ix_witnesses_payload_gin ON scanner.witnesses USING gin (payload_json jsonb_path_ops);
|
||||
|
||||
-- Witness verification log (for audit trail)
|
||||
CREATE TABLE IF NOT EXISTS scanner.witness_verifications (
|
||||
verification_id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
witness_id UUID NOT NULL REFERENCES scanner.witnesses(witness_id),
|
||||
verified_at TIMESTAMPTZ NOT NULL DEFAULT now(),
|
||||
verified_by TEXT, -- 'system', 'api', 'cli'
|
||||
verification_status TEXT NOT NULL, -- 'valid', 'invalid', 'expired'
|
||||
verification_error TEXT,
|
||||
verifier_key_id TEXT
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS ix_witness_verifications_witness_id ON scanner.witness_verifications (witness_id);
|
||||
|
||||
COMMENT ON TABLE scanner.witnesses IS 'DSSE-signed path witnesses for reachability proofs (stellaops.witness.v1)';
|
||||
COMMENT ON TABLE scanner.witness_verifications IS 'Audit log of witness verification attempts';
|
||||
COMMENT ON COLUMN scanner.witnesses.witness_hash IS 'BLAKE3 hash of witness payload for deduplication and integrity';
|
||||
COMMENT ON COLUMN scanner.witnesses.dsse_envelope IS 'Dead Simple Signing Envelope (DSSE) containing the signed witness';
|
||||
@@ -12,4 +12,7 @@ internal static class MigrationIds
|
||||
public const string EpssIntegration = "008_epss_integration.sql";
|
||||
public const string CallGraphTables = "009_call_graph_tables.sql";
|
||||
public const string ReachabilityDriftTables = "010_reachability_drift_tables.sql";
|
||||
public const string EpssRawLayer = "011_epss_raw_layer.sql";
|
||||
public const string EpssSignalLayer = "012_epss_signal_layer.sql";
|
||||
public const string WitnessStorage = "013_witness_storage.sql";
|
||||
}
|
||||
|
||||
@@ -0,0 +1,89 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// IWitnessRepository.cs
|
||||
// Sprint: SPRINT_3700_0001_0001_witness_foundation
|
||||
// Task: WIT-012
|
||||
// Description: Repository interface for path witness storage and retrieval.
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
namespace StellaOps.Scanner.Storage.Repositories;
|
||||
|
||||
/// <summary>
|
||||
/// Repository for DSSE-signed path witnesses.
|
||||
/// </summary>
|
||||
public interface IWitnessRepository
|
||||
{
|
||||
/// <summary>
|
||||
/// Stores a witness and returns the assigned ID.
|
||||
/// </summary>
|
||||
Task<Guid> StoreAsync(WitnessRecord witness, CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Retrieves a witness by its ID.
|
||||
/// </summary>
|
||||
Task<WitnessRecord?> GetByIdAsync(Guid witnessId, CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Retrieves a witness by its hash.
|
||||
/// </summary>
|
||||
Task<WitnessRecord?> GetByHashAsync(string witnessHash, CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Retrieves all witnesses for a given graph hash.
|
||||
/// </summary>
|
||||
Task<IReadOnlyList<WitnessRecord>> GetByGraphHashAsync(string graphHash, CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Retrieves witnesses for a given scan.
|
||||
/// </summary>
|
||||
Task<IReadOnlyList<WitnessRecord>> GetByScanIdAsync(Guid scanId, CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Retrieves witnesses for a given CVE.
|
||||
/// </summary>
|
||||
Task<IReadOnlyList<WitnessRecord>> GetByCveAsync(string cveId, CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Updates a witness with a DSSE envelope after signing.
|
||||
/// </summary>
|
||||
Task UpdateDsseEnvelopeAsync(Guid witnessId, string dsseEnvelopeJson, string signerKeyId, CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Records a verification attempt for a witness.
|
||||
/// </summary>
|
||||
Task RecordVerificationAsync(WitnessVerificationRecord verification, CancellationToken cancellationToken = default);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Record representing a stored witness.
|
||||
/// </summary>
|
||||
public sealed record WitnessRecord
|
||||
{
|
||||
public Guid WitnessId { get; init; }
|
||||
public required string WitnessHash { get; init; }
|
||||
public string SchemaVersion { get; init; } = "stellaops.witness.v1";
|
||||
public required string WitnessType { get; init; }
|
||||
public required string GraphHash { get; init; }
|
||||
public Guid? ScanId { get; init; }
|
||||
public Guid? RunId { get; init; }
|
||||
public required string PayloadJson { get; init; }
|
||||
public string? DsseEnvelope { get; init; }
|
||||
public DateTimeOffset CreatedAt { get; init; }
|
||||
public DateTimeOffset? SignedAt { get; init; }
|
||||
public string? SignerKeyId { get; init; }
|
||||
public string? EntrypointFqn { get; init; }
|
||||
public string? SinkCve { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Record representing a witness verification attempt.
|
||||
/// </summary>
|
||||
public sealed record WitnessVerificationRecord
|
||||
{
|
||||
public Guid VerificationId { get; init; }
|
||||
public required Guid WitnessId { get; init; }
|
||||
public DateTimeOffset VerifiedAt { get; init; }
|
||||
public string? VerifiedBy { get; init; }
|
||||
public required string VerificationStatus { get; init; }
|
||||
public string? VerificationError { get; init; }
|
||||
public string? VerifierKeyId { get; init; }
|
||||
}
|
||||
@@ -0,0 +1,275 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// PostgresWitnessRepository.cs
|
||||
// Sprint: SPRINT_3700_0001_0001_witness_foundation
|
||||
// Task: WIT-012
|
||||
// Description: Postgres implementation of IWitnessRepository for witness storage.
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System.Text.Json;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Npgsql;
|
||||
using NpgsqlTypes;
|
||||
using StellaOps.Scanner.Storage.Postgres;
|
||||
|
||||
namespace StellaOps.Scanner.Storage.Repositories;
|
||||
|
||||
/// <summary>
|
||||
/// Postgres implementation of <see cref="IWitnessRepository"/>.
|
||||
/// </summary>
|
||||
public sealed class PostgresWitnessRepository : IWitnessRepository
|
||||
{
|
||||
private readonly ScannerDataSource _dataSource;
|
||||
private readonly ILogger<PostgresWitnessRepository> _logger;
|
||||
|
||||
public PostgresWitnessRepository(ScannerDataSource dataSource, ILogger<PostgresWitnessRepository> logger)
|
||||
{
|
||||
_dataSource = dataSource ?? throw new ArgumentNullException(nameof(dataSource));
|
||||
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||
}
|
||||
|
||||
public async Task<Guid> StoreAsync(WitnessRecord witness, CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(witness);
|
||||
|
||||
const string sql = """
|
||||
INSERT INTO scanner.witnesses (
|
||||
witness_hash, schema_version, witness_type, graph_hash,
|
||||
scan_id, run_id, payload_json, dsse_envelope, created_at,
|
||||
signed_at, signer_key_id, entrypoint_fqn, sink_cve
|
||||
) VALUES (
|
||||
@witness_hash, @schema_version, @witness_type, @graph_hash,
|
||||
@scan_id, @run_id, @payload_json::jsonb, @dsse_envelope::jsonb, @created_at,
|
||||
@signed_at, @signer_key_id, @entrypoint_fqn, @sink_cve
|
||||
)
|
||||
ON CONFLICT (witness_hash) DO UPDATE SET
|
||||
dsse_envelope = COALESCE(EXCLUDED.dsse_envelope, scanner.witnesses.dsse_envelope),
|
||||
signed_at = COALESCE(EXCLUDED.signed_at, scanner.witnesses.signed_at),
|
||||
signer_key_id = COALESCE(EXCLUDED.signer_key_id, scanner.witnesses.signer_key_id)
|
||||
RETURNING witness_id
|
||||
""";
|
||||
|
||||
await using var conn = await _dataSource.OpenConnectionAsync(cancellationToken).ConfigureAwait(false);
|
||||
await using var cmd = new NpgsqlCommand(sql, conn);
|
||||
|
||||
cmd.Parameters.AddWithValue("witness_hash", witness.WitnessHash);
|
||||
cmd.Parameters.AddWithValue("schema_version", witness.SchemaVersion);
|
||||
cmd.Parameters.AddWithValue("witness_type", witness.WitnessType);
|
||||
cmd.Parameters.AddWithValue("graph_hash", witness.GraphHash);
|
||||
cmd.Parameters.AddWithValue("scan_id", witness.ScanId.HasValue ? witness.ScanId.Value : DBNull.Value);
|
||||
cmd.Parameters.AddWithValue("run_id", witness.RunId.HasValue ? witness.RunId.Value : DBNull.Value);
|
||||
cmd.Parameters.AddWithValue("payload_json", witness.PayloadJson);
|
||||
cmd.Parameters.AddWithValue("dsse_envelope", string.IsNullOrEmpty(witness.DsseEnvelope) ? DBNull.Value : witness.DsseEnvelope);
|
||||
cmd.Parameters.AddWithValue("created_at", witness.CreatedAt == default ? DateTimeOffset.UtcNow : witness.CreatedAt);
|
||||
cmd.Parameters.AddWithValue("signed_at", witness.SignedAt.HasValue ? witness.SignedAt.Value : DBNull.Value);
|
||||
cmd.Parameters.AddWithValue("signer_key_id", string.IsNullOrEmpty(witness.SignerKeyId) ? DBNull.Value : witness.SignerKeyId);
|
||||
cmd.Parameters.AddWithValue("entrypoint_fqn", string.IsNullOrEmpty(witness.EntrypointFqn) ? DBNull.Value : witness.EntrypointFqn);
|
||||
cmd.Parameters.AddWithValue("sink_cve", string.IsNullOrEmpty(witness.SinkCve) ? DBNull.Value : witness.SinkCve);
|
||||
|
||||
var result = await cmd.ExecuteScalarAsync(cancellationToken).ConfigureAwait(false);
|
||||
var witnessId = (Guid)result!;
|
||||
|
||||
_logger.LogDebug("Stored witness {WitnessId} with hash {WitnessHash}", witnessId, witness.WitnessHash);
|
||||
return witnessId;
|
||||
}
|
||||
|
||||
public async Task<WitnessRecord?> GetByIdAsync(Guid witnessId, CancellationToken cancellationToken = default)
|
||||
{
|
||||
const string sql = """
|
||||
SELECT witness_id, witness_hash, schema_version, witness_type, graph_hash,
|
||||
scan_id, run_id, payload_json, dsse_envelope, created_at,
|
||||
signed_at, signer_key_id, entrypoint_fqn, sink_cve
|
||||
FROM scanner.witnesses
|
||||
WHERE witness_id = @witness_id
|
||||
""";
|
||||
|
||||
await using var conn = await _dataSource.OpenConnectionAsync(cancellationToken).ConfigureAwait(false);
|
||||
await using var cmd = new NpgsqlCommand(sql, conn);
|
||||
cmd.Parameters.AddWithValue("witness_id", witnessId);
|
||||
|
||||
await using var reader = await cmd.ExecuteReaderAsync(cancellationToken).ConfigureAwait(false);
|
||||
if (await reader.ReadAsync(cancellationToken).ConfigureAwait(false))
|
||||
{
|
||||
return MapToRecord(reader);
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
public async Task<WitnessRecord?> GetByHashAsync(string witnessHash, CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(witnessHash);
|
||||
|
||||
const string sql = """
|
||||
SELECT witness_id, witness_hash, schema_version, witness_type, graph_hash,
|
||||
scan_id, run_id, payload_json, dsse_envelope, created_at,
|
||||
signed_at, signer_key_id, entrypoint_fqn, sink_cve
|
||||
FROM scanner.witnesses
|
||||
WHERE witness_hash = @witness_hash
|
||||
""";
|
||||
|
||||
await using var conn = await _dataSource.OpenConnectionAsync(cancellationToken).ConfigureAwait(false);
|
||||
await using var cmd = new NpgsqlCommand(sql, conn);
|
||||
cmd.Parameters.AddWithValue("witness_hash", witnessHash);
|
||||
|
||||
await using var reader = await cmd.ExecuteReaderAsync(cancellationToken).ConfigureAwait(false);
|
||||
if (await reader.ReadAsync(cancellationToken).ConfigureAwait(false))
|
||||
{
|
||||
return MapToRecord(reader);
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
public async Task<IReadOnlyList<WitnessRecord>> GetByGraphHashAsync(string graphHash, CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(graphHash);
|
||||
|
||||
const string sql = """
|
||||
SELECT witness_id, witness_hash, schema_version, witness_type, graph_hash,
|
||||
scan_id, run_id, payload_json, dsse_envelope, created_at,
|
||||
signed_at, signer_key_id, entrypoint_fqn, sink_cve
|
||||
FROM scanner.witnesses
|
||||
WHERE graph_hash = @graph_hash
|
||||
ORDER BY created_at DESC
|
||||
""";
|
||||
|
||||
await using var conn = await _dataSource.OpenConnectionAsync(cancellationToken).ConfigureAwait(false);
|
||||
await using var cmd = new NpgsqlCommand(sql, conn);
|
||||
cmd.Parameters.AddWithValue("graph_hash", graphHash);
|
||||
|
||||
var results = new List<WitnessRecord>();
|
||||
await using var reader = await cmd.ExecuteReaderAsync(cancellationToken).ConfigureAwait(false);
|
||||
while (await reader.ReadAsync(cancellationToken).ConfigureAwait(false))
|
||||
{
|
||||
results.Add(MapToRecord(reader));
|
||||
}
|
||||
|
||||
return results;
|
||||
}
|
||||
|
||||
public async Task<IReadOnlyList<WitnessRecord>> GetByScanIdAsync(Guid scanId, CancellationToken cancellationToken = default)
|
||||
{
|
||||
const string sql = """
|
||||
SELECT witness_id, witness_hash, schema_version, witness_type, graph_hash,
|
||||
scan_id, run_id, payload_json, dsse_envelope, created_at,
|
||||
signed_at, signer_key_id, entrypoint_fqn, sink_cve
|
||||
FROM scanner.witnesses
|
||||
WHERE scan_id = @scan_id
|
||||
ORDER BY created_at DESC
|
||||
""";
|
||||
|
||||
await using var conn = await _dataSource.OpenConnectionAsync(cancellationToken).ConfigureAwait(false);
|
||||
await using var cmd = new NpgsqlCommand(sql, conn);
|
||||
cmd.Parameters.AddWithValue("scan_id", scanId);
|
||||
|
||||
var results = new List<WitnessRecord>();
|
||||
await using var reader = await cmd.ExecuteReaderAsync(cancellationToken).ConfigureAwait(false);
|
||||
while (await reader.ReadAsync(cancellationToken).ConfigureAwait(false))
|
||||
{
|
||||
results.Add(MapToRecord(reader));
|
||||
}
|
||||
|
||||
return results;
|
||||
}
|
||||
|
||||
public async Task<IReadOnlyList<WitnessRecord>> GetByCveAsync(string cveId, CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(cveId);
|
||||
|
||||
const string sql = """
|
||||
SELECT witness_id, witness_hash, schema_version, witness_type, graph_hash,
|
||||
scan_id, run_id, payload_json, dsse_envelope, created_at,
|
||||
signed_at, signer_key_id, entrypoint_fqn, sink_cve
|
||||
FROM scanner.witnesses
|
||||
WHERE sink_cve = @sink_cve
|
||||
ORDER BY created_at DESC
|
||||
""";
|
||||
|
||||
await using var conn = await _dataSource.OpenConnectionAsync(cancellationToken).ConfigureAwait(false);
|
||||
await using var cmd = new NpgsqlCommand(sql, conn);
|
||||
cmd.Parameters.AddWithValue("sink_cve", cveId);
|
||||
|
||||
var results = new List<WitnessRecord>();
|
||||
await using var reader = await cmd.ExecuteReaderAsync(cancellationToken).ConfigureAwait(false);
|
||||
while (await reader.ReadAsync(cancellationToken).ConfigureAwait(false))
|
||||
{
|
||||
results.Add(MapToRecord(reader));
|
||||
}
|
||||
|
||||
return results;
|
||||
}
|
||||
|
||||
public async Task UpdateDsseEnvelopeAsync(Guid witnessId, string dsseEnvelopeJson, string signerKeyId, CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(dsseEnvelopeJson);
|
||||
|
||||
const string sql = """
|
||||
UPDATE scanner.witnesses
|
||||
SET dsse_envelope = @dsse_envelope::jsonb,
|
||||
signed_at = @signed_at,
|
||||
signer_key_id = @signer_key_id
|
||||
WHERE witness_id = @witness_id
|
||||
""";
|
||||
|
||||
await using var conn = await _dataSource.OpenConnectionAsync(cancellationToken).ConfigureAwait(false);
|
||||
await using var cmd = new NpgsqlCommand(sql, conn);
|
||||
cmd.Parameters.AddWithValue("witness_id", witnessId);
|
||||
cmd.Parameters.AddWithValue("dsse_envelope", dsseEnvelopeJson);
|
||||
cmd.Parameters.AddWithValue("signed_at", DateTimeOffset.UtcNow);
|
||||
cmd.Parameters.AddWithValue("signer_key_id", string.IsNullOrEmpty(signerKeyId) ? DBNull.Value : signerKeyId);
|
||||
|
||||
var affected = await cmd.ExecuteNonQueryAsync(cancellationToken).ConfigureAwait(false);
|
||||
if (affected > 0)
|
||||
{
|
||||
_logger.LogDebug("Updated DSSE envelope for witness {WitnessId}", witnessId);
|
||||
}
|
||||
}
|
||||
|
||||
public async Task RecordVerificationAsync(WitnessVerificationRecord verification, CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(verification);
|
||||
|
||||
const string sql = """
|
||||
INSERT INTO scanner.witness_verifications (
|
||||
witness_id, verified_at, verified_by, verification_status,
|
||||
verification_error, verifier_key_id
|
||||
) VALUES (
|
||||
@witness_id, @verified_at, @verified_by, @verification_status,
|
||||
@verification_error, @verifier_key_id
|
||||
)
|
||||
""";
|
||||
|
||||
await using var conn = await _dataSource.OpenConnectionAsync(cancellationToken).ConfigureAwait(false);
|
||||
await using var cmd = new NpgsqlCommand(sql, conn);
|
||||
cmd.Parameters.AddWithValue("witness_id", verification.WitnessId);
|
||||
cmd.Parameters.AddWithValue("verified_at", verification.VerifiedAt == default ? DateTimeOffset.UtcNow : verification.VerifiedAt);
|
||||
cmd.Parameters.AddWithValue("verified_by", string.IsNullOrEmpty(verification.VerifiedBy) ? DBNull.Value : verification.VerifiedBy);
|
||||
cmd.Parameters.AddWithValue("verification_status", verification.VerificationStatus);
|
||||
cmd.Parameters.AddWithValue("verification_error", string.IsNullOrEmpty(verification.VerificationError) ? DBNull.Value : verification.VerificationError);
|
||||
cmd.Parameters.AddWithValue("verifier_key_id", string.IsNullOrEmpty(verification.VerifierKeyId) ? DBNull.Value : verification.VerifierKeyId);
|
||||
|
||||
await cmd.ExecuteNonQueryAsync(cancellationToken).ConfigureAwait(false);
|
||||
_logger.LogDebug("Recorded verification for witness {WitnessId}: {Status}", verification.WitnessId, verification.VerificationStatus);
|
||||
}
|
||||
|
||||
private static WitnessRecord MapToRecord(NpgsqlDataReader reader)
|
||||
{
|
||||
return new WitnessRecord
|
||||
{
|
||||
WitnessId = reader.GetGuid(0),
|
||||
WitnessHash = reader.GetString(1),
|
||||
SchemaVersion = reader.GetString(2),
|
||||
WitnessType = reader.GetString(3),
|
||||
GraphHash = reader.GetString(4),
|
||||
ScanId = reader.IsDBNull(5) ? null : reader.GetGuid(5),
|
||||
RunId = reader.IsDBNull(6) ? null : reader.GetGuid(6),
|
||||
PayloadJson = reader.GetString(7),
|
||||
DsseEnvelope = reader.IsDBNull(8) ? null : reader.GetString(8),
|
||||
CreatedAt = reader.GetDateTime(9),
|
||||
SignedAt = reader.IsDBNull(10) ? null : reader.GetDateTime(10),
|
||||
SignerKeyId = reader.IsDBNull(11) ? null : reader.GetString(11),
|
||||
EntrypointFqn = reader.IsDBNull(12) ? null : reader.GetString(12),
|
||||
SinkCve = reader.IsDBNull(13) ? null : reader.GetString(13)
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,162 @@
|
||||
using System.ComponentModel.DataAnnotations.Schema;
|
||||
using Microsoft.EntityFrameworkCore;
|
||||
|
||||
namespace StellaOps.Scanner.Triage.Entities;
|
||||
|
||||
/// <summary>
|
||||
/// Read-only view representing the current state of a triage case,
|
||||
/// combining the latest risk, reachability, and VEX data.
|
||||
/// </summary>
|
||||
[Keyless]
|
||||
public sealed class TriageCaseCurrent
|
||||
{
|
||||
/// <summary>
|
||||
/// The case/finding ID.
|
||||
/// </summary>
|
||||
[Column("case_id")]
|
||||
public Guid CaseId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// The asset ID.
|
||||
/// </summary>
|
||||
[Column("asset_id")]
|
||||
public Guid AssetId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Optional environment ID.
|
||||
/// </summary>
|
||||
[Column("environment_id")]
|
||||
public Guid? EnvironmentId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Human-readable asset label.
|
||||
/// </summary>
|
||||
[Column("asset_label")]
|
||||
public string AssetLabel { get; init; } = string.Empty;
|
||||
|
||||
/// <summary>
|
||||
/// Package URL of the affected component.
|
||||
/// </summary>
|
||||
[Column("purl")]
|
||||
public string Purl { get; init; } = string.Empty;
|
||||
|
||||
/// <summary>
|
||||
/// CVE identifier (if vulnerability finding).
|
||||
/// </summary>
|
||||
[Column("cve_id")]
|
||||
public string? CveId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Rule identifier (if policy rule finding).
|
||||
/// </summary>
|
||||
[Column("rule_id")]
|
||||
public string? RuleId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// When this finding was first seen.
|
||||
/// </summary>
|
||||
[Column("first_seen_at")]
|
||||
public DateTimeOffset FirstSeenAt { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// When this finding was last seen.
|
||||
/// </summary>
|
||||
[Column("last_seen_at")]
|
||||
public DateTimeOffset LastSeenAt { get; init; }
|
||||
|
||||
// Latest risk result fields
|
||||
|
||||
/// <summary>
|
||||
/// Policy ID from latest risk evaluation.
|
||||
/// </summary>
|
||||
[Column("policy_id")]
|
||||
public string? PolicyId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Policy version from latest risk evaluation.
|
||||
/// </summary>
|
||||
[Column("policy_version")]
|
||||
public string? PolicyVersion { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Inputs hash from latest risk evaluation.
|
||||
/// </summary>
|
||||
[Column("inputs_hash")]
|
||||
public string? InputsHash { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Risk score (0-100).
|
||||
/// </summary>
|
||||
[Column("score")]
|
||||
public int? Score { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Final verdict.
|
||||
/// </summary>
|
||||
[Column("verdict")]
|
||||
public TriageVerdict? Verdict { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Current triage lane.
|
||||
/// </summary>
|
||||
[Column("lane")]
|
||||
public TriageLane? Lane { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Short narrative explaining the current state.
|
||||
/// </summary>
|
||||
[Column("why")]
|
||||
public string? Why { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// When the risk was last computed.
|
||||
/// </summary>
|
||||
[Column("risk_computed_at")]
|
||||
public DateTimeOffset? RiskComputedAt { get; init; }
|
||||
|
||||
// Latest reachability fields
|
||||
|
||||
/// <summary>
|
||||
/// Reachability determination.
|
||||
/// </summary>
|
||||
[Column("reachable")]
|
||||
public TriageReachability Reachable { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Reachability confidence (0-100).
|
||||
/// </summary>
|
||||
[Column("reach_confidence")]
|
||||
public short? ReachConfidence { get; init; }
|
||||
|
||||
// Latest VEX fields
|
||||
|
||||
/// <summary>
|
||||
/// VEX status.
|
||||
/// </summary>
|
||||
[Column("vex_status")]
|
||||
public TriageVexStatus? VexStatus { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// VEX issuer.
|
||||
/// </summary>
|
||||
[Column("vex_issuer")]
|
||||
public string? VexIssuer { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// VEX signature reference.
|
||||
/// </summary>
|
||||
[Column("vex_signature_ref")]
|
||||
public string? VexSignatureRef { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// VEX source domain.
|
||||
/// </summary>
|
||||
[Column("vex_source_domain")]
|
||||
public string? VexSourceDomain { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// VEX source reference.
|
||||
/// </summary>
|
||||
[Column("vex_source_ref")]
|
||||
public string? VexSourceRef { get; init; }
|
||||
}
|
||||
@@ -0,0 +1,120 @@
|
||||
using System.ComponentModel.DataAnnotations;
|
||||
using System.ComponentModel.DataAnnotations.Schema;
|
||||
|
||||
namespace StellaOps.Scanner.Triage.Entities;
|
||||
|
||||
/// <summary>
|
||||
/// Signed triage decision (mute, ack, exception). Decisions are reversible via revocation.
|
||||
/// </summary>
|
||||
[Table("triage_decision")]
|
||||
public sealed class TriageDecision
|
||||
{
|
||||
/// <summary>
|
||||
/// Unique identifier.
|
||||
/// </summary>
|
||||
[Key]
|
||||
[Column("id")]
|
||||
public Guid Id { get; init; } = Guid.NewGuid();
|
||||
|
||||
/// <summary>
|
||||
/// The finding this decision applies to.
|
||||
/// </summary>
|
||||
[Column("finding_id")]
|
||||
public Guid FindingId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Type of decision.
|
||||
/// </summary>
|
||||
[Column("kind")]
|
||||
public TriageDecisionKind Kind { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Reason code for the decision (from a controlled vocabulary).
|
||||
/// </summary>
|
||||
[Required]
|
||||
[Column("reason_code")]
|
||||
public required string ReasonCode { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Optional freeform note from the decision maker.
|
||||
/// </summary>
|
||||
[Column("note")]
|
||||
public string? Note { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Reference to the policy that allowed this decision.
|
||||
/// </summary>
|
||||
[Column("policy_ref")]
|
||||
public string? PolicyRef { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Time-to-live for the decision (null = indefinite).
|
||||
/// </summary>
|
||||
[Column("ttl")]
|
||||
public DateTimeOffset? Ttl { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Authority subject (sub) of the actor who made the decision.
|
||||
/// </summary>
|
||||
[Required]
|
||||
[Column("actor_subject")]
|
||||
public required string ActorSubject { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Display name of the actor.
|
||||
/// </summary>
|
||||
[Column("actor_display")]
|
||||
public string? ActorDisplay { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Reference to DSSE signature.
|
||||
/// </summary>
|
||||
[Column("signature_ref")]
|
||||
public string? SignatureRef { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Hash of the DSSE envelope.
|
||||
/// </summary>
|
||||
[Column("dsse_hash")]
|
||||
public string? DsseHash { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// When the decision was created.
|
||||
/// </summary>
|
||||
[Column("created_at")]
|
||||
public DateTimeOffset CreatedAt { get; init; } = DateTimeOffset.UtcNow;
|
||||
|
||||
/// <summary>
|
||||
/// When the decision was revoked (null = active).
|
||||
/// </summary>
|
||||
[Column("revoked_at")]
|
||||
public DateTimeOffset? RevokedAt { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Reason for revocation.
|
||||
/// </summary>
|
||||
[Column("revoke_reason")]
|
||||
public string? RevokeReason { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Signature reference for revocation.
|
||||
/// </summary>
|
||||
[Column("revoke_signature_ref")]
|
||||
public string? RevokeSignatureRef { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// DSSE hash for revocation.
|
||||
/// </summary>
|
||||
[Column("revoke_dsse_hash")]
|
||||
public string? RevokeDsseHash { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Whether this decision is currently active.
|
||||
/// </summary>
|
||||
[NotMapped]
|
||||
public bool IsActive => RevokedAt is null;
|
||||
|
||||
// Navigation property
|
||||
[ForeignKey(nameof(FindingId))]
|
||||
public TriageFinding? Finding { get; init; }
|
||||
}
|
||||
@@ -0,0 +1,91 @@
|
||||
using System.ComponentModel.DataAnnotations;
|
||||
using System.ComponentModel.DataAnnotations.Schema;
|
||||
|
||||
namespace StellaOps.Scanner.Triage.Entities;
|
||||
|
||||
/// <summary>
|
||||
/// Effective VEX status for a finding after merging multiple VEX sources.
|
||||
/// Preserves provenance pointers for auditability.
|
||||
/// </summary>
|
||||
[Table("triage_effective_vex")]
|
||||
public sealed class TriageEffectiveVex
|
||||
{
|
||||
/// <summary>
|
||||
/// Unique identifier.
|
||||
/// </summary>
|
||||
[Key]
|
||||
[Column("id")]
|
||||
public Guid Id { get; init; } = Guid.NewGuid();
|
||||
|
||||
/// <summary>
|
||||
/// The finding this VEX status applies to.
|
||||
/// </summary>
|
||||
[Column("finding_id")]
|
||||
public Guid FindingId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// The effective VEX status after merging.
|
||||
/// </summary>
|
||||
[Column("status")]
|
||||
public TriageVexStatus Status { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Source domain that provided this VEX (e.g., "excititor").
|
||||
/// </summary>
|
||||
[Required]
|
||||
[Column("source_domain")]
|
||||
public required string SourceDomain { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Stable reference string to the source document.
|
||||
/// </summary>
|
||||
[Required]
|
||||
[Column("source_ref")]
|
||||
public required string SourceRef { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Array of pruned VEX sources with reasons (for merge transparency).
|
||||
/// </summary>
|
||||
[Column("pruned_sources", TypeName = "jsonb")]
|
||||
public string? PrunedSourcesJson { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Hash of the DSSE envelope if signed.
|
||||
/// </summary>
|
||||
[Column("dsse_envelope_hash")]
|
||||
public string? DsseEnvelopeHash { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Reference to Rekor/ledger entry for signature verification.
|
||||
/// </summary>
|
||||
[Column("signature_ref")]
|
||||
public string? SignatureRef { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Issuer of the VEX document.
|
||||
/// </summary>
|
||||
[Column("issuer")]
|
||||
public string? Issuer { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// When this VEX status became valid.
|
||||
/// </summary>
|
||||
[Column("valid_from")]
|
||||
public DateTimeOffset ValidFrom { get; init; } = DateTimeOffset.UtcNow;
|
||||
|
||||
/// <summary>
|
||||
/// When this VEX status expires (null = indefinite).
|
||||
/// </summary>
|
||||
[Column("valid_to")]
|
||||
public DateTimeOffset? ValidTo { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// When this record was collected.
|
||||
/// </summary>
|
||||
[Column("collected_at")]
|
||||
public DateTimeOffset CollectedAt { get; init; } = DateTimeOffset.UtcNow;
|
||||
|
||||
// Navigation property
|
||||
[ForeignKey(nameof(FindingId))]
|
||||
public TriageFinding? Finding { get; init; }
|
||||
}
|
||||
@@ -0,0 +1,151 @@
|
||||
namespace StellaOps.Scanner.Triage.Entities;
|
||||
|
||||
/// <summary>
|
||||
/// Triage lane indicating the current workflow state of a finding.
|
||||
/// </summary>
|
||||
public enum TriageLane
|
||||
{
|
||||
/// <summary>Finding is actively being evaluated.</summary>
|
||||
Active,
|
||||
|
||||
/// <summary>Finding is blocking shipment.</summary>
|
||||
Blocked,
|
||||
|
||||
/// <summary>Finding requires a security exception to proceed.</summary>
|
||||
NeedsException,
|
||||
|
||||
/// <summary>Finding is muted due to reachability analysis (not reachable).</summary>
|
||||
MutedReach,
|
||||
|
||||
/// <summary>Finding is muted due to VEX status (not affected).</summary>
|
||||
MutedVex,
|
||||
|
||||
/// <summary>Finding is mitigated by compensating controls.</summary>
|
||||
Compensated
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Final verdict for a triage case.
|
||||
/// </summary>
|
||||
public enum TriageVerdict
|
||||
{
|
||||
/// <summary>Can ship - no blocking issues.</summary>
|
||||
Ship,
|
||||
|
||||
/// <summary>Cannot ship - blocking issues present.</summary>
|
||||
Block,
|
||||
|
||||
/// <summary>Exception granted - can ship with documented exception.</summary>
|
||||
Exception
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Reachability determination result.
|
||||
/// </summary>
|
||||
public enum TriageReachability
|
||||
{
|
||||
/// <summary>Vulnerable code is reachable.</summary>
|
||||
Yes,
|
||||
|
||||
/// <summary>Vulnerable code is not reachable.</summary>
|
||||
No,
|
||||
|
||||
/// <summary>Reachability cannot be determined.</summary>
|
||||
Unknown
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// VEX status per OpenVEX specification.
|
||||
/// </summary>
|
||||
public enum TriageVexStatus
|
||||
{
|
||||
/// <summary>Product is affected by the vulnerability.</summary>
|
||||
Affected,
|
||||
|
||||
/// <summary>Product is not affected by the vulnerability.</summary>
|
||||
NotAffected,
|
||||
|
||||
/// <summary>Investigation is ongoing.</summary>
|
||||
UnderInvestigation,
|
||||
|
||||
/// <summary>Status is unknown.</summary>
|
||||
Unknown
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Type of triage decision.
|
||||
/// </summary>
|
||||
public enum TriageDecisionKind
|
||||
{
|
||||
/// <summary>Mute based on reachability analysis.</summary>
|
||||
MuteReach,
|
||||
|
||||
/// <summary>Mute based on VEX status.</summary>
|
||||
MuteVex,
|
||||
|
||||
/// <summary>Acknowledge the finding without action.</summary>
|
||||
Ack,
|
||||
|
||||
/// <summary>Grant a security exception.</summary>
|
||||
Exception
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Trigger that caused a triage snapshot to be created.
|
||||
/// </summary>
|
||||
public enum TriageSnapshotTrigger
|
||||
{
|
||||
/// <summary>Vulnerability feed was updated.</summary>
|
||||
FeedUpdate,
|
||||
|
||||
/// <summary>VEX document was updated.</summary>
|
||||
VexUpdate,
|
||||
|
||||
/// <summary>SBOM was updated.</summary>
|
||||
SbomUpdate,
|
||||
|
||||
/// <summary>Runtime trace was received.</summary>
|
||||
RuntimeTrace,
|
||||
|
||||
/// <summary>Policy was updated.</summary>
|
||||
PolicyUpdate,
|
||||
|
||||
/// <summary>A triage decision was made.</summary>
|
||||
Decision,
|
||||
|
||||
/// <summary>Manual rescan was triggered.</summary>
|
||||
Rescan
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Type of evidence artifact attached to a finding.
|
||||
/// </summary>
|
||||
public enum TriageEvidenceType
|
||||
{
|
||||
/// <summary>Slice of the SBOM relevant to the finding.</summary>
|
||||
SbomSlice,
|
||||
|
||||
/// <summary>VEX document.</summary>
|
||||
VexDoc,
|
||||
|
||||
/// <summary>Build provenance attestation.</summary>
|
||||
Provenance,
|
||||
|
||||
/// <summary>Callstack or callgraph slice.</summary>
|
||||
CallstackSlice,
|
||||
|
||||
/// <summary>Reachability proof document.</summary>
|
||||
ReachabilityProof,
|
||||
|
||||
/// <summary>Replay manifest for deterministic reproduction.</summary>
|
||||
ReplayManifest,
|
||||
|
||||
/// <summary>Policy document that was applied.</summary>
|
||||
Policy,
|
||||
|
||||
/// <summary>Scan log output.</summary>
|
||||
ScanLog,
|
||||
|
||||
/// <summary>Other evidence type.</summary>
|
||||
Other
|
||||
}
|
||||
@@ -0,0 +1,103 @@
|
||||
using System.ComponentModel.DataAnnotations;
|
||||
using System.ComponentModel.DataAnnotations.Schema;
|
||||
|
||||
namespace StellaOps.Scanner.Triage.Entities;
|
||||
|
||||
/// <summary>
|
||||
/// Evidence artifact attached to a finding. Hash-addressed and optionally signed.
|
||||
/// </summary>
|
||||
[Table("triage_evidence_artifact")]
|
||||
public sealed class TriageEvidenceArtifact
|
||||
{
|
||||
/// <summary>
|
||||
/// Unique identifier.
|
||||
/// </summary>
|
||||
[Key]
|
||||
[Column("id")]
|
||||
public Guid Id { get; init; } = Guid.NewGuid();
|
||||
|
||||
/// <summary>
|
||||
/// The finding this evidence applies to.
|
||||
/// </summary>
|
||||
[Column("finding_id")]
|
||||
public Guid FindingId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Type of evidence.
|
||||
/// </summary>
|
||||
[Column("type")]
|
||||
public TriageEvidenceType Type { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Human-readable title for the evidence.
|
||||
/// </summary>
|
||||
[Required]
|
||||
[Column("title")]
|
||||
public required string Title { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Issuer of the evidence (if applicable).
|
||||
/// </summary>
|
||||
[Column("issuer")]
|
||||
public string? Issuer { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Whether the evidence is cryptographically signed.
|
||||
/// </summary>
|
||||
[Column("signed")]
|
||||
public bool Signed { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Entity that signed the evidence.
|
||||
/// </summary>
|
||||
[Column("signed_by")]
|
||||
public string? SignedBy { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Content-addressable hash of the artifact.
|
||||
/// </summary>
|
||||
[Required]
|
||||
[Column("content_hash")]
|
||||
public required string ContentHash { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Reference to the signature.
|
||||
/// </summary>
|
||||
[Column("signature_ref")]
|
||||
public string? SignatureRef { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// MIME type of the artifact.
|
||||
/// </summary>
|
||||
[Column("media_type")]
|
||||
public string? MediaType { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// URI to the artifact (object store, file path, or inline reference).
|
||||
/// </summary>
|
||||
[Required]
|
||||
[Column("uri")]
|
||||
public required string Uri { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Size of the artifact in bytes.
|
||||
/// </summary>
|
||||
[Column("size_bytes")]
|
||||
public long? SizeBytes { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Additional metadata (JSON).
|
||||
/// </summary>
|
||||
[Column("metadata", TypeName = "jsonb")]
|
||||
public string? MetadataJson { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// When this artifact was created.
|
||||
/// </summary>
|
||||
[Column("created_at")]
|
||||
public DateTimeOffset CreatedAt { get; init; } = DateTimeOffset.UtcNow;
|
||||
|
||||
// Navigation property
|
||||
[ForeignKey(nameof(FindingId))]
|
||||
public TriageFinding? Finding { get; init; }
|
||||
}
|
||||
@@ -0,0 +1,78 @@
|
||||
using System.ComponentModel.DataAnnotations;
|
||||
using System.ComponentModel.DataAnnotations.Schema;
|
||||
|
||||
namespace StellaOps.Scanner.Triage.Entities;
|
||||
|
||||
/// <summary>
|
||||
/// Represents a triage finding (case). This is the core entity that ties
|
||||
/// together all triage-related data for a specific vulnerability/rule
|
||||
/// on a specific asset.
|
||||
/// </summary>
|
||||
[Table("triage_finding")]
|
||||
public sealed class TriageFinding
|
||||
{
|
||||
/// <summary>
|
||||
/// Unique identifier for the finding (also serves as the case ID).
|
||||
/// </summary>
|
||||
[Key]
|
||||
[Column("id")]
|
||||
public Guid Id { get; init; } = Guid.NewGuid();
|
||||
|
||||
/// <summary>
|
||||
/// The asset this finding applies to.
|
||||
/// </summary>
|
||||
[Column("asset_id")]
|
||||
public Guid AssetId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Optional environment identifier (e.g., prod, staging).
|
||||
/// </summary>
|
||||
[Column("environment_id")]
|
||||
public Guid? EnvironmentId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Human-readable asset label (e.g., "prod/api-gateway:1.2.3").
|
||||
/// </summary>
|
||||
[Required]
|
||||
[Column("asset_label")]
|
||||
public required string AssetLabel { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Package URL identifying the affected component.
|
||||
/// </summary>
|
||||
[Required]
|
||||
[Column("purl")]
|
||||
public required string Purl { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// CVE identifier if this is a vulnerability finding.
|
||||
/// </summary>
|
||||
[Column("cve_id")]
|
||||
public string? CveId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Rule identifier if this is a policy rule finding.
|
||||
/// </summary>
|
||||
[Column("rule_id")]
|
||||
public string? RuleId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// When this finding was first observed.
|
||||
/// </summary>
|
||||
[Column("first_seen_at")]
|
||||
public DateTimeOffset FirstSeenAt { get; init; } = DateTimeOffset.UtcNow;
|
||||
|
||||
/// <summary>
|
||||
/// When this finding was last observed.
|
||||
/// </summary>
|
||||
[Column("last_seen_at")]
|
||||
public DateTimeOffset LastSeenAt { get; set; } = DateTimeOffset.UtcNow;
|
||||
|
||||
// Navigation properties
|
||||
public ICollection<TriageEffectiveVex> EffectiveVexRecords { get; init; } = new List<TriageEffectiveVex>();
|
||||
public ICollection<TriageReachabilityResult> ReachabilityResults { get; init; } = new List<TriageReachabilityResult>();
|
||||
public ICollection<TriageRiskResult> RiskResults { get; init; } = new List<TriageRiskResult>();
|
||||
public ICollection<TriageDecision> Decisions { get; init; } = new List<TriageDecision>();
|
||||
public ICollection<TriageEvidenceArtifact> EvidenceArtifacts { get; init; } = new List<TriageEvidenceArtifact>();
|
||||
public ICollection<TriageSnapshot> Snapshots { get; init; } = new List<TriageSnapshot>();
|
||||
}
|
||||
@@ -0,0 +1,66 @@
|
||||
using System.ComponentModel.DataAnnotations;
|
||||
using System.ComponentModel.DataAnnotations.Schema;
|
||||
|
||||
namespace StellaOps.Scanner.Triage.Entities;
|
||||
|
||||
/// <summary>
|
||||
/// Reachability analysis result for a finding.
|
||||
/// </summary>
|
||||
[Table("triage_reachability_result")]
|
||||
public sealed class TriageReachabilityResult
|
||||
{
|
||||
/// <summary>
|
||||
/// Unique identifier.
|
||||
/// </summary>
|
||||
[Key]
|
||||
[Column("id")]
|
||||
public Guid Id { get; init; } = Guid.NewGuid();
|
||||
|
||||
/// <summary>
|
||||
/// The finding this reachability result applies to.
|
||||
/// </summary>
|
||||
[Column("finding_id")]
|
||||
public Guid FindingId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Reachability determination (Yes, No, Unknown).
|
||||
/// </summary>
|
||||
[Column("reachable")]
|
||||
public TriageReachability Reachable { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Confidence level (0-100).
|
||||
/// </summary>
|
||||
[Column("confidence")]
|
||||
[Range(0, 100)]
|
||||
public short Confidence { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Reference to static analysis proof (callgraph slice, CFG slice).
|
||||
/// </summary>
|
||||
[Column("static_proof_ref")]
|
||||
public string? StaticProofRef { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Reference to runtime proof (runtime trace hits).
|
||||
/// </summary>
|
||||
[Column("runtime_proof_ref")]
|
||||
public string? RuntimeProofRef { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Hash of the inputs used to compute reachability (for caching/diffing).
|
||||
/// </summary>
|
||||
[Required]
|
||||
[Column("inputs_hash")]
|
||||
public required string InputsHash { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// When this result was computed.
|
||||
/// </summary>
|
||||
[Column("computed_at")]
|
||||
public DateTimeOffset ComputedAt { get; init; } = DateTimeOffset.UtcNow;
|
||||
|
||||
// Navigation property
|
||||
[ForeignKey(nameof(FindingId))]
|
||||
public TriageFinding? Finding { get; init; }
|
||||
}
|
||||
@@ -0,0 +1,87 @@
|
||||
using System.ComponentModel.DataAnnotations;
|
||||
using System.ComponentModel.DataAnnotations.Schema;
|
||||
|
||||
namespace StellaOps.Scanner.Triage.Entities;
|
||||
|
||||
/// <summary>
|
||||
/// Risk/lattice result from the scanner's policy evaluation.
|
||||
/// </summary>
|
||||
[Table("triage_risk_result")]
|
||||
public sealed class TriageRiskResult
|
||||
{
|
||||
/// <summary>
|
||||
/// Unique identifier.
|
||||
/// </summary>
|
||||
[Key]
|
||||
[Column("id")]
|
||||
public Guid Id { get; init; } = Guid.NewGuid();
|
||||
|
||||
/// <summary>
|
||||
/// The finding this risk result applies to.
|
||||
/// </summary>
|
||||
[Column("finding_id")]
|
||||
public Guid FindingId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// The policy that was applied.
|
||||
/// </summary>
|
||||
[Required]
|
||||
[Column("policy_id")]
|
||||
public required string PolicyId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Version of the policy that was applied.
|
||||
/// </summary>
|
||||
[Required]
|
||||
[Column("policy_version")]
|
||||
public required string PolicyVersion { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Hash of the inputs used for this evaluation.
|
||||
/// </summary>
|
||||
[Required]
|
||||
[Column("inputs_hash")]
|
||||
public required string InputsHash { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Computed risk score (0-100).
|
||||
/// </summary>
|
||||
[Column("score")]
|
||||
[Range(0, 100)]
|
||||
public int Score { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Final verdict (Ship, Block, Exception).
|
||||
/// </summary>
|
||||
[Column("verdict")]
|
||||
public TriageVerdict Verdict { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Current lane based on policy evaluation.
|
||||
/// </summary>
|
||||
[Column("lane")]
|
||||
public TriageLane Lane { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Short narrative explaining the decision.
|
||||
/// </summary>
|
||||
[Required]
|
||||
[Column("why")]
|
||||
public required string Why { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Structured lattice explanation for UI diffing (JSON).
|
||||
/// </summary>
|
||||
[Column("explanation", TypeName = "jsonb")]
|
||||
public string? ExplanationJson { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// When this result was computed.
|
||||
/// </summary>
|
||||
[Column("computed_at")]
|
||||
public DateTimeOffset ComputedAt { get; init; } = DateTimeOffset.UtcNow;
|
||||
|
||||
// Navigation property
|
||||
[ForeignKey(nameof(FindingId))]
|
||||
public TriageFinding? Finding { get; init; }
|
||||
}
|
||||
@@ -0,0 +1,66 @@
|
||||
using System.ComponentModel.DataAnnotations;
|
||||
using System.ComponentModel.DataAnnotations.Schema;
|
||||
|
||||
namespace StellaOps.Scanner.Triage.Entities;
|
||||
|
||||
/// <summary>
|
||||
/// Immutable snapshot record for Smart-Diff, capturing input/output changes.
|
||||
/// </summary>
|
||||
[Table("triage_snapshot")]
|
||||
public sealed class TriageSnapshot
|
||||
{
|
||||
/// <summary>
|
||||
/// Unique identifier.
|
||||
/// </summary>
|
||||
[Key]
|
||||
[Column("id")]
|
||||
public Guid Id { get; init; } = Guid.NewGuid();
|
||||
|
||||
/// <summary>
|
||||
/// The finding this snapshot applies to.
|
||||
/// </summary>
|
||||
[Column("finding_id")]
|
||||
public Guid FindingId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// What triggered this snapshot.
|
||||
/// </summary>
|
||||
[Column("trigger")]
|
||||
public TriageSnapshotTrigger Trigger { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Previous inputs hash (null for first snapshot).
|
||||
/// </summary>
|
||||
[Column("from_inputs_hash")]
|
||||
public string? FromInputsHash { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// New inputs hash.
|
||||
/// </summary>
|
||||
[Required]
|
||||
[Column("to_inputs_hash")]
|
||||
public required string ToInputsHash { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Human-readable summary of what changed.
|
||||
/// </summary>
|
||||
[Required]
|
||||
[Column("summary")]
|
||||
public required string Summary { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Precomputed diff in JSON format (optional).
|
||||
/// </summary>
|
||||
[Column("diff_json", TypeName = "jsonb")]
|
||||
public string? DiffJson { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// When this snapshot was created.
|
||||
/// </summary>
|
||||
[Column("created_at")]
|
||||
public DateTimeOffset CreatedAt { get; init; } = DateTimeOffset.UtcNow;
|
||||
|
||||
// Navigation property
|
||||
[ForeignKey(nameof(FindingId))]
|
||||
public TriageFinding? Finding { get; init; }
|
||||
}
|
||||
@@ -0,0 +1,249 @@
|
||||
-- Stella Ops Triage Schema Migration
|
||||
-- Generated from docs/db/triage_schema.sql
|
||||
-- Version: 1.0.0
|
||||
|
||||
BEGIN;
|
||||
|
||||
-- Extensions
|
||||
CREATE EXTENSION IF NOT EXISTS pgcrypto;
|
||||
|
||||
-- Enums
|
||||
DO $$
|
||||
BEGIN
|
||||
IF NOT EXISTS (SELECT 1 FROM pg_type WHERE typname = 'triage_lane') THEN
|
||||
CREATE TYPE triage_lane AS ENUM (
|
||||
'ACTIVE',
|
||||
'BLOCKED',
|
||||
'NEEDS_EXCEPTION',
|
||||
'MUTED_REACH',
|
||||
'MUTED_VEX',
|
||||
'COMPENSATED'
|
||||
);
|
||||
END IF;
|
||||
|
||||
IF NOT EXISTS (SELECT 1 FROM pg_type WHERE typname = 'triage_verdict') THEN
|
||||
CREATE TYPE triage_verdict AS ENUM ('SHIP', 'BLOCK', 'EXCEPTION');
|
||||
END IF;
|
||||
|
||||
IF NOT EXISTS (SELECT 1 FROM pg_type WHERE typname = 'triage_reachability') THEN
|
||||
CREATE TYPE triage_reachability AS ENUM ('YES', 'NO', 'UNKNOWN');
|
||||
END IF;
|
||||
|
||||
IF NOT EXISTS (SELECT 1 FROM pg_type WHERE typname = 'triage_vex_status') THEN
|
||||
CREATE TYPE triage_vex_status AS ENUM ('affected', 'not_affected', 'under_investigation', 'unknown');
|
||||
END IF;
|
||||
|
||||
IF NOT EXISTS (SELECT 1 FROM pg_type WHERE typname = 'triage_decision_kind') THEN
|
||||
CREATE TYPE triage_decision_kind AS ENUM ('MUTE_REACH', 'MUTE_VEX', 'ACK', 'EXCEPTION');
|
||||
END IF;
|
||||
|
||||
IF NOT EXISTS (SELECT 1 FROM pg_type WHERE typname = 'triage_snapshot_trigger') THEN
|
||||
CREATE TYPE triage_snapshot_trigger AS ENUM (
|
||||
'FEED_UPDATE',
|
||||
'VEX_UPDATE',
|
||||
'SBOM_UPDATE',
|
||||
'RUNTIME_TRACE',
|
||||
'POLICY_UPDATE',
|
||||
'DECISION',
|
||||
'RESCAN'
|
||||
);
|
||||
END IF;
|
||||
|
||||
IF NOT EXISTS (SELECT 1 FROM pg_type WHERE typname = 'triage_evidence_type') THEN
|
||||
CREATE TYPE triage_evidence_type AS ENUM (
|
||||
'SBOM_SLICE',
|
||||
'VEX_DOC',
|
||||
'PROVENANCE',
|
||||
'CALLSTACK_SLICE',
|
||||
'REACHABILITY_PROOF',
|
||||
'REPLAY_MANIFEST',
|
||||
'POLICY',
|
||||
'SCAN_LOG',
|
||||
'OTHER'
|
||||
);
|
||||
END IF;
|
||||
END $$;
|
||||
|
||||
-- Core: finding (caseId == findingId)
|
||||
CREATE TABLE IF NOT EXISTS triage_finding (
|
||||
id uuid PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
asset_id uuid NOT NULL,
|
||||
environment_id uuid NULL,
|
||||
asset_label text NOT NULL,
|
||||
purl text NOT NULL,
|
||||
cve_id text NULL,
|
||||
rule_id text NULL,
|
||||
first_seen_at timestamptz NOT NULL DEFAULT now(),
|
||||
last_seen_at timestamptz NOT NULL DEFAULT now(),
|
||||
UNIQUE (asset_id, environment_id, purl, cve_id, rule_id)
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS ix_triage_finding_last_seen ON triage_finding (last_seen_at DESC);
|
||||
CREATE INDEX IF NOT EXISTS ix_triage_finding_asset_label ON triage_finding (asset_label);
|
||||
CREATE INDEX IF NOT EXISTS ix_triage_finding_purl ON triage_finding (purl);
|
||||
CREATE INDEX IF NOT EXISTS ix_triage_finding_cve ON triage_finding (cve_id);
|
||||
|
||||
-- Effective VEX (post-merge)
|
||||
CREATE TABLE IF NOT EXISTS triage_effective_vex (
|
||||
id uuid PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
finding_id uuid NOT NULL REFERENCES triage_finding(id) ON DELETE CASCADE,
|
||||
status triage_vex_status NOT NULL,
|
||||
source_domain text NOT NULL,
|
||||
source_ref text NOT NULL,
|
||||
pruned_sources jsonb NULL,
|
||||
dsse_envelope_hash text NULL,
|
||||
signature_ref text NULL,
|
||||
issuer text NULL,
|
||||
valid_from timestamptz NOT NULL DEFAULT now(),
|
||||
valid_to timestamptz NULL,
|
||||
collected_at timestamptz NOT NULL DEFAULT now()
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS ix_triage_effective_vex_finding ON triage_effective_vex (finding_id, collected_at DESC);
|
||||
|
||||
-- Reachability results
|
||||
CREATE TABLE IF NOT EXISTS triage_reachability_result (
|
||||
id uuid PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
finding_id uuid NOT NULL REFERENCES triage_finding(id) ON DELETE CASCADE,
|
||||
reachable triage_reachability NOT NULL,
|
||||
confidence smallint NOT NULL CHECK (confidence >= 0 AND confidence <= 100),
|
||||
static_proof_ref text NULL,
|
||||
runtime_proof_ref text NULL,
|
||||
inputs_hash text NOT NULL,
|
||||
computed_at timestamptz NOT NULL DEFAULT now()
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS ix_triage_reachability_finding ON triage_reachability_result (finding_id, computed_at DESC);
|
||||
|
||||
-- Risk/lattice result
|
||||
CREATE TABLE IF NOT EXISTS triage_risk_result (
|
||||
id uuid PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
finding_id uuid NOT NULL REFERENCES triage_finding(id) ON DELETE CASCADE,
|
||||
policy_id text NOT NULL,
|
||||
policy_version text NOT NULL,
|
||||
inputs_hash text NOT NULL,
|
||||
score int NOT NULL CHECK (score >= 0 AND score <= 100),
|
||||
verdict triage_verdict NOT NULL,
|
||||
lane triage_lane NOT NULL,
|
||||
why text NOT NULL,
|
||||
explanation jsonb NULL,
|
||||
computed_at timestamptz NOT NULL DEFAULT now(),
|
||||
UNIQUE (finding_id, policy_id, policy_version, inputs_hash)
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS ix_triage_risk_finding ON triage_risk_result (finding_id, computed_at DESC);
|
||||
CREATE INDEX IF NOT EXISTS ix_triage_risk_lane ON triage_risk_result (lane, computed_at DESC);
|
||||
|
||||
-- Signed Decisions
|
||||
CREATE TABLE IF NOT EXISTS triage_decision (
|
||||
id uuid PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
finding_id uuid NOT NULL REFERENCES triage_finding(id) ON DELETE CASCADE,
|
||||
kind triage_decision_kind NOT NULL,
|
||||
reason_code text NOT NULL,
|
||||
note text NULL,
|
||||
policy_ref text NULL,
|
||||
ttl timestamptz NULL,
|
||||
actor_subject text NOT NULL,
|
||||
actor_display text NULL,
|
||||
signature_ref text NULL,
|
||||
dsse_hash text NULL,
|
||||
created_at timestamptz NOT NULL DEFAULT now(),
|
||||
revoked_at timestamptz NULL,
|
||||
revoke_reason text NULL,
|
||||
revoke_signature_ref text NULL,
|
||||
revoke_dsse_hash text NULL
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS ix_triage_decision_finding ON triage_decision (finding_id, created_at DESC);
|
||||
CREATE INDEX IF NOT EXISTS ix_triage_decision_kind ON triage_decision (kind, created_at DESC);
|
||||
CREATE INDEX IF NOT EXISTS ix_triage_decision_active ON triage_decision (finding_id) WHERE revoked_at IS NULL;
|
||||
|
||||
-- Evidence artifacts
|
||||
CREATE TABLE IF NOT EXISTS triage_evidence_artifact (
|
||||
id uuid PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
finding_id uuid NOT NULL REFERENCES triage_finding(id) ON DELETE CASCADE,
|
||||
type triage_evidence_type NOT NULL,
|
||||
title text NOT NULL,
|
||||
issuer text NULL,
|
||||
signed boolean NOT NULL DEFAULT false,
|
||||
signed_by text NULL,
|
||||
content_hash text NOT NULL,
|
||||
signature_ref text NULL,
|
||||
media_type text NULL,
|
||||
uri text NOT NULL,
|
||||
size_bytes bigint NULL,
|
||||
metadata jsonb NULL,
|
||||
created_at timestamptz NOT NULL DEFAULT now(),
|
||||
UNIQUE (finding_id, type, content_hash)
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS ix_triage_evidence_finding ON triage_evidence_artifact (finding_id, created_at DESC);
|
||||
CREATE INDEX IF NOT EXISTS ix_triage_evidence_type ON triage_evidence_artifact (type, created_at DESC);
|
||||
|
||||
-- Snapshots for Smart-Diff
|
||||
CREATE TABLE IF NOT EXISTS triage_snapshot (
|
||||
id uuid PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
finding_id uuid NOT NULL REFERENCES triage_finding(id) ON DELETE CASCADE,
|
||||
trigger triage_snapshot_trigger NOT NULL,
|
||||
from_inputs_hash text NULL,
|
||||
to_inputs_hash text NOT NULL,
|
||||
summary text NOT NULL,
|
||||
diff_json jsonb NULL,
|
||||
created_at timestamptz NOT NULL DEFAULT now(),
|
||||
UNIQUE (finding_id, to_inputs_hash, created_at)
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS ix_triage_snapshot_finding ON triage_snapshot (finding_id, created_at DESC);
|
||||
CREATE INDEX IF NOT EXISTS ix_triage_snapshot_trigger ON triage_snapshot (trigger, created_at DESC);
|
||||
|
||||
-- Current-case view
|
||||
CREATE OR REPLACE VIEW v_triage_case_current AS
|
||||
WITH latest_risk AS (
|
||||
SELECT DISTINCT ON (finding_id)
|
||||
finding_id, policy_id, policy_version, inputs_hash, score, verdict, lane, why, computed_at
|
||||
FROM triage_risk_result
|
||||
ORDER BY finding_id, computed_at DESC
|
||||
),
|
||||
latest_reach AS (
|
||||
SELECT DISTINCT ON (finding_id)
|
||||
finding_id, reachable, confidence, static_proof_ref, runtime_proof_ref, computed_at
|
||||
FROM triage_reachability_result
|
||||
ORDER BY finding_id, computed_at DESC
|
||||
),
|
||||
latest_vex AS (
|
||||
SELECT DISTINCT ON (finding_id)
|
||||
finding_id, status, issuer, signature_ref, source_domain, source_ref, collected_at
|
||||
FROM triage_effective_vex
|
||||
ORDER BY finding_id, collected_at DESC
|
||||
)
|
||||
SELECT
|
||||
f.id AS case_id,
|
||||
f.asset_id,
|
||||
f.environment_id,
|
||||
f.asset_label,
|
||||
f.purl,
|
||||
f.cve_id,
|
||||
f.rule_id,
|
||||
f.first_seen_at,
|
||||
f.last_seen_at,
|
||||
r.policy_id,
|
||||
r.policy_version,
|
||||
r.inputs_hash,
|
||||
r.score,
|
||||
r.verdict,
|
||||
r.lane,
|
||||
r.why,
|
||||
r.computed_at AS risk_computed_at,
|
||||
coalesce(re.reachable, 'UNKNOWN'::triage_reachability) AS reachable,
|
||||
re.confidence AS reach_confidence,
|
||||
v.status AS vex_status,
|
||||
v.issuer AS vex_issuer,
|
||||
v.signature_ref AS vex_signature_ref,
|
||||
v.source_domain AS vex_source_domain,
|
||||
v.source_ref AS vex_source_ref
|
||||
FROM triage_finding f
|
||||
LEFT JOIN latest_risk r ON r.finding_id = f.id
|
||||
LEFT JOIN latest_reach re ON re.finding_id = f.id
|
||||
LEFT JOIN latest_vex v ON v.finding_id = f.id;
|
||||
|
||||
COMMIT;
|
||||
@@ -0,0 +1,16 @@
|
||||
<?xml version='1.0' encoding='utf-8'?>
|
||||
<Project Sdk="Microsoft.NET.Sdk">
|
||||
<PropertyGroup>
|
||||
<TargetFramework>net10.0</TargetFramework>
|
||||
<LangVersion>preview</LangVersion>
|
||||
<Nullable>enable</Nullable>
|
||||
<ImplicitUsings>enable</ImplicitUsings>
|
||||
<TreatWarningsAsErrors>false</TreatWarningsAsErrors>
|
||||
<RootNamespace>StellaOps.Scanner.Triage</RootNamespace>
|
||||
</PropertyGroup>
|
||||
<ItemGroup>
|
||||
<PackageReference Include="Microsoft.EntityFrameworkCore" Version="10.0.0-*" />
|
||||
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="10.0.0" />
|
||||
<PackageReference Include="Npgsql.EntityFrameworkCore.PostgreSQL" Version="10.0.0-*" />
|
||||
</ItemGroup>
|
||||
</Project>
|
||||
@@ -0,0 +1,228 @@
|
||||
using Microsoft.EntityFrameworkCore;
|
||||
using StellaOps.Scanner.Triage.Entities;
|
||||
|
||||
namespace StellaOps.Scanner.Triage;
|
||||
|
||||
/// <summary>
|
||||
/// Entity Framework Core DbContext for the Triage schema.
|
||||
/// </summary>
|
||||
public sealed class TriageDbContext : DbContext
|
||||
{
|
||||
/// <summary>
|
||||
/// Initializes a new instance of the <see cref="TriageDbContext"/> class.
|
||||
/// </summary>
|
||||
public TriageDbContext(DbContextOptions<TriageDbContext> options)
|
||||
: base(options)
|
||||
{
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Triage findings (cases).
|
||||
/// </summary>
|
||||
public DbSet<TriageFinding> Findings => Set<TriageFinding>();
|
||||
|
||||
/// <summary>
|
||||
/// Effective VEX records.
|
||||
/// </summary>
|
||||
public DbSet<TriageEffectiveVex> EffectiveVex => Set<TriageEffectiveVex>();
|
||||
|
||||
/// <summary>
|
||||
/// Reachability analysis results.
|
||||
/// </summary>
|
||||
public DbSet<TriageReachabilityResult> ReachabilityResults => Set<TriageReachabilityResult>();
|
||||
|
||||
/// <summary>
|
||||
/// Risk/lattice evaluation results.
|
||||
/// </summary>
|
||||
public DbSet<TriageRiskResult> RiskResults => Set<TriageRiskResult>();
|
||||
|
||||
/// <summary>
|
||||
/// Triage decisions.
|
||||
/// </summary>
|
||||
public DbSet<TriageDecision> Decisions => Set<TriageDecision>();
|
||||
|
||||
/// <summary>
|
||||
/// Evidence artifacts.
|
||||
/// </summary>
|
||||
public DbSet<TriageEvidenceArtifact> EvidenceArtifacts => Set<TriageEvidenceArtifact>();
|
||||
|
||||
/// <summary>
|
||||
/// Snapshots for Smart-Diff.
|
||||
/// </summary>
|
||||
public DbSet<TriageSnapshot> Snapshots => Set<TriageSnapshot>();
|
||||
|
||||
/// <summary>
|
||||
/// Current case view (read-only).
|
||||
/// </summary>
|
||||
public DbSet<TriageCaseCurrent> CurrentCases => Set<TriageCaseCurrent>();
|
||||
|
||||
/// <inheritdoc/>
|
||||
protected override void OnModelCreating(ModelBuilder modelBuilder)
|
||||
{
|
||||
base.OnModelCreating(modelBuilder);
|
||||
|
||||
// Configure PostgreSQL enums
|
||||
modelBuilder.HasPostgresEnum<TriageLane>("triage_lane");
|
||||
modelBuilder.HasPostgresEnum<TriageVerdict>("triage_verdict");
|
||||
modelBuilder.HasPostgresEnum<TriageReachability>("triage_reachability");
|
||||
modelBuilder.HasPostgresEnum<TriageVexStatus>("triage_vex_status");
|
||||
modelBuilder.HasPostgresEnum<TriageDecisionKind>("triage_decision_kind");
|
||||
modelBuilder.HasPostgresEnum<TriageSnapshotTrigger>("triage_snapshot_trigger");
|
||||
modelBuilder.HasPostgresEnum<TriageEvidenceType>("triage_evidence_type");
|
||||
|
||||
// Configure TriageFinding
|
||||
modelBuilder.Entity<TriageFinding>(entity =>
|
||||
{
|
||||
entity.ToTable("triage_finding");
|
||||
entity.HasKey(e => e.Id);
|
||||
|
||||
entity.HasIndex(e => e.LastSeenAt)
|
||||
.IsDescending()
|
||||
.HasDatabaseName("ix_triage_finding_last_seen");
|
||||
|
||||
entity.HasIndex(e => e.AssetLabel)
|
||||
.HasDatabaseName("ix_triage_finding_asset_label");
|
||||
|
||||
entity.HasIndex(e => e.Purl)
|
||||
.HasDatabaseName("ix_triage_finding_purl");
|
||||
|
||||
entity.HasIndex(e => e.CveId)
|
||||
.HasDatabaseName("ix_triage_finding_cve");
|
||||
|
||||
entity.HasIndex(e => new { e.AssetId, e.EnvironmentId, e.Purl, e.CveId, e.RuleId })
|
||||
.IsUnique();
|
||||
});
|
||||
|
||||
// Configure TriageEffectiveVex
|
||||
modelBuilder.Entity<TriageEffectiveVex>(entity =>
|
||||
{
|
||||
entity.ToTable("triage_effective_vex");
|
||||
entity.HasKey(e => e.Id);
|
||||
|
||||
entity.HasIndex(e => new { e.FindingId, e.CollectedAt })
|
||||
.IsDescending(false, true)
|
||||
.HasDatabaseName("ix_triage_effective_vex_finding");
|
||||
|
||||
entity.HasOne(e => e.Finding)
|
||||
.WithMany(f => f.EffectiveVexRecords)
|
||||
.HasForeignKey(e => e.FindingId)
|
||||
.OnDelete(DeleteBehavior.Cascade);
|
||||
});
|
||||
|
||||
// Configure TriageReachabilityResult
|
||||
modelBuilder.Entity<TriageReachabilityResult>(entity =>
|
||||
{
|
||||
entity.ToTable("triage_reachability_result");
|
||||
entity.HasKey(e => e.Id);
|
||||
|
||||
entity.HasIndex(e => new { e.FindingId, e.ComputedAt })
|
||||
.IsDescending(false, true)
|
||||
.HasDatabaseName("ix_triage_reachability_finding");
|
||||
|
||||
entity.HasOne(e => e.Finding)
|
||||
.WithMany(f => f.ReachabilityResults)
|
||||
.HasForeignKey(e => e.FindingId)
|
||||
.OnDelete(DeleteBehavior.Cascade);
|
||||
});
|
||||
|
||||
// Configure TriageRiskResult
|
||||
modelBuilder.Entity<TriageRiskResult>(entity =>
|
||||
{
|
||||
entity.ToTable("triage_risk_result");
|
||||
entity.HasKey(e => e.Id);
|
||||
|
||||
entity.HasIndex(e => new { e.FindingId, e.ComputedAt })
|
||||
.IsDescending(false, true)
|
||||
.HasDatabaseName("ix_triage_risk_finding");
|
||||
|
||||
entity.HasIndex(e => new { e.Lane, e.ComputedAt })
|
||||
.IsDescending(false, true)
|
||||
.HasDatabaseName("ix_triage_risk_lane");
|
||||
|
||||
entity.HasIndex(e => new { e.FindingId, e.PolicyId, e.PolicyVersion, e.InputsHash })
|
||||
.IsUnique();
|
||||
|
||||
entity.HasOne(e => e.Finding)
|
||||
.WithMany(f => f.RiskResults)
|
||||
.HasForeignKey(e => e.FindingId)
|
||||
.OnDelete(DeleteBehavior.Cascade);
|
||||
});
|
||||
|
||||
// Configure TriageDecision
|
||||
modelBuilder.Entity<TriageDecision>(entity =>
|
||||
{
|
||||
entity.ToTable("triage_decision");
|
||||
entity.HasKey(e => e.Id);
|
||||
|
||||
entity.HasIndex(e => new { e.FindingId, e.CreatedAt })
|
||||
.IsDescending(false, true)
|
||||
.HasDatabaseName("ix_triage_decision_finding");
|
||||
|
||||
entity.HasIndex(e => new { e.Kind, e.CreatedAt })
|
||||
.IsDescending(false, true)
|
||||
.HasDatabaseName("ix_triage_decision_kind");
|
||||
|
||||
entity.HasIndex(e => e.FindingId)
|
||||
.HasFilter("revoked_at IS NULL")
|
||||
.HasDatabaseName("ix_triage_decision_active");
|
||||
|
||||
entity.HasOne(e => e.Finding)
|
||||
.WithMany(f => f.Decisions)
|
||||
.HasForeignKey(e => e.FindingId)
|
||||
.OnDelete(DeleteBehavior.Cascade);
|
||||
});
|
||||
|
||||
// Configure TriageEvidenceArtifact
|
||||
modelBuilder.Entity<TriageEvidenceArtifact>(entity =>
|
||||
{
|
||||
entity.ToTable("triage_evidence_artifact");
|
||||
entity.HasKey(e => e.Id);
|
||||
|
||||
entity.HasIndex(e => new { e.FindingId, e.CreatedAt })
|
||||
.IsDescending(false, true)
|
||||
.HasDatabaseName("ix_triage_evidence_finding");
|
||||
|
||||
entity.HasIndex(e => new { e.Type, e.CreatedAt })
|
||||
.IsDescending(false, true)
|
||||
.HasDatabaseName("ix_triage_evidence_type");
|
||||
|
||||
entity.HasIndex(e => new { e.FindingId, e.Type, e.ContentHash })
|
||||
.IsUnique();
|
||||
|
||||
entity.HasOne(e => e.Finding)
|
||||
.WithMany(f => f.EvidenceArtifacts)
|
||||
.HasForeignKey(e => e.FindingId)
|
||||
.OnDelete(DeleteBehavior.Cascade);
|
||||
});
|
||||
|
||||
// Configure TriageSnapshot
|
||||
modelBuilder.Entity<TriageSnapshot>(entity =>
|
||||
{
|
||||
entity.ToTable("triage_snapshot");
|
||||
entity.HasKey(e => e.Id);
|
||||
|
||||
entity.HasIndex(e => new { e.FindingId, e.CreatedAt })
|
||||
.IsDescending(false, true)
|
||||
.HasDatabaseName("ix_triage_snapshot_finding");
|
||||
|
||||
entity.HasIndex(e => new { e.Trigger, e.CreatedAt })
|
||||
.IsDescending(false, true)
|
||||
.HasDatabaseName("ix_triage_snapshot_trigger");
|
||||
|
||||
entity.HasIndex(e => new { e.FindingId, e.ToInputsHash, e.CreatedAt })
|
||||
.IsUnique();
|
||||
|
||||
entity.HasOne(e => e.Finding)
|
||||
.WithMany(f => f.Snapshots)
|
||||
.HasForeignKey(e => e.FindingId)
|
||||
.OnDelete(DeleteBehavior.Cascade);
|
||||
});
|
||||
|
||||
// Configure the read-only view
|
||||
modelBuilder.Entity<TriageCaseCurrent>(entity =>
|
||||
{
|
||||
entity.ToView("v_triage_case_current");
|
||||
entity.HasNoKey();
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,281 @@
|
||||
using System.Text.Json;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using Microsoft.Extensions.Options;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Scanner.Analyzers.Native.Index.Tests;
|
||||
|
||||
/// <summary>
|
||||
/// Unit tests for <see cref="OfflineBuildIdIndex"/>.
|
||||
/// </summary>
|
||||
public sealed class OfflineBuildIdIndexTests : IDisposable
|
||||
{
|
||||
private readonly string _tempDir;
|
||||
|
||||
public OfflineBuildIdIndexTests()
|
||||
{
|
||||
_tempDir = Path.Combine(Path.GetTempPath(), $"buildid-test-{Guid.NewGuid():N}");
|
||||
Directory.CreateDirectory(_tempDir);
|
||||
}
|
||||
|
||||
public void Dispose()
|
||||
{
|
||||
if (Directory.Exists(_tempDir))
|
||||
{
|
||||
Directory.Delete(_tempDir, recursive: true);
|
||||
}
|
||||
}
|
||||
|
||||
#region Loading Tests
|
||||
|
||||
[Fact]
|
||||
public async Task LoadAsync_EmptyIndex_WhenNoPathConfigured()
|
||||
{
|
||||
var options = Options.Create(new BuildIdIndexOptions { IndexPath = null });
|
||||
var index = new OfflineBuildIdIndex(options, NullLogger<OfflineBuildIdIndex>.Instance);
|
||||
|
||||
await index.LoadAsync();
|
||||
|
||||
Assert.True(index.IsLoaded);
|
||||
Assert.Equal(0, index.Count);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task LoadAsync_EmptyIndex_WhenFileNotFound()
|
||||
{
|
||||
var options = Options.Create(new BuildIdIndexOptions { IndexPath = "/nonexistent/file.ndjson" });
|
||||
var index = new OfflineBuildIdIndex(options, NullLogger<OfflineBuildIdIndex>.Instance);
|
||||
|
||||
await index.LoadAsync();
|
||||
|
||||
Assert.True(index.IsLoaded);
|
||||
Assert.Equal(0, index.Count);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task LoadAsync_ParsesNdjsonEntries()
|
||||
{
|
||||
var indexPath = Path.Combine(_tempDir, "index.ndjson");
|
||||
await File.WriteAllTextAsync(indexPath, """
|
||||
{"build_id":"gnu-build-id:abc123","purl":"pkg:deb/debian/libc6@2.31","distro":"debian","confidence":"exact","indexed_at":"2025-01-15T10:00:00Z"}
|
||||
{"build_id":"pe-cv:12345678-1234-1234-1234-123456789012-1","purl":"pkg:nuget/System.Text.Json@8.0.0","confidence":"inferred"}
|
||||
{"build_id":"macho-uuid:fedcba9876543210fedcba9876543210","purl":"pkg:brew/openssl@3.0.0","distro":"macos","confidence":"exact"}
|
||||
""");
|
||||
|
||||
var options = Options.Create(new BuildIdIndexOptions { IndexPath = indexPath, RequireSignature = false });
|
||||
var index = new OfflineBuildIdIndex(options, NullLogger<OfflineBuildIdIndex>.Instance);
|
||||
|
||||
await index.LoadAsync();
|
||||
|
||||
Assert.True(index.IsLoaded);
|
||||
Assert.Equal(3, index.Count);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task LoadAsync_SkipsEmptyLines()
|
||||
{
|
||||
var indexPath = Path.Combine(_tempDir, "index-empty-lines.ndjson");
|
||||
await File.WriteAllTextAsync(indexPath, """
|
||||
{"build_id":"gnu-build-id:abc123","purl":"pkg:deb/debian/libc6@2.31"}
|
||||
|
||||
{"build_id":"gnu-build-id:def456","purl":"pkg:deb/debian/libssl@1.1"}
|
||||
|
||||
""");
|
||||
|
||||
var options = Options.Create(new BuildIdIndexOptions { IndexPath = indexPath, RequireSignature = false });
|
||||
var index = new OfflineBuildIdIndex(options, NullLogger<OfflineBuildIdIndex>.Instance);
|
||||
|
||||
await index.LoadAsync();
|
||||
|
||||
Assert.Equal(2, index.Count);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task LoadAsync_SkipsCommentLines()
|
||||
{
|
||||
var indexPath = Path.Combine(_tempDir, "index-comments.ndjson");
|
||||
await File.WriteAllTextAsync(indexPath, """
|
||||
# This is a comment
|
||||
{"build_id":"gnu-build-id:abc123","purl":"pkg:deb/debian/libc6@2.31"}
|
||||
// Another comment style
|
||||
{"build_id":"gnu-build-id:def456","purl":"pkg:deb/debian/libssl@1.1"}
|
||||
""");
|
||||
|
||||
var options = Options.Create(new BuildIdIndexOptions { IndexPath = indexPath, RequireSignature = false });
|
||||
var index = new OfflineBuildIdIndex(options, NullLogger<OfflineBuildIdIndex>.Instance);
|
||||
|
||||
await index.LoadAsync();
|
||||
|
||||
Assert.Equal(2, index.Count);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task LoadAsync_SkipsInvalidJsonLines()
|
||||
{
|
||||
var indexPath = Path.Combine(_tempDir, "index-invalid.ndjson");
|
||||
await File.WriteAllTextAsync(indexPath, """
|
||||
{"build_id":"gnu-build-id:abc123","purl":"pkg:deb/debian/libc6@2.31"}
|
||||
not valid json at all
|
||||
{"build_id":"gnu-build-id:def456","purl":"pkg:deb/debian/libssl@1.1"}
|
||||
""");
|
||||
|
||||
var options = Options.Create(new BuildIdIndexOptions { IndexPath = indexPath, RequireSignature = false });
|
||||
var index = new OfflineBuildIdIndex(options, NullLogger<OfflineBuildIdIndex>.Instance);
|
||||
|
||||
await index.LoadAsync();
|
||||
|
||||
Assert.Equal(2, index.Count);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Lookup Tests
|
||||
|
||||
[Fact]
|
||||
public async Task LookupAsync_ReturnsNull_WhenNotFound()
|
||||
{
|
||||
var indexPath = Path.Combine(_tempDir, "index.ndjson");
|
||||
await File.WriteAllTextAsync(indexPath, """
|
||||
{"build_id":"gnu-build-id:abc123","purl":"pkg:deb/debian/libc6@2.31"}
|
||||
""");
|
||||
|
||||
var options = Options.Create(new BuildIdIndexOptions { IndexPath = indexPath, RequireSignature = false });
|
||||
var index = new OfflineBuildIdIndex(options, NullLogger<OfflineBuildIdIndex>.Instance);
|
||||
await index.LoadAsync();
|
||||
|
||||
var result = await index.LookupAsync("gnu-build-id:notfound");
|
||||
|
||||
Assert.Null(result);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task LookupAsync_ReturnsNull_ForNullOrEmpty()
|
||||
{
|
||||
var indexPath = Path.Combine(_tempDir, "index.ndjson");
|
||||
await File.WriteAllTextAsync(indexPath, """
|
||||
{"build_id":"gnu-build-id:abc123","purl":"pkg:deb/debian/libc6@2.31"}
|
||||
""");
|
||||
|
||||
var options = Options.Create(new BuildIdIndexOptions { IndexPath = indexPath, RequireSignature = false });
|
||||
var index = new OfflineBuildIdIndex(options, NullLogger<OfflineBuildIdIndex>.Instance);
|
||||
await index.LoadAsync();
|
||||
|
||||
Assert.Null(await index.LookupAsync(null!));
|
||||
Assert.Null(await index.LookupAsync(""));
|
||||
Assert.Null(await index.LookupAsync(" "));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task LookupAsync_FindsExactMatch()
|
||||
{
|
||||
var indexPath = Path.Combine(_tempDir, "index.ndjson");
|
||||
await File.WriteAllTextAsync(indexPath, """
|
||||
{"build_id":"gnu-build-id:abc123def456","purl":"pkg:deb/debian/libc6@2.31","version":"2.31","distro":"debian","confidence":"exact","indexed_at":"2025-01-15T10:00:00Z"}
|
||||
""");
|
||||
|
||||
var options = Options.Create(new BuildIdIndexOptions { IndexPath = indexPath, RequireSignature = false });
|
||||
var index = new OfflineBuildIdIndex(options, NullLogger<OfflineBuildIdIndex>.Instance);
|
||||
await index.LoadAsync();
|
||||
|
||||
var result = await index.LookupAsync("gnu-build-id:abc123def456");
|
||||
|
||||
Assert.NotNull(result);
|
||||
Assert.Equal("gnu-build-id:abc123def456", result.BuildId);
|
||||
Assert.Equal("pkg:deb/debian/libc6@2.31", result.Purl);
|
||||
Assert.Equal("2.31", result.Version);
|
||||
Assert.Equal("debian", result.SourceDistro);
|
||||
Assert.Equal(BuildIdConfidence.Exact, result.Confidence);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task LookupAsync_CaseInsensitive()
|
||||
{
|
||||
var indexPath = Path.Combine(_tempDir, "index.ndjson");
|
||||
await File.WriteAllTextAsync(indexPath, """
|
||||
{"build_id":"gnu-build-id:ABC123DEF456","purl":"pkg:deb/debian/libc6@2.31"}
|
||||
""");
|
||||
|
||||
var options = Options.Create(new BuildIdIndexOptions { IndexPath = indexPath, RequireSignature = false });
|
||||
var index = new OfflineBuildIdIndex(options, NullLogger<OfflineBuildIdIndex>.Instance);
|
||||
await index.LoadAsync();
|
||||
|
||||
// Query with lowercase
|
||||
var result = await index.LookupAsync("gnu-build-id:abc123def456");
|
||||
|
||||
Assert.NotNull(result);
|
||||
Assert.Equal("pkg:deb/debian/libc6@2.31", result.Purl);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Batch Lookup Tests
|
||||
|
||||
[Fact]
|
||||
public async Task BatchLookupAsync_ReturnsFoundEntries()
|
||||
{
|
||||
var indexPath = Path.Combine(_tempDir, "index.ndjson");
|
||||
await File.WriteAllTextAsync(indexPath, """
|
||||
{"build_id":"gnu-build-id:aaa","purl":"pkg:deb/debian/liba@1.0"}
|
||||
{"build_id":"gnu-build-id:bbb","purl":"pkg:deb/debian/libb@1.0"}
|
||||
{"build_id":"gnu-build-id:ccc","purl":"pkg:deb/debian/libc@1.0"}
|
||||
""");
|
||||
|
||||
var options = Options.Create(new BuildIdIndexOptions { IndexPath = indexPath, RequireSignature = false });
|
||||
var index = new OfflineBuildIdIndex(options, NullLogger<OfflineBuildIdIndex>.Instance);
|
||||
await index.LoadAsync();
|
||||
|
||||
var results = await index.BatchLookupAsync(["gnu-build-id:aaa", "gnu-build-id:notfound", "gnu-build-id:ccc"]);
|
||||
|
||||
Assert.Equal(2, results.Count);
|
||||
Assert.Contains(results, r => r.Purl == "pkg:deb/debian/liba@1.0");
|
||||
Assert.Contains(results, r => r.Purl == "pkg:deb/debian/libc@1.0");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task BatchLookupAsync_SkipsNullAndEmpty()
|
||||
{
|
||||
var indexPath = Path.Combine(_tempDir, "index.ndjson");
|
||||
await File.WriteAllTextAsync(indexPath, """
|
||||
{"build_id":"gnu-build-id:aaa","purl":"pkg:deb/debian/liba@1.0"}
|
||||
""");
|
||||
|
||||
var options = Options.Create(new BuildIdIndexOptions { IndexPath = indexPath, RequireSignature = false });
|
||||
var index = new OfflineBuildIdIndex(options, NullLogger<OfflineBuildIdIndex>.Instance);
|
||||
await index.LoadAsync();
|
||||
|
||||
var results = await index.BatchLookupAsync([null!, "", " ", "gnu-build-id:aaa"]);
|
||||
|
||||
Assert.Single(results);
|
||||
Assert.Equal("pkg:deb/debian/liba@1.0", results[0].Purl);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Confidence Parsing Tests
|
||||
|
||||
[Theory]
|
||||
[InlineData("exact", BuildIdConfidence.Exact)]
|
||||
[InlineData("EXACT", BuildIdConfidence.Exact)]
|
||||
[InlineData("inferred", BuildIdConfidence.Inferred)]
|
||||
[InlineData("Inferred", BuildIdConfidence.Inferred)]
|
||||
[InlineData("heuristic", BuildIdConfidence.Heuristic)]
|
||||
[InlineData("unknown", BuildIdConfidence.Heuristic)] // Defaults to heuristic
|
||||
[InlineData("", BuildIdConfidence.Heuristic)]
|
||||
public async Task LoadAsync_ParsesConfidenceLevels(string confidenceValue, BuildIdConfidence expected)
|
||||
{
|
||||
var indexPath = Path.Combine(_tempDir, "index.ndjson");
|
||||
var entry = new { build_id = "gnu-build-id:test", purl = "pkg:test/test@1.0", confidence = confidenceValue };
|
||||
await File.WriteAllTextAsync(indexPath, JsonSerializer.Serialize(entry));
|
||||
|
||||
var options = Options.Create(new BuildIdIndexOptions { IndexPath = indexPath, RequireSignature = false });
|
||||
var index = new OfflineBuildIdIndex(options, NullLogger<OfflineBuildIdIndex>.Instance);
|
||||
await index.LoadAsync();
|
||||
|
||||
var result = await index.LookupAsync("gnu-build-id:test");
|
||||
|
||||
Assert.NotNull(result);
|
||||
Assert.Equal(expected, result.Confidence);
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
@@ -0,0 +1,425 @@
|
||||
using System.Buffers.Binary;
|
||||
using System.Text;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Scanner.Analyzers.Native.Tests;
|
||||
|
||||
/// <summary>
|
||||
/// Unit tests for <see cref="MachOReader"/>.
|
||||
/// </summary>
|
||||
public sealed class MachOReaderTests
|
||||
{
|
||||
#region Test Data Builders
|
||||
|
||||
/// <summary>
|
||||
/// Builds a minimal 64-bit Mach-O binary for testing.
|
||||
/// </summary>
|
||||
private static byte[] BuildMachO64(
|
||||
int cpuType = 0x0100000C, // arm64
|
||||
int cpuSubtype = 0,
|
||||
byte[]? uuid = null,
|
||||
MachOPlatform platform = MachOPlatform.MacOS,
|
||||
uint minOs = 0x000E0000, // 14.0
|
||||
uint sdk = 0x000E0000)
|
||||
{
|
||||
var loadCommands = new List<byte[]>();
|
||||
|
||||
// Add LC_UUID if provided
|
||||
if (uuid is { Length: 16 })
|
||||
{
|
||||
var uuidCmd = new byte[24];
|
||||
BinaryPrimitives.WriteUInt32LittleEndian(uuidCmd, 0x1B); // LC_UUID
|
||||
BinaryPrimitives.WriteUInt32LittleEndian(uuidCmd.AsSpan(4), 24); // cmdsize
|
||||
Array.Copy(uuid, 0, uuidCmd, 8, 16);
|
||||
loadCommands.Add(uuidCmd);
|
||||
}
|
||||
|
||||
// Add LC_BUILD_VERSION
|
||||
var buildVersionCmd = new byte[24];
|
||||
BinaryPrimitives.WriteUInt32LittleEndian(buildVersionCmd, 0x32); // LC_BUILD_VERSION
|
||||
BinaryPrimitives.WriteUInt32LittleEndian(buildVersionCmd.AsSpan(4), 24); // cmdsize
|
||||
BinaryPrimitives.WriteUInt32LittleEndian(buildVersionCmd.AsSpan(8), (uint)platform);
|
||||
BinaryPrimitives.WriteUInt32LittleEndian(buildVersionCmd.AsSpan(12), minOs);
|
||||
BinaryPrimitives.WriteUInt32LittleEndian(buildVersionCmd.AsSpan(16), sdk);
|
||||
BinaryPrimitives.WriteUInt32LittleEndian(buildVersionCmd.AsSpan(20), 0); // ntools
|
||||
loadCommands.Add(buildVersionCmd);
|
||||
|
||||
var sizeOfCmds = loadCommands.Sum(c => c.Length);
|
||||
|
||||
// Build header (32 bytes for 64-bit)
|
||||
var header = new byte[32];
|
||||
BinaryPrimitives.WriteUInt32LittleEndian(header, 0xFEEDFACF); // MH_MAGIC_64
|
||||
BinaryPrimitives.WriteInt32LittleEndian(header.AsSpan(4), cpuType);
|
||||
BinaryPrimitives.WriteInt32LittleEndian(header.AsSpan(8), cpuSubtype);
|
||||
BinaryPrimitives.WriteUInt32LittleEndian(header.AsSpan(12), 2); // MH_EXECUTE
|
||||
BinaryPrimitives.WriteUInt32LittleEndian(header.AsSpan(16), (uint)loadCommands.Count);
|
||||
BinaryPrimitives.WriteUInt32LittleEndian(header.AsSpan(20), (uint)sizeOfCmds);
|
||||
BinaryPrimitives.WriteUInt32LittleEndian(header.AsSpan(24), 0); // flags
|
||||
BinaryPrimitives.WriteUInt32LittleEndian(header.AsSpan(28), 0); // reserved
|
||||
|
||||
// Combine
|
||||
var result = new byte[32 + sizeOfCmds];
|
||||
Array.Copy(header, result, 32);
|
||||
var offset = 32;
|
||||
foreach (var cmd in loadCommands)
|
||||
{
|
||||
Array.Copy(cmd, 0, result, offset, cmd.Length);
|
||||
offset += cmd.Length;
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Builds a minimal 32-bit Mach-O binary for testing.
|
||||
/// </summary>
|
||||
private static byte[] BuildMachO32(
|
||||
int cpuType = 7, // x86
|
||||
int cpuSubtype = 0,
|
||||
byte[]? uuid = null)
|
||||
{
|
||||
var loadCommands = new List<byte[]>();
|
||||
|
||||
// Add LC_UUID if provided
|
||||
if (uuid is { Length: 16 })
|
||||
{
|
||||
var uuidCmd = new byte[24];
|
||||
BinaryPrimitives.WriteUInt32LittleEndian(uuidCmd, 0x1B); // LC_UUID
|
||||
BinaryPrimitives.WriteUInt32LittleEndian(uuidCmd.AsSpan(4), 24); // cmdsize
|
||||
Array.Copy(uuid, 0, uuidCmd, 8, 16);
|
||||
loadCommands.Add(uuidCmd);
|
||||
}
|
||||
|
||||
var sizeOfCmds = loadCommands.Sum(c => c.Length);
|
||||
|
||||
// Build header (28 bytes for 32-bit)
|
||||
var header = new byte[28];
|
||||
BinaryPrimitives.WriteUInt32LittleEndian(header, 0xFEEDFACE); // MH_MAGIC
|
||||
BinaryPrimitives.WriteInt32LittleEndian(header.AsSpan(4), cpuType);
|
||||
BinaryPrimitives.WriteInt32LittleEndian(header.AsSpan(8), cpuSubtype);
|
||||
BinaryPrimitives.WriteUInt32LittleEndian(header.AsSpan(12), 2); // MH_EXECUTE
|
||||
BinaryPrimitives.WriteUInt32LittleEndian(header.AsSpan(16), (uint)loadCommands.Count);
|
||||
BinaryPrimitives.WriteUInt32LittleEndian(header.AsSpan(20), (uint)sizeOfCmds);
|
||||
BinaryPrimitives.WriteUInt32LittleEndian(header.AsSpan(24), 0); // flags
|
||||
|
||||
// Combine
|
||||
var result = new byte[28 + sizeOfCmds];
|
||||
Array.Copy(header, result, 28);
|
||||
var offset = 28;
|
||||
foreach (var cmd in loadCommands)
|
||||
{
|
||||
Array.Copy(cmd, 0, result, offset, cmd.Length);
|
||||
offset += cmd.Length;
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Builds a fat (universal) binary containing multiple slices.
|
||||
/// </summary>
|
||||
private static byte[] BuildFatBinary(params byte[][] slices)
|
||||
{
|
||||
// Fat header: magic (4) + nfat_arch (4)
|
||||
// Fat arch entries: 20 bytes each (cputype, cpusubtype, offset, size, align)
|
||||
var headerSize = 8 + (slices.Length * 20);
|
||||
var alignedHeaderSize = (headerSize + 0xFFF) & ~0xFFF; // 4KB alignment
|
||||
|
||||
var totalSize = alignedHeaderSize + slices.Sum(s => ((s.Length + 0xFFF) & ~0xFFF));
|
||||
var result = new byte[totalSize];
|
||||
|
||||
// Write fat header (big-endian)
|
||||
BinaryPrimitives.WriteUInt32BigEndian(result, 0xCAFEBABE); // FAT_MAGIC
|
||||
BinaryPrimitives.WriteUInt32BigEndian(result.AsSpan(4), (uint)slices.Length);
|
||||
|
||||
var currentOffset = alignedHeaderSize;
|
||||
for (var i = 0; i < slices.Length; i++)
|
||||
{
|
||||
var slice = slices[i];
|
||||
var archOffset = 8 + (i * 20);
|
||||
|
||||
// Read CPU type from slice header
|
||||
var cpuType = BinaryPrimitives.ReadUInt32LittleEndian(slice.AsSpan(4));
|
||||
|
||||
// Write fat_arch entry (big-endian)
|
||||
BinaryPrimitives.WriteUInt32BigEndian(result.AsSpan(archOffset), cpuType);
|
||||
BinaryPrimitives.WriteUInt32BigEndian(result.AsSpan(archOffset + 4), 0); // cpusubtype
|
||||
BinaryPrimitives.WriteUInt32BigEndian(result.AsSpan(archOffset + 8), (uint)currentOffset);
|
||||
BinaryPrimitives.WriteUInt32BigEndian(result.AsSpan(archOffset + 12), (uint)slice.Length);
|
||||
BinaryPrimitives.WriteUInt32BigEndian(result.AsSpan(archOffset + 16), 12); // align = 2^12 = 4096
|
||||
|
||||
// Copy slice
|
||||
Array.Copy(slice, 0, result, currentOffset, slice.Length);
|
||||
currentOffset += (slice.Length + 0xFFF) & ~0xFFF; // Align to 4KB
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Magic Detection Tests
|
||||
|
||||
[Fact]
|
||||
public void Parse_Returns_Null_For_Empty_Stream()
|
||||
{
|
||||
using var stream = new MemoryStream([]);
|
||||
var result = MachOReader.Parse(stream, "/test/empty");
|
||||
Assert.Null(result);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Parse_Returns_Null_For_Invalid_Magic()
|
||||
{
|
||||
var data = new byte[] { 0x00, 0x11, 0x22, 0x33, 0x44, 0x55, 0x66, 0x77 };
|
||||
using var stream = new MemoryStream(data);
|
||||
var result = MachOReader.Parse(stream, "/test/invalid");
|
||||
Assert.Null(result);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Parse_Detects_64Bit_LittleEndian_MachO()
|
||||
{
|
||||
var data = BuildMachO64();
|
||||
using var stream = new MemoryStream(data);
|
||||
var result = MachOReader.Parse(stream, "/test/arm64");
|
||||
|
||||
Assert.NotNull(result);
|
||||
Assert.Single(result.Identities);
|
||||
Assert.Equal("arm64", result.Identities[0].CpuType);
|
||||
Assert.False(result.Identities[0].IsFatBinary);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Parse_Detects_32Bit_MachO()
|
||||
{
|
||||
var data = BuildMachO32(cpuType: 7); // x86
|
||||
using var stream = new MemoryStream(data);
|
||||
var result = MachOReader.Parse(stream, "/test/i386");
|
||||
|
||||
Assert.NotNull(result);
|
||||
Assert.Single(result.Identities);
|
||||
Assert.Equal("i386", result.Identities[0].CpuType);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region LC_UUID Tests
|
||||
|
||||
[Fact]
|
||||
public void Parse_Extracts_LC_UUID()
|
||||
{
|
||||
var uuid = new byte[] { 0x01, 0x23, 0x45, 0x67, 0x89, 0xAB, 0xCD, 0xEF, 0xFE, 0xDC, 0xBA, 0x98, 0x76, 0x54, 0x32, 0x10 };
|
||||
var data = BuildMachO64(uuid: uuid);
|
||||
using var stream = new MemoryStream(data);
|
||||
var result = MachOReader.Parse(stream, "/test/with-uuid");
|
||||
|
||||
Assert.NotNull(result);
|
||||
Assert.Single(result.Identities);
|
||||
Assert.Equal("0123456789abcdeffedcba9876543210", result.Identities[0].Uuid);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Parse_Returns_Null_Uuid_When_Not_Present()
|
||||
{
|
||||
var data = BuildMachO64(uuid: null);
|
||||
using var stream = new MemoryStream(data);
|
||||
var result = MachOReader.Parse(stream, "/test/no-uuid");
|
||||
|
||||
Assert.NotNull(result);
|
||||
Assert.Single(result.Identities);
|
||||
Assert.Null(result.Identities[0].Uuid);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Parse_UUID_Is_Lowercase_Hex_No_Dashes()
|
||||
{
|
||||
var uuid = new byte[] { 0xAB, 0xCD, 0xEF, 0x12, 0x34, 0x56, 0x78, 0x9A, 0xBC, 0xDE, 0xF0, 0x12, 0x34, 0x56, 0x78, 0x9A };
|
||||
var data = BuildMachO64(uuid: uuid);
|
||||
using var stream = new MemoryStream(data);
|
||||
var result = MachOReader.Parse(stream, "/test/uuid-format");
|
||||
|
||||
Assert.NotNull(result);
|
||||
var uuidString = result.Identities[0].Uuid;
|
||||
Assert.NotNull(uuidString);
|
||||
Assert.Equal(32, uuidString.Length);
|
||||
Assert.DoesNotContain("-", uuidString);
|
||||
Assert.Equal(uuidString.ToLowerInvariant(), uuidString);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Platform Detection Tests
|
||||
|
||||
[Theory]
|
||||
[InlineData(MachOPlatform.MacOS)]
|
||||
[InlineData(MachOPlatform.iOS)]
|
||||
[InlineData(MachOPlatform.TvOS)]
|
||||
[InlineData(MachOPlatform.WatchOS)]
|
||||
[InlineData(MachOPlatform.MacCatalyst)]
|
||||
[InlineData(MachOPlatform.VisionOS)]
|
||||
public void Parse_Extracts_Platform_From_LC_BUILD_VERSION(MachOPlatform expectedPlatform)
|
||||
{
|
||||
var data = BuildMachO64(platform: expectedPlatform);
|
||||
using var stream = new MemoryStream(data);
|
||||
var result = MachOReader.Parse(stream, "/test/platform");
|
||||
|
||||
Assert.NotNull(result);
|
||||
Assert.Single(result.Identities);
|
||||
Assert.Equal(expectedPlatform, result.Identities[0].Platform);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Parse_Extracts_MinOs_Version()
|
||||
{
|
||||
var data = BuildMachO64(minOs: 0x000E0500); // 14.5.0
|
||||
using var stream = new MemoryStream(data);
|
||||
var result = MachOReader.Parse(stream, "/test/min-os");
|
||||
|
||||
Assert.NotNull(result);
|
||||
Assert.Equal("14.5", result.Identities[0].MinOsVersion);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Parse_Extracts_SDK_Version()
|
||||
{
|
||||
var data = BuildMachO64(sdk: 0x000F0000); // 15.0.0
|
||||
using var stream = new MemoryStream(data);
|
||||
var result = MachOReader.Parse(stream, "/test/sdk");
|
||||
|
||||
Assert.NotNull(result);
|
||||
Assert.Equal("15.0", result.Identities[0].SdkVersion);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Parse_Version_With_Patch()
|
||||
{
|
||||
var data = BuildMachO64(minOs: 0x000E0501); // 14.5.1
|
||||
using var stream = new MemoryStream(data);
|
||||
var result = MachOReader.Parse(stream, "/test/version-patch");
|
||||
|
||||
Assert.NotNull(result);
|
||||
Assert.Equal("14.5.1", result.Identities[0].MinOsVersion);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region CPU Type Tests
|
||||
|
||||
[Theory]
|
||||
[InlineData(0x00000007, "i386")] // CPU_TYPE_X86
|
||||
[InlineData(0x01000007, "x86_64")] // CPU_TYPE_X86_64
|
||||
[InlineData(0x0000000C, "arm")] // CPU_TYPE_ARM
|
||||
[InlineData(0x0100000C, "arm64")] // CPU_TYPE_ARM64
|
||||
public void Parse_Maps_CpuType_Correctly(int cpuType, string expectedName)
|
||||
{
|
||||
var data = BuildMachO64(cpuType: cpuType);
|
||||
using var stream = new MemoryStream(data);
|
||||
var result = MachOReader.Parse(stream, "/test/cpu");
|
||||
|
||||
Assert.NotNull(result);
|
||||
Assert.Equal(expectedName, result.Identities[0].CpuType);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Fat Binary Tests
|
||||
|
||||
[Fact]
|
||||
public void Parse_Handles_Fat_Binary()
|
||||
{
|
||||
var arm64Slice = BuildMachO64(cpuType: 0x0100000C, uuid: new byte[] { 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09, 0x0A, 0x0B, 0x0C, 0x0D, 0x0E, 0x0F, 0x10 });
|
||||
var x64Slice = BuildMachO64(cpuType: 0x01000007, uuid: new byte[] { 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17, 0x18, 0x19, 0x1A, 0x1B, 0x1C, 0x1D, 0x1E, 0x1F, 0x20 });
|
||||
|
||||
var fatData = BuildFatBinary(arm64Slice, x64Slice);
|
||||
using var stream = new MemoryStream(fatData);
|
||||
var result = MachOReader.Parse(stream, "/test/universal");
|
||||
|
||||
Assert.NotNull(result);
|
||||
Assert.Equal(2, result.Identities.Count);
|
||||
|
||||
// Both slices should be marked as fat binary slices
|
||||
Assert.True(result.Identities[0].IsFatBinary);
|
||||
Assert.True(result.Identities[1].IsFatBinary);
|
||||
|
||||
// Check UUIDs are different
|
||||
Assert.NotEqual(result.Identities[0].Uuid, result.Identities[1].Uuid);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ParseFatBinary_Returns_Multiple_Identities()
|
||||
{
|
||||
var arm64Slice = BuildMachO64(cpuType: 0x0100000C);
|
||||
var x64Slice = BuildMachO64(cpuType: 0x01000007);
|
||||
|
||||
var fatData = BuildFatBinary(arm64Slice, x64Slice);
|
||||
using var stream = new MemoryStream(fatData);
|
||||
var identities = MachOReader.ParseFatBinary(stream);
|
||||
|
||||
Assert.Equal(2, identities.Count);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region TryExtractIdentity Tests
|
||||
|
||||
[Fact]
|
||||
public void TryExtractIdentity_Returns_True_For_Valid_MachO()
|
||||
{
|
||||
var data = BuildMachO64();
|
||||
using var stream = new MemoryStream(data);
|
||||
|
||||
var success = MachOReader.TryExtractIdentity(stream, out var identity);
|
||||
|
||||
Assert.True(success);
|
||||
Assert.NotNull(identity);
|
||||
Assert.Equal("arm64", identity.CpuType);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void TryExtractIdentity_Returns_False_For_Invalid_Data()
|
||||
{
|
||||
var data = new byte[] { 0x00, 0x00, 0x00, 0x00 };
|
||||
using var stream = new MemoryStream(data);
|
||||
|
||||
var success = MachOReader.TryExtractIdentity(stream, out var identity);
|
||||
|
||||
Assert.False(success);
|
||||
Assert.Null(identity);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void TryExtractIdentity_Returns_First_Slice_For_Fat_Binary()
|
||||
{
|
||||
var arm64Slice = BuildMachO64(cpuType: 0x0100000C);
|
||||
var x64Slice = BuildMachO64(cpuType: 0x01000007);
|
||||
|
||||
var fatData = BuildFatBinary(arm64Slice, x64Slice);
|
||||
using var stream = new MemoryStream(fatData);
|
||||
|
||||
var success = MachOReader.TryExtractIdentity(stream, out var identity);
|
||||
|
||||
Assert.True(success);
|
||||
Assert.NotNull(identity);
|
||||
// Should get first slice
|
||||
Assert.Equal("arm64", identity.CpuType);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Path and LayerDigest Tests
|
||||
|
||||
[Fact]
|
||||
public void Parse_Preserves_Path_And_LayerDigest()
|
||||
{
|
||||
var data = BuildMachO64();
|
||||
using var stream = new MemoryStream(data);
|
||||
var result = MachOReader.Parse(stream, "/usr/bin/myapp", "sha256:abc123");
|
||||
|
||||
Assert.NotNull(result);
|
||||
Assert.Equal("/usr/bin/myapp", result.Path);
|
||||
Assert.Equal("sha256:abc123", result.LayerDigest);
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
@@ -0,0 +1,361 @@
|
||||
using FluentAssertions;
|
||||
using StellaOps.Scanner.Analyzers.Native;
|
||||
using StellaOps.Scanner.Analyzers.Native.Tests.Fixtures;
|
||||
using StellaOps.Scanner.Analyzers.Native.Tests.TestUtilities;
|
||||
|
||||
namespace StellaOps.Scanner.Analyzers.Native.Tests;
|
||||
|
||||
/// <summary>
|
||||
/// Unit tests for PeReader full PE parsing including CodeView GUID, Rich header, and version resources.
|
||||
/// </summary>
|
||||
public class PeReaderTests : NativeTestBase
|
||||
{
|
||||
#region Basic Parsing
|
||||
|
||||
[Fact]
|
||||
public void TryExtractIdentity_InvalidData_ReturnsFalse()
|
||||
{
|
||||
// Arrange
|
||||
var invalidData = new byte[] { 0x00, 0x01, 0x02, 0x03 };
|
||||
|
||||
// Act
|
||||
var result = PeReader.TryExtractIdentity(invalidData, out var identity);
|
||||
|
||||
// Assert
|
||||
result.Should().BeFalse();
|
||||
identity.Should().BeNull();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void TryExtractIdentity_TooShort_ReturnsFalse()
|
||||
{
|
||||
// Arrange
|
||||
var shortData = new byte[0x20];
|
||||
|
||||
// Act
|
||||
var result = PeReader.TryExtractIdentity(shortData, out var identity);
|
||||
|
||||
// Assert
|
||||
result.Should().BeFalse();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void TryExtractIdentity_MissingMzSignature_ReturnsFalse()
|
||||
{
|
||||
// Arrange
|
||||
var data = new byte[0x100];
|
||||
data[0] = (byte)'X';
|
||||
data[1] = (byte)'Y';
|
||||
|
||||
// Act
|
||||
var result = PeReader.TryExtractIdentity(data, out var identity);
|
||||
|
||||
// Assert
|
||||
result.Should().BeFalse();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void TryExtractIdentity_ValidMinimalPe64_ReturnsTrue()
|
||||
{
|
||||
// Arrange
|
||||
var pe = PeBuilder.Console64().Build();
|
||||
|
||||
// Act
|
||||
var result = PeReader.TryExtractIdentity(pe, out var identity);
|
||||
|
||||
// Assert
|
||||
result.Should().BeTrue();
|
||||
identity.Should().NotBeNull();
|
||||
identity!.Is64Bit.Should().BeTrue();
|
||||
identity.Machine.Should().Be("x86_64");
|
||||
identity.Subsystem.Should().Be(PeSubsystem.WindowsConsole);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void TryExtractIdentity_ValidMinimalPe32_ReturnsTrue()
|
||||
{
|
||||
// Arrange
|
||||
var pe = new PeBuilder()
|
||||
.Is64Bit(false)
|
||||
.WithSubsystem(PeSubsystem.WindowsConsole)
|
||||
.Build();
|
||||
|
||||
// Act
|
||||
var result = PeReader.TryExtractIdentity(pe, out var identity);
|
||||
|
||||
// Assert
|
||||
result.Should().BeTrue();
|
||||
identity.Should().NotBeNull();
|
||||
identity!.Is64Bit.Should().BeFalse();
|
||||
identity.Machine.Should().Be("x86");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void TryExtractIdentity_GuiSubsystem_ParsesCorrectly()
|
||||
{
|
||||
// Arrange
|
||||
var pe = new PeBuilder()
|
||||
.Is64Bit(true)
|
||||
.WithSubsystem(PeSubsystem.WindowsGui)
|
||||
.Build();
|
||||
|
||||
// Act
|
||||
var result = PeReader.TryExtractIdentity(pe, out var identity);
|
||||
|
||||
// Assert
|
||||
result.Should().BeTrue();
|
||||
identity.Should().NotBeNull();
|
||||
identity!.Subsystem.Should().Be(PeSubsystem.WindowsGui);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Parse Method
|
||||
|
||||
[Fact]
|
||||
public void Parse_ValidPeStream_ReturnsPeParseResult()
|
||||
{
|
||||
// Arrange
|
||||
var pe = PeBuilder.Console64().Build();
|
||||
using var stream = new MemoryStream(pe);
|
||||
|
||||
// Act
|
||||
var result = PeReader.Parse(stream, "test.exe");
|
||||
|
||||
// Assert
|
||||
result.Should().NotBeNull();
|
||||
result!.Identity.Should().NotBeNull();
|
||||
result.Identity.Is64Bit.Should().BeTrue();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Parse_InvalidStream_ReturnsNull()
|
||||
{
|
||||
// Arrange
|
||||
var invalidData = new byte[] { 0x00, 0x01, 0x02, 0x03 };
|
||||
using var stream = new MemoryStream(invalidData);
|
||||
|
||||
// Act
|
||||
var result = PeReader.Parse(stream, "invalid.exe");
|
||||
|
||||
// Assert
|
||||
result.Should().BeNull();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Parse_ThrowsOnNullStream()
|
||||
{
|
||||
// Act & Assert
|
||||
var action = () => PeReader.Parse(null!, "test.exe");
|
||||
action.Should().Throw<ArgumentNullException>();
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Machine Architecture
|
||||
|
||||
[Theory]
|
||||
[InlineData(PeMachine.I386, "x86", false)]
|
||||
[InlineData(PeMachine.Amd64, "x86_64", true)]
|
||||
[InlineData(PeMachine.Arm64, "arm64", true)]
|
||||
public void TryExtractIdentity_MachineTypes_MapCorrectly(PeMachine machine, string expectedArch, bool is64Bit)
|
||||
{
|
||||
// Arrange
|
||||
var pe = new PeBuilder()
|
||||
.Is64Bit(is64Bit)
|
||||
.WithMachine(machine)
|
||||
.Build();
|
||||
|
||||
// Act
|
||||
var result = PeReader.TryExtractIdentity(pe, out var identity);
|
||||
|
||||
// Assert
|
||||
result.Should().BeTrue();
|
||||
identity.Should().NotBeNull();
|
||||
identity!.Machine.Should().Be(expectedArch);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Exports
|
||||
|
||||
[Fact]
|
||||
public void TryExtractIdentity_NoExports_ReturnsEmptyList()
|
||||
{
|
||||
// Arrange - standard console app has no exports
|
||||
var pe = PeBuilder.Console64().Build();
|
||||
|
||||
// Act
|
||||
var result = PeReader.TryExtractIdentity(pe, out var identity);
|
||||
|
||||
// Assert
|
||||
result.Should().BeTrue();
|
||||
identity.Should().NotBeNull();
|
||||
identity!.Exports.Should().BeEmpty();
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Compiler Hints (Rich Header)
|
||||
|
||||
[Fact]
|
||||
public void TryExtractIdentity_NoRichHeader_ReturnsEmptyHints()
|
||||
{
|
||||
// Arrange - builder-generated PEs don't have rich header
|
||||
var pe = PeBuilder.Console64().Build();
|
||||
|
||||
// Act
|
||||
var result = PeReader.TryExtractIdentity(pe, out var identity);
|
||||
|
||||
// Assert
|
||||
result.Should().BeTrue();
|
||||
identity.Should().NotBeNull();
|
||||
identity!.CompilerHints.Should().BeEmpty();
|
||||
identity.RichHeaderHash.Should().BeNull();
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region CodeView Debug Info
|
||||
|
||||
[Fact]
|
||||
public void TryExtractIdentity_NoDebugDirectory_ReturnsNullCodeView()
|
||||
{
|
||||
// Arrange - builder-generated PEs don't have debug directory
|
||||
var pe = PeBuilder.Console64().Build();
|
||||
|
||||
// Act
|
||||
var result = PeReader.TryExtractIdentity(pe, out var identity);
|
||||
|
||||
// Assert
|
||||
result.Should().BeTrue();
|
||||
identity.Should().NotBeNull();
|
||||
identity!.CodeViewGuid.Should().BeNull();
|
||||
identity.CodeViewAge.Should().BeNull();
|
||||
identity.PdbPath.Should().BeNull();
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Version Resources
|
||||
|
||||
[Fact]
|
||||
public void TryExtractIdentity_NoVersionResource_ReturnsNullVersions()
|
||||
{
|
||||
// Arrange - builder-generated PEs don't have version resources
|
||||
var pe = PeBuilder.Console64().Build();
|
||||
|
||||
// Act
|
||||
var result = PeReader.TryExtractIdentity(pe, out var identity);
|
||||
|
||||
// Assert
|
||||
result.Should().BeTrue();
|
||||
identity.Should().NotBeNull();
|
||||
identity!.ProductVersion.Should().BeNull();
|
||||
identity.FileVersion.Should().BeNull();
|
||||
identity.CompanyName.Should().BeNull();
|
||||
identity.ProductName.Should().BeNull();
|
||||
identity.OriginalFilename.Should().BeNull();
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Determinism
|
||||
|
||||
[Fact]
|
||||
public void TryExtractIdentity_SameInput_ReturnsSameOutput()
|
||||
{
|
||||
// Arrange
|
||||
var pe = PeBuilder.Console64().Build();
|
||||
|
||||
// Act
|
||||
PeReader.TryExtractIdentity(pe, out var identity1);
|
||||
PeReader.TryExtractIdentity(pe, out var identity2);
|
||||
|
||||
// Assert
|
||||
identity1.Should().BeEquivalentTo(identity2);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void TryExtractIdentity_DifferentInputs_ReturnsDifferentOutput()
|
||||
{
|
||||
// Arrange
|
||||
var pe64 = PeBuilder.Console64().Build();
|
||||
var pe32 = new PeBuilder().Is64Bit(false).Build();
|
||||
|
||||
// Act
|
||||
PeReader.TryExtractIdentity(pe64, out var identity64);
|
||||
PeReader.TryExtractIdentity(pe32, out var identity32);
|
||||
|
||||
// Assert
|
||||
identity64!.Is64Bit.Should().NotBe(identity32!.Is64Bit);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Edge Cases
|
||||
|
||||
[Fact]
|
||||
public void TryExtractIdentity_InvalidPeOffset_ReturnsFalse()
|
||||
{
|
||||
// Arrange - Create data with MZ signature but invalid PE offset
|
||||
var data = new byte[0x100];
|
||||
data[0] = (byte)'M';
|
||||
data[1] = (byte)'Z';
|
||||
// Set PE offset beyond file bounds
|
||||
data[0x3C] = 0xFF;
|
||||
data[0x3D] = 0xFF;
|
||||
data[0x3E] = 0x00;
|
||||
data[0x3F] = 0x00;
|
||||
|
||||
// Act
|
||||
var result = PeReader.TryExtractIdentity(data, out var identity);
|
||||
|
||||
// Assert
|
||||
result.Should().BeFalse();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void TryExtractIdentity_MissingPeSignature_ReturnsFalse()
|
||||
{
|
||||
// Arrange - Create data with MZ but missing PE signature
|
||||
var data = new byte[0x100];
|
||||
data[0] = (byte)'M';
|
||||
data[1] = (byte)'Z';
|
||||
data[0x3C] = 0x80; // PE offset at 0x80
|
||||
// No PE signature at offset 0x80
|
||||
|
||||
// Act
|
||||
var result = PeReader.TryExtractIdentity(data, out var identity);
|
||||
|
||||
// Assert
|
||||
result.Should().BeFalse();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void TryExtractIdentity_InvalidMagic_ReturnsFalse()
|
||||
{
|
||||
// Arrange - Create data with PE signature but invalid magic
|
||||
var data = new byte[0x200];
|
||||
data[0] = (byte)'M';
|
||||
data[1] = (byte)'Z';
|
||||
data[0x3C] = 0x80; // PE offset at 0x80
|
||||
|
||||
// PE signature
|
||||
data[0x80] = (byte)'P';
|
||||
data[0x81] = (byte)'E';
|
||||
data[0x82] = 0;
|
||||
data[0x83] = 0;
|
||||
|
||||
// Invalid COFF header with size 0
|
||||
data[0x80 + 16] = 0; // SizeOfOptionalHeader = 0
|
||||
|
||||
// Act
|
||||
var result = PeReader.TryExtractIdentity(data, out var identity);
|
||||
|
||||
// Assert
|
||||
result.Should().BeFalse();
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
@@ -0,0 +1,387 @@
|
||||
using StellaOps.Cryptography;
|
||||
using StellaOps.Scanner.Reachability.Gates;
|
||||
using StellaOps.Scanner.Reachability.Witnesses;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Scanner.Reachability.Tests;
|
||||
|
||||
public class PathWitnessBuilderTests
|
||||
{
|
||||
private readonly ICryptoHash _cryptoHash;
|
||||
private readonly TimeProvider _timeProvider;
|
||||
|
||||
public PathWitnessBuilderTests()
|
||||
{
|
||||
_cryptoHash = DefaultCryptoHash.CreateForTests();
|
||||
_timeProvider = TimeProvider.System;
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task BuildAsync_ReturnsNull_WhenNoPathExists()
|
||||
{
|
||||
// Arrange
|
||||
var graph = CreateSimpleGraph();
|
||||
var builder = new PathWitnessBuilder(_cryptoHash, _timeProvider);
|
||||
|
||||
var request = new PathWitnessRequest
|
||||
{
|
||||
SbomDigest = "sha256:abc123",
|
||||
ComponentPurl = "pkg:nuget/Newtonsoft.Json@12.0.3",
|
||||
VulnId = "CVE-2024-12345",
|
||||
VulnSource = "NVD",
|
||||
AffectedRange = "<=12.0.3",
|
||||
EntrypointSymbolId = "sym:entry1",
|
||||
EntrypointKind = "http",
|
||||
EntrypointName = "GET /api/test",
|
||||
SinkSymbolId = "sym:unreachable", // Not in graph
|
||||
SinkType = "deserialization",
|
||||
CallGraph = graph,
|
||||
CallgraphDigest = "blake3:abc123"
|
||||
};
|
||||
|
||||
// Act
|
||||
var result = await builder.BuildAsync(request);
|
||||
|
||||
// Assert
|
||||
Assert.Null(result);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task BuildAsync_ReturnsWitness_WhenPathExists()
|
||||
{
|
||||
// Arrange
|
||||
var graph = CreateSimpleGraph();
|
||||
var builder = new PathWitnessBuilder(_cryptoHash, _timeProvider);
|
||||
|
||||
var request = new PathWitnessRequest
|
||||
{
|
||||
SbomDigest = "sha256:abc123",
|
||||
ComponentPurl = "pkg:nuget/Newtonsoft.Json@12.0.3",
|
||||
VulnId = "CVE-2024-12345",
|
||||
VulnSource = "NVD",
|
||||
AffectedRange = "<=12.0.3",
|
||||
EntrypointSymbolId = "sym:entry1",
|
||||
EntrypointKind = "http",
|
||||
EntrypointName = "GET /api/test",
|
||||
SinkSymbolId = "sym:sink1",
|
||||
SinkType = "deserialization",
|
||||
CallGraph = graph,
|
||||
CallgraphDigest = "blake3:abc123"
|
||||
};
|
||||
|
||||
// Act
|
||||
var result = await builder.BuildAsync(request);
|
||||
|
||||
// Assert
|
||||
Assert.NotNull(result);
|
||||
Assert.Equal(WitnessSchema.Version, result.WitnessSchema);
|
||||
Assert.StartsWith(WitnessSchema.WitnessIdPrefix, result.WitnessId);
|
||||
Assert.Equal("CVE-2024-12345", result.Vuln.Id);
|
||||
Assert.Equal("sym:entry1", result.Entrypoint.SymbolId);
|
||||
Assert.Equal("sym:sink1", result.Sink.SymbolId);
|
||||
Assert.NotEmpty(result.Path);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task BuildAsync_GeneratesContentAddressedWitnessId()
|
||||
{
|
||||
// Arrange
|
||||
var graph = CreateSimpleGraph();
|
||||
var builder = new PathWitnessBuilder(_cryptoHash, _timeProvider);
|
||||
|
||||
var request = new PathWitnessRequest
|
||||
{
|
||||
SbomDigest = "sha256:abc123",
|
||||
ComponentPurl = "pkg:nuget/Newtonsoft.Json@12.0.3",
|
||||
VulnId = "CVE-2024-12345",
|
||||
VulnSource = "NVD",
|
||||
AffectedRange = "<=12.0.3",
|
||||
EntrypointSymbolId = "sym:entry1",
|
||||
EntrypointKind = "http",
|
||||
EntrypointName = "GET /api/test",
|
||||
SinkSymbolId = "sym:sink1",
|
||||
SinkType = "deserialization",
|
||||
CallGraph = graph,
|
||||
CallgraphDigest = "blake3:abc123"
|
||||
};
|
||||
|
||||
// Act
|
||||
var result1 = await builder.BuildAsync(request);
|
||||
var result2 = await builder.BuildAsync(request);
|
||||
|
||||
// Assert
|
||||
Assert.NotNull(result1);
|
||||
Assert.NotNull(result2);
|
||||
// The witness ID should be deterministic (same input = same hash)
|
||||
// Note: ObservedAt differs, but witness ID is computed without it
|
||||
Assert.Equal(result1.WitnessId, result2.WitnessId);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task BuildAsync_PopulatesArtifactInfo()
|
||||
{
|
||||
// Arrange
|
||||
var graph = CreateSimpleGraph();
|
||||
var builder = new PathWitnessBuilder(_cryptoHash, _timeProvider);
|
||||
|
||||
var request = new PathWitnessRequest
|
||||
{
|
||||
SbomDigest = "sha256:sbom123",
|
||||
ComponentPurl = "pkg:npm/lodash@4.17.21",
|
||||
VulnId = "CVE-2024-99999",
|
||||
VulnSource = "GHSA",
|
||||
AffectedRange = "<4.17.21",
|
||||
EntrypointSymbolId = "sym:entry1",
|
||||
EntrypointKind = "grpc",
|
||||
EntrypointName = "UserService.GetUser",
|
||||
SinkSymbolId = "sym:sink1",
|
||||
SinkType = "prototype_pollution",
|
||||
CallGraph = graph,
|
||||
CallgraphDigest = "blake3:graph456"
|
||||
};
|
||||
|
||||
// Act
|
||||
var result = await builder.BuildAsync(request);
|
||||
|
||||
// Assert
|
||||
Assert.NotNull(result);
|
||||
Assert.Equal("sha256:sbom123", result.Artifact.SbomDigest);
|
||||
Assert.Equal("pkg:npm/lodash@4.17.21", result.Artifact.ComponentPurl);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task BuildAsync_PopulatesEvidenceInfo()
|
||||
{
|
||||
// Arrange
|
||||
var graph = CreateSimpleGraph();
|
||||
var builder = new PathWitnessBuilder(_cryptoHash, _timeProvider);
|
||||
|
||||
var request = new PathWitnessRequest
|
||||
{
|
||||
SbomDigest = "sha256:abc123",
|
||||
ComponentPurl = "pkg:nuget/Test@1.0.0",
|
||||
VulnId = "CVE-2024-12345",
|
||||
VulnSource = "NVD",
|
||||
AffectedRange = "<=1.0.0",
|
||||
EntrypointSymbolId = "sym:entry1",
|
||||
EntrypointKind = "http",
|
||||
EntrypointName = "TestController.Get",
|
||||
SinkSymbolId = "sym:sink1",
|
||||
SinkType = "sql_injection",
|
||||
CallGraph = graph,
|
||||
CallgraphDigest = "blake3:callgraph789",
|
||||
SurfaceDigest = "sha256:surface123",
|
||||
AnalysisConfigDigest = "sha256:config456",
|
||||
BuildId = "build:xyz789"
|
||||
};
|
||||
|
||||
// Act
|
||||
var result = await builder.BuildAsync(request);
|
||||
|
||||
// Assert
|
||||
Assert.NotNull(result);
|
||||
Assert.Equal("blake3:callgraph789", result.Evidence.CallgraphDigest);
|
||||
Assert.Equal("sha256:surface123", result.Evidence.SurfaceDigest);
|
||||
Assert.Equal("sha256:config456", result.Evidence.AnalysisConfigDigest);
|
||||
Assert.Equal("build:xyz789", result.Evidence.BuildId);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task BuildAsync_FindsShortestPath()
|
||||
{
|
||||
// Arrange - graph with multiple paths
|
||||
var graph = CreateGraphWithMultiplePaths();
|
||||
var builder = new PathWitnessBuilder(_cryptoHash, _timeProvider);
|
||||
|
||||
var request = new PathWitnessRequest
|
||||
{
|
||||
SbomDigest = "sha256:abc123",
|
||||
ComponentPurl = "pkg:nuget/Test@1.0.0",
|
||||
VulnId = "CVE-2024-12345",
|
||||
VulnSource = "NVD",
|
||||
AffectedRange = "<=1.0.0",
|
||||
EntrypointSymbolId = "sym:start",
|
||||
EntrypointKind = "http",
|
||||
EntrypointName = "Start",
|
||||
SinkSymbolId = "sym:end",
|
||||
SinkType = "deserialization",
|
||||
CallGraph = graph,
|
||||
CallgraphDigest = "blake3:abc123"
|
||||
};
|
||||
|
||||
// Act
|
||||
var result = await builder.BuildAsync(request);
|
||||
|
||||
// Assert
|
||||
Assert.NotNull(result);
|
||||
// Short path: start -> direct -> end (3 steps)
|
||||
// Long path: start -> long1 -> long2 -> long3 -> end (5 steps)
|
||||
Assert.Equal(3, result.Path.Count);
|
||||
Assert.Equal("sym:start", result.Path[0].SymbolId);
|
||||
Assert.Equal("sym:direct", result.Path[1].SymbolId);
|
||||
Assert.Equal("sym:end", result.Path[2].SymbolId);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task BuildAllAsync_YieldsMultipleWitnesses_WhenMultipleRootsReachSink()
|
||||
{
|
||||
// Arrange
|
||||
var graph = CreateGraphWithMultipleRoots();
|
||||
var builder = new PathWitnessBuilder(_cryptoHash, _timeProvider);
|
||||
|
||||
var request = new BatchWitnessRequest
|
||||
{
|
||||
SbomDigest = "sha256:abc123",
|
||||
ComponentPurl = "pkg:nuget/Test@1.0.0",
|
||||
VulnId = "CVE-2024-12345",
|
||||
VulnSource = "NVD",
|
||||
AffectedRange = "<=1.0.0",
|
||||
SinkSymbolId = "sym:sink",
|
||||
SinkType = "deserialization",
|
||||
CallGraph = graph,
|
||||
CallgraphDigest = "blake3:abc123",
|
||||
MaxWitnesses = 10
|
||||
};
|
||||
|
||||
// Act
|
||||
var witnesses = new List<PathWitness>();
|
||||
await foreach (var witness in builder.BuildAllAsync(request))
|
||||
{
|
||||
witnesses.Add(witness);
|
||||
}
|
||||
|
||||
// Assert
|
||||
Assert.Equal(2, witnesses.Count);
|
||||
Assert.Contains(witnesses, w => w.Entrypoint.SymbolId == "sym:root1");
|
||||
Assert.Contains(witnesses, w => w.Entrypoint.SymbolId == "sym:root2");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task BuildAllAsync_RespectsMaxWitnesses()
|
||||
{
|
||||
// Arrange
|
||||
var graph = CreateGraphWithMultipleRoots();
|
||||
var builder = new PathWitnessBuilder(_cryptoHash, _timeProvider);
|
||||
|
||||
var request = new BatchWitnessRequest
|
||||
{
|
||||
SbomDigest = "sha256:abc123",
|
||||
ComponentPurl = "pkg:nuget/Test@1.0.0",
|
||||
VulnId = "CVE-2024-12345",
|
||||
VulnSource = "NVD",
|
||||
AffectedRange = "<=1.0.0",
|
||||
SinkSymbolId = "sym:sink",
|
||||
SinkType = "deserialization",
|
||||
CallGraph = graph,
|
||||
CallgraphDigest = "blake3:abc123",
|
||||
MaxWitnesses = 1 // Limit to 1
|
||||
};
|
||||
|
||||
// Act
|
||||
var witnesses = new List<PathWitness>();
|
||||
await foreach (var witness in builder.BuildAllAsync(request))
|
||||
{
|
||||
witnesses.Add(witness);
|
||||
}
|
||||
|
||||
// Assert
|
||||
Assert.Single(witnesses);
|
||||
}
|
||||
|
||||
#region Test Helpers
|
||||
|
||||
private static RichGraph CreateSimpleGraph()
|
||||
{
|
||||
var nodes = new List<RichGraphNode>
|
||||
{
|
||||
new("n1", "sym:entry1", null, null, "dotnet", "method", "Entry1", null, null, null, null),
|
||||
new("n2", "sym:middle1", null, null, "dotnet", "method", "Middle1", null, null, null, null),
|
||||
new("n3", "sym:sink1", null, null, "dotnet", "method", "Sink1", null, null, null, null)
|
||||
};
|
||||
|
||||
var edges = new List<RichGraphEdge>
|
||||
{
|
||||
new("n1", "n2", "call", null, null, null, 1.0, null),
|
||||
new("n2", "n3", "call", null, null, null, 1.0, null)
|
||||
};
|
||||
|
||||
var roots = new List<RichGraphRoot>
|
||||
{
|
||||
new("n1", "http", "/api/test")
|
||||
};
|
||||
|
||||
return new RichGraph(
|
||||
nodes,
|
||||
edges,
|
||||
roots,
|
||||
new RichGraphAnalyzer("test", "1.0.0", null));
|
||||
}
|
||||
|
||||
private static RichGraph CreateGraphWithMultiplePaths()
|
||||
{
|
||||
var nodes = new List<RichGraphNode>
|
||||
{
|
||||
new("n0", "sym:start", null, null, "dotnet", "method", "Start", null, null, null, null),
|
||||
new("n1", "sym:direct", null, null, "dotnet", "method", "Direct", null, null, null, null),
|
||||
new("n2", "sym:long1", null, null, "dotnet", "method", "Long1", null, null, null, null),
|
||||
new("n3", "sym:long2", null, null, "dotnet", "method", "Long2", null, null, null, null),
|
||||
new("n4", "sym:long3", null, null, "dotnet", "method", "Long3", null, null, null, null),
|
||||
new("n5", "sym:end", null, null, "dotnet", "method", "End", null, null, null, null)
|
||||
};
|
||||
|
||||
var edges = new List<RichGraphEdge>
|
||||
{
|
||||
// Short path: start -> direct -> end
|
||||
new("n0", "n1", "call", null, null, null, 1.0, null),
|
||||
new("n1", "n5", "call", null, null, null, 1.0, null),
|
||||
// Long path: start -> long1 -> long2 -> long3 -> end
|
||||
new("n0", "n2", "call", null, null, null, 1.0, null),
|
||||
new("n2", "n3", "call", null, null, null, 1.0, null),
|
||||
new("n3", "n4", "call", null, null, null, 1.0, null),
|
||||
new("n4", "n5", "call", null, null, null, 1.0, null)
|
||||
};
|
||||
|
||||
var roots = new List<RichGraphRoot>
|
||||
{
|
||||
new("n0", "http", "/api/start")
|
||||
};
|
||||
|
||||
return new RichGraph(
|
||||
nodes,
|
||||
edges,
|
||||
roots,
|
||||
new RichGraphAnalyzer("test", "1.0.0", null));
|
||||
}
|
||||
|
||||
private static RichGraph CreateGraphWithMultipleRoots()
|
||||
{
|
||||
var nodes = new List<RichGraphNode>
|
||||
{
|
||||
new("n1", "sym:root1", null, null, "dotnet", "method", "Root1", null, null, null, null),
|
||||
new("n2", "sym:root2", null, null, "dotnet", "method", "Root2", null, null, null, null),
|
||||
new("n3", "sym:middle", null, null, "dotnet", "method", "Middle", null, null, null, null),
|
||||
new("n4", "sym:sink", null, null, "dotnet", "method", "Sink", null, null, null, null)
|
||||
};
|
||||
|
||||
var edges = new List<RichGraphEdge>
|
||||
{
|
||||
new("n1", "n3", "call", null, null, null, 1.0, null),
|
||||
new("n2", "n3", "call", null, null, null, 1.0, null),
|
||||
new("n3", "n4", "call", null, null, null, 1.0, null)
|
||||
};
|
||||
|
||||
var roots = new List<RichGraphRoot>
|
||||
{
|
||||
new("n1", "http", "/api/root1"),
|
||||
new("n2", "http", "/api/root2")
|
||||
};
|
||||
|
||||
return new RichGraph(
|
||||
nodes,
|
||||
edges,
|
||||
roots,
|
||||
new RichGraphAnalyzer("test", "1.0.0", null));
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
@@ -0,0 +1,320 @@
|
||||
using System.Text.Json;
|
||||
using StellaOps.Cryptography;
|
||||
using StellaOps.Scanner.Reachability.Attestation;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Scanner.Reachability.Tests;
|
||||
|
||||
/// <summary>
|
||||
/// Unit tests for <see cref="ReachabilityWitnessDsseBuilder"/>.
|
||||
/// Sprint: SPRINT_3620_0001_0001
|
||||
/// Task: RWD-011
|
||||
/// </summary>
|
||||
public sealed class ReachabilityWitnessDsseBuilderTests
|
||||
{
|
||||
private readonly ReachabilityWitnessDsseBuilder _builder;
|
||||
private readonly FakeTimeProvider _timeProvider;
|
||||
|
||||
public ReachabilityWitnessDsseBuilderTests()
|
||||
{
|
||||
_timeProvider = new FakeTimeProvider(new DateTimeOffset(2025, 12, 18, 10, 0, 0, TimeSpan.Zero));
|
||||
_builder = new ReachabilityWitnessDsseBuilder(
|
||||
CryptoHashFactory.CreateDefault(),
|
||||
_timeProvider);
|
||||
}
|
||||
|
||||
#region BuildStatement Tests
|
||||
|
||||
[Fact]
|
||||
public void BuildStatement_CreatesValidStatement()
|
||||
{
|
||||
var graph = CreateTestGraph();
|
||||
|
||||
var statement = _builder.BuildStatement(
|
||||
graph,
|
||||
graphHash: "blake3:abc123",
|
||||
subjectDigest: "sha256:def456");
|
||||
|
||||
Assert.NotNull(statement);
|
||||
Assert.Equal("https://in-toto.io/Statement/v1", statement.Type);
|
||||
Assert.Equal("https://stella.ops/reachabilityWitness/v1", statement.PredicateType);
|
||||
Assert.Single(statement.Subject);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void BuildStatement_SetsSubjectCorrectly()
|
||||
{
|
||||
var graph = CreateTestGraph();
|
||||
|
||||
var statement = _builder.BuildStatement(
|
||||
graph,
|
||||
graphHash: "blake3:abc123",
|
||||
subjectDigest: "sha256:imageabc123");
|
||||
|
||||
var subject = statement.Subject[0];
|
||||
Assert.Equal("sha256:imageabc123", subject.Name);
|
||||
Assert.Equal("imageabc123", subject.Digest["sha256"]);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void BuildStatement_ExtractsPredicateCorrectly()
|
||||
{
|
||||
var graph = CreateTestGraph();
|
||||
|
||||
var statement = _builder.BuildStatement(
|
||||
graph,
|
||||
graphHash: "blake3:abc123",
|
||||
subjectDigest: "sha256:def456",
|
||||
graphCasUri: "cas://local/blake3:abc123",
|
||||
policyHash: "sha256:policy123",
|
||||
sourceCommit: "abc123def456");
|
||||
|
||||
var predicate = statement.Predicate as ReachabilityWitnessStatement;
|
||||
Assert.NotNull(predicate);
|
||||
Assert.Equal("stella.ops/reachabilityWitness@v1", predicate.Schema);
|
||||
Assert.Equal("blake3:abc123", predicate.GraphHash);
|
||||
Assert.Equal("cas://local/blake3:abc123", predicate.GraphCasUri);
|
||||
Assert.Equal("sha256:def456", predicate.SubjectDigest);
|
||||
Assert.Equal("sha256:policy123", predicate.PolicyHash);
|
||||
Assert.Equal("abc123def456", predicate.SourceCommit);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void BuildStatement_CountsNodesAndEdges()
|
||||
{
|
||||
var graph = CreateTestGraph();
|
||||
|
||||
var statement = _builder.BuildStatement(
|
||||
graph,
|
||||
graphHash: "blake3:abc123",
|
||||
subjectDigest: "sha256:def456");
|
||||
|
||||
var predicate = statement.Predicate as ReachabilityWitnessStatement;
|
||||
Assert.NotNull(predicate);
|
||||
Assert.Equal(3, predicate.NodeCount);
|
||||
Assert.Equal(2, predicate.EdgeCount);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void BuildStatement_CountsEntrypoints()
|
||||
{
|
||||
var graph = CreateTestGraphWithRoots();
|
||||
|
||||
var statement = _builder.BuildStatement(
|
||||
graph,
|
||||
graphHash: "blake3:abc123",
|
||||
subjectDigest: "sha256:def456");
|
||||
|
||||
var predicate = statement.Predicate as ReachabilityWitnessStatement;
|
||||
Assert.NotNull(predicate);
|
||||
Assert.Equal(2, predicate.EntrypointCount);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void BuildStatement_UsesProvidedTimestamp()
|
||||
{
|
||||
var graph = CreateTestGraph();
|
||||
|
||||
var statement = _builder.BuildStatement(
|
||||
graph,
|
||||
graphHash: "blake3:abc123",
|
||||
subjectDigest: "sha256:def456");
|
||||
|
||||
var predicate = statement.Predicate as ReachabilityWitnessStatement;
|
||||
Assert.NotNull(predicate);
|
||||
Assert.Equal(_timeProvider.GetUtcNow(), predicate.GeneratedAt);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void BuildStatement_ExtractsAnalyzerVersion()
|
||||
{
|
||||
var graph = CreateTestGraph();
|
||||
|
||||
var statement = _builder.BuildStatement(
|
||||
graph,
|
||||
graphHash: "blake3:abc123",
|
||||
subjectDigest: "sha256:def456");
|
||||
|
||||
var predicate = statement.Predicate as ReachabilityWitnessStatement;
|
||||
Assert.NotNull(predicate);
|
||||
Assert.Equal("1.0.0", predicate.AnalyzerVersion);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region SerializeStatement Tests
|
||||
|
||||
[Fact]
|
||||
public void SerializeStatement_ProducesValidJson()
|
||||
{
|
||||
var graph = CreateTestGraph();
|
||||
var statement = _builder.BuildStatement(
|
||||
graph,
|
||||
graphHash: "blake3:abc123",
|
||||
subjectDigest: "sha256:def456");
|
||||
|
||||
var bytes = _builder.SerializeStatement(statement);
|
||||
|
||||
Assert.NotEmpty(bytes);
|
||||
var json = System.Text.Encoding.UTF8.GetString(bytes);
|
||||
Assert.Contains("\"_type\":\"https://in-toto.io/Statement/v1\"", json);
|
||||
Assert.Contains("\"predicateType\":\"https://stella.ops/reachabilityWitness/v1\"", json);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void SerializeStatement_IsDeterministic()
|
||||
{
|
||||
var graph = CreateTestGraph();
|
||||
var statement = _builder.BuildStatement(
|
||||
graph,
|
||||
graphHash: "blake3:abc123",
|
||||
subjectDigest: "sha256:def456");
|
||||
|
||||
var bytes1 = _builder.SerializeStatement(statement);
|
||||
var bytes2 = _builder.SerializeStatement(statement);
|
||||
|
||||
Assert.Equal(bytes1, bytes2);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region ComputeStatementHash Tests
|
||||
|
||||
[Fact]
|
||||
public void ComputeStatementHash_ReturnsBlake3Hash()
|
||||
{
|
||||
var graph = CreateTestGraph();
|
||||
var statement = _builder.BuildStatement(
|
||||
graph,
|
||||
graphHash: "blake3:abc123",
|
||||
subjectDigest: "sha256:def456");
|
||||
var bytes = _builder.SerializeStatement(statement);
|
||||
|
||||
var hash = _builder.ComputeStatementHash(bytes);
|
||||
|
||||
Assert.StartsWith("blake3:", hash);
|
||||
Assert.Equal(64 + 7, hash.Length); // "blake3:" + 64 hex chars
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ComputeStatementHash_IsDeterministic()
|
||||
{
|
||||
var graph = CreateTestGraph();
|
||||
var statement = _builder.BuildStatement(
|
||||
graph,
|
||||
graphHash: "blake3:abc123",
|
||||
subjectDigest: "sha256:def456");
|
||||
var bytes = _builder.SerializeStatement(statement);
|
||||
|
||||
var hash1 = _builder.ComputeStatementHash(bytes);
|
||||
var hash2 = _builder.ComputeStatementHash(bytes);
|
||||
|
||||
Assert.Equal(hash1, hash2);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Edge Cases
|
||||
|
||||
[Fact]
|
||||
public void BuildStatement_ThrowsForNullGraph()
|
||||
{
|
||||
Assert.Throws<ArgumentNullException>(() =>
|
||||
_builder.BuildStatement(null!, "blake3:abc", "sha256:def"));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void BuildStatement_ThrowsForEmptyGraphHash()
|
||||
{
|
||||
var graph = CreateTestGraph();
|
||||
Assert.Throws<ArgumentException>(() =>
|
||||
_builder.BuildStatement(graph, "", "sha256:def"));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void BuildStatement_ThrowsForEmptySubjectDigest()
|
||||
{
|
||||
var graph = CreateTestGraph();
|
||||
Assert.Throws<ArgumentException>(() =>
|
||||
_builder.BuildStatement(graph, "blake3:abc", ""));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void BuildStatement_HandlesEmptyGraph()
|
||||
{
|
||||
var graph = new RichGraph(
|
||||
Schema: "richgraph-v1",
|
||||
Analyzer: new RichGraphAnalyzer("test", "1.0.0", null),
|
||||
Nodes: Array.Empty<RichGraphNode>(),
|
||||
Edges: Array.Empty<RichGraphEdge>(),
|
||||
Roots: null);
|
||||
|
||||
var statement = _builder.BuildStatement(
|
||||
graph,
|
||||
graphHash: "blake3:abc123",
|
||||
subjectDigest: "sha256:def456");
|
||||
|
||||
var predicate = statement.Predicate as ReachabilityWitnessStatement;
|
||||
Assert.NotNull(predicate);
|
||||
Assert.Equal(0, predicate.NodeCount);
|
||||
Assert.Equal(0, predicate.EdgeCount);
|
||||
Assert.Equal("unknown", predicate.Language);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Test Helpers
|
||||
|
||||
private static RichGraph CreateTestGraph()
|
||||
{
|
||||
return new RichGraph(
|
||||
Schema: "richgraph-v1",
|
||||
Analyzer: new RichGraphAnalyzer("test-analyzer", "1.0.0", null),
|
||||
Nodes: new[]
|
||||
{
|
||||
new RichGraphNode("n1", "sym:dotnet:A", null, null, "dotnet", "method", "A", null, null, null, null),
|
||||
new RichGraphNode("n2", "sym:dotnet:B", null, null, "dotnet", "method", "B", null, null, null, null),
|
||||
new RichGraphNode("n3", "sym:dotnet:C", null, null, "dotnet", "sink", "C", null, null, null, null)
|
||||
},
|
||||
Edges: new[]
|
||||
{
|
||||
new RichGraphEdge("n1", "n2", "call", null, null, null, 0.9, null),
|
||||
new RichGraphEdge("n2", "n3", "call", null, null, null, 0.9, null)
|
||||
},
|
||||
Roots: null);
|
||||
}
|
||||
|
||||
private static RichGraph CreateTestGraphWithRoots()
|
||||
{
|
||||
return new RichGraph(
|
||||
Schema: "richgraph-v1",
|
||||
Analyzer: new RichGraphAnalyzer("test-analyzer", "1.0.0", null),
|
||||
Nodes: new[]
|
||||
{
|
||||
new RichGraphNode("n1", "sym:dotnet:A", null, null, "dotnet", "method", "A", null, null, null, null),
|
||||
new RichGraphNode("n2", "sym:dotnet:B", null, null, "dotnet", "method", "B", null, null, null, null),
|
||||
new RichGraphNode("n3", "sym:dotnet:C", null, null, "dotnet", "sink", "C", null, null, null, null)
|
||||
},
|
||||
Edges: new[]
|
||||
{
|
||||
new RichGraphEdge("n1", "n2", "call", null, null, null, 0.9, null),
|
||||
new RichGraphEdge("n2", "n3", "call", null, null, null, 0.9, null)
|
||||
},
|
||||
Roots: new[]
|
||||
{
|
||||
new RichGraphRoot("n1", "http", "GET /api"),
|
||||
new RichGraphRoot("n2", "grpc", "Service.Method")
|
||||
});
|
||||
}
|
||||
|
||||
private sealed class FakeTimeProvider : TimeProvider
|
||||
{
|
||||
private readonly DateTimeOffset _fixedTime;
|
||||
|
||||
public FakeTimeProvider(DateTimeOffset fixedTime) => _fixedTime = fixedTime;
|
||||
|
||||
public override DateTimeOffset GetUtcNow() => _fixedTime;
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
@@ -108,4 +108,30 @@ public class RichGraphWriterTests
|
||||
Assert.Contains("\"type\":\"authRequired\"", json);
|
||||
Assert.Contains("\"guard_symbol\":\"sym:dotnet:B\"", json);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task UsesBlake3HashForDefaultProfile()
|
||||
{
|
||||
// WIT-013: Verify BLAKE3 is used for graph hashing
|
||||
var writer = new RichGraphWriter(CryptoHashFactory.CreateDefault());
|
||||
using var temp = new TempDir();
|
||||
|
||||
var union = new ReachabilityUnionGraph(
|
||||
Nodes: new[]
|
||||
{
|
||||
new ReachabilityUnionNode("sym:dotnet:A", "dotnet", "method", "A")
|
||||
},
|
||||
Edges: Array.Empty<ReachabilityUnionEdge>());
|
||||
|
||||
var rich = RichGraphBuilder.FromUnion(union, "test-analyzer", "1.0.0");
|
||||
var result = await writer.WriteAsync(rich, temp.Path, "analysis-blake3");
|
||||
|
||||
// Default profile (world) uses BLAKE3
|
||||
Assert.StartsWith("blake3:", result.GraphHash);
|
||||
Assert.Equal(64 + 7, result.GraphHash.Length); // "blake3:" (7) + 64 hex chars
|
||||
|
||||
// Verify meta.json also contains the blake3-prefixed hash
|
||||
var metaJson = await File.ReadAllTextAsync(result.MetaPath);
|
||||
Assert.Contains("\"graph_hash\":\"blake3:", metaJson);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,293 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// FindingEvidenceContractsTests.cs
|
||||
// Sprint: SPRINT_3800_0001_0001_evidence_api_models
|
||||
// Description: Unit tests for JSON serialization of evidence API contracts.
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Text.Json;
|
||||
using StellaOps.Scanner.WebService.Contracts;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Scanner.WebService.Tests;
|
||||
|
||||
public class FindingEvidenceContractsTests
|
||||
{
|
||||
private static readonly JsonSerializerOptions SerializerOptions = new()
|
||||
{
|
||||
PropertyNamingPolicy = JsonNamingPolicy.SnakeCaseLower,
|
||||
WriteIndented = false
|
||||
};
|
||||
|
||||
[Fact]
|
||||
public void FindingEvidenceResponse_SerializesToSnakeCase()
|
||||
{
|
||||
var response = new FindingEvidenceResponse
|
||||
{
|
||||
FindingId = "finding-123",
|
||||
Cve = "CVE-2021-44228",
|
||||
Component = new ComponentRef
|
||||
{
|
||||
Purl = "pkg:maven/org.apache.logging.log4j/log4j-core@2.14.1",
|
||||
Name = "log4j-core",
|
||||
Version = "2.14.1",
|
||||
Type = "maven"
|
||||
},
|
||||
ReachablePath = new[] { "com.example.App.main", "org.apache.log4j.Logger.log" },
|
||||
LastSeen = new DateTimeOffset(2025, 12, 18, 12, 0, 0, TimeSpan.Zero)
|
||||
};
|
||||
|
||||
var json = JsonSerializer.Serialize(response, SerializerOptions);
|
||||
|
||||
Assert.Contains("\"finding_id\":\"finding-123\"", json);
|
||||
Assert.Contains("\"cve\":\"CVE-2021-44228\"", json);
|
||||
Assert.Contains("\"reachable_path\":", json);
|
||||
Assert.Contains("\"last_seen\":", json);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void FindingEvidenceResponse_RoundTripsCorrectly()
|
||||
{
|
||||
var original = new FindingEvidenceResponse
|
||||
{
|
||||
FindingId = "finding-456",
|
||||
Cve = "CVE-2023-12345",
|
||||
Component = new ComponentRef
|
||||
{
|
||||
Purl = "pkg:npm/lodash@4.17.20",
|
||||
Name = "lodash",
|
||||
Version = "4.17.20",
|
||||
Type = "npm"
|
||||
},
|
||||
Entrypoint = new EntrypointProof
|
||||
{
|
||||
Type = "http_handler",
|
||||
Route = "/api/v1/users",
|
||||
Method = "POST",
|
||||
Auth = "required",
|
||||
Fqn = "com.example.UserController.createUser"
|
||||
},
|
||||
ScoreExplain = new ScoreExplanationDto
|
||||
{
|
||||
Kind = "stellaops_risk_v1",
|
||||
RiskScore = 7.5,
|
||||
Contributions = new[]
|
||||
{
|
||||
new ScoreContributionDto
|
||||
{
|
||||
Factor = "cvss_base",
|
||||
Weight = 0.4,
|
||||
RawValue = 9.8,
|
||||
Contribution = 3.92,
|
||||
Explanation = "CVSS v4 base score"
|
||||
}
|
||||
},
|
||||
LastSeen = DateTimeOffset.UtcNow
|
||||
},
|
||||
LastSeen = DateTimeOffset.UtcNow
|
||||
};
|
||||
|
||||
var json = JsonSerializer.Serialize(original, SerializerOptions);
|
||||
var deserialized = JsonSerializer.Deserialize<FindingEvidenceResponse>(json, SerializerOptions);
|
||||
|
||||
Assert.NotNull(deserialized);
|
||||
Assert.Equal(original.FindingId, deserialized.FindingId);
|
||||
Assert.Equal(original.Cve, deserialized.Cve);
|
||||
Assert.Equal(original.Component?.Purl, deserialized.Component?.Purl);
|
||||
Assert.Equal(original.Entrypoint?.Type, deserialized.Entrypoint?.Type);
|
||||
Assert.Equal(original.ScoreExplain?.RiskScore, deserialized.ScoreExplain?.RiskScore);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ComponentRef_SerializesAllFields()
|
||||
{
|
||||
var component = new ComponentRef
|
||||
{
|
||||
Purl = "pkg:nuget/Newtonsoft.Json@13.0.1",
|
||||
Name = "Newtonsoft.Json",
|
||||
Version = "13.0.1",
|
||||
Type = "nuget"
|
||||
};
|
||||
|
||||
var json = JsonSerializer.Serialize(component, SerializerOptions);
|
||||
|
||||
Assert.Contains("\"purl\":\"pkg:nuget/Newtonsoft.Json@13.0.1\"", json);
|
||||
Assert.Contains("\"name\":\"Newtonsoft.Json\"", json);
|
||||
Assert.Contains("\"version\":\"13.0.1\"", json);
|
||||
Assert.Contains("\"type\":\"nuget\"", json);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void EntrypointProof_SerializesWithLocation()
|
||||
{
|
||||
var entrypoint = new EntrypointProof
|
||||
{
|
||||
Type = "grpc_method",
|
||||
Route = "grpc.UserService.GetUser",
|
||||
Auth = "required",
|
||||
Phase = "runtime",
|
||||
Fqn = "com.example.UserServiceImpl.getUser",
|
||||
Location = new SourceLocation
|
||||
{
|
||||
File = "src/main/java/com/example/UserServiceImpl.java",
|
||||
Line = 42,
|
||||
Column = 5
|
||||
}
|
||||
};
|
||||
|
||||
var json = JsonSerializer.Serialize(entrypoint, SerializerOptions);
|
||||
|
||||
Assert.Contains("\"type\":\"grpc_method\"", json);
|
||||
Assert.Contains("\"route\":\"grpc.UserService.GetUser\"", json);
|
||||
Assert.Contains("\"location\":", json);
|
||||
Assert.Contains("\"file\":\"src/main/java/com/example/UserServiceImpl.java\"", json);
|
||||
Assert.Contains("\"line\":42", json);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void BoundaryProofDto_SerializesWithControls()
|
||||
{
|
||||
var boundary = new BoundaryProofDto
|
||||
{
|
||||
Kind = "network",
|
||||
Surface = new SurfaceDescriptor
|
||||
{
|
||||
Type = "api",
|
||||
Protocol = "https",
|
||||
Port = 443
|
||||
},
|
||||
Exposure = new ExposureDescriptor
|
||||
{
|
||||
Level = "public",
|
||||
InternetFacing = true,
|
||||
Zone = "dmz"
|
||||
},
|
||||
Auth = new AuthDescriptor
|
||||
{
|
||||
Required = true,
|
||||
Type = "jwt",
|
||||
Roles = new[] { "admin", "user" }
|
||||
},
|
||||
Controls = new[]
|
||||
{
|
||||
new ControlDescriptor
|
||||
{
|
||||
Type = "waf",
|
||||
Active = true,
|
||||
Config = "OWASP-ModSecurity"
|
||||
}
|
||||
},
|
||||
LastSeen = DateTimeOffset.UtcNow,
|
||||
Confidence = 0.95
|
||||
};
|
||||
|
||||
var json = JsonSerializer.Serialize(boundary, SerializerOptions);
|
||||
|
||||
Assert.Contains("\"kind\":\"network\"", json);
|
||||
Assert.Contains("\"internet_facing\":true", json);
|
||||
Assert.Contains("\"controls\":[", json);
|
||||
Assert.Contains("\"confidence\":0.95", json);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void VexEvidenceDto_SerializesCorrectly()
|
||||
{
|
||||
var vex = new VexEvidenceDto
|
||||
{
|
||||
Status = "not_affected",
|
||||
Justification = "vulnerable_code_not_in_execute_path",
|
||||
Impact = "The vulnerable code path is never executed in our usage",
|
||||
AttestationRef = "dsse:sha256:abc123",
|
||||
IssuedAt = new DateTimeOffset(2025, 12, 1, 0, 0, 0, TimeSpan.Zero),
|
||||
ExpiresAt = new DateTimeOffset(2026, 12, 1, 0, 0, 0, TimeSpan.Zero),
|
||||
Source = "vendor"
|
||||
};
|
||||
|
||||
var json = JsonSerializer.Serialize(vex, SerializerOptions);
|
||||
|
||||
Assert.Contains("\"status\":\"not_affected\"", json);
|
||||
Assert.Contains("\"justification\":\"vulnerable_code_not_in_execute_path\"", json);
|
||||
Assert.Contains("\"attestation_ref\":\"dsse:sha256:abc123\"", json);
|
||||
Assert.Contains("\"source\":\"vendor\"", json);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ScoreExplanationDto_SerializesContributions()
|
||||
{
|
||||
var explanation = new ScoreExplanationDto
|
||||
{
|
||||
Kind = "stellaops_risk_v1",
|
||||
RiskScore = 6.2,
|
||||
Contributions = new[]
|
||||
{
|
||||
new ScoreContributionDto
|
||||
{
|
||||
Factor = "cvss_base",
|
||||
Weight = 0.4,
|
||||
RawValue = 9.8,
|
||||
Contribution = 3.92,
|
||||
Explanation = "Critical CVSS base score"
|
||||
},
|
||||
new ScoreContributionDto
|
||||
{
|
||||
Factor = "epss",
|
||||
Weight = 0.2,
|
||||
RawValue = 0.45,
|
||||
Contribution = 0.09,
|
||||
Explanation = "45% probability of exploitation"
|
||||
},
|
||||
new ScoreContributionDto
|
||||
{
|
||||
Factor = "reachability",
|
||||
Weight = 0.3,
|
||||
RawValue = 1.0,
|
||||
Contribution = 0.3,
|
||||
Explanation = "Reachable from HTTP entrypoint"
|
||||
},
|
||||
new ScoreContributionDto
|
||||
{
|
||||
Factor = "gate_multiplier",
|
||||
Weight = 1.0,
|
||||
RawValue = 0.5,
|
||||
Contribution = -2.11,
|
||||
Explanation = "Auth gate reduces exposure by 50%"
|
||||
}
|
||||
},
|
||||
LastSeen = DateTimeOffset.UtcNow
|
||||
};
|
||||
|
||||
var json = JsonSerializer.Serialize(explanation, SerializerOptions);
|
||||
|
||||
Assert.Contains("\"kind\":\"stellaops_risk_v1\"", json);
|
||||
Assert.Contains("\"risk_score\":6.2", json);
|
||||
Assert.Contains("\"contributions\":[", json);
|
||||
Assert.Contains("\"factor\":\"cvss_base\"", json);
|
||||
Assert.Contains("\"factor\":\"epss\"", json);
|
||||
Assert.Contains("\"factor\":\"reachability\"", json);
|
||||
Assert.Contains("\"factor\":\"gate_multiplier\"", json);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void NullOptionalFields_AreOmittedOrNullInJson()
|
||||
{
|
||||
var response = new FindingEvidenceResponse
|
||||
{
|
||||
FindingId = "finding-minimal",
|
||||
Cve = "CVE-2025-0001",
|
||||
LastSeen = DateTimeOffset.UtcNow
|
||||
// All optional fields are null
|
||||
};
|
||||
|
||||
var json = JsonSerializer.Serialize(response, SerializerOptions);
|
||||
var deserialized = JsonSerializer.Deserialize<FindingEvidenceResponse>(json, SerializerOptions);
|
||||
|
||||
Assert.NotNull(deserialized);
|
||||
Assert.Null(deserialized.Component);
|
||||
Assert.Null(deserialized.ReachablePath);
|
||||
Assert.Null(deserialized.Entrypoint);
|
||||
Assert.Null(deserialized.Boundary);
|
||||
Assert.Null(deserialized.Vex);
|
||||
Assert.Null(deserialized.ScoreExplain);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,222 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Threading.Tasks;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using StellaOps.Signals.Models;
|
||||
using StellaOps.Signals.Persistence;
|
||||
using StellaOps.Signals.Services;
|
||||
using StellaOps.Signals.Storage.Postgres.Repositories;
|
||||
using Xunit;
|
||||
using Xunit.Abstractions;
|
||||
|
||||
namespace StellaOps.Signals.Storage.Postgres.Tests;
|
||||
|
||||
/// <summary>
|
||||
/// Integration tests for callgraph projection to relational tables.
|
||||
/// </summary>
|
||||
[Collection(SignalsPostgresCollection.Name)]
|
||||
public sealed class CallGraphProjectionIntegrationTests
|
||||
{
|
||||
private readonly SignalsPostgresFixture _fixture;
|
||||
private readonly ITestOutputHelper _output;
|
||||
|
||||
public CallGraphProjectionIntegrationTests(SignalsPostgresFixture fixture, ITestOutputHelper output)
|
||||
{
|
||||
_fixture = fixture;
|
||||
_output = output;
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task SyncAsync_ProjectsNodesToRelationalTable()
|
||||
{
|
||||
// Arrange
|
||||
var dataSource = await CreateDataSourceAsync();
|
||||
var repository = new PostgresCallGraphProjectionRepository(
|
||||
dataSource,
|
||||
NullLogger<PostgresCallGraphProjectionRepository>.Instance);
|
||||
var service = new CallGraphSyncService(
|
||||
repository,
|
||||
TimeProvider.System,
|
||||
NullLogger<CallGraphSyncService>.Instance);
|
||||
|
||||
var scanId = Guid.NewGuid();
|
||||
var document = CreateSampleDocument();
|
||||
|
||||
// Act
|
||||
var result = await service.SyncAsync(scanId, "sha256:test-digest", document);
|
||||
|
||||
// Assert
|
||||
Assert.True(result.WasUpdated);
|
||||
Assert.Equal(document.Nodes.Count, result.NodesProjected);
|
||||
Assert.Equal(document.Edges.Count, result.EdgesProjected);
|
||||
Assert.Equal(document.Entrypoints.Count, result.EntrypointsProjected);
|
||||
Assert.True(result.DurationMs >= 0);
|
||||
|
||||
_output.WriteLine($"Projected {result.NodesProjected} nodes, {result.EdgesProjected} edges, {result.EntrypointsProjected} entrypoints in {result.DurationMs}ms");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task SyncAsync_IsIdempotent_DoesNotCreateDuplicates()
|
||||
{
|
||||
// Arrange
|
||||
var dataSource = await CreateDataSourceAsync();
|
||||
var repository = new PostgresCallGraphProjectionRepository(
|
||||
dataSource,
|
||||
NullLogger<PostgresCallGraphProjectionRepository>.Instance);
|
||||
var service = new CallGraphSyncService(
|
||||
repository,
|
||||
TimeProvider.System,
|
||||
NullLogger<CallGraphSyncService>.Instance);
|
||||
|
||||
var scanId = Guid.NewGuid();
|
||||
var document = CreateSampleDocument();
|
||||
|
||||
// Act - project twice
|
||||
var result1 = await service.SyncAsync(scanId, "sha256:test-digest", document);
|
||||
var result2 = await service.SyncAsync(scanId, "sha256:test-digest", document);
|
||||
|
||||
// Assert - second run should update, not duplicate
|
||||
Assert.Equal(result1.NodesProjected, result2.NodesProjected);
|
||||
Assert.Equal(result1.EdgesProjected, result2.EdgesProjected);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task SyncAsync_WithEntrypoints_ProjectsEntrypointsCorrectly()
|
||||
{
|
||||
// Arrange
|
||||
var dataSource = await CreateDataSourceAsync();
|
||||
var repository = new PostgresCallGraphProjectionRepository(
|
||||
dataSource,
|
||||
NullLogger<PostgresCallGraphProjectionRepository>.Instance);
|
||||
var service = new CallGraphSyncService(
|
||||
repository,
|
||||
TimeProvider.System,
|
||||
NullLogger<CallGraphSyncService>.Instance);
|
||||
|
||||
var scanId = Guid.NewGuid();
|
||||
var document = new CallgraphDocument
|
||||
{
|
||||
Id = Guid.NewGuid().ToString("N"),
|
||||
Language = "csharp",
|
||||
GraphHash = "test-hash",
|
||||
Nodes = new List<CallgraphNode>
|
||||
{
|
||||
new() { Id = "node-1", Name = "GetUsers", Namespace = "Api.Controllers" },
|
||||
new() { Id = "node-2", Name = "CreateUser", Namespace = "Api.Controllers" }
|
||||
},
|
||||
Edges = new List<CallgraphEdge>(),
|
||||
Entrypoints = new List<CallgraphEntrypoint>
|
||||
{
|
||||
new() { NodeId = "node-1", Kind = EntrypointKind.Http, Route = "/api/users", HttpMethod = "GET", Order = 0 },
|
||||
new() { NodeId = "node-2", Kind = EntrypointKind.Http, Route = "/api/users", HttpMethod = "POST", Order = 1 }
|
||||
}
|
||||
};
|
||||
|
||||
// Act
|
||||
var result = await service.SyncAsync(scanId, "sha256:test-digest", document);
|
||||
|
||||
// Assert
|
||||
Assert.Equal(2, result.EntrypointsProjected);
|
||||
_output.WriteLine($"Projected {result.EntrypointsProjected} HTTP entrypoints");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task DeleteByScanAsync_RemovesAllProjectedData()
|
||||
{
|
||||
// Arrange
|
||||
var dataSource = await CreateDataSourceAsync();
|
||||
var repository = new PostgresCallGraphProjectionRepository(
|
||||
dataSource,
|
||||
NullLogger<PostgresCallGraphProjectionRepository>.Instance);
|
||||
var queryRepository = new PostgresCallGraphQueryRepository(
|
||||
dataSource,
|
||||
NullLogger<PostgresCallGraphQueryRepository>.Instance);
|
||||
var service = new CallGraphSyncService(
|
||||
repository,
|
||||
TimeProvider.System,
|
||||
NullLogger<CallGraphSyncService>.Instance);
|
||||
|
||||
var scanId = Guid.NewGuid();
|
||||
var document = CreateSampleDocument();
|
||||
|
||||
// Project first
|
||||
await service.SyncAsync(scanId, "sha256:test-digest", document);
|
||||
|
||||
// Act
|
||||
await service.DeleteByScanAsync(scanId);
|
||||
|
||||
// Assert - query should return empty stats
|
||||
var stats = await queryRepository.GetStatsAsync(scanId);
|
||||
Assert.Equal(0, stats.NodeCount);
|
||||
Assert.Equal(0, stats.EdgeCount);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task QueryRepository_CanQueryProjectedData()
|
||||
{
|
||||
// Arrange
|
||||
var dataSource = await CreateDataSourceAsync();
|
||||
var repository = new PostgresCallGraphProjectionRepository(
|
||||
dataSource,
|
||||
NullLogger<PostgresCallGraphProjectionRepository>.Instance);
|
||||
var queryRepository = new PostgresCallGraphQueryRepository(
|
||||
dataSource,
|
||||
NullLogger<PostgresCallGraphQueryRepository>.Instance);
|
||||
var service = new CallGraphSyncService(
|
||||
repository,
|
||||
TimeProvider.System,
|
||||
NullLogger<CallGraphSyncService>.Instance);
|
||||
|
||||
var scanId = Guid.NewGuid();
|
||||
var document = CreateSampleDocument();
|
||||
|
||||
// Project
|
||||
await service.SyncAsync(scanId, "sha256:test-digest", document);
|
||||
|
||||
// Act
|
||||
var stats = await queryRepository.GetStatsAsync(scanId);
|
||||
|
||||
// Assert
|
||||
Assert.Equal(document.Nodes.Count, stats.NodeCount);
|
||||
Assert.Equal(document.Edges.Count, stats.EdgeCount);
|
||||
_output.WriteLine($"Query returned: {stats.NodeCount} nodes, {stats.EdgeCount} edges");
|
||||
}
|
||||
|
||||
private async Task<SignalsDataSource> CreateDataSourceAsync()
|
||||
{
|
||||
var connectionString = _fixture.GetConnectionString();
|
||||
var options = new Microsoft.Extensions.Options.OptionsWrapper<StellaOps.Infrastructure.Postgres.Options.PostgresOptions>(
|
||||
new StellaOps.Infrastructure.Postgres.Options.PostgresOptions { ConnectionString = connectionString });
|
||||
var dataSource = new SignalsDataSource(options);
|
||||
|
||||
// Run migration
|
||||
await _fixture.RunMigrationsAsync();
|
||||
|
||||
return dataSource;
|
||||
}
|
||||
|
||||
private static CallgraphDocument CreateSampleDocument()
|
||||
{
|
||||
return new CallgraphDocument
|
||||
{
|
||||
Id = Guid.NewGuid().ToString("N"),
|
||||
Language = "csharp",
|
||||
GraphHash = "sha256:sample-graph-hash",
|
||||
Nodes = new List<CallgraphNode>
|
||||
{
|
||||
new() { Id = "node-1", Name = "Main", Kind = "method", Namespace = "Program", Visibility = SymbolVisibility.Public, IsEntrypointCandidate = true },
|
||||
new() { Id = "node-2", Name = "DoWork", Kind = "method", Namespace = "Service", Visibility = SymbolVisibility.Internal },
|
||||
new() { Id = "node-3", Name = "ProcessData", Kind = "method", Namespace = "Core", Visibility = SymbolVisibility.Private }
|
||||
},
|
||||
Edges = new List<CallgraphEdge>
|
||||
{
|
||||
new() { SourceId = "node-1", TargetId = "node-2", Kind = EdgeKind.Static, Reason = EdgeReason.DirectCall, Weight = 1.0 },
|
||||
new() { SourceId = "node-2", TargetId = "node-3", Kind = EdgeKind.Static, Reason = EdgeReason.DirectCall, Weight = 1.0 }
|
||||
},
|
||||
Entrypoints = new List<CallgraphEntrypoint>
|
||||
{
|
||||
new() { NodeId = "node-1", Kind = EntrypointKind.Main, Phase = EntrypointPhase.AppStart, Order = 0 }
|
||||
}
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,466 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Npgsql;
|
||||
using NpgsqlTypes;
|
||||
using StellaOps.Infrastructure.Postgres.Repositories;
|
||||
using StellaOps.Signals.Models;
|
||||
using StellaOps.Signals.Persistence;
|
||||
|
||||
namespace StellaOps.Signals.Storage.Postgres.Repositories;
|
||||
|
||||
/// <summary>
|
||||
/// PostgreSQL implementation of <see cref="ICallGraphProjectionRepository"/>.
|
||||
/// Projects callgraph documents into relational tables for efficient querying.
|
||||
/// </summary>
|
||||
public sealed class PostgresCallGraphProjectionRepository : RepositoryBase<SignalsDataSource>, ICallGraphProjectionRepository
|
||||
{
|
||||
private const int BatchSize = 1000;
|
||||
|
||||
private static readonly JsonSerializerOptions JsonOptions = new(JsonSerializerDefaults.Web)
|
||||
{
|
||||
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
|
||||
WriteIndented = false
|
||||
};
|
||||
|
||||
public PostgresCallGraphProjectionRepository(
|
||||
SignalsDataSource dataSource,
|
||||
ILogger<PostgresCallGraphProjectionRepository> logger)
|
||||
: base(dataSource, logger)
|
||||
{
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public async Task<bool> UpsertScanAsync(
|
||||
Guid scanId,
|
||||
string artifactDigest,
|
||||
string? sbomDigest = null,
|
||||
string? repoUri = null,
|
||||
string? commitSha = null,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
const string sql = """
|
||||
INSERT INTO signals.scans (scan_id, artifact_digest, sbom_digest, repo_uri, commit_sha, status, created_at)
|
||||
VALUES (@scan_id, @artifact_digest, @sbom_digest, @repo_uri, @commit_sha, 'processing', NOW())
|
||||
ON CONFLICT (scan_id)
|
||||
DO UPDATE SET
|
||||
artifact_digest = EXCLUDED.artifact_digest,
|
||||
sbom_digest = COALESCE(EXCLUDED.sbom_digest, signals.scans.sbom_digest),
|
||||
repo_uri = COALESCE(EXCLUDED.repo_uri, signals.scans.repo_uri),
|
||||
commit_sha = COALESCE(EXCLUDED.commit_sha, signals.scans.commit_sha),
|
||||
status = CASE WHEN signals.scans.status = 'completed' THEN 'completed' ELSE 'processing' END
|
||||
RETURNING (xmax = 0) AS was_inserted
|
||||
""";
|
||||
|
||||
await using var connection = await DataSource.OpenSystemConnectionAsync(cancellationToken).ConfigureAwait(false);
|
||||
await using var command = CreateCommand(sql, connection);
|
||||
|
||||
AddParameter(command, "@scan_id", scanId);
|
||||
AddParameter(command, "@artifact_digest", artifactDigest);
|
||||
AddParameter(command, "@sbom_digest", sbomDigest ?? (object)DBNull.Value);
|
||||
AddParameter(command, "@repo_uri", repoUri ?? (object)DBNull.Value);
|
||||
AddParameter(command, "@commit_sha", commitSha ?? (object)DBNull.Value);
|
||||
|
||||
var result = await command.ExecuteScalarAsync(cancellationToken).ConfigureAwait(false);
|
||||
return result is true;
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public async Task CompleteScanAsync(Guid scanId, CancellationToken cancellationToken = default)
|
||||
{
|
||||
const string sql = """
|
||||
UPDATE signals.scans
|
||||
SET status = 'completed', completed_at = NOW()
|
||||
WHERE scan_id = @scan_id
|
||||
""";
|
||||
|
||||
await using var connection = await DataSource.OpenSystemConnectionAsync(cancellationToken).ConfigureAwait(false);
|
||||
await using var command = CreateCommand(sql, connection);
|
||||
AddParameter(command, "@scan_id", scanId);
|
||||
|
||||
await command.ExecuteNonQueryAsync(cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public async Task FailScanAsync(Guid scanId, string errorMessage, CancellationToken cancellationToken = default)
|
||||
{
|
||||
const string sql = """
|
||||
UPDATE signals.scans
|
||||
SET status = 'failed', error_message = @error_message, completed_at = NOW()
|
||||
WHERE scan_id = @scan_id
|
||||
""";
|
||||
|
||||
await using var connection = await DataSource.OpenSystemConnectionAsync(cancellationToken).ConfigureAwait(false);
|
||||
await using var command = CreateCommand(sql, connection);
|
||||
AddParameter(command, "@scan_id", scanId);
|
||||
AddParameter(command, "@error_message", errorMessage);
|
||||
|
||||
await command.ExecuteNonQueryAsync(cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public async Task<int> UpsertNodesAsync(
|
||||
Guid scanId,
|
||||
IReadOnlyList<CallgraphNode> nodes,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
if (nodes is not { Count: > 0 })
|
||||
{
|
||||
return 0;
|
||||
}
|
||||
|
||||
// Sort nodes deterministically by Id for stable ordering
|
||||
var sortedNodes = nodes.OrderBy(n => n.Id, StringComparer.Ordinal).ToList();
|
||||
|
||||
var totalInserted = 0;
|
||||
|
||||
await using var connection = await DataSource.OpenSystemConnectionAsync(cancellationToken).ConfigureAwait(false);
|
||||
await using var transaction = await connection.BeginTransactionAsync(cancellationToken).ConfigureAwait(false);
|
||||
|
||||
try
|
||||
{
|
||||
// Process in batches
|
||||
for (var i = 0; i < sortedNodes.Count; i += BatchSize)
|
||||
{
|
||||
var batch = sortedNodes.Skip(i).Take(BatchSize).ToList();
|
||||
totalInserted += await UpsertNodeBatchAsync(connection, transaction, scanId, batch, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
await transaction.CommitAsync(cancellationToken).ConfigureAwait(false);
|
||||
return totalInserted;
|
||||
}
|
||||
catch
|
||||
{
|
||||
await transaction.RollbackAsync(cancellationToken).ConfigureAwait(false);
|
||||
throw;
|
||||
}
|
||||
}
|
||||
|
||||
private async Task<int> UpsertNodeBatchAsync(
|
||||
NpgsqlConnection connection,
|
||||
NpgsqlTransaction transaction,
|
||||
Guid scanId,
|
||||
IReadOnlyList<CallgraphNode> nodes,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
var sql = new StringBuilder();
|
||||
sql.AppendLine("""
|
||||
INSERT INTO signals.cg_nodes (scan_id, node_id, artifact_key, symbol_key, visibility, is_entrypoint_candidate, purl, symbol_digest, flags, attributes)
|
||||
VALUES
|
||||
""");
|
||||
|
||||
var parameters = new List<NpgsqlParameter>();
|
||||
var paramIndex = 0;
|
||||
|
||||
for (var i = 0; i < nodes.Count; i++)
|
||||
{
|
||||
var node = nodes[i];
|
||||
if (i > 0) sql.Append(',');
|
||||
|
||||
sql.AppendLine($"""
|
||||
(@p{paramIndex}, @p{paramIndex + 1}, @p{paramIndex + 2}, @p{paramIndex + 3}, @p{paramIndex + 4}, @p{paramIndex + 5}, @p{paramIndex + 6}, @p{paramIndex + 7}, @p{paramIndex + 8}, @p{paramIndex + 9})
|
||||
""");
|
||||
|
||||
parameters.Add(new NpgsqlParameter($"@p{paramIndex++}", scanId));
|
||||
parameters.Add(new NpgsqlParameter($"@p{paramIndex++}", node.Id));
|
||||
parameters.Add(new NpgsqlParameter($"@p{paramIndex++}", node.Namespace ?? (object)DBNull.Value));
|
||||
parameters.Add(new NpgsqlParameter($"@p{paramIndex++}", BuildSymbolKey(node)));
|
||||
parameters.Add(new NpgsqlParameter($"@p{paramIndex++}", MapVisibility(node)));
|
||||
parameters.Add(new NpgsqlParameter($"@p{paramIndex++}", node.IsEntrypointCandidate));
|
||||
parameters.Add(new NpgsqlParameter($"@p{paramIndex++}", node.Purl ?? (object)DBNull.Value));
|
||||
parameters.Add(new NpgsqlParameter($"@p{paramIndex++}", node.SymbolDigest ?? (object)DBNull.Value));
|
||||
parameters.Add(new NpgsqlParameter($"@p{paramIndex++}", MapNodeFlags(node)));
|
||||
parameters.Add(new NpgsqlParameter($"@p{paramIndex++}", NpgsqlDbType.Jsonb) { Value = SerializeAttributes(node) ?? DBNull.Value });
|
||||
}
|
||||
|
||||
sql.AppendLine("""
|
||||
ON CONFLICT (scan_id, node_id)
|
||||
DO UPDATE SET
|
||||
artifact_key = EXCLUDED.artifact_key,
|
||||
symbol_key = EXCLUDED.symbol_key,
|
||||
visibility = EXCLUDED.visibility,
|
||||
is_entrypoint_candidate = EXCLUDED.is_entrypoint_candidate,
|
||||
purl = EXCLUDED.purl,
|
||||
symbol_digest = EXCLUDED.symbol_digest,
|
||||
flags = EXCLUDED.flags,
|
||||
attributes = EXCLUDED.attributes
|
||||
""");
|
||||
|
||||
await using var command = new NpgsqlCommand(sql.ToString(), connection, transaction);
|
||||
command.Parameters.AddRange(parameters.ToArray());
|
||||
|
||||
return await command.ExecuteNonQueryAsync(cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public async Task<int> UpsertEdgesAsync(
|
||||
Guid scanId,
|
||||
IReadOnlyList<CallgraphEdge> edges,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
if (edges is not { Count: > 0 })
|
||||
{
|
||||
return 0;
|
||||
}
|
||||
|
||||
// Sort edges deterministically by (SourceId, TargetId) for stable ordering
|
||||
var sortedEdges = edges
|
||||
.OrderBy(e => e.SourceId, StringComparer.Ordinal)
|
||||
.ThenBy(e => e.TargetId, StringComparer.Ordinal)
|
||||
.ToList();
|
||||
|
||||
var totalInserted = 0;
|
||||
|
||||
await using var connection = await DataSource.OpenSystemConnectionAsync(cancellationToken).ConfigureAwait(false);
|
||||
await using var transaction = await connection.BeginTransactionAsync(cancellationToken).ConfigureAwait(false);
|
||||
|
||||
try
|
||||
{
|
||||
// Process in batches
|
||||
for (var i = 0; i < sortedEdges.Count; i += BatchSize)
|
||||
{
|
||||
var batch = sortedEdges.Skip(i).Take(BatchSize).ToList();
|
||||
totalInserted += await UpsertEdgeBatchAsync(connection, transaction, scanId, batch, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
await transaction.CommitAsync(cancellationToken).ConfigureAwait(false);
|
||||
return totalInserted;
|
||||
}
|
||||
catch
|
||||
{
|
||||
await transaction.RollbackAsync(cancellationToken).ConfigureAwait(false);
|
||||
throw;
|
||||
}
|
||||
}
|
||||
|
||||
private async Task<int> UpsertEdgeBatchAsync(
|
||||
NpgsqlConnection connection,
|
||||
NpgsqlTransaction transaction,
|
||||
Guid scanId,
|
||||
IReadOnlyList<CallgraphEdge> edges,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
var sql = new StringBuilder();
|
||||
sql.AppendLine("""
|
||||
INSERT INTO signals.cg_edges (scan_id, from_node_id, to_node_id, kind, reason, weight, is_resolved, provenance)
|
||||
VALUES
|
||||
""");
|
||||
|
||||
var parameters = new List<NpgsqlParameter>();
|
||||
var paramIndex = 0;
|
||||
|
||||
for (var i = 0; i < edges.Count; i++)
|
||||
{
|
||||
var edge = edges[i];
|
||||
if (i > 0) sql.Append(',');
|
||||
|
||||
sql.AppendLine($"""
|
||||
(@p{paramIndex}, @p{paramIndex + 1}, @p{paramIndex + 2}, @p{paramIndex + 3}, @p{paramIndex + 4}, @p{paramIndex + 5}, @p{paramIndex + 6}, @p{paramIndex + 7})
|
||||
""");
|
||||
|
||||
parameters.Add(new NpgsqlParameter($"@p{paramIndex++}", scanId));
|
||||
parameters.Add(new NpgsqlParameter($"@p{paramIndex++}", edge.SourceId));
|
||||
parameters.Add(new NpgsqlParameter($"@p{paramIndex++}", edge.TargetId));
|
||||
parameters.Add(new NpgsqlParameter($"@p{paramIndex++}", (short)MapEdgeKind(edge)));
|
||||
parameters.Add(new NpgsqlParameter($"@p{paramIndex++}", (short)MapEdgeReason(edge)));
|
||||
parameters.Add(new NpgsqlParameter($"@p{paramIndex++}", (float)(edge.Confidence ?? 1.0)));
|
||||
parameters.Add(new NpgsqlParameter($"@p{paramIndex++}", edge.IsResolved));
|
||||
parameters.Add(new NpgsqlParameter($"@p{paramIndex++}", edge.Provenance ?? (object)DBNull.Value));
|
||||
}
|
||||
|
||||
sql.AppendLine("""
|
||||
ON CONFLICT (scan_id, from_node_id, to_node_id, kind, reason)
|
||||
DO UPDATE SET
|
||||
weight = EXCLUDED.weight,
|
||||
is_resolved = EXCLUDED.is_resolved,
|
||||
provenance = EXCLUDED.provenance
|
||||
""");
|
||||
|
||||
await using var command = new NpgsqlCommand(sql.ToString(), connection, transaction);
|
||||
command.Parameters.AddRange(parameters.ToArray());
|
||||
|
||||
return await command.ExecuteNonQueryAsync(cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public async Task<int> UpsertEntrypointsAsync(
|
||||
Guid scanId,
|
||||
IReadOnlyList<CallgraphEntrypoint> entrypoints,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
if (entrypoints is not { Count: > 0 })
|
||||
{
|
||||
return 0;
|
||||
}
|
||||
|
||||
// Sort entrypoints deterministically by (NodeId, Order) for stable ordering
|
||||
var sortedEntrypoints = entrypoints
|
||||
.OrderBy(e => e.NodeId, StringComparer.Ordinal)
|
||||
.ThenBy(e => e.Order)
|
||||
.ToList();
|
||||
|
||||
const string sql = """
|
||||
INSERT INTO signals.entrypoints (scan_id, node_id, kind, framework, route, http_method, phase, order_idx)
|
||||
VALUES (@scan_id, @node_id, @kind, @framework, @route, @http_method, @phase, @order_idx)
|
||||
ON CONFLICT (scan_id, node_id, kind)
|
||||
DO UPDATE SET
|
||||
framework = EXCLUDED.framework,
|
||||
route = EXCLUDED.route,
|
||||
http_method = EXCLUDED.http_method,
|
||||
phase = EXCLUDED.phase,
|
||||
order_idx = EXCLUDED.order_idx
|
||||
""";
|
||||
|
||||
var totalInserted = 0;
|
||||
|
||||
await using var connection = await DataSource.OpenSystemConnectionAsync(cancellationToken).ConfigureAwait(false);
|
||||
await using var transaction = await connection.BeginTransactionAsync(cancellationToken).ConfigureAwait(false);
|
||||
|
||||
try
|
||||
{
|
||||
foreach (var entrypoint in sortedEntrypoints)
|
||||
{
|
||||
await using var command = new NpgsqlCommand(sql, connection, transaction);
|
||||
|
||||
command.Parameters.AddWithValue("@scan_id", scanId);
|
||||
command.Parameters.AddWithValue("@node_id", entrypoint.NodeId);
|
||||
command.Parameters.AddWithValue("@kind", MapEntrypointKind(entrypoint.Kind));
|
||||
command.Parameters.AddWithValue("@framework", entrypoint.Framework.ToString().ToLowerInvariant());
|
||||
command.Parameters.AddWithValue("@route", entrypoint.Route ?? (object)DBNull.Value);
|
||||
command.Parameters.AddWithValue("@http_method", entrypoint.HttpMethod ?? (object)DBNull.Value);
|
||||
command.Parameters.AddWithValue("@phase", MapEntrypointPhase(entrypoint.Phase));
|
||||
command.Parameters.AddWithValue("@order_idx", entrypoint.Order);
|
||||
|
||||
totalInserted += await command.ExecuteNonQueryAsync(cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
await transaction.CommitAsync(cancellationToken).ConfigureAwait(false);
|
||||
return totalInserted;
|
||||
}
|
||||
catch
|
||||
{
|
||||
await transaction.RollbackAsync(cancellationToken).ConfigureAwait(false);
|
||||
throw;
|
||||
}
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public async Task DeleteScanAsync(Guid scanId, CancellationToken cancellationToken = default)
|
||||
{
|
||||
// Delete from scans cascades to all related tables via FK
|
||||
const string sql = "DELETE FROM signals.scans WHERE scan_id = @scan_id";
|
||||
|
||||
await using var connection = await DataSource.OpenSystemConnectionAsync(cancellationToken).ConfigureAwait(false);
|
||||
await using var command = CreateCommand(sql, connection);
|
||||
AddParameter(command, "@scan_id", scanId);
|
||||
|
||||
await command.ExecuteNonQueryAsync(cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
// ===== HELPER METHODS =====
|
||||
|
||||
private static string BuildSymbolKey(CallgraphNode node)
|
||||
{
|
||||
// Build canonical symbol key: namespace.name or just name
|
||||
if (!string.IsNullOrWhiteSpace(node.Namespace))
|
||||
{
|
||||
return $"{node.Namespace}.{node.Name}";
|
||||
}
|
||||
return node.Name;
|
||||
}
|
||||
|
||||
private static string MapVisibility(CallgraphNode node)
|
||||
{
|
||||
return node.Visibility switch
|
||||
{
|
||||
SymbolVisibility.Public => "public",
|
||||
SymbolVisibility.Internal => "internal",
|
||||
SymbolVisibility.Protected => "protected",
|
||||
SymbolVisibility.Private => "private",
|
||||
_ => "unknown"
|
||||
};
|
||||
}
|
||||
|
||||
private static int MapNodeFlags(CallgraphNode node)
|
||||
{
|
||||
// Use the Flags property directly from the node
|
||||
// The Flags bitfield is already encoded by the parser
|
||||
return node.Flags;
|
||||
}
|
||||
|
||||
private static string? SerializeAttributes(CallgraphNode node)
|
||||
{
|
||||
// Serialize additional attributes if present
|
||||
if (node.Evidence is not { Count: > 0 })
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
return JsonSerializer.Serialize(new { evidence = node.Evidence }, JsonOptions);
|
||||
}
|
||||
|
||||
private static EdgeKind MapEdgeKind(CallgraphEdge edge)
|
||||
{
|
||||
return edge.Kind switch
|
||||
{
|
||||
EdgeKind.Static => EdgeKind.Static,
|
||||
EdgeKind.Heuristic => EdgeKind.Heuristic,
|
||||
EdgeKind.Runtime => EdgeKind.Runtime,
|
||||
_ => edge.Type?.ToLowerInvariant() switch
|
||||
{
|
||||
"static" => EdgeKind.Static,
|
||||
"heuristic" => EdgeKind.Heuristic,
|
||||
"runtime" => EdgeKind.Runtime,
|
||||
_ => EdgeKind.Static
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
private static EdgeReason MapEdgeReason(CallgraphEdge edge)
|
||||
{
|
||||
return edge.Reason switch
|
||||
{
|
||||
EdgeReason.DirectCall => EdgeReason.DirectCall,
|
||||
EdgeReason.VirtualCall => EdgeReason.VirtualCall,
|
||||
EdgeReason.ReflectionString => EdgeReason.ReflectionString,
|
||||
EdgeReason.RuntimeMinted => EdgeReason.RuntimeMinted,
|
||||
_ => EdgeReason.DirectCall
|
||||
};
|
||||
}
|
||||
|
||||
private static string MapEntrypointKind(EntrypointKind kind)
|
||||
{
|
||||
return kind switch
|
||||
{
|
||||
EntrypointKind.Http => "http",
|
||||
EntrypointKind.Grpc => "grpc",
|
||||
EntrypointKind.Cli => "cli",
|
||||
EntrypointKind.Job => "job",
|
||||
EntrypointKind.Event => "event",
|
||||
EntrypointKind.MessageQueue => "message_queue",
|
||||
EntrypointKind.Timer => "timer",
|
||||
EntrypointKind.Test => "test",
|
||||
EntrypointKind.Main => "main",
|
||||
EntrypointKind.ModuleInit => "module_init",
|
||||
EntrypointKind.StaticConstructor => "static_constructor",
|
||||
_ => "unknown"
|
||||
};
|
||||
}
|
||||
|
||||
private static string MapEntrypointPhase(EntrypointPhase phase)
|
||||
{
|
||||
return phase switch
|
||||
{
|
||||
EntrypointPhase.ModuleInit => "module_init",
|
||||
EntrypointPhase.AppStart => "app_start",
|
||||
EntrypointPhase.Runtime => "runtime",
|
||||
EntrypointPhase.Shutdown => "shutdown",
|
||||
_ => "runtime"
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -34,6 +34,7 @@ public static class ServiceCollectionExtensions
|
||||
services.AddSingleton<IDeploymentRefsRepository, PostgresDeploymentRefsRepository>();
|
||||
services.AddSingleton<IGraphMetricsRepository, PostgresGraphMetricsRepository>();
|
||||
services.AddSingleton<ICallGraphQueryRepository, PostgresCallGraphQueryRepository>();
|
||||
services.AddSingleton<ICallGraphProjectionRepository, PostgresCallGraphProjectionRepository>();
|
||||
|
||||
return services;
|
||||
}
|
||||
@@ -59,6 +60,7 @@ public static class ServiceCollectionExtensions
|
||||
services.AddSingleton<IDeploymentRefsRepository, PostgresDeploymentRefsRepository>();
|
||||
services.AddSingleton<IGraphMetricsRepository, PostgresGraphMetricsRepository>();
|
||||
services.AddSingleton<ICallGraphQueryRepository, PostgresCallGraphQueryRepository>();
|
||||
services.AddSingleton<ICallGraphProjectionRepository, PostgresCallGraphProjectionRepository>();
|
||||
|
||||
return services;
|
||||
}
|
||||
|
||||
192
src/Signals/StellaOps.Signals/Models/ScoreExplanation.cs
Normal file
192
src/Signals/StellaOps.Signals/Models/ScoreExplanation.cs
Normal file
@@ -0,0 +1,192 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// ScoreExplanation.cs
|
||||
// Sprint: SPRINT_3800_0001_0001_evidence_api_models
|
||||
// Description: Score explanation model with additive breakdown of risk factors.
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Text.Json.Serialization;
|
||||
|
||||
namespace StellaOps.Signals.Models;
|
||||
|
||||
/// <summary>
|
||||
/// Score explanation with additive breakdown of risk factors.
|
||||
/// Provides transparency into how a risk score was computed.
|
||||
/// </summary>
|
||||
public sealed record ScoreExplanation
|
||||
{
|
||||
/// <summary>
|
||||
/// Kind of scoring algorithm (stellaops_risk_v1, cvss_v4, custom).
|
||||
/// </summary>
|
||||
[JsonPropertyName("kind")]
|
||||
public string Kind { get; init; } = "stellaops_risk_v1";
|
||||
|
||||
/// <summary>
|
||||
/// Final computed risk score (0.0 to 10.0 or custom range).
|
||||
/// </summary>
|
||||
[JsonPropertyName("risk_score")]
|
||||
public double RiskScore { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Individual score contributions summing to the final score.
|
||||
/// </summary>
|
||||
[JsonPropertyName("contributions")]
|
||||
public IReadOnlyList<ScoreContribution> Contributions { get; init; } = Array.Empty<ScoreContribution>();
|
||||
|
||||
/// <summary>
|
||||
/// When the score was computed.
|
||||
/// </summary>
|
||||
[JsonPropertyName("last_seen")]
|
||||
public DateTimeOffset LastSeen { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Version of the scoring algorithm.
|
||||
/// </summary>
|
||||
[JsonPropertyName("algorithm_version")]
|
||||
public string? AlgorithmVersion { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Reference to the evidence used for scoring (scan ID, graph hash, etc.).
|
||||
/// </summary>
|
||||
[JsonPropertyName("evidence_ref")]
|
||||
public string? EvidenceRef { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Human-readable summary of the score.
|
||||
/// </summary>
|
||||
[JsonPropertyName("summary")]
|
||||
public string? Summary { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Any modifiers applied after base calculation (caps, floors, policy overrides).
|
||||
/// </summary>
|
||||
[JsonPropertyName("modifiers")]
|
||||
public IReadOnlyList<ScoreModifier>? Modifiers { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Individual contribution to the risk score.
|
||||
/// </summary>
|
||||
public sealed record ScoreContribution
|
||||
{
|
||||
/// <summary>
|
||||
/// Factor name (cvss_base, epss, reachability, gate_multiplier, vex_override, etc.).
|
||||
/// </summary>
|
||||
[JsonPropertyName("factor")]
|
||||
public string Factor { get; init; } = string.Empty;
|
||||
|
||||
/// <summary>
|
||||
/// Weight applied to this factor (0.0 to 1.0).
|
||||
/// </summary>
|
||||
[JsonPropertyName("weight")]
|
||||
public double Weight { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Raw value before weighting.
|
||||
/// </summary>
|
||||
[JsonPropertyName("raw_value")]
|
||||
public double RawValue { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Weighted contribution to final score.
|
||||
/// </summary>
|
||||
[JsonPropertyName("contribution")]
|
||||
public double Contribution { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Human-readable explanation of this factor.
|
||||
/// </summary>
|
||||
[JsonPropertyName("explanation")]
|
||||
public string? Explanation { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Source of the factor value (nvd, first, scan, vex, policy).
|
||||
/// </summary>
|
||||
[JsonPropertyName("source")]
|
||||
public string? Source { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// When this factor value was last updated.
|
||||
/// </summary>
|
||||
[JsonPropertyName("updated_at")]
|
||||
public DateTimeOffset? UpdatedAt { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Confidence in this factor (0.0 to 1.0).
|
||||
/// </summary>
|
||||
[JsonPropertyName("confidence")]
|
||||
public double? Confidence { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Modifier applied to the score after base calculation.
|
||||
/// </summary>
|
||||
public sealed record ScoreModifier
|
||||
{
|
||||
/// <summary>
|
||||
/// Type of modifier (cap, floor, policy_override, vex_reduction, etc.).
|
||||
/// </summary>
|
||||
[JsonPropertyName("type")]
|
||||
public string Type { get; init; } = string.Empty;
|
||||
|
||||
/// <summary>
|
||||
/// Original value before modifier.
|
||||
/// </summary>
|
||||
[JsonPropertyName("before")]
|
||||
public double Before { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Value after modifier.
|
||||
/// </summary>
|
||||
[JsonPropertyName("after")]
|
||||
public double After { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Reason for the modifier.
|
||||
/// </summary>
|
||||
[JsonPropertyName("reason")]
|
||||
public string? Reason { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Policy or rule that triggered the modifier.
|
||||
/// </summary>
|
||||
[JsonPropertyName("policy_ref")]
|
||||
public string? PolicyRef { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Well-known score factor names.
|
||||
/// </summary>
|
||||
public static class ScoreFactors
|
||||
{
|
||||
/// <summary>CVSS v4 base score.</summary>
|
||||
public const string CvssBase = "cvss_base";
|
||||
|
||||
/// <summary>CVSS v4 environmental score.</summary>
|
||||
public const string CvssEnvironmental = "cvss_environmental";
|
||||
|
||||
/// <summary>EPSS probability score.</summary>
|
||||
public const string Epss = "epss";
|
||||
|
||||
/// <summary>Reachability analysis result.</summary>
|
||||
public const string Reachability = "reachability";
|
||||
|
||||
/// <summary>Gate-based multiplier (auth, feature flags, etc.).</summary>
|
||||
public const string GateMultiplier = "gate_multiplier";
|
||||
|
||||
/// <summary>VEX-based status override.</summary>
|
||||
public const string VexOverride = "vex_override";
|
||||
|
||||
/// <summary>Time-based decay (older vulnerabilities).</summary>
|
||||
public const string TimeDecay = "time_decay";
|
||||
|
||||
/// <summary>Exposure surface multiplier.</summary>
|
||||
public const string ExposureSurface = "exposure_surface";
|
||||
|
||||
/// <summary>Known exploitation status (KEV, etc.).</summary>
|
||||
public const string KnownExploitation = "known_exploitation";
|
||||
|
||||
/// <summary>Asset criticality multiplier.</summary>
|
||||
public const string AssetCriticality = "asset_criticality";
|
||||
}
|
||||
128
src/Signals/StellaOps.Signals/Options/ScoreExplanationWeights.cs
Normal file
128
src/Signals/StellaOps.Signals/Options/ScoreExplanationWeights.cs
Normal file
@@ -0,0 +1,128 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// ScoreExplanationWeights.cs
|
||||
// Sprint: SPRINT_3800_0001_0002_score_explanation_service
|
||||
// Description: Configurable weights for additive score explanation.
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System;
|
||||
|
||||
namespace StellaOps.Signals.Options;
|
||||
|
||||
/// <summary>
|
||||
/// Configurable weights for the additive score explanation model.
|
||||
/// Total score is computed as sum of weighted contributions (0-100 range).
|
||||
/// </summary>
|
||||
public sealed class ScoreExplanationWeights
|
||||
{
|
||||
/// <summary>
|
||||
/// Multiplier for CVSS base score (10.0 CVSS × 5.0 = 50 points max).
|
||||
/// </summary>
|
||||
public double CvssMultiplier { get; set; } = 5.0;
|
||||
|
||||
/// <summary>
|
||||
/// Points when path reaches entrypoint directly.
|
||||
/// </summary>
|
||||
public double EntrypointReachability { get; set; } = 25.0;
|
||||
|
||||
/// <summary>
|
||||
/// Points for direct reachability (caller directly invokes vulnerable code).
|
||||
/// </summary>
|
||||
public double DirectReachability { get; set; } = 20.0;
|
||||
|
||||
/// <summary>
|
||||
/// Points for runtime-observed reachability.
|
||||
/// </summary>
|
||||
public double RuntimeReachability { get; set; } = 22.0;
|
||||
|
||||
/// <summary>
|
||||
/// Points for unknown reachability status.
|
||||
/// </summary>
|
||||
public double UnknownReachability { get; set; } = 12.0;
|
||||
|
||||
/// <summary>
|
||||
/// Points for unreachable paths (typically 0).
|
||||
/// </summary>
|
||||
public double UnreachableReachability { get; set; } = 0.0;
|
||||
|
||||
/// <summary>
|
||||
/// Points for HTTP/HTTPS exposed entrypoints.
|
||||
/// </summary>
|
||||
public double HttpExposure { get; set; } = 15.0;
|
||||
|
||||
/// <summary>
|
||||
/// Points for gRPC exposed entrypoints.
|
||||
/// </summary>
|
||||
public double GrpcExposure { get; set; } = 12.0;
|
||||
|
||||
/// <summary>
|
||||
/// Points for internal-only exposure (not internet-facing).
|
||||
/// </summary>
|
||||
public double InternalExposure { get; set; } = 5.0;
|
||||
|
||||
/// <summary>
|
||||
/// Points for CLI or scheduled task exposure.
|
||||
/// </summary>
|
||||
public double CliExposure { get; set; } = 3.0;
|
||||
|
||||
/// <summary>
|
||||
/// Discount (negative) when auth gate is detected.
|
||||
/// </summary>
|
||||
public double AuthGateDiscount { get; set; } = -3.0;
|
||||
|
||||
/// <summary>
|
||||
/// Discount (negative) when admin-only gate is detected.
|
||||
/// </summary>
|
||||
public double AdminGateDiscount { get; set; } = -5.0;
|
||||
|
||||
/// <summary>
|
||||
/// Discount (negative) when feature flag gate is detected.
|
||||
/// </summary>
|
||||
public double FeatureFlagDiscount { get; set; } = -2.0;
|
||||
|
||||
/// <summary>
|
||||
/// Discount (negative) when non-default config gate is detected.
|
||||
/// </summary>
|
||||
public double NonDefaultConfigDiscount { get; set; } = -2.0;
|
||||
|
||||
/// <summary>
|
||||
/// Multiplier for EPSS probability (0.0-1.0 → 0-10 points).
|
||||
/// </summary>
|
||||
public double EpssMultiplier { get; set; } = 10.0;
|
||||
|
||||
/// <summary>
|
||||
/// Bonus for known exploited vulnerabilities (KEV).
|
||||
/// </summary>
|
||||
public double KevBonus { get; set; } = 10.0;
|
||||
|
||||
/// <summary>
|
||||
/// Minimum score floor.
|
||||
/// </summary>
|
||||
public double MinScore { get; set; } = 0.0;
|
||||
|
||||
/// <summary>
|
||||
/// Maximum score ceiling.
|
||||
/// </summary>
|
||||
public double MaxScore { get; set; } = 100.0;
|
||||
|
||||
/// <summary>
|
||||
/// Validates the configuration.
|
||||
/// </summary>
|
||||
public void Validate()
|
||||
{
|
||||
if (CvssMultiplier < 0)
|
||||
throw new ArgumentOutOfRangeException(nameof(CvssMultiplier), CvssMultiplier, "Must be non-negative.");
|
||||
|
||||
if (MinScore >= MaxScore)
|
||||
throw new ArgumentException("MinScore must be less than MaxScore.");
|
||||
|
||||
// Discounts should be negative or zero
|
||||
if (AuthGateDiscount > 0)
|
||||
throw new ArgumentOutOfRangeException(nameof(AuthGateDiscount), AuthGateDiscount, "Discounts should be negative or zero.");
|
||||
|
||||
if (AdminGateDiscount > 0)
|
||||
throw new ArgumentOutOfRangeException(nameof(AdminGateDiscount), AdminGateDiscount, "Discounts should be negative or zero.");
|
||||
|
||||
if (FeatureFlagDiscount > 0)
|
||||
throw new ArgumentOutOfRangeException(nameof(FeatureFlagDiscount), FeatureFlagDiscount, "Discounts should be negative or zero.");
|
||||
}
|
||||
}
|
||||
@@ -12,6 +12,11 @@ public sealed class SignalsScoringOptions
|
||||
/// </summary>
|
||||
public SignalsGateMultiplierOptions GateMultipliers { get; } = new();
|
||||
|
||||
/// <summary>
|
||||
/// Score explanation weights for additive risk scoring (Sprint: SPRINT_3800_0001_0002).
|
||||
/// </summary>
|
||||
public ScoreExplanationWeights ExplanationWeights { get; } = new();
|
||||
|
||||
/// <summary>
|
||||
/// Confidence assigned when a path exists from entry point to target.
|
||||
/// </summary>
|
||||
@@ -68,6 +73,7 @@ public sealed class SignalsScoringOptions
|
||||
public void Validate()
|
||||
{
|
||||
GateMultipliers.Validate();
|
||||
ExplanationWeights.Validate();
|
||||
|
||||
EnsurePercent(nameof(ReachableConfidence), ReachableConfidence);
|
||||
EnsurePercent(nameof(UnreachableConfidence), UnreachableConfidence);
|
||||
|
||||
@@ -0,0 +1,84 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using StellaOps.Signals.Models;
|
||||
|
||||
namespace StellaOps.Signals.Persistence;
|
||||
|
||||
/// <summary>
|
||||
/// Repository for projecting callgraph documents into relational tables.
|
||||
/// </summary>
|
||||
public interface ICallGraphProjectionRepository
|
||||
{
|
||||
/// <summary>
|
||||
/// Upserts or creates a scan record.
|
||||
/// </summary>
|
||||
/// <param name="scanId">The scan identifier.</param>
|
||||
/// <param name="artifactDigest">The artifact digest.</param>
|
||||
/// <param name="sbomDigest">Optional SBOM digest.</param>
|
||||
/// <param name="repoUri">Optional repository URI.</param>
|
||||
/// <param name="commitSha">Optional commit SHA.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>True if created, false if already existed.</returns>
|
||||
Task<bool> UpsertScanAsync(
|
||||
Guid scanId,
|
||||
string artifactDigest,
|
||||
string? sbomDigest = null,
|
||||
string? repoUri = null,
|
||||
string? commitSha = null,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Marks a scan as completed.
|
||||
/// </summary>
|
||||
Task CompleteScanAsync(Guid scanId, CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Marks a scan as failed.
|
||||
/// </summary>
|
||||
Task FailScanAsync(Guid scanId, string errorMessage, CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Upserts nodes into the relational cg_nodes table.
|
||||
/// </summary>
|
||||
/// <param name="scanId">The scan identifier.</param>
|
||||
/// <param name="nodes">The nodes to upsert.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>Number of nodes upserted.</returns>
|
||||
Task<int> UpsertNodesAsync(
|
||||
Guid scanId,
|
||||
IReadOnlyList<CallgraphNode> nodes,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Upserts edges into the relational cg_edges table.
|
||||
/// </summary>
|
||||
/// <param name="scanId">The scan identifier.</param>
|
||||
/// <param name="edges">The edges to upsert.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>Number of edges upserted.</returns>
|
||||
Task<int> UpsertEdgesAsync(
|
||||
Guid scanId,
|
||||
IReadOnlyList<CallgraphEdge> edges,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Upserts entrypoints into the relational entrypoints table.
|
||||
/// </summary>
|
||||
/// <param name="scanId">The scan identifier.</param>
|
||||
/// <param name="entrypoints">The entrypoints to upsert.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>Number of entrypoints upserted.</returns>
|
||||
Task<int> UpsertEntrypointsAsync(
|
||||
Guid scanId,
|
||||
IReadOnlyList<CallgraphEntrypoint> entrypoints,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Deletes all relational data for a scan (cascading via FK).
|
||||
/// </summary>
|
||||
/// <param name="scanId">The scan identifier.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
Task DeleteScanAsync(Guid scanId, CancellationToken cancellationToken = default);
|
||||
}
|
||||
@@ -0,0 +1,156 @@
|
||||
using System;
|
||||
using System.Collections.Concurrent;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using StellaOps.Signals.Models;
|
||||
|
||||
namespace StellaOps.Signals.Persistence;
|
||||
|
||||
/// <summary>
|
||||
/// In-memory implementation of <see cref="ICallGraphProjectionRepository"/> for testing.
|
||||
/// </summary>
|
||||
public sealed class InMemoryCallGraphProjectionRepository : ICallGraphProjectionRepository
|
||||
{
|
||||
private readonly ConcurrentDictionary<Guid, ScanRecord> _scans = new();
|
||||
private readonly ConcurrentDictionary<(Guid ScanId, string NodeId), NodeRecord> _nodes = new();
|
||||
private readonly ConcurrentDictionary<(Guid ScanId, string FromId, string ToId), EdgeRecord> _edges = new();
|
||||
private readonly ConcurrentDictionary<(Guid ScanId, string NodeId, string Kind), EntrypointRecord> _entrypoints = new();
|
||||
|
||||
public Task<bool> UpsertScanAsync(
|
||||
Guid scanId,
|
||||
string artifactDigest,
|
||||
string? sbomDigest = null,
|
||||
string? repoUri = null,
|
||||
string? commitSha = null,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
var wasInserted = !_scans.ContainsKey(scanId);
|
||||
_scans[scanId] = new ScanRecord(scanId, artifactDigest, sbomDigest, repoUri, commitSha, "processing", null);
|
||||
return Task.FromResult(wasInserted);
|
||||
}
|
||||
|
||||
public Task CompleteScanAsync(Guid scanId, CancellationToken cancellationToken = default)
|
||||
{
|
||||
if (_scans.TryGetValue(scanId, out var scan))
|
||||
{
|
||||
_scans[scanId] = scan with { Status = "completed", CompletedAt = DateTimeOffset.UtcNow };
|
||||
}
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
|
||||
public Task FailScanAsync(Guid scanId, string errorMessage, CancellationToken cancellationToken = default)
|
||||
{
|
||||
if (_scans.TryGetValue(scanId, out var scan))
|
||||
{
|
||||
_scans[scanId] = scan with { Status = "failed", ErrorMessage = errorMessage, CompletedAt = DateTimeOffset.UtcNow };
|
||||
}
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
|
||||
public Task<int> UpsertNodesAsync(
|
||||
Guid scanId,
|
||||
IReadOnlyList<CallgraphNode> nodes,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
var count = 0;
|
||||
foreach (var node in nodes.OrderBy(n => n.Id, StringComparer.Ordinal))
|
||||
{
|
||||
var key = (scanId, node.Id);
|
||||
_nodes[key] = new NodeRecord(scanId, node.Id, node.Name, node.Namespace, node.Purl);
|
||||
count++;
|
||||
}
|
||||
return Task.FromResult(count);
|
||||
}
|
||||
|
||||
public Task<int> UpsertEdgesAsync(
|
||||
Guid scanId,
|
||||
IReadOnlyList<CallgraphEdge> edges,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
var count = 0;
|
||||
foreach (var edge in edges.OrderBy(e => e.SourceId, StringComparer.Ordinal)
|
||||
.ThenBy(e => e.TargetId, StringComparer.Ordinal))
|
||||
{
|
||||
var key = (scanId, edge.SourceId, edge.TargetId);
|
||||
_edges[key] = new EdgeRecord(scanId, edge.SourceId, edge.TargetId, edge.Kind.ToString(), edge.Weight);
|
||||
count++;
|
||||
}
|
||||
return Task.FromResult(count);
|
||||
}
|
||||
|
||||
public Task<int> UpsertEntrypointsAsync(
|
||||
Guid scanId,
|
||||
IReadOnlyList<CallgraphEntrypoint> entrypoints,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
var count = 0;
|
||||
foreach (var ep in entrypoints.OrderBy(e => e.NodeId, StringComparer.Ordinal))
|
||||
{
|
||||
var key = (scanId, ep.NodeId, ep.Kind.ToString());
|
||||
_entrypoints[key] = new EntrypointRecord(scanId, ep.NodeId, ep.Kind.ToString(), ep.Route, ep.HttpMethod);
|
||||
count++;
|
||||
}
|
||||
return Task.FromResult(count);
|
||||
}
|
||||
|
||||
public Task DeleteScanAsync(Guid scanId, CancellationToken cancellationToken = default)
|
||||
{
|
||||
_scans.TryRemove(scanId, out _);
|
||||
|
||||
foreach (var key in _nodes.Keys.Where(k => k.ScanId == scanId).ToList())
|
||||
{
|
||||
_nodes.TryRemove(key, out _);
|
||||
}
|
||||
|
||||
foreach (var key in _edges.Keys.Where(k => k.ScanId == scanId).ToList())
|
||||
{
|
||||
_edges.TryRemove(key, out _);
|
||||
}
|
||||
|
||||
foreach (var key in _entrypoints.Keys.Where(k => k.ScanId == scanId).ToList())
|
||||
{
|
||||
_entrypoints.TryRemove(key, out _);
|
||||
}
|
||||
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
|
||||
// Accessors for testing
|
||||
public IReadOnlyDictionary<Guid, ScanRecord> Scans => _scans;
|
||||
public IReadOnlyDictionary<(Guid ScanId, string NodeId), NodeRecord> Nodes => _nodes;
|
||||
public IReadOnlyDictionary<(Guid ScanId, string FromId, string ToId), EdgeRecord> Edges => _edges;
|
||||
public IReadOnlyDictionary<(Guid ScanId, string NodeId, string Kind), EntrypointRecord> Entrypoints => _entrypoints;
|
||||
|
||||
public sealed record ScanRecord(
|
||||
Guid ScanId,
|
||||
string ArtifactDigest,
|
||||
string? SbomDigest,
|
||||
string? RepoUri,
|
||||
string? CommitSha,
|
||||
string Status,
|
||||
DateTimeOffset? CompletedAt,
|
||||
string? ErrorMessage = null);
|
||||
|
||||
public sealed record NodeRecord(
|
||||
Guid ScanId,
|
||||
string NodeId,
|
||||
string Name,
|
||||
string? Namespace,
|
||||
string? Purl);
|
||||
|
||||
public sealed record EdgeRecord(
|
||||
Guid ScanId,
|
||||
string FromId,
|
||||
string ToId,
|
||||
string Kind,
|
||||
double Weight);
|
||||
|
||||
public sealed record EntrypointRecord(
|
||||
Guid ScanId,
|
||||
string NodeId,
|
||||
string Kind,
|
||||
string? Route,
|
||||
string? HttpMethod);
|
||||
}
|
||||
@@ -83,6 +83,7 @@ builder.Services.AddRouting(options => options.LowercaseUrls = true);
|
||||
|
||||
builder.Services.AddSingleton<ICallgraphRepository, InMemoryCallgraphRepository>();
|
||||
builder.Services.AddSingleton<ICallgraphNormalizationService, CallgraphNormalizationService>();
|
||||
builder.Services.AddSingleton<ICallGraphProjectionRepository, InMemoryCallGraphProjectionRepository>();
|
||||
|
||||
// Configure callgraph artifact storage based on driver
|
||||
if (bootstrap.Storage.IsRustFsDriver())
|
||||
@@ -117,6 +118,7 @@ builder.Services.AddSingleton<ICallgraphParser>(new SimpleJsonCallgraphParser("p
|
||||
builder.Services.AddSingleton<ICallgraphParser>(new SimpleJsonCallgraphParser("go"));
|
||||
builder.Services.AddSingleton<ICallgraphParserResolver, CallgraphParserResolver>();
|
||||
builder.Services.AddSingleton<ICallgraphIngestionService, CallgraphIngestionService>();
|
||||
builder.Services.AddSingleton<ICallGraphSyncService, CallGraphSyncService>();
|
||||
builder.Services.AddSingleton<IReachabilityCache>(sp =>
|
||||
{
|
||||
var options = sp.GetRequiredService<IOptions<SignalsOptions>>().Value;
|
||||
@@ -197,6 +199,7 @@ builder.Services.AddSingleton<IEventsPublisher>(sp =>
|
||||
eventBuilder);
|
||||
});
|
||||
builder.Services.AddSingleton<IReachabilityScoringService, ReachabilityScoringService>();
|
||||
builder.Services.AddSingleton<IScoreExplanationService, ScoreExplanationService>(); // Sprint: SPRINT_3800_0001_0002
|
||||
builder.Services.AddSingleton<IRuntimeFactsProvenanceNormalizer, RuntimeFactsProvenanceNormalizer>();
|
||||
builder.Services.AddSingleton<IRuntimeFactsIngestionService, RuntimeFactsIngestionService>();
|
||||
builder.Services.AddSingleton<IReachabilityUnionIngestionService, ReachabilityUnionIngestionService>();
|
||||
|
||||
118
src/Signals/StellaOps.Signals/Services/CallGraphSyncService.cs
Normal file
118
src/Signals/StellaOps.Signals/Services/CallGraphSyncService.cs
Normal file
@@ -0,0 +1,118 @@
|
||||
using System;
|
||||
using System.Diagnostics;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using StellaOps.Signals.Models;
|
||||
using StellaOps.Signals.Persistence;
|
||||
|
||||
namespace StellaOps.Signals.Services;
|
||||
|
||||
/// <summary>
|
||||
/// Synchronizes canonical callgraph documents to relational tables.
|
||||
/// </summary>
|
||||
internal sealed class CallGraphSyncService : ICallGraphSyncService
|
||||
{
|
||||
private readonly ICallGraphProjectionRepository _projectionRepository;
|
||||
private readonly ILogger<CallGraphSyncService> _logger;
|
||||
private readonly TimeProvider _timeProvider;
|
||||
|
||||
public CallGraphSyncService(
|
||||
ICallGraphProjectionRepository projectionRepository,
|
||||
TimeProvider timeProvider,
|
||||
ILogger<CallGraphSyncService> logger)
|
||||
{
|
||||
_projectionRepository = projectionRepository ?? throw new ArgumentNullException(nameof(projectionRepository));
|
||||
_timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider));
|
||||
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public async Task<CallGraphSyncResult> SyncAsync(
|
||||
Guid scanId,
|
||||
string artifactDigest,
|
||||
CallgraphDocument document,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(document);
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(artifactDigest);
|
||||
|
||||
var stopwatch = Stopwatch.StartNew();
|
||||
|
||||
_logger.LogInformation(
|
||||
"Starting callgraph projection for scan {ScanId}, artifact {ArtifactDigest}, nodes={NodeCount}, edges={EdgeCount}",
|
||||
scanId, artifactDigest, document.Nodes.Count, document.Edges.Count);
|
||||
|
||||
try
|
||||
{
|
||||
// Step 1: Upsert scan record
|
||||
await _projectionRepository.UpsertScanAsync(
|
||||
scanId,
|
||||
artifactDigest,
|
||||
document.GraphHash,
|
||||
cancellationToken: cancellationToken).ConfigureAwait(false);
|
||||
|
||||
// Step 2: Project nodes in stable order
|
||||
var nodesProjected = await _projectionRepository.UpsertNodesAsync(
|
||||
scanId,
|
||||
document.Nodes,
|
||||
cancellationToken).ConfigureAwait(false);
|
||||
|
||||
// Step 3: Project edges in stable order
|
||||
var edgesProjected = await _projectionRepository.UpsertEdgesAsync(
|
||||
scanId,
|
||||
document.Edges,
|
||||
cancellationToken).ConfigureAwait(false);
|
||||
|
||||
// Step 4: Project entrypoints in stable order
|
||||
var entrypointsProjected = 0;
|
||||
if (document.Entrypoints is { Count: > 0 })
|
||||
{
|
||||
entrypointsProjected = await _projectionRepository.UpsertEntrypointsAsync(
|
||||
scanId,
|
||||
document.Entrypoints,
|
||||
cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
// Step 5: Mark scan as completed
|
||||
await _projectionRepository.CompleteScanAsync(scanId, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
stopwatch.Stop();
|
||||
|
||||
_logger.LogInformation(
|
||||
"Completed callgraph projection for scan {ScanId}: nodes={NodesProjected}, edges={EdgesProjected}, entrypoints={EntrypointsProjected}, duration={DurationMs}ms",
|
||||
scanId, nodesProjected, edgesProjected, entrypointsProjected, stopwatch.ElapsedMilliseconds);
|
||||
|
||||
return new CallGraphSyncResult(
|
||||
ScanId: scanId,
|
||||
NodesProjected: nodesProjected,
|
||||
EdgesProjected: edgesProjected,
|
||||
EntrypointsProjected: entrypointsProjected,
|
||||
WasUpdated: nodesProjected > 0 || edgesProjected > 0,
|
||||
DurationMs: stopwatch.ElapsedMilliseconds);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
stopwatch.Stop();
|
||||
|
||||
_logger.LogError(
|
||||
ex,
|
||||
"Failed callgraph projection for scan {ScanId} after {DurationMs}ms: {ErrorMessage}",
|
||||
scanId, stopwatch.ElapsedMilliseconds, ex.Message);
|
||||
|
||||
await _projectionRepository.FailScanAsync(scanId, ex.Message, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
throw;
|
||||
}
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public async Task DeleteByScanAsync(Guid scanId, CancellationToken cancellationToken = default)
|
||||
{
|
||||
_logger.LogInformation("Deleting callgraph projection for scan {ScanId}", scanId);
|
||||
|
||||
await _projectionRepository.DeleteScanAsync(scanId, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
_logger.LogInformation("Deleted callgraph projection for scan {ScanId}", scanId);
|
||||
}
|
||||
}
|
||||
@@ -32,6 +32,7 @@ internal sealed class CallgraphIngestionService : ICallgraphIngestionService
|
||||
private readonly ICallgraphRepository repository;
|
||||
private readonly IReachabilityStoreRepository reachabilityStore;
|
||||
private readonly ICallgraphNormalizationService normalizer;
|
||||
private readonly ICallGraphSyncService callGraphSyncService;
|
||||
private readonly ILogger<CallgraphIngestionService> logger;
|
||||
private readonly SignalsOptions options;
|
||||
private readonly TimeProvider timeProvider;
|
||||
@@ -43,6 +44,7 @@ internal sealed class CallgraphIngestionService : ICallgraphIngestionService
|
||||
ICallgraphRepository repository,
|
||||
IReachabilityStoreRepository reachabilityStore,
|
||||
ICallgraphNormalizationService normalizer,
|
||||
ICallGraphSyncService callGraphSyncService,
|
||||
IOptions<SignalsOptions> options,
|
||||
TimeProvider timeProvider,
|
||||
ILogger<CallgraphIngestionService> logger)
|
||||
@@ -52,6 +54,7 @@ internal sealed class CallgraphIngestionService : ICallgraphIngestionService
|
||||
this.repository = repository ?? throw new ArgumentNullException(nameof(repository));
|
||||
this.reachabilityStore = reachabilityStore ?? throw new ArgumentNullException(nameof(reachabilityStore));
|
||||
this.normalizer = normalizer ?? throw new ArgumentNullException(nameof(normalizer));
|
||||
this.callGraphSyncService = callGraphSyncService ?? throw new ArgumentNullException(nameof(callGraphSyncService));
|
||||
this.logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||
this.timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider));
|
||||
this.options = options?.Value ?? throw new ArgumentNullException(nameof(options));
|
||||
@@ -161,6 +164,38 @@ internal sealed class CallgraphIngestionService : ICallgraphIngestionService
|
||||
document.Edges,
|
||||
cancellationToken).ConfigureAwait(false);
|
||||
|
||||
// Project the callgraph into relational tables for cross-artifact queries
|
||||
// This is triggered post-upsert per SPRINT_3104 requirements
|
||||
var scanId = Guid.TryParse(document.Id, out var parsedScanId)
|
||||
? parsedScanId
|
||||
: Guid.NewGuid();
|
||||
var artifactDigest = document.Artifact.Hash ?? document.GraphHash ?? document.Id;
|
||||
|
||||
try
|
||||
{
|
||||
var syncResult = await callGraphSyncService.SyncAsync(
|
||||
scanId,
|
||||
artifactDigest,
|
||||
document,
|
||||
cancellationToken).ConfigureAwait(false);
|
||||
|
||||
logger.LogDebug(
|
||||
"Projected callgraph {Id} to relational tables: nodes={NodesProjected}, edges={EdgesProjected}, entrypoints={EntrypointsProjected}, duration={DurationMs}ms",
|
||||
document.Id,
|
||||
syncResult.NodesProjected,
|
||||
syncResult.EdgesProjected,
|
||||
syncResult.EntrypointsProjected,
|
||||
syncResult.DurationMs);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
// Log but don't fail the ingest - projection is a secondary operation
|
||||
logger.LogWarning(
|
||||
ex,
|
||||
"Failed to project callgraph {Id} to relational tables. The JSONB document was persisted successfully.",
|
||||
document.Id);
|
||||
}
|
||||
|
||||
logger.LogInformation(
|
||||
"Ingested callgraph {Language}:{Component}:{Version} (id={Id}) with {NodeCount} nodes and {EdgeCount} edges.",
|
||||
document.Language,
|
||||
|
||||
@@ -0,0 +1,59 @@
|
||||
using System;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using StellaOps.Signals.Models;
|
||||
|
||||
namespace StellaOps.Signals.Services;
|
||||
|
||||
/// <summary>
|
||||
/// Synchronizes canonical callgraph documents to relational tables.
|
||||
/// Enables cross-artifact queries, analytics, and efficient lookups.
|
||||
/// </summary>
|
||||
/// <remarks>
|
||||
/// This service projects the JSONB <see cref="CallgraphDocument"/> into
|
||||
/// the relational tables defined in signals.* schema (cg_nodes, cg_edges,
|
||||
/// entrypoints, etc.) for efficient querying.
|
||||
/// </remarks>
|
||||
public interface ICallGraphSyncService
|
||||
{
|
||||
/// <summary>
|
||||
/// Projects a callgraph document into relational tables.
|
||||
/// This operation is idempotent—repeated calls with the same
|
||||
/// document will not create duplicates.
|
||||
/// </summary>
|
||||
/// <param name="scanId">The scan identifier.</param>
|
||||
/// <param name="artifactDigest">The artifact digest for the scan context.</param>
|
||||
/// <param name="document">The callgraph document to project.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>A result indicating projection status and statistics.</returns>
|
||||
Task<CallGraphSyncResult> SyncAsync(
|
||||
Guid scanId,
|
||||
string artifactDigest,
|
||||
CallgraphDocument document,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Removes all relational data for a given scan.
|
||||
/// Used for cleanup or re-projection.
|
||||
/// </summary>
|
||||
/// <param name="scanId">The scan identifier to clean up.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
Task DeleteByScanAsync(Guid scanId, CancellationToken cancellationToken = default);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Result of a call graph sync operation.
|
||||
/// </summary>
|
||||
/// <param name="ScanId">The scan identifier.</param>
|
||||
/// <param name="NodesProjected">Number of nodes projected.</param>
|
||||
/// <param name="EdgesProjected">Number of edges projected.</param>
|
||||
/// <param name="EntrypointsProjected">Number of entrypoints projected.</param>
|
||||
/// <param name="WasUpdated">True if any data was inserted/updated.</param>
|
||||
/// <param name="DurationMs">Duration of the sync operation in milliseconds.</param>
|
||||
public sealed record CallGraphSyncResult(
|
||||
Guid ScanId,
|
||||
int NodesProjected,
|
||||
int EdgesProjected,
|
||||
int EntrypointsProjected,
|
||||
bool WasUpdated,
|
||||
long DurationMs);
|
||||
@@ -0,0 +1,92 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// IScoreExplanationService.cs
|
||||
// Sprint: SPRINT_3800_0001_0002_score_explanation_service
|
||||
// Description: Interface for computing additive score explanations.
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using StellaOps.Signals.Models;
|
||||
|
||||
namespace StellaOps.Signals.Services;
|
||||
|
||||
/// <summary>
|
||||
/// Service for computing additive score explanations.
|
||||
/// Transforms reachability data, CVSS scores, and gate information into
|
||||
/// human-readable score contributions.
|
||||
/// </summary>
|
||||
public interface IScoreExplanationService
|
||||
{
|
||||
/// <summary>
|
||||
/// Computes a score explanation for a reachability fact.
|
||||
/// </summary>
|
||||
/// <param name="request">The score explanation request containing all input data.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>A score explanation with contributions summing to the risk score.</returns>
|
||||
Task<ScoreExplanation> ComputeExplanationAsync(
|
||||
ScoreExplanationRequest request,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Computes a score explanation synchronously.
|
||||
/// </summary>
|
||||
/// <param name="request">The score explanation request.</param>
|
||||
/// <returns>A score explanation with contributions.</returns>
|
||||
ScoreExplanation ComputeExplanation(ScoreExplanationRequest request);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Request for computing a score explanation.
|
||||
/// </summary>
|
||||
public sealed record ScoreExplanationRequest
|
||||
{
|
||||
/// <summary>
|
||||
/// CVE identifier.
|
||||
/// </summary>
|
||||
public string? CveId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// CVSS v4 base score (0.0-10.0).
|
||||
/// </summary>
|
||||
public double? CvssScore { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// EPSS probability (0.0-1.0).
|
||||
/// </summary>
|
||||
public double? EpssScore { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Reachability bucket (entrypoint, direct, runtime, unknown, unreachable).
|
||||
/// </summary>
|
||||
public string? ReachabilityBucket { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Entrypoint type (http, grpc, cli, internal).
|
||||
/// </summary>
|
||||
public string? EntrypointType { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Detected gates protecting the path.
|
||||
/// </summary>
|
||||
public IReadOnlyList<string>? Gates { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Whether the vulnerability is in the KEV list.
|
||||
/// </summary>
|
||||
public bool IsKnownExploited { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Whether the path is internet-facing.
|
||||
/// </summary>
|
||||
public bool? IsInternetFacing { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// VEX status if available.
|
||||
/// </summary>
|
||||
public string? VexStatus { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Reference to the evidence source (scan ID, graph hash, etc.).
|
||||
/// </summary>
|
||||
public string? EvidenceRef { get; init; }
|
||||
}
|
||||
@@ -0,0 +1,315 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// ScoreExplanationService.cs
|
||||
// Sprint: SPRINT_3800_0001_0002_score_explanation_service
|
||||
// Description: Implementation of additive score explanation computation.
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Microsoft.Extensions.Options;
|
||||
using StellaOps.Signals.Models;
|
||||
using StellaOps.Signals.Options;
|
||||
|
||||
namespace StellaOps.Signals.Services;
|
||||
|
||||
/// <summary>
|
||||
/// Computes additive score explanations for vulnerability findings.
|
||||
/// The score is computed as a sum of weighted factors, each with a human-readable explanation.
|
||||
/// </summary>
|
||||
public sealed class ScoreExplanationService : IScoreExplanationService
|
||||
{
|
||||
private readonly IOptions<SignalsScoringOptions> _options;
|
||||
private readonly ILogger<ScoreExplanationService> _logger;
|
||||
private readonly TimeProvider _timeProvider;
|
||||
|
||||
public ScoreExplanationService(
|
||||
IOptions<SignalsScoringOptions> options,
|
||||
ILogger<ScoreExplanationService> logger,
|
||||
TimeProvider? timeProvider = null)
|
||||
{
|
||||
_options = options ?? throw new ArgumentNullException(nameof(options));
|
||||
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||
_timeProvider = timeProvider ?? TimeProvider.System;
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public Task<ScoreExplanation> ComputeExplanationAsync(
|
||||
ScoreExplanationRequest request,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
return Task.FromResult(ComputeExplanation(request));
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public ScoreExplanation ComputeExplanation(ScoreExplanationRequest request)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(request);
|
||||
|
||||
var weights = _options.Value.ExplanationWeights;
|
||||
var contributions = new List<ScoreContribution>();
|
||||
var modifiers = new List<ScoreModifier>();
|
||||
double runningTotal = 0.0;
|
||||
|
||||
// 1. CVSS Base Score Contribution
|
||||
if (request.CvssScore.HasValue)
|
||||
{
|
||||
var cvssContribution = request.CvssScore.Value * weights.CvssMultiplier;
|
||||
contributions.Add(new ScoreContribution
|
||||
{
|
||||
Factor = ScoreFactors.CvssBase,
|
||||
Weight = weights.CvssMultiplier,
|
||||
RawValue = request.CvssScore.Value,
|
||||
Contribution = cvssContribution,
|
||||
Explanation = $"CVSS base score {request.CvssScore.Value:F1} × {weights.CvssMultiplier:F1} weight",
|
||||
Source = "nvd"
|
||||
});
|
||||
runningTotal += cvssContribution;
|
||||
}
|
||||
|
||||
// 2. EPSS Contribution
|
||||
if (request.EpssScore.HasValue)
|
||||
{
|
||||
var epssContribution = request.EpssScore.Value * weights.EpssMultiplier;
|
||||
contributions.Add(new ScoreContribution
|
||||
{
|
||||
Factor = ScoreFactors.Epss,
|
||||
Weight = weights.EpssMultiplier,
|
||||
RawValue = request.EpssScore.Value,
|
||||
Contribution = epssContribution,
|
||||
Explanation = $"EPSS probability {request.EpssScore.Value:P1} indicates exploitation likelihood",
|
||||
Source = "first"
|
||||
});
|
||||
runningTotal += epssContribution;
|
||||
}
|
||||
|
||||
// 3. Reachability Contribution
|
||||
if (!string.IsNullOrEmpty(request.ReachabilityBucket))
|
||||
{
|
||||
var (reachabilityContribution, reachabilityExplanation) = ComputeReachabilityContribution(
|
||||
request.ReachabilityBucket, weights);
|
||||
|
||||
contributions.Add(new ScoreContribution
|
||||
{
|
||||
Factor = ScoreFactors.Reachability,
|
||||
Weight = 1.0,
|
||||
RawValue = reachabilityContribution,
|
||||
Contribution = reachabilityContribution,
|
||||
Explanation = reachabilityExplanation,
|
||||
Source = "scan"
|
||||
});
|
||||
runningTotal += reachabilityContribution;
|
||||
}
|
||||
|
||||
// 4. Exposure Surface Contribution
|
||||
if (!string.IsNullOrEmpty(request.EntrypointType))
|
||||
{
|
||||
var (exposureContribution, exposureExplanation) = ComputeExposureContribution(
|
||||
request.EntrypointType, request.IsInternetFacing, weights);
|
||||
|
||||
contributions.Add(new ScoreContribution
|
||||
{
|
||||
Factor = ScoreFactors.ExposureSurface,
|
||||
Weight = 1.0,
|
||||
RawValue = exposureContribution,
|
||||
Contribution = exposureContribution,
|
||||
Explanation = exposureExplanation,
|
||||
Source = "scan"
|
||||
});
|
||||
runningTotal += exposureContribution;
|
||||
}
|
||||
|
||||
// 5. Gate Multipliers (Discounts)
|
||||
if (request.Gates is { Count: > 0 })
|
||||
{
|
||||
var (gateDiscount, gateExplanation) = ComputeGateDiscounts(request.Gates, weights);
|
||||
|
||||
if (gateDiscount != 0)
|
||||
{
|
||||
contributions.Add(new ScoreContribution
|
||||
{
|
||||
Factor = ScoreFactors.GateMultiplier,
|
||||
Weight = 1.0,
|
||||
RawValue = gateDiscount,
|
||||
Contribution = gateDiscount,
|
||||
Explanation = gateExplanation,
|
||||
Source = "scan"
|
||||
});
|
||||
runningTotal += gateDiscount;
|
||||
}
|
||||
}
|
||||
|
||||
// 6. Known Exploitation Bonus
|
||||
if (request.IsKnownExploited)
|
||||
{
|
||||
contributions.Add(new ScoreContribution
|
||||
{
|
||||
Factor = ScoreFactors.KnownExploitation,
|
||||
Weight = 1.0,
|
||||
RawValue = weights.KevBonus,
|
||||
Contribution = weights.KevBonus,
|
||||
Explanation = "Vulnerability is in CISA KEV list (known exploited)",
|
||||
Source = "cisa_kev"
|
||||
});
|
||||
runningTotal += weights.KevBonus;
|
||||
}
|
||||
|
||||
// 7. VEX Override (if not_affected, reduce to near-zero)
|
||||
if (string.Equals(request.VexStatus, "not_affected", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
var vexReduction = -(runningTotal * 0.9); // Reduce by 90%
|
||||
modifiers.Add(new ScoreModifier
|
||||
{
|
||||
Type = "vex_reduction",
|
||||
Before = runningTotal,
|
||||
After = runningTotal + vexReduction,
|
||||
Reason = "VEX statement indicates vulnerability is not exploitable in this context",
|
||||
PolicyRef = "vex:not_affected"
|
||||
});
|
||||
runningTotal += vexReduction;
|
||||
}
|
||||
|
||||
// Apply floor/ceiling
|
||||
var originalTotal = runningTotal;
|
||||
runningTotal = Math.Clamp(runningTotal, weights.MinScore, weights.MaxScore);
|
||||
|
||||
if (runningTotal != originalTotal)
|
||||
{
|
||||
modifiers.Add(new ScoreModifier
|
||||
{
|
||||
Type = runningTotal < originalTotal ? "cap" : "floor",
|
||||
Before = originalTotal,
|
||||
After = runningTotal,
|
||||
Reason = $"Score clamped to {weights.MinScore:F0}-{weights.MaxScore:F0} range"
|
||||
});
|
||||
}
|
||||
|
||||
_logger.LogDebug(
|
||||
"Computed score explanation: {Score:F2} with {ContributionCount} contributions for {CveId}",
|
||||
runningTotal, contributions.Count, request.CveId ?? "unknown");
|
||||
|
||||
return new ScoreExplanation
|
||||
{
|
||||
Kind = "stellaops_risk_v1",
|
||||
RiskScore = runningTotal,
|
||||
Contributions = contributions,
|
||||
LastSeen = _timeProvider.GetUtcNow(),
|
||||
AlgorithmVersion = "1.0.0",
|
||||
EvidenceRef = request.EvidenceRef,
|
||||
Summary = GenerateSummary(runningTotal, contributions),
|
||||
Modifiers = modifiers.Count > 0 ? modifiers : null
|
||||
};
|
||||
}
|
||||
|
||||
private static (double contribution, string explanation) ComputeReachabilityContribution(
|
||||
string bucket, ScoreExplanationWeights weights)
|
||||
{
|
||||
return bucket.ToLowerInvariant() switch
|
||||
{
|
||||
"entrypoint" => (weights.EntrypointReachability,
|
||||
"Vulnerable code is directly reachable from application entrypoint"),
|
||||
"direct" => (weights.DirectReachability,
|
||||
"Vulnerable code is directly called from application code"),
|
||||
"runtime" => (weights.RuntimeReachability,
|
||||
"Vulnerable code execution observed at runtime"),
|
||||
"unknown" => (weights.UnknownReachability,
|
||||
"Reachability could not be determined; assuming partial exposure"),
|
||||
"unreachable" => (weights.UnreachableReachability,
|
||||
"No path found from entrypoints to vulnerable code"),
|
||||
_ => (weights.UnknownReachability,
|
||||
$"Unknown reachability bucket '{bucket}'; assuming partial exposure")
|
||||
};
|
||||
}
|
||||
|
||||
private static (double contribution, string explanation) ComputeExposureContribution(
|
||||
string entrypointType, bool? isInternetFacing, ScoreExplanationWeights weights)
|
||||
{
|
||||
var baseContribution = entrypointType.ToLowerInvariant() switch
|
||||
{
|
||||
"http" or "https" or "http_handler" => weights.HttpExposure,
|
||||
"grpc" or "grpc_method" => weights.GrpcExposure,
|
||||
"cli" or "cli_command" or "scheduled" => weights.CliExposure,
|
||||
"internal" or "library" => weights.InternalExposure,
|
||||
_ => weights.InternalExposure
|
||||
};
|
||||
|
||||
var exposureType = entrypointType.ToLowerInvariant() switch
|
||||
{
|
||||
"http" or "https" or "http_handler" => "HTTP/HTTPS",
|
||||
"grpc" or "grpc_method" => "gRPC",
|
||||
"cli" or "cli_command" => "CLI",
|
||||
"scheduled" => "scheduled task",
|
||||
"internal" or "library" => "internal",
|
||||
_ => entrypointType
|
||||
};
|
||||
|
||||
var internetSuffix = isInternetFacing == true ? " (internet-facing)" : "";
|
||||
return (baseContribution, $"Exposed via {exposureType} entrypoint{internetSuffix}");
|
||||
}
|
||||
|
||||
private static (double discount, string explanation) ComputeGateDiscounts(
|
||||
IReadOnlyList<string> gates, ScoreExplanationWeights weights)
|
||||
{
|
||||
double totalDiscount = 0;
|
||||
var gateDescriptions = new List<string>();
|
||||
|
||||
foreach (var gate in gates)
|
||||
{
|
||||
var normalizedGate = gate.ToLowerInvariant();
|
||||
|
||||
if (normalizedGate.Contains("auth") || normalizedGate.Contains("authorize"))
|
||||
{
|
||||
totalDiscount += weights.AuthGateDiscount;
|
||||
gateDescriptions.Add("authentication required");
|
||||
}
|
||||
else if (normalizedGate.Contains("admin") || normalizedGate.Contains("role"))
|
||||
{
|
||||
totalDiscount += weights.AdminGateDiscount;
|
||||
gateDescriptions.Add("admin/role restriction");
|
||||
}
|
||||
else if (normalizedGate.Contains("feature") || normalizedGate.Contains("flag"))
|
||||
{
|
||||
totalDiscount += weights.FeatureFlagDiscount;
|
||||
gateDescriptions.Add("feature flag protection");
|
||||
}
|
||||
else if (normalizedGate.Contains("config") || normalizedGate.Contains("default"))
|
||||
{
|
||||
totalDiscount += weights.NonDefaultConfigDiscount;
|
||||
gateDescriptions.Add("non-default configuration");
|
||||
}
|
||||
}
|
||||
|
||||
if (gateDescriptions.Count == 0)
|
||||
{
|
||||
return (0, "No protective gates detected");
|
||||
}
|
||||
|
||||
return (totalDiscount, $"Protected by: {string.Join(", ", gateDescriptions)}");
|
||||
}
|
||||
|
||||
private static string GenerateSummary(double score, IReadOnlyList<ScoreContribution> contributions)
|
||||
{
|
||||
var severity = score switch
|
||||
{
|
||||
>= 80 => "Critical",
|
||||
>= 60 => "High",
|
||||
>= 40 => "Medium",
|
||||
>= 20 => "Low",
|
||||
_ => "Minimal"
|
||||
};
|
||||
|
||||
var topFactors = contributions
|
||||
.OrderByDescending(c => Math.Abs(c.Contribution))
|
||||
.Take(2)
|
||||
.Select(c => c.Factor)
|
||||
.ToList();
|
||||
|
||||
var factorSummary = topFactors.Count > 0
|
||||
? $" driven by {string.Join(" and ", topFactors)}"
|
||||
: "";
|
||||
|
||||
return $"{severity} risk ({score:F0}/100){factorSummary}";
|
||||
}
|
||||
}
|
||||
@@ -12,3 +12,7 @@ This file mirrors sprint work for the Signals module.
|
||||
| `GATE-3405-011` | `docs/implplan/SPRINT_3405_0001_0001_gate_multipliers.md` | DONE (2025-12-18) | Applied gate multipliers in `ReachabilityScoringService` using path gate evidence from callgraph edges. |
|
||||
| `GATE-3405-012` | `docs/implplan/SPRINT_3405_0001_0001_gate_multipliers.md` | DONE (2025-12-18) | Extended reachability fact evidence contract + digest to include `GateMultiplierBps` and `Gates`. |
|
||||
| `GATE-3405-016` | `docs/implplan/SPRINT_3405_0001_0001_gate_multipliers.md` | DONE (2025-12-18) | Added deterministic parser/normalizer/scoring coverage for gate propagation + multiplier effect. |
|
||||
| `SIG-CG-3104-001` | `docs/implplan/SPRINT_3104_0001_0001_signals_callgraph_projection_completion.md` | DONE (2025-12-18) | Defined `ICallGraphSyncService` contract for projecting callgraphs into relational tables. |
|
||||
| `SIG-CG-3104-002` | `docs/implplan/SPRINT_3104_0001_0001_signals_callgraph_projection_completion.md` | DONE (2025-12-18) | Implemented `CallGraphSyncService` with idempotent, transactional batch projection. |
|
||||
| `SIG-CG-3104-003` | `docs/implplan/SPRINT_3104_0001_0001_signals_callgraph_projection_completion.md` | DONE (2025-12-18) | Wired projection trigger in `CallgraphIngestionService` post-upsert. |
|
||||
| `SIG-CG-3104-004` | `docs/implplan/SPRINT_3104_0001_0001_signals_callgraph_projection_completion.md` | DONE (2025-12-18) | Added unit tests (`CallGraphSyncServiceTests.cs`) and integration tests (`CallGraphProjectionIntegrationTests.cs`). |
|
||||
|
||||
@@ -0,0 +1,271 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using StellaOps.Signals.Models;
|
||||
using StellaOps.Signals.Persistence;
|
||||
using StellaOps.Signals.Services;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Signals.Tests;
|
||||
|
||||
/// <summary>
|
||||
/// Unit tests for <see cref="CallGraphSyncService"/>.
|
||||
/// </summary>
|
||||
public sealed class CallGraphSyncServiceTests
|
||||
{
|
||||
[Fact]
|
||||
public async Task SyncAsync_WithValidDocument_ReturnsSuccessResult()
|
||||
{
|
||||
// Arrange
|
||||
var repository = new InMemoryCallGraphProjectionRepository();
|
||||
var service = new CallGraphSyncService(
|
||||
repository,
|
||||
TimeProvider.System,
|
||||
NullLogger<CallGraphSyncService>.Instance);
|
||||
|
||||
var scanId = Guid.NewGuid();
|
||||
var document = CreateSampleDocument();
|
||||
|
||||
// Act
|
||||
var result = await service.SyncAsync(scanId, "sha256:test-digest", document);
|
||||
|
||||
// Assert
|
||||
Assert.Equal(scanId, result.ScanId);
|
||||
Assert.Equal(3, result.NodesProjected);
|
||||
Assert.Equal(2, result.EdgesProjected);
|
||||
Assert.Equal(1, result.EntrypointsProjected);
|
||||
Assert.True(result.WasUpdated);
|
||||
Assert.True(result.DurationMs >= 0);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task SyncAsync_ProjectsToRepository()
|
||||
{
|
||||
// Arrange
|
||||
var repository = new InMemoryCallGraphProjectionRepository();
|
||||
var service = new CallGraphSyncService(
|
||||
repository,
|
||||
TimeProvider.System,
|
||||
NullLogger<CallGraphSyncService>.Instance);
|
||||
|
||||
var scanId = Guid.NewGuid();
|
||||
var document = CreateSampleDocument();
|
||||
|
||||
// Act
|
||||
await service.SyncAsync(scanId, "sha256:test-digest", document);
|
||||
|
||||
// Assert - check repository state
|
||||
Assert.Single(repository.Scans);
|
||||
Assert.Equal(3, repository.Nodes.Count);
|
||||
Assert.Equal(2, repository.Edges.Count);
|
||||
Assert.Single(repository.Entrypoints);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task SyncAsync_SetsScanStatusToCompleted()
|
||||
{
|
||||
// Arrange
|
||||
var repository = new InMemoryCallGraphProjectionRepository();
|
||||
var service = new CallGraphSyncService(
|
||||
repository,
|
||||
TimeProvider.System,
|
||||
NullLogger<CallGraphSyncService>.Instance);
|
||||
|
||||
var scanId = Guid.NewGuid();
|
||||
var document = CreateSampleDocument();
|
||||
|
||||
// Act
|
||||
await service.SyncAsync(scanId, "sha256:test-digest", document);
|
||||
|
||||
// Assert
|
||||
Assert.True(repository.Scans.ContainsKey(scanId));
|
||||
Assert.Equal("completed", repository.Scans[scanId].Status);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task SyncAsync_WithEmptyDocument_ReturnsZeroCounts()
|
||||
{
|
||||
// Arrange
|
||||
var repository = new InMemoryCallGraphProjectionRepository();
|
||||
var service = new CallGraphSyncService(
|
||||
repository,
|
||||
TimeProvider.System,
|
||||
NullLogger<CallGraphSyncService>.Instance);
|
||||
|
||||
var scanId = Guid.NewGuid();
|
||||
var document = new CallgraphDocument
|
||||
{
|
||||
Id = Guid.NewGuid().ToString("N"),
|
||||
Language = "csharp",
|
||||
GraphHash = "test-hash",
|
||||
Nodes = new List<CallgraphNode>(),
|
||||
Edges = new List<CallgraphEdge>(),
|
||||
Entrypoints = new List<CallgraphEntrypoint>()
|
||||
};
|
||||
|
||||
// Act
|
||||
var result = await service.SyncAsync(scanId, "sha256:test-digest", document);
|
||||
|
||||
// Assert
|
||||
Assert.Equal(0, result.NodesProjected);
|
||||
Assert.Equal(0, result.EdgesProjected);
|
||||
Assert.Equal(0, result.EntrypointsProjected);
|
||||
Assert.False(result.WasUpdated);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task SyncAsync_WithNullDocument_ThrowsArgumentNullException()
|
||||
{
|
||||
// Arrange
|
||||
var repository = new InMemoryCallGraphProjectionRepository();
|
||||
var service = new CallGraphSyncService(
|
||||
repository,
|
||||
TimeProvider.System,
|
||||
NullLogger<CallGraphSyncService>.Instance);
|
||||
|
||||
// Act & Assert
|
||||
await Assert.ThrowsAsync<ArgumentNullException>(() =>
|
||||
service.SyncAsync(Guid.NewGuid(), "sha256:test-digest", null!));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task SyncAsync_WithEmptyArtifactDigest_ThrowsArgumentException()
|
||||
{
|
||||
// Arrange
|
||||
var repository = new InMemoryCallGraphProjectionRepository();
|
||||
var service = new CallGraphSyncService(
|
||||
repository,
|
||||
TimeProvider.System,
|
||||
NullLogger<CallGraphSyncService>.Instance);
|
||||
|
||||
var document = CreateSampleDocument();
|
||||
|
||||
// Act & Assert
|
||||
await Assert.ThrowsAsync<ArgumentException>(() =>
|
||||
service.SyncAsync(Guid.NewGuid(), "", document));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task DeleteByScanAsync_RemovesScanFromRepository()
|
||||
{
|
||||
// Arrange
|
||||
var repository = new InMemoryCallGraphProjectionRepository();
|
||||
var service = new CallGraphSyncService(
|
||||
repository,
|
||||
TimeProvider.System,
|
||||
NullLogger<CallGraphSyncService>.Instance);
|
||||
|
||||
var scanId = Guid.NewGuid();
|
||||
var document = CreateSampleDocument();
|
||||
await service.SyncAsync(scanId, "sha256:test-digest", document);
|
||||
|
||||
// Act
|
||||
await service.DeleteByScanAsync(scanId);
|
||||
|
||||
// Assert
|
||||
Assert.Empty(repository.Scans);
|
||||
Assert.Empty(repository.Nodes);
|
||||
Assert.Empty(repository.Edges);
|
||||
Assert.Empty(repository.Entrypoints);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task SyncAsync_OrdersNodesAndEdgesDeterministically()
|
||||
{
|
||||
// Arrange
|
||||
var repository = new TrackingProjectionRepository();
|
||||
var service = new CallGraphSyncService(
|
||||
repository,
|
||||
TimeProvider.System,
|
||||
NullLogger<CallGraphSyncService>.Instance);
|
||||
|
||||
var scanId = Guid.NewGuid();
|
||||
var document = new CallgraphDocument
|
||||
{
|
||||
Id = Guid.NewGuid().ToString("N"),
|
||||
Language = "csharp",
|
||||
GraphHash = "test-hash",
|
||||
Nodes = new List<CallgraphNode>
|
||||
{
|
||||
new() { Id = "z-node", Name = "Last" },
|
||||
new() { Id = "a-node", Name = "First" },
|
||||
new() { Id = "m-node", Name = "Middle" }
|
||||
},
|
||||
Edges = new List<CallgraphEdge>
|
||||
{
|
||||
new() { SourceId = "z-node", TargetId = "a-node" },
|
||||
new() { SourceId = "a-node", TargetId = "m-node" }
|
||||
},
|
||||
Entrypoints = new List<CallgraphEntrypoint>()
|
||||
};
|
||||
|
||||
// Act
|
||||
await service.SyncAsync(scanId, "sha256:test-digest", document);
|
||||
|
||||
// Assert - nodes should be processed in sorted order by Id
|
||||
Assert.Equal(3, repository.ProjectedNodes.Count);
|
||||
Assert.Equal("a-node", repository.ProjectedNodes[0].Id);
|
||||
Assert.Equal("m-node", repository.ProjectedNodes[1].Id);
|
||||
Assert.Equal("z-node", repository.ProjectedNodes[2].Id);
|
||||
}
|
||||
|
||||
private static CallgraphDocument CreateSampleDocument()
|
||||
{
|
||||
return new CallgraphDocument
|
||||
{
|
||||
Id = Guid.NewGuid().ToString("N"),
|
||||
Language = "csharp",
|
||||
GraphHash = "sha256:sample-graph-hash",
|
||||
Nodes = new List<CallgraphNode>
|
||||
{
|
||||
new() { Id = "node-1", Name = "Main", Kind = "method", Namespace = "Program", Visibility = SymbolVisibility.Public, IsEntrypointCandidate = true },
|
||||
new() { Id = "node-2", Name = "DoWork", Kind = "method", Namespace = "Service", Visibility = SymbolVisibility.Internal },
|
||||
new() { Id = "node-3", Name = "ProcessData", Kind = "method", Namespace = "Core", Visibility = SymbolVisibility.Private }
|
||||
},
|
||||
Edges = new List<CallgraphEdge>
|
||||
{
|
||||
new() { SourceId = "node-1", TargetId = "node-2", Kind = EdgeKind.Static, Reason = EdgeReason.DirectCall, Weight = 1.0 },
|
||||
new() { SourceId = "node-2", TargetId = "node-3", Kind = EdgeKind.Static, Reason = EdgeReason.DirectCall, Weight = 1.0 }
|
||||
},
|
||||
Entrypoints = new List<CallgraphEntrypoint>
|
||||
{
|
||||
new() { NodeId = "node-1", Kind = EntrypointKind.Main, Phase = EntrypointPhase.AppStart, Order = 0 }
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Test repository that tracks the order of projected nodes.
|
||||
/// </summary>
|
||||
private sealed class TrackingProjectionRepository : ICallGraphProjectionRepository
|
||||
{
|
||||
public List<CallgraphNode> ProjectedNodes { get; } = new();
|
||||
|
||||
public Task<bool> UpsertScanAsync(Guid scanId, string artifactDigest, string? sbomDigest = null, string? repoUri = null, string? commitSha = null, CancellationToken cancellationToken = default)
|
||||
=> Task.FromResult(true);
|
||||
|
||||
public Task CompleteScanAsync(Guid scanId, CancellationToken cancellationToken = default)
|
||||
=> Task.CompletedTask;
|
||||
|
||||
public Task FailScanAsync(Guid scanId, string errorMessage, CancellationToken cancellationToken = default)
|
||||
=> Task.CompletedTask;
|
||||
|
||||
public Task<int> UpsertNodesAsync(Guid scanId, IReadOnlyList<CallgraphNode> nodes, CancellationToken cancellationToken = default)
|
||||
{
|
||||
// Store in the order received - the service should have sorted them
|
||||
ProjectedNodes.AddRange(nodes);
|
||||
return Task.FromResult(nodes.Count);
|
||||
}
|
||||
|
||||
public Task<int> UpsertEdgesAsync(Guid scanId, IReadOnlyList<CallgraphEdge> edges, CancellationToken cancellationToken = default)
|
||||
=> Task.FromResult(edges.Count);
|
||||
|
||||
public Task<int> UpsertEntrypointsAsync(Guid scanId, IReadOnlyList<CallgraphEntrypoint> entrypoints, CancellationToken cancellationToken = default)
|
||||
=> Task.FromResult(entrypoints.Count);
|
||||
|
||||
public Task DeleteScanAsync(Guid scanId, CancellationToken cancellationToken = default)
|
||||
=> Task.CompletedTask;
|
||||
}
|
||||
}
|
||||
@@ -33,12 +33,14 @@ public class CallgraphIngestionServiceTests
|
||||
var resolver = new StubParserResolver(parser);
|
||||
var options = Microsoft.Extensions.Options.Options.Create(new SignalsOptions());
|
||||
var reachabilityStore = new InMemoryReachabilityStoreRepository(_timeProvider);
|
||||
var callGraphSyncService = new StubCallGraphSyncService();
|
||||
var service = new CallgraphIngestionService(
|
||||
resolver,
|
||||
_artifactStore,
|
||||
_repository,
|
||||
reachabilityStore,
|
||||
_normalizer,
|
||||
callGraphSyncService,
|
||||
options,
|
||||
_timeProvider,
|
||||
NullLogger<CallgraphIngestionService>.Instance);
|
||||
@@ -189,4 +191,33 @@ public class CallgraphIngestionServiceTests
|
||||
return Task.FromResult(document);
|
||||
}
|
||||
}
|
||||
|
||||
private sealed class StubCallGraphSyncService : ICallGraphSyncService
|
||||
{
|
||||
public CallGraphSyncResult? LastSyncResult { get; private set; }
|
||||
public CallgraphDocument? LastSyncedDocument { get; private set; }
|
||||
|
||||
public Task<CallGraphSyncResult> SyncAsync(
|
||||
Guid scanId,
|
||||
string artifactDigest,
|
||||
CallgraphDocument document,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
LastSyncedDocument = document;
|
||||
var result = new CallGraphSyncResult(
|
||||
ScanId: scanId,
|
||||
NodesProjected: document.Nodes.Count,
|
||||
EdgesProjected: document.Edges.Count,
|
||||
EntrypointsProjected: document.Entrypoints.Count,
|
||||
WasUpdated: true,
|
||||
DurationMs: 1);
|
||||
LastSyncResult = result;
|
||||
return Task.FromResult(result);
|
||||
}
|
||||
|
||||
public Task DeleteByScanAsync(Guid scanId, CancellationToken cancellationToken = default)
|
||||
{
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,287 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// ScoreExplanationServiceTests.cs
|
||||
// Sprint: SPRINT_3800_0001_0002_score_explanation_service
|
||||
// Description: Unit tests for ScoreExplanationService.
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using Microsoft.Extensions.Options;
|
||||
using StellaOps.Signals.Models;
|
||||
using StellaOps.Signals.Options;
|
||||
using StellaOps.Signals.Services;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Signals.Tests;
|
||||
|
||||
public class ScoreExplanationServiceTests
|
||||
{
|
||||
private readonly ScoreExplanationService _service;
|
||||
private readonly SignalsScoringOptions _options;
|
||||
|
||||
public ScoreExplanationServiceTests()
|
||||
{
|
||||
_options = new SignalsScoringOptions();
|
||||
_service = new ScoreExplanationService(
|
||||
Options.Create(_options),
|
||||
NullLogger<ScoreExplanationService>.Instance);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ComputeExplanation_WithCvssOnly_ReturnsCorrectContribution()
|
||||
{
|
||||
var request = new ScoreExplanationRequest
|
||||
{
|
||||
CveId = "CVE-2021-44228",
|
||||
CvssScore = 10.0
|
||||
};
|
||||
|
||||
var result = _service.ComputeExplanation(request);
|
||||
|
||||
Assert.Equal("stellaops_risk_v1", result.Kind);
|
||||
Assert.Single(result.Contributions);
|
||||
|
||||
var cvssContrib = result.Contributions[0];
|
||||
Assert.Equal(ScoreFactors.CvssBase, cvssContrib.Factor);
|
||||
Assert.Equal(10.0, cvssContrib.RawValue);
|
||||
Assert.Equal(50.0, cvssContrib.Contribution); // 10.0 * 5.0 default multiplier
|
||||
Assert.Equal(50.0, result.RiskScore);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ComputeExplanation_WithEpss_ReturnsCorrectContribution()
|
||||
{
|
||||
var request = new ScoreExplanationRequest
|
||||
{
|
||||
CveId = "CVE-2023-12345",
|
||||
EpssScore = 0.5 // 50% probability
|
||||
};
|
||||
|
||||
var result = _service.ComputeExplanation(request);
|
||||
|
||||
var epssContrib = result.Contributions.Single(c => c.Factor == ScoreFactors.Epss);
|
||||
Assert.Equal(0.5, epssContrib.RawValue);
|
||||
Assert.Equal(5.0, epssContrib.Contribution); // 0.5 * 10.0 default multiplier
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData("entrypoint", 25.0)]
|
||||
[InlineData("direct", 20.0)]
|
||||
[InlineData("runtime", 22.0)]
|
||||
[InlineData("unknown", 12.0)]
|
||||
[InlineData("unreachable", 0.0)]
|
||||
public void ComputeExplanation_WithReachabilityBucket_ReturnsCorrectContribution(
|
||||
string bucket, double expectedContribution)
|
||||
{
|
||||
var request = new ScoreExplanationRequest
|
||||
{
|
||||
ReachabilityBucket = bucket
|
||||
};
|
||||
|
||||
var result = _service.ComputeExplanation(request);
|
||||
|
||||
var reachContrib = result.Contributions.Single(c => c.Factor == ScoreFactors.Reachability);
|
||||
Assert.Equal(expectedContribution, reachContrib.Contribution);
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData("http", 15.0)]
|
||||
[InlineData("https", 15.0)]
|
||||
[InlineData("http_handler", 15.0)]
|
||||
[InlineData("grpc", 12.0)]
|
||||
[InlineData("cli", 3.0)]
|
||||
[InlineData("internal", 5.0)]
|
||||
public void ComputeExplanation_WithEntrypointType_ReturnsCorrectExposure(
|
||||
string entrypointType, double expectedContribution)
|
||||
{
|
||||
var request = new ScoreExplanationRequest
|
||||
{
|
||||
EntrypointType = entrypointType
|
||||
};
|
||||
|
||||
var result = _service.ComputeExplanation(request);
|
||||
|
||||
var exposureContrib = result.Contributions.Single(c => c.Factor == ScoreFactors.ExposureSurface);
|
||||
Assert.Equal(expectedContribution, exposureContrib.Contribution);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ComputeExplanation_WithAuthGate_AppliesDiscount()
|
||||
{
|
||||
var request = new ScoreExplanationRequest
|
||||
{
|
||||
CvssScore = 8.0,
|
||||
Gates = new[] { "auth_required" }
|
||||
};
|
||||
|
||||
var result = _service.ComputeExplanation(request);
|
||||
|
||||
var gateContrib = result.Contributions.Single(c => c.Factor == ScoreFactors.GateMultiplier);
|
||||
Assert.Equal(-3.0, gateContrib.Contribution); // Default auth discount
|
||||
Assert.Equal(37.0, result.RiskScore); // 8.0 * 5.0 - 3.0
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ComputeExplanation_WithMultipleGates_CombinesDiscounts()
|
||||
{
|
||||
var request = new ScoreExplanationRequest
|
||||
{
|
||||
CvssScore = 10.0,
|
||||
Gates = new[] { "auth_required", "admin_role", "feature_flag" }
|
||||
};
|
||||
|
||||
var result = _service.ComputeExplanation(request);
|
||||
|
||||
var gateContrib = result.Contributions.Single(c => c.Factor == ScoreFactors.GateMultiplier);
|
||||
// auth: -3, admin: -5, feature_flag: -2 = -10 total
|
||||
Assert.Equal(-10.0, gateContrib.Contribution);
|
||||
Assert.Equal(40.0, result.RiskScore); // 50 - 10
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ComputeExplanation_WithKev_AppliesBonus()
|
||||
{
|
||||
var request = new ScoreExplanationRequest
|
||||
{
|
||||
CvssScore = 7.0,
|
||||
IsKnownExploited = true
|
||||
};
|
||||
|
||||
var result = _service.ComputeExplanation(request);
|
||||
|
||||
var kevContrib = result.Contributions.Single(c => c.Factor == ScoreFactors.KnownExploitation);
|
||||
Assert.Equal(10.0, kevContrib.Contribution);
|
||||
Assert.Equal(45.0, result.RiskScore); // 7.0 * 5.0 + 10.0
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ComputeExplanation_WithVexNotAffected_ReducesScore()
|
||||
{
|
||||
var request = new ScoreExplanationRequest
|
||||
{
|
||||
CvssScore = 10.0,
|
||||
VexStatus = "not_affected"
|
||||
};
|
||||
|
||||
var result = _service.ComputeExplanation(request);
|
||||
|
||||
Assert.NotNull(result.Modifiers);
|
||||
Assert.Contains(result.Modifiers, m => m.Type == "vex_reduction");
|
||||
Assert.True(result.RiskScore < 50.0); // Should be significantly reduced
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ComputeExplanation_ClampsToMaxScore()
|
||||
{
|
||||
var request = new ScoreExplanationRequest
|
||||
{
|
||||
CvssScore = 10.0,
|
||||
EpssScore = 0.95,
|
||||
ReachabilityBucket = "entrypoint",
|
||||
EntrypointType = "http",
|
||||
IsKnownExploited = true
|
||||
};
|
||||
|
||||
var result = _service.ComputeExplanation(request);
|
||||
|
||||
Assert.Equal(100.0, result.RiskScore); // Clamped to max
|
||||
Assert.NotNull(result.Modifiers);
|
||||
Assert.Contains(result.Modifiers, m => m.Type == "cap");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ComputeExplanation_ContributionsSumToTotal()
|
||||
{
|
||||
var request = new ScoreExplanationRequest
|
||||
{
|
||||
CvssScore = 8.5,
|
||||
EpssScore = 0.3,
|
||||
ReachabilityBucket = "direct",
|
||||
EntrypointType = "grpc"
|
||||
};
|
||||
|
||||
var result = _service.ComputeExplanation(request);
|
||||
|
||||
var expectedSum = result.Contributions.Sum(c => c.Contribution);
|
||||
Assert.Equal(expectedSum, result.RiskScore, precision: 5);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ComputeExplanation_GeneratesSummary()
|
||||
{
|
||||
var request = new ScoreExplanationRequest
|
||||
{
|
||||
CvssScore = 9.8,
|
||||
ReachabilityBucket = "entrypoint"
|
||||
};
|
||||
|
||||
var result = _service.ComputeExplanation(request);
|
||||
|
||||
Assert.NotNull(result.Summary);
|
||||
Assert.Contains("risk", result.Summary, StringComparison.OrdinalIgnoreCase);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ComputeExplanation_SetsAlgorithmVersion()
|
||||
{
|
||||
var request = new ScoreExplanationRequest { CvssScore = 5.0 };
|
||||
|
||||
var result = _service.ComputeExplanation(request);
|
||||
|
||||
Assert.Equal("1.0.0", result.AlgorithmVersion);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ComputeExplanation_PreservesEvidenceRef()
|
||||
{
|
||||
var request = new ScoreExplanationRequest
|
||||
{
|
||||
CvssScore = 5.0,
|
||||
EvidenceRef = "scan:abc123"
|
||||
};
|
||||
|
||||
var result = _service.ComputeExplanation(request);
|
||||
|
||||
Assert.Equal("scan:abc123", result.EvidenceRef);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ComputeExplanationAsync_ReturnsSameAsSync()
|
||||
{
|
||||
var request = new ScoreExplanationRequest
|
||||
{
|
||||
CvssScore = 7.5,
|
||||
ReachabilityBucket = "runtime"
|
||||
};
|
||||
|
||||
var syncResult = _service.ComputeExplanation(request);
|
||||
var asyncResult = await _service.ComputeExplanationAsync(request);
|
||||
|
||||
Assert.Equal(syncResult.RiskScore, asyncResult.RiskScore);
|
||||
Assert.Equal(syncResult.Contributions.Count, asyncResult.Contributions.Count);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ComputeExplanation_IsDeterministic()
|
||||
{
|
||||
var request = new ScoreExplanationRequest
|
||||
{
|
||||
CvssScore = 8.0,
|
||||
EpssScore = 0.4,
|
||||
ReachabilityBucket = "entrypoint",
|
||||
EntrypointType = "http",
|
||||
Gates = new[] { "auth_required" }
|
||||
};
|
||||
|
||||
var result1 = _service.ComputeExplanation(request);
|
||||
var result2 = _service.ComputeExplanation(request);
|
||||
|
||||
Assert.Equal(result1.RiskScore, result2.RiskScore);
|
||||
Assert.Equal(result1.Contributions.Count, result2.Contributions.Count);
|
||||
|
||||
for (int i = 0; i < result1.Contributions.Count; i++)
|
||||
{
|
||||
Assert.Equal(result1.Contributions[i].Factor, result2.Contributions[i].Factor);
|
||||
Assert.Equal(result1.Contributions[i].Contribution, result2.Contributions[i].Contribution);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,312 @@
|
||||
/**
|
||||
* Attestation Chain API Client
|
||||
* Sprint: SPRINT_4100_0001_0001_triage_models
|
||||
* Provides API client for verifying and fetching attestation chains.
|
||||
*/
|
||||
|
||||
import { Injectable, InjectionToken, inject, signal } from '@angular/core';
|
||||
import { HttpClient, HttpParams } from '@angular/common/http';
|
||||
import { Observable, of, map, shareReplay, catchError, throwError } from 'rxjs';
|
||||
|
||||
import {
|
||||
AttestationChain,
|
||||
AttestationNode,
|
||||
AttestationVerifyRequest,
|
||||
AttestationVerifyResult,
|
||||
DsseEnvelope,
|
||||
InTotoStatement,
|
||||
RekorLogEntry,
|
||||
SignerInfo,
|
||||
} from './attestation-chain.models';
|
||||
import { TenantActivationService } from '../auth/tenant-activation.service';
|
||||
import { generateTraceId } from './trace.util';
|
||||
|
||||
/**
|
||||
* Attestation Chain API interface.
|
||||
*/
|
||||
export interface AttestationChainApi {
|
||||
/** Verify a DSSE envelope. */
|
||||
verify(request: AttestationVerifyRequest): Observable<AttestationVerifyResult>;
|
||||
|
||||
/** Get attestation chain for a digest. */
|
||||
getChain(digest: string, options?: AttestationQueryOptions): Observable<AttestationChain>;
|
||||
|
||||
/** Get single attestation node by ID. */
|
||||
getNode(nodeId: string, options?: AttestationQueryOptions): Observable<AttestationNode>;
|
||||
|
||||
/** List attestations for a subject digest. */
|
||||
listBySubject(
|
||||
subjectDigest: string,
|
||||
options?: AttestationQueryOptions
|
||||
): Observable<AttestationNode[]>;
|
||||
|
||||
/** Fetch Rekor log entry for an attestation. */
|
||||
getRekorEntry(uuid: string): Observable<RekorLogEntry>;
|
||||
|
||||
/** Download raw DSSE envelope. */
|
||||
downloadEnvelope(nodeId: string): Observable<DsseEnvelope>;
|
||||
}
|
||||
|
||||
export interface AttestationQueryOptions {
|
||||
readonly tenantId?: string;
|
||||
readonly traceId?: string;
|
||||
readonly include_rekor?: boolean;
|
||||
readonly include_cert_chain?: boolean;
|
||||
}
|
||||
|
||||
export const ATTESTATION_CHAIN_API = new InjectionToken<AttestationChainApi>(
|
||||
'ATTESTATION_CHAIN_API'
|
||||
);
|
||||
|
||||
/**
|
||||
* HTTP implementation of the Attestation Chain API.
|
||||
*/
|
||||
@Injectable()
|
||||
export class AttestationChainHttpClient implements AttestationChainApi {
|
||||
private readonly http = inject(HttpClient);
|
||||
private readonly tenantService = inject(TenantActivationService, { optional: true });
|
||||
|
||||
private readonly baseUrl = signal('/api/v1/attestor');
|
||||
private readonly rekorUrl = signal('https://rekor.sigstore.dev');
|
||||
|
||||
// Cache for verified chains
|
||||
private readonly chainCache = new Map<string, Observable<AttestationChain>>();
|
||||
private readonly cacheMaxAge = 300_000; // 5 minutes
|
||||
|
||||
verify(request: AttestationVerifyRequest): Observable<AttestationVerifyResult> {
|
||||
const url = `${this.baseUrl()}/verify`;
|
||||
const headers = this.buildHeaders();
|
||||
|
||||
return this.http.post<AttestationVerifyResult>(url, request, { headers }).pipe(
|
||||
catchError(this.handleError)
|
||||
);
|
||||
}
|
||||
|
||||
getChain(digest: string, options?: AttestationQueryOptions): Observable<AttestationChain> {
|
||||
const cacheKey = `chain:${digest}`;
|
||||
|
||||
if (this.chainCache.has(cacheKey)) {
|
||||
return this.chainCache.get(cacheKey)!;
|
||||
}
|
||||
|
||||
const url = `${this.baseUrl()}/chains/${encodeURIComponent(digest)}`;
|
||||
const params = this.buildParams(options);
|
||||
const headers = this.buildHeaders(options);
|
||||
|
||||
const request$ = this.http.get<AttestationChain>(url, { params, headers }).pipe(
|
||||
shareReplay({ bufferSize: 1, refCount: true }),
|
||||
catchError(this.handleError)
|
||||
);
|
||||
|
||||
this.chainCache.set(cacheKey, request$);
|
||||
setTimeout(() => this.chainCache.delete(cacheKey), this.cacheMaxAge);
|
||||
|
||||
return request$;
|
||||
}
|
||||
|
||||
getNode(nodeId: string, options?: AttestationQueryOptions): Observable<AttestationNode> {
|
||||
const url = `${this.baseUrl()}/nodes/${encodeURIComponent(nodeId)}`;
|
||||
const params = this.buildParams(options);
|
||||
const headers = this.buildHeaders(options);
|
||||
|
||||
return this.http.get<AttestationNode>(url, { params, headers }).pipe(
|
||||
catchError(this.handleError)
|
||||
);
|
||||
}
|
||||
|
||||
listBySubject(
|
||||
subjectDigest: string,
|
||||
options?: AttestationQueryOptions
|
||||
): Observable<AttestationNode[]> {
|
||||
const url = `${this.baseUrl()}/subjects/${encodeURIComponent(subjectDigest)}/attestations`;
|
||||
const params = this.buildParams(options);
|
||||
const headers = this.buildHeaders(options);
|
||||
|
||||
return this.http.get<{ items: AttestationNode[] }>(url, { params, headers }).pipe(
|
||||
map((response) => response.items),
|
||||
catchError(this.handleError)
|
||||
);
|
||||
}
|
||||
|
||||
getRekorEntry(uuid: string): Observable<RekorLogEntry> {
|
||||
const url = `${this.rekorUrl()}/api/v1/log/entries/${encodeURIComponent(uuid)}`;
|
||||
|
||||
return this.http.get<Record<string, unknown>>(url).pipe(
|
||||
map((response) => this.parseRekorResponse(uuid, response)),
|
||||
catchError(this.handleError)
|
||||
);
|
||||
}
|
||||
|
||||
downloadEnvelope(nodeId: string): Observable<DsseEnvelope> {
|
||||
const url = `${this.baseUrl()}/nodes/${encodeURIComponent(nodeId)}/envelope`;
|
||||
const headers = this.buildHeaders();
|
||||
|
||||
return this.http.get<DsseEnvelope>(url, { headers }).pipe(catchError(this.handleError));
|
||||
}
|
||||
|
||||
/**
|
||||
* Invalidate cached chain for a digest.
|
||||
*/
|
||||
invalidateCache(digest?: string): void {
|
||||
if (digest) {
|
||||
this.chainCache.delete(`chain:${digest}`);
|
||||
} else {
|
||||
this.chainCache.clear();
|
||||
}
|
||||
}
|
||||
|
||||
private parseRekorResponse(uuid: string, response: Record<string, unknown>): RekorLogEntry {
|
||||
// Rekor returns { uuid: { body, integratedTime, logIndex, ... } }
|
||||
const entry = response[uuid] as Record<string, unknown>;
|
||||
|
||||
return {
|
||||
uuid,
|
||||
log_index: entry['logIndex'] as number,
|
||||
log_id: entry['logID'] as string,
|
||||
integrated_time: new Date((entry['integratedTime'] as number) * 1000).toISOString(),
|
||||
signed_entry_timestamp: entry['verification'] as string,
|
||||
inclusion_proof: entry['inclusionProof']
|
||||
? {
|
||||
log_index: (entry['inclusionProof'] as Record<string, unknown>)['logIndex'] as number,
|
||||
root_hash: (entry['inclusionProof'] as Record<string, unknown>)['rootHash'] as string,
|
||||
tree_size: (entry['inclusionProof'] as Record<string, unknown>)['treeSize'] as number,
|
||||
hashes: (entry['inclusionProof'] as Record<string, unknown>)['hashes'] as string[],
|
||||
}
|
||||
: undefined,
|
||||
};
|
||||
}
|
||||
|
||||
private buildParams(options?: AttestationQueryOptions): HttpParams {
|
||||
let params = new HttpParams();
|
||||
|
||||
if (options?.include_rekor) {
|
||||
params = params.set('include_rekor', 'true');
|
||||
}
|
||||
if (options?.include_cert_chain) {
|
||||
params = params.set('include_cert_chain', 'true');
|
||||
}
|
||||
|
||||
return params;
|
||||
}
|
||||
|
||||
private buildHeaders(options?: AttestationQueryOptions): Record<string, string> {
|
||||
const headers: Record<string, string> = {};
|
||||
|
||||
const tenantId = options?.tenantId ?? this.tenantService?.activeTenantId();
|
||||
if (tenantId) {
|
||||
headers['X-Tenant-Id'] = tenantId;
|
||||
}
|
||||
|
||||
const traceId = options?.traceId ?? generateTraceId();
|
||||
headers['X-Trace-Id'] = traceId;
|
||||
|
||||
return headers;
|
||||
}
|
||||
|
||||
private handleError(error: unknown): Observable<never> {
|
||||
console.error('[AttestationChainClient] API error:', error);
|
||||
return throwError(() => error);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Mock implementation for testing and development.
|
||||
*/
|
||||
@Injectable()
|
||||
export class AttestationChainMockClient implements AttestationChainApi {
|
||||
private readonly mockChain: AttestationChain = {
|
||||
chain_id: 'chain-mock-001',
|
||||
nodes: [
|
||||
{
|
||||
node_id: 'node-001',
|
||||
type: 'sbom',
|
||||
predicate_type: 'https://spdx.dev/Document',
|
||||
subjects: [
|
||||
{
|
||||
name: 'myapp:1.0.0',
|
||||
digest: { sha256: 'abc123def456...' },
|
||||
},
|
||||
],
|
||||
signer: {
|
||||
key_id: 'keyid:abc123',
|
||||
identity: 'build@example.com',
|
||||
algorithm: 'ecdsa-p256',
|
||||
trusted: true,
|
||||
},
|
||||
created_at: new Date().toISOString(),
|
||||
},
|
||||
{
|
||||
node_id: 'node-002',
|
||||
type: 'scan',
|
||||
predicate_type: 'https://stellaops.io/attestation/vuln-scan/v1',
|
||||
subjects: [
|
||||
{
|
||||
name: 'myapp:1.0.0',
|
||||
digest: { sha256: 'abc123def456...' },
|
||||
},
|
||||
],
|
||||
signer: {
|
||||
key_id: 'keyid:scanner001',
|
||||
identity: 'scanner@stellaops.io',
|
||||
algorithm: 'ecdsa-p256',
|
||||
trusted: true,
|
||||
},
|
||||
created_at: new Date().toISOString(),
|
||||
parent_id: 'node-001',
|
||||
},
|
||||
],
|
||||
status: 'verified',
|
||||
verified_at: new Date().toISOString(),
|
||||
rekor_entry: {
|
||||
log_index: 12345678,
|
||||
log_id: 'c0d23d6ad406973f9559f3ba2d1ca01f84147d8ffc5b8445c224f98b9591801d',
|
||||
uuid: 'mock-uuid-12345',
|
||||
integrated_time: new Date().toISOString(),
|
||||
},
|
||||
};
|
||||
|
||||
verify(request: AttestationVerifyRequest): Observable<AttestationVerifyResult> {
|
||||
return of({
|
||||
valid: true,
|
||||
status: 'verified' as const,
|
||||
signer: {
|
||||
key_id: 'keyid:mock',
|
||||
identity: 'mock@example.com',
|
||||
trusted: true,
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
getChain(digest: string, options?: AttestationQueryOptions): Observable<AttestationChain> {
|
||||
return of({ ...this.mockChain, chain_id: `chain:${digest}` });
|
||||
}
|
||||
|
||||
getNode(nodeId: string, options?: AttestationQueryOptions): Observable<AttestationNode> {
|
||||
const node = this.mockChain.nodes.find((n) => n.node_id === nodeId);
|
||||
return node ? of(node) : throwError(() => new Error(`Node not found: ${nodeId}`));
|
||||
}
|
||||
|
||||
listBySubject(
|
||||
subjectDigest: string,
|
||||
options?: AttestationQueryOptions
|
||||
): Observable<AttestationNode[]> {
|
||||
return of(this.mockChain.nodes);
|
||||
}
|
||||
|
||||
getRekorEntry(uuid: string): Observable<RekorLogEntry> {
|
||||
return of(this.mockChain.rekor_entry!);
|
||||
}
|
||||
|
||||
downloadEnvelope(nodeId: string): Observable<DsseEnvelope> {
|
||||
return of({
|
||||
payloadType: 'YXBwbGljYXRpb24vdm5kLmluLXRvdG8ranNvbg==', // application/vnd.in-toto+json
|
||||
payload: btoa(JSON.stringify({ _type: 'mock', subject: [], predicateType: 'mock' })),
|
||||
signatures: [
|
||||
{
|
||||
keyid: 'keyid:mock',
|
||||
sig: 'mock-signature-base64',
|
||||
},
|
||||
],
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,291 @@
|
||||
/**
|
||||
* Attestation Chain Models
|
||||
* Sprint: SPRINT_4100_0001_0001_triage_models
|
||||
* DSSE (Dead Simple Signing Envelope) and in-toto model types.
|
||||
*/
|
||||
|
||||
// ============================================================================
|
||||
// DSSE Envelope Types
|
||||
// ============================================================================
|
||||
|
||||
/**
|
||||
* DSSE (Dead Simple Signing Envelope) structure.
|
||||
* @see https://github.com/secure-systems-lab/dsse
|
||||
*/
|
||||
export interface DsseEnvelope {
|
||||
/** Base64-encoded payload type URI. */
|
||||
readonly payloadType: string;
|
||||
/** Base64-encoded payload. */
|
||||
readonly payload: string;
|
||||
/** Array of signatures. */
|
||||
readonly signatures: readonly DsseSignature[];
|
||||
}
|
||||
|
||||
/**
|
||||
* DSSE signature structure.
|
||||
*/
|
||||
export interface DsseSignature {
|
||||
/** Key identifier (fingerprint, URI, or key ID). */
|
||||
readonly keyid: string;
|
||||
/** Base64-encoded signature. */
|
||||
readonly sig: string;
|
||||
}
|
||||
|
||||
// ============================================================================
|
||||
// in-toto Statement Types
|
||||
// ============================================================================
|
||||
|
||||
/**
|
||||
* in-toto Statement wrapper (v1.0).
|
||||
* @see https://github.com/in-toto/attestation
|
||||
*/
|
||||
export interface InTotoStatement<T = unknown> {
|
||||
/** Schema version, should be "https://in-toto.io/Statement/v1". */
|
||||
readonly _type: string;
|
||||
/** Subject artifacts this statement is about. */
|
||||
readonly subject: readonly InTotoSubject[];
|
||||
/** Predicate type URI. */
|
||||
readonly predicateType: string;
|
||||
/** Predicate payload (type depends on predicateType). */
|
||||
readonly predicate: T;
|
||||
}
|
||||
|
||||
/**
|
||||
* in-toto Subject (artifact reference).
|
||||
*/
|
||||
export interface InTotoSubject {
|
||||
/** Artifact name or identifier. */
|
||||
readonly name: string;
|
||||
/** Digest map (algorithm → hex value). */
|
||||
readonly digest: Record<string, string>;
|
||||
}
|
||||
|
||||
// ============================================================================
|
||||
// Attestation Chain Types
|
||||
// ============================================================================
|
||||
|
||||
/**
|
||||
* Attestation chain representing linked evidence.
|
||||
*/
|
||||
export interface AttestationChain {
|
||||
/** Chain identifier (root envelope digest). */
|
||||
readonly chain_id: string;
|
||||
/** Ordered list of attestation nodes in the chain. */
|
||||
readonly nodes: readonly AttestationNode[];
|
||||
/** Chain verification status. */
|
||||
readonly status: AttestationChainStatus;
|
||||
/** When the chain was verified. */
|
||||
readonly verified_at: string;
|
||||
/** Rekor log entry if transparency-logged. */
|
||||
readonly rekor_entry?: RekorLogEntry;
|
||||
}
|
||||
|
||||
/**
|
||||
* Single node in an attestation chain.
|
||||
*/
|
||||
export interface AttestationNode {
|
||||
/** Node identifier (envelope digest). */
|
||||
readonly node_id: string;
|
||||
/** Node type (sbom, scan, vex, policy, witness). */
|
||||
readonly type: AttestationNodeType;
|
||||
/** Predicate type URI from the statement. */
|
||||
readonly predicate_type: string;
|
||||
/** Subject digests this node attests. */
|
||||
readonly subjects: readonly InTotoSubject[];
|
||||
/** Key that signed this node. */
|
||||
readonly signer: SignerInfo;
|
||||
/** When this attestation was created. */
|
||||
readonly created_at: string;
|
||||
/** Parent node ID (for chain ordering). */
|
||||
readonly parent_id?: string;
|
||||
/** Node-specific metadata. */
|
||||
readonly metadata?: Record<string, unknown>;
|
||||
}
|
||||
|
||||
/**
|
||||
* Attestation node types.
|
||||
*/
|
||||
export type AttestationNodeType =
|
||||
| 'sbom'
|
||||
| 'scan'
|
||||
| 'vex'
|
||||
| 'policy'
|
||||
| 'witness'
|
||||
| 'provenance'
|
||||
| 'custom';
|
||||
|
||||
/**
|
||||
* Signer information.
|
||||
*/
|
||||
export interface SignerInfo {
|
||||
/** Key identifier. */
|
||||
readonly key_id: string;
|
||||
/** Signer identity (email, URI, etc.). */
|
||||
readonly identity?: string;
|
||||
/** Key algorithm (ecdsa-p256, ed25519, rsa-pss). */
|
||||
readonly algorithm?: string;
|
||||
/** Whether the key is from a trusted root. */
|
||||
readonly trusted: boolean;
|
||||
/** Certificate chain if using X.509. */
|
||||
readonly cert_chain?: readonly string[];
|
||||
}
|
||||
|
||||
/**
|
||||
* Chain verification status.
|
||||
*/
|
||||
export type AttestationChainStatus =
|
||||
| 'verified'
|
||||
| 'signature_invalid'
|
||||
| 'chain_broken'
|
||||
| 'expired'
|
||||
| 'untrusted_signer'
|
||||
| 'pending';
|
||||
|
||||
// ============================================================================
|
||||
// Rekor Integration
|
||||
// ============================================================================
|
||||
|
||||
/**
|
||||
* Rekor transparency log entry.
|
||||
*/
|
||||
export interface RekorLogEntry {
|
||||
/** Log index. */
|
||||
readonly log_index: number;
|
||||
/** Log ID (tree ID). */
|
||||
readonly log_id: string;
|
||||
/** Entry UUID. */
|
||||
readonly uuid: string;
|
||||
/** Integrated timestamp (RFC 3339). */
|
||||
readonly integrated_time: string;
|
||||
/** Inclusion proof. */
|
||||
readonly inclusion_proof?: RekorInclusionProof;
|
||||
/** Signed entry timestamp. */
|
||||
readonly signed_entry_timestamp?: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Rekor Merkle tree inclusion proof.
|
||||
*/
|
||||
export interface RekorInclusionProof {
|
||||
/** Log index. */
|
||||
readonly log_index: number;
|
||||
/** Root hash. */
|
||||
readonly root_hash: string;
|
||||
/** Tree size at time of inclusion. */
|
||||
readonly tree_size: number;
|
||||
/** Merkle proof hashes. */
|
||||
readonly hashes: readonly string[];
|
||||
}
|
||||
|
||||
// ============================================================================
|
||||
// Verification Types
|
||||
// ============================================================================
|
||||
|
||||
/**
|
||||
* Attestation verification request.
|
||||
*/
|
||||
export interface AttestationVerifyRequest {
|
||||
/** DSSE envelope to verify. */
|
||||
readonly envelope: DsseEnvelope;
|
||||
/** Expected predicate type (optional validation). */
|
||||
readonly expected_predicate_type?: string;
|
||||
/** Whether to verify Rekor inclusion. */
|
||||
readonly verify_rekor?: boolean;
|
||||
/** Trusted key IDs for signature verification. */
|
||||
readonly trusted_keys?: readonly string[];
|
||||
}
|
||||
|
||||
/**
|
||||
* Attestation verification result.
|
||||
*/
|
||||
export interface AttestationVerifyResult {
|
||||
/** Whether verification succeeded. */
|
||||
readonly valid: boolean;
|
||||
/** Verification status. */
|
||||
readonly status: AttestationChainStatus;
|
||||
/** Parsed statement (if signature valid). */
|
||||
readonly statement?: InTotoStatement;
|
||||
/** Signer information. */
|
||||
readonly signer?: SignerInfo;
|
||||
/** Rekor entry (if verified). */
|
||||
readonly rekor_entry?: RekorLogEntry;
|
||||
/** Error message (if failed). */
|
||||
readonly error?: string;
|
||||
}
|
||||
|
||||
// ============================================================================
|
||||
// Predicate Types
|
||||
// ============================================================================
|
||||
|
||||
/**
|
||||
* Well-known predicate type URIs.
|
||||
*/
|
||||
export const PredicateTypes = {
|
||||
/** SPDX SBOM. */
|
||||
Spdx: 'https://spdx.dev/Document',
|
||||
/** CycloneDX SBOM. */
|
||||
CycloneDx: 'https://cyclonedx.org/bom',
|
||||
/** SLSA Provenance v1. */
|
||||
SlsaProvenance: 'https://slsa.dev/provenance/v1',
|
||||
/** StellaOps Vulnerability Scan. */
|
||||
VulnScan: 'https://stellaops.io/attestation/vuln-scan/v1',
|
||||
/** StellaOps Reachability Witness. */
|
||||
Witness: 'https://stellaops.io/attestation/witness/v1',
|
||||
/** StellaOps Policy Decision. */
|
||||
PolicyDecision: 'https://stellaops.io/attestation/policy-decision/v1',
|
||||
/** OpenVEX. */
|
||||
OpenVex: 'https://openvex.dev/ns/v0.2.0',
|
||||
} as const;
|
||||
|
||||
export type PredicateType = typeof PredicateTypes[keyof typeof PredicateTypes];
|
||||
|
||||
// ============================================================================
|
||||
// Helper Functions
|
||||
// ============================================================================
|
||||
|
||||
/**
|
||||
* Decodes base64-encoded DSSE payload.
|
||||
*/
|
||||
export function decodeDssePayload<T>(envelope: DsseEnvelope): T {
|
||||
const decoded = atob(envelope.payload);
|
||||
return JSON.parse(decoded) as T;
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the digest from a subject by algorithm preference.
|
||||
*/
|
||||
export function getSubjectDigest(
|
||||
subject: InTotoSubject,
|
||||
preferredAlgorithm: string = 'sha256'
|
||||
): string | undefined {
|
||||
return subject.digest[preferredAlgorithm] ?? Object.values(subject.digest)[0];
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if a chain is fully verified.
|
||||
*/
|
||||
export function isChainVerified(chain: AttestationChain): boolean {
|
||||
return chain.status === 'verified';
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets human-readable status label.
|
||||
*/
|
||||
export function getChainStatusLabel(status: AttestationChainStatus): string {
|
||||
switch (status) {
|
||||
case 'verified':
|
||||
return 'Verified';
|
||||
case 'signature_invalid':
|
||||
return 'Invalid Signature';
|
||||
case 'chain_broken':
|
||||
return 'Chain Broken';
|
||||
case 'expired':
|
||||
return 'Expired';
|
||||
case 'untrusted_signer':
|
||||
return 'Untrusted Signer';
|
||||
case 'pending':
|
||||
return 'Pending Verification';
|
||||
default:
|
||||
return 'Unknown';
|
||||
}
|
||||
}
|
||||
351
src/Web/StellaOps.Web/src/app/core/api/triage-evidence.client.ts
Normal file
351
src/Web/StellaOps.Web/src/app/core/api/triage-evidence.client.ts
Normal file
@@ -0,0 +1,351 @@
|
||||
/**
|
||||
* Triage Evidence API Client
|
||||
* Sprint: SPRINT_4100_0001_0001_triage_models
|
||||
* Provides API client for fetching finding evidence from Scanner service.
|
||||
*/
|
||||
|
||||
import { Injectable, InjectionToken, inject, signal } from '@angular/core';
|
||||
import { HttpClient, HttpParams } from '@angular/common/http';
|
||||
import { Observable, of, map, shareReplay, catchError, throwError } from 'rxjs';
|
||||
|
||||
import {
|
||||
FindingEvidenceResponse,
|
||||
FindingEvidenceRequest,
|
||||
FindingEvidenceListResponse,
|
||||
ComponentRef,
|
||||
ScoreExplanation,
|
||||
VexEvidence,
|
||||
BoundaryProof,
|
||||
EntrypointProof,
|
||||
} from './triage-evidence.models';
|
||||
import { TenantActivationService } from '../auth/tenant-activation.service';
|
||||
import { generateTraceId } from './trace.util';
|
||||
|
||||
/**
|
||||
* Triage Evidence API interface.
|
||||
*/
|
||||
export interface TriageEvidenceApi {
|
||||
/** Get evidence for a specific finding. */
|
||||
getFindingEvidence(
|
||||
findingId: string,
|
||||
options?: TriageEvidenceQueryOptions
|
||||
): Observable<FindingEvidenceResponse>;
|
||||
|
||||
/** Get evidence by CVE ID. */
|
||||
getEvidenceByCve(
|
||||
cve: string,
|
||||
options?: TriageEvidenceQueryOptions
|
||||
): Observable<FindingEvidenceListResponse>;
|
||||
|
||||
/** Get evidence by component PURL. */
|
||||
getEvidenceByComponent(
|
||||
purl: string,
|
||||
options?: TriageEvidenceQueryOptions
|
||||
): Observable<FindingEvidenceListResponse>;
|
||||
|
||||
/** List all evidence with pagination. */
|
||||
list(
|
||||
options?: TriageEvidenceQueryOptions & PaginationOptions
|
||||
): Observable<FindingEvidenceListResponse>;
|
||||
|
||||
/** Get score explanation for a finding. */
|
||||
getScoreExplanation(
|
||||
findingId: string,
|
||||
options?: TriageEvidenceQueryOptions
|
||||
): Observable<ScoreExplanation>;
|
||||
|
||||
/** Get VEX evidence for a finding. */
|
||||
getVexEvidence(
|
||||
findingId: string,
|
||||
options?: TriageEvidenceQueryOptions
|
||||
): Observable<VexEvidence | null>;
|
||||
}
|
||||
|
||||
export interface TriageEvidenceQueryOptions {
|
||||
readonly tenantId?: string;
|
||||
readonly projectId?: string;
|
||||
readonly traceId?: string;
|
||||
readonly include_path?: boolean;
|
||||
readonly include_boundary?: boolean;
|
||||
readonly include_vex?: boolean;
|
||||
readonly include_score?: boolean;
|
||||
}
|
||||
|
||||
export interface PaginationOptions {
|
||||
readonly page?: number;
|
||||
readonly page_size?: number;
|
||||
}
|
||||
|
||||
export const TRIAGE_EVIDENCE_API = new InjectionToken<TriageEvidenceApi>('TRIAGE_EVIDENCE_API');
|
||||
|
||||
/**
|
||||
* HTTP implementation of the Triage Evidence API.
|
||||
*/
|
||||
@Injectable()
|
||||
export class TriageEvidenceHttpClient implements TriageEvidenceApi {
|
||||
private readonly http = inject(HttpClient);
|
||||
private readonly tenantService = inject(TenantActivationService, { optional: true });
|
||||
|
||||
private readonly baseUrl = signal('/api/v1/scanner');
|
||||
|
||||
// Cache for frequently accessed evidence
|
||||
private readonly evidenceCache = new Map<string, Observable<FindingEvidenceResponse>>();
|
||||
private readonly cacheMaxAge = 60_000; // 1 minute
|
||||
|
||||
getFindingEvidence(
|
||||
findingId: string,
|
||||
options?: TriageEvidenceQueryOptions
|
||||
): Observable<FindingEvidenceResponse> {
|
||||
const cacheKey = this.buildCacheKey('finding', findingId, options);
|
||||
|
||||
if (this.evidenceCache.has(cacheKey)) {
|
||||
return this.evidenceCache.get(cacheKey)!;
|
||||
}
|
||||
|
||||
const url = `${this.baseUrl()}/evidence/${encodeURIComponent(findingId)}`;
|
||||
const params = this.buildParams(options);
|
||||
const headers = this.buildHeaders(options);
|
||||
|
||||
const request$ = this.http.get<FindingEvidenceResponse>(url, { params, headers }).pipe(
|
||||
shareReplay({ bufferSize: 1, refCount: true }),
|
||||
catchError(this.handleError)
|
||||
);
|
||||
|
||||
this.evidenceCache.set(cacheKey, request$);
|
||||
setTimeout(() => this.evidenceCache.delete(cacheKey), this.cacheMaxAge);
|
||||
|
||||
return request$;
|
||||
}
|
||||
|
||||
getEvidenceByCve(
|
||||
cve: string,
|
||||
options?: TriageEvidenceQueryOptions
|
||||
): Observable<FindingEvidenceListResponse> {
|
||||
const url = `${this.baseUrl()}/evidence`;
|
||||
const params = this.buildParams({ ...options, cve });
|
||||
const headers = this.buildHeaders(options);
|
||||
|
||||
return this.http.get<FindingEvidenceListResponse>(url, { params, headers }).pipe(
|
||||
catchError(this.handleError)
|
||||
);
|
||||
}
|
||||
|
||||
getEvidenceByComponent(
|
||||
purl: string,
|
||||
options?: TriageEvidenceQueryOptions
|
||||
): Observable<FindingEvidenceListResponse> {
|
||||
const url = `${this.baseUrl()}/evidence`;
|
||||
const params = this.buildParams({ ...options, component_purl: purl });
|
||||
const headers = this.buildHeaders(options);
|
||||
|
||||
return this.http.get<FindingEvidenceListResponse>(url, { params, headers }).pipe(
|
||||
catchError(this.handleError)
|
||||
);
|
||||
}
|
||||
|
||||
list(
|
||||
options?: TriageEvidenceQueryOptions & PaginationOptions
|
||||
): Observable<FindingEvidenceListResponse> {
|
||||
const url = `${this.baseUrl()}/evidence`;
|
||||
const params = this.buildParams(options);
|
||||
const headers = this.buildHeaders(options);
|
||||
|
||||
return this.http.get<FindingEvidenceListResponse>(url, { params, headers }).pipe(
|
||||
catchError(this.handleError)
|
||||
);
|
||||
}
|
||||
|
||||
getScoreExplanation(
|
||||
findingId: string,
|
||||
options?: TriageEvidenceQueryOptions
|
||||
): Observable<ScoreExplanation> {
|
||||
return this.getFindingEvidence(findingId, { ...options, include_score: true }).pipe(
|
||||
map((evidence) => {
|
||||
if (!evidence.score_explain) {
|
||||
throw new Error(`No score explanation available for finding ${findingId}`);
|
||||
}
|
||||
return evidence.score_explain;
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
getVexEvidence(
|
||||
findingId: string,
|
||||
options?: TriageEvidenceQueryOptions
|
||||
): Observable<VexEvidence | null> {
|
||||
return this.getFindingEvidence(findingId, { ...options, include_vex: true }).pipe(
|
||||
map((evidence) => evidence.vex ?? null)
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Invalidate cached evidence for a finding.
|
||||
*/
|
||||
invalidateCache(findingId?: string): void {
|
||||
if (findingId) {
|
||||
// Remove all cache entries for this finding
|
||||
for (const key of this.evidenceCache.keys()) {
|
||||
if (key.includes(findingId)) {
|
||||
this.evidenceCache.delete(key);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
this.evidenceCache.clear();
|
||||
}
|
||||
}
|
||||
|
||||
private buildParams(options?: Record<string, unknown>): HttpParams {
|
||||
let params = new HttpParams();
|
||||
|
||||
if (options) {
|
||||
for (const [key, value] of Object.entries(options)) {
|
||||
if (value !== undefined && value !== null && key !== 'tenantId' && key !== 'traceId') {
|
||||
params = params.set(key, String(value));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return params;
|
||||
}
|
||||
|
||||
private buildHeaders(options?: TriageEvidenceQueryOptions): Record<string, string> {
|
||||
const headers: Record<string, string> = {};
|
||||
|
||||
const tenantId = options?.tenantId ?? this.tenantService?.activeTenantId();
|
||||
if (tenantId) {
|
||||
headers['X-Tenant-Id'] = tenantId;
|
||||
}
|
||||
|
||||
const traceId = options?.traceId ?? generateTraceId();
|
||||
headers['X-Trace-Id'] = traceId;
|
||||
|
||||
return headers;
|
||||
}
|
||||
|
||||
private buildCacheKey(type: string, id: string, options?: TriageEvidenceQueryOptions): string {
|
||||
const opts = JSON.stringify(options ?? {});
|
||||
return `${type}:${id}:${opts}`;
|
||||
}
|
||||
|
||||
private handleError(error: unknown): Observable<never> {
|
||||
console.error('[TriageEvidenceClient] API error:', error);
|
||||
return throwError(() => error);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Mock implementation for testing and development.
|
||||
*/
|
||||
@Injectable()
|
||||
export class TriageEvidenceMockClient implements TriageEvidenceApi {
|
||||
private readonly mockEvidence: FindingEvidenceResponse = {
|
||||
finding_id: 'finding-mock-001',
|
||||
cve: 'CVE-2021-44228',
|
||||
component: {
|
||||
purl: 'pkg:maven/org.apache.logging.log4j/log4j-core@2.14.1',
|
||||
name: 'log4j-core',
|
||||
version: '2.14.1',
|
||||
type: 'maven',
|
||||
},
|
||||
reachable_path: [
|
||||
'com.example.App.main',
|
||||
'com.example.Service.process',
|
||||
'org.apache.logging.log4j.Logger.log',
|
||||
],
|
||||
entrypoint: {
|
||||
type: 'http_handler',
|
||||
route: '/api/v1/process',
|
||||
method: 'POST',
|
||||
auth: 'required',
|
||||
fqn: 'com.example.Controller.process',
|
||||
},
|
||||
score_explain: {
|
||||
kind: 'stellaops_risk_v1',
|
||||
risk_score: 75.0,
|
||||
contributions: [
|
||||
{
|
||||
factor: 'cvss_base',
|
||||
weight: 5.0,
|
||||
raw_value: 10.0,
|
||||
contribution: 50.0,
|
||||
explanation: 'Critical CVSS base score',
|
||||
source: 'nvd',
|
||||
},
|
||||
{
|
||||
factor: 'reachability',
|
||||
weight: 1.0,
|
||||
raw_value: 25.0,
|
||||
contribution: 25.0,
|
||||
explanation: 'Reachable from HTTP entrypoint',
|
||||
source: 'scan',
|
||||
},
|
||||
],
|
||||
last_seen: new Date().toISOString(),
|
||||
algorithm_version: '1.0.0',
|
||||
summary: 'High risk (75/100) driven by cvss_base and reachability',
|
||||
},
|
||||
last_seen: new Date().toISOString(),
|
||||
attestation_refs: ['dsse:sha256:mock123'],
|
||||
};
|
||||
|
||||
getFindingEvidence(
|
||||
findingId: string,
|
||||
options?: TriageEvidenceQueryOptions
|
||||
): Observable<FindingEvidenceResponse> {
|
||||
return of({ ...this.mockEvidence, finding_id: findingId });
|
||||
}
|
||||
|
||||
getEvidenceByCve(
|
||||
cve: string,
|
||||
options?: TriageEvidenceQueryOptions
|
||||
): Observable<FindingEvidenceListResponse> {
|
||||
return of({
|
||||
items: [{ ...this.mockEvidence, cve }],
|
||||
total: 1,
|
||||
page: 1,
|
||||
page_size: 20,
|
||||
});
|
||||
}
|
||||
|
||||
getEvidenceByComponent(
|
||||
purl: string,
|
||||
options?: TriageEvidenceQueryOptions
|
||||
): Observable<FindingEvidenceListResponse> {
|
||||
return of({
|
||||
items: [
|
||||
{
|
||||
...this.mockEvidence,
|
||||
component: { ...this.mockEvidence.component!, purl },
|
||||
},
|
||||
],
|
||||
total: 1,
|
||||
page: 1,
|
||||
page_size: 20,
|
||||
});
|
||||
}
|
||||
|
||||
list(
|
||||
options?: TriageEvidenceQueryOptions & PaginationOptions
|
||||
): Observable<FindingEvidenceListResponse> {
|
||||
return of({
|
||||
items: [this.mockEvidence],
|
||||
total: 1,
|
||||
page: options?.page ?? 1,
|
||||
page_size: options?.page_size ?? 20,
|
||||
});
|
||||
}
|
||||
|
||||
getScoreExplanation(
|
||||
findingId: string,
|
||||
options?: TriageEvidenceQueryOptions
|
||||
): Observable<ScoreExplanation> {
|
||||
return of(this.mockEvidence.score_explain!);
|
||||
}
|
||||
|
||||
getVexEvidence(
|
||||
findingId: string,
|
||||
options?: TriageEvidenceQueryOptions
|
||||
): Observable<VexEvidence | null> {
|
||||
return of(null);
|
||||
}
|
||||
}
|
||||
265
src/Web/StellaOps.Web/src/app/core/api/triage-evidence.models.ts
Normal file
265
src/Web/StellaOps.Web/src/app/core/api/triage-evidence.models.ts
Normal file
@@ -0,0 +1,265 @@
|
||||
/**
|
||||
* Triage Evidence Models
|
||||
* Sprint: SPRINT_4100_0001_0001_triage_models
|
||||
* Mirrors backend contracts from Scanner.WebService/Contracts/FindingEvidenceContracts.cs
|
||||
*/
|
||||
|
||||
// ============================================================================
|
||||
// Core Evidence Response
|
||||
// ============================================================================
|
||||
|
||||
/**
|
||||
* Unified evidence response for a finding, combining reachability, boundary,
|
||||
* VEX evidence, and score explanation.
|
||||
*/
|
||||
export interface FindingEvidenceResponse {
|
||||
readonly finding_id: string;
|
||||
readonly cve: string;
|
||||
readonly component?: ComponentRef;
|
||||
readonly reachable_path?: readonly string[];
|
||||
readonly entrypoint?: EntrypointProof;
|
||||
readonly boundary?: BoundaryProof;
|
||||
readonly vex?: VexEvidence;
|
||||
readonly score_explain?: ScoreExplanation;
|
||||
readonly last_seen: string; // ISO 8601
|
||||
readonly expires_at?: string;
|
||||
readonly attestation_refs?: readonly string[];
|
||||
}
|
||||
|
||||
/**
|
||||
* Reference to a component (package) by PURL and version.
|
||||
*/
|
||||
export interface ComponentRef {
|
||||
readonly purl: string;
|
||||
readonly name: string;
|
||||
readonly version: string;
|
||||
readonly type: string;
|
||||
}
|
||||
|
||||
// ============================================================================
|
||||
// Entrypoint Proof
|
||||
// ============================================================================
|
||||
|
||||
/**
|
||||
* Proof of how code is exposed as an entrypoint.
|
||||
*/
|
||||
export interface EntrypointProof {
|
||||
readonly type: string; // http_handler, grpc_method, cli_command, etc.
|
||||
readonly route?: string;
|
||||
readonly method?: string;
|
||||
readonly auth?: string; // none, optional, required
|
||||
readonly phase?: string; // startup, runtime, shutdown
|
||||
readonly fqn: string;
|
||||
readonly location?: SourceLocation;
|
||||
}
|
||||
|
||||
/**
|
||||
* Source file location reference.
|
||||
*/
|
||||
export interface SourceLocation {
|
||||
readonly file: string;
|
||||
readonly line?: number;
|
||||
readonly column?: number;
|
||||
}
|
||||
|
||||
// ============================================================================
|
||||
// Boundary Proof
|
||||
// ============================================================================
|
||||
|
||||
/**
|
||||
* Boundary proof describing surface exposure and controls.
|
||||
*/
|
||||
export interface BoundaryProof {
|
||||
readonly kind: string;
|
||||
readonly surface?: SurfaceDescriptor;
|
||||
readonly exposure?: ExposureDescriptor;
|
||||
readonly auth?: AuthDescriptor;
|
||||
readonly controls?: readonly ControlDescriptor[];
|
||||
readonly last_seen: string;
|
||||
readonly confidence: number;
|
||||
}
|
||||
|
||||
/**
|
||||
* Describes what attack surface is exposed.
|
||||
*/
|
||||
export interface SurfaceDescriptor {
|
||||
readonly type: string;
|
||||
readonly protocol?: string;
|
||||
readonly port?: number;
|
||||
}
|
||||
|
||||
/**
|
||||
* Describes how the surface is exposed.
|
||||
*/
|
||||
export interface ExposureDescriptor {
|
||||
readonly level: string; // public, internal, private
|
||||
readonly internet_facing: boolean;
|
||||
readonly zone?: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Describes authentication requirements.
|
||||
*/
|
||||
export interface AuthDescriptor {
|
||||
readonly required: boolean;
|
||||
readonly type?: string;
|
||||
readonly roles?: readonly string[];
|
||||
}
|
||||
|
||||
/**
|
||||
* Describes a security control.
|
||||
*/
|
||||
export interface ControlDescriptor {
|
||||
readonly type: string;
|
||||
readonly active: boolean;
|
||||
readonly config?: string;
|
||||
}
|
||||
|
||||
// ============================================================================
|
||||
// VEX Evidence
|
||||
// ============================================================================
|
||||
|
||||
/**
|
||||
* VEX (Vulnerability Exploitability eXchange) evidence.
|
||||
*/
|
||||
export interface VexEvidence {
|
||||
readonly status: VexStatus;
|
||||
readonly justification?: string;
|
||||
readonly impact?: string;
|
||||
readonly action?: string;
|
||||
readonly attestation_ref?: string;
|
||||
readonly issued_at?: string;
|
||||
readonly expires_at?: string;
|
||||
readonly source?: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* VEX status values per OpenVEX specification.
|
||||
*/
|
||||
export type VexStatus = 'not_affected' | 'affected' | 'fixed' | 'under_investigation';
|
||||
|
||||
// ============================================================================
|
||||
// Score Explanation
|
||||
// ============================================================================
|
||||
|
||||
/**
|
||||
* Score explanation with additive breakdown of risk factors.
|
||||
*/
|
||||
export interface ScoreExplanation {
|
||||
readonly kind: string;
|
||||
readonly risk_score: number;
|
||||
readonly contributions?: readonly ScoreContribution[];
|
||||
readonly last_seen: string;
|
||||
readonly algorithm_version?: string;
|
||||
readonly evidence_ref?: string;
|
||||
readonly summary?: string;
|
||||
readonly modifiers?: readonly ScoreModifier[];
|
||||
}
|
||||
|
||||
/**
|
||||
* Individual contribution to the risk score.
|
||||
*/
|
||||
export interface ScoreContribution {
|
||||
readonly factor: string;
|
||||
readonly weight: number;
|
||||
readonly raw_value: number;
|
||||
readonly contribution: number;
|
||||
readonly explanation?: string;
|
||||
readonly source?: string;
|
||||
readonly updated_at?: string;
|
||||
readonly confidence?: number;
|
||||
}
|
||||
|
||||
/**
|
||||
* Modifier applied to the score after base calculation.
|
||||
*/
|
||||
export interface ScoreModifier {
|
||||
readonly type: string;
|
||||
readonly before: number;
|
||||
readonly after: number;
|
||||
readonly reason?: string;
|
||||
readonly policy_ref?: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Well-known score factor names.
|
||||
*/
|
||||
export const ScoreFactors = {
|
||||
CvssBase: 'cvss_base',
|
||||
CvssEnvironmental: 'cvss_environmental',
|
||||
Epss: 'epss',
|
||||
Reachability: 'reachability',
|
||||
GateMultiplier: 'gate_multiplier',
|
||||
VexOverride: 'vex_override',
|
||||
TimeDecay: 'time_decay',
|
||||
ExposureSurface: 'exposure_surface',
|
||||
KnownExploitation: 'known_exploitation',
|
||||
AssetCriticality: 'asset_criticality',
|
||||
} as const;
|
||||
|
||||
export type ScoreFactor = typeof ScoreFactors[keyof typeof ScoreFactors];
|
||||
|
||||
// ============================================================================
|
||||
// Query Interfaces
|
||||
// ============================================================================
|
||||
|
||||
/**
|
||||
* Request for finding evidence.
|
||||
*/
|
||||
export interface FindingEvidenceRequest {
|
||||
readonly finding_id?: string;
|
||||
readonly cve?: string;
|
||||
readonly component_purl?: string;
|
||||
readonly include_path?: boolean;
|
||||
readonly include_boundary?: boolean;
|
||||
readonly include_vex?: boolean;
|
||||
readonly include_score?: boolean;
|
||||
}
|
||||
|
||||
/**
|
||||
* List response for multiple findings.
|
||||
*/
|
||||
export interface FindingEvidenceListResponse {
|
||||
readonly items: readonly FindingEvidenceResponse[];
|
||||
readonly total: number;
|
||||
readonly page: number;
|
||||
readonly page_size: number;
|
||||
}
|
||||
|
||||
// ============================================================================
|
||||
// Severity Helpers
|
||||
// ============================================================================
|
||||
|
||||
/**
|
||||
* Returns severity label based on score.
|
||||
*/
|
||||
export function getSeverityLabel(score: number): 'critical' | 'high' | 'medium' | 'low' | 'minimal' {
|
||||
if (score >= 80) return 'critical';
|
||||
if (score >= 60) return 'high';
|
||||
if (score >= 40) return 'medium';
|
||||
if (score >= 20) return 'low';
|
||||
return 'minimal';
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns CSS class for severity.
|
||||
*/
|
||||
export function getSeverityClass(score: number): string {
|
||||
return `severity-${getSeverityLabel(score)}`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if VEX status indicates non-exploitability.
|
||||
*/
|
||||
export function isVexNotAffected(vex?: VexEvidence): boolean {
|
||||
return vex?.status === 'not_affected';
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if VEX evidence is still valid (not expired).
|
||||
*/
|
||||
export function isVexValid(vex?: VexEvidence): boolean {
|
||||
if (!vex) return false;
|
||||
if (!vex.expires_at) return true;
|
||||
return new Date(vex.expires_at) > new Date();
|
||||
}
|
||||
Reference in New Issue
Block a user