Refactor code structure for improved readability and maintainability; optimize performance in key functions.

This commit is contained in:
master
2025-12-22 19:06:31 +02:00
parent dfaa2079aa
commit 4602ccc3a3
1444 changed files with 109919 additions and 8058 deletions

View File

@@ -0,0 +1,33 @@
# AGENTS - Scanner Advisory Library
## Mission
Provide advisory feed integration and offline bundles for CVE-to-symbol mapping used by reachability slices.
## Roles
- Backend engineer (.NET 10, C# preview).
- QA engineer (deterministic tests; offline fixtures).
## Required Reading
- `docs/README.md`
- `docs/07_HIGH_LEVEL_ARCHITECTURE.md`
- `docs/modules/platform/architecture-overview.md`
- `docs/modules/scanner/architecture.md`
- `docs/modules/concelier/architecture.md`
- `docs/reachability/slice-schema.md`
## Working Directory & Boundaries
- Primary scope: `src/Scanner/__Libraries/StellaOps.Scanner.Advisory/`
- Tests: `src/Scanner/__Tests/StellaOps.Scanner.Advisory.Tests/`
- Avoid cross-module edits unless explicitly noted in the sprint.
## Determinism & Offline Rules
- Prefer offline advisory bundles; no network access in tests.
- Cache advisory data deterministically with stable ordering and TTL control.
## Testing Expectations
- Unit tests for HTTP client shape and offline fallback.
- Deterministic serialization and cache hit/miss behavior.
## Workflow
- Update sprint status on task transitions.
- Record notable decisions in the sprint Execution Log.

View File

@@ -0,0 +1,74 @@
using System.Collections.Immutable;
using System.Text.Json;
namespace StellaOps.Scanner.Advisory;
public interface IAdvisoryBundleStore
{
Task<AdvisorySymbolMapping?> TryGetAsync(string cveId, CancellationToken cancellationToken = default);
}
public sealed class NullAdvisoryBundleStore : IAdvisoryBundleStore
{
public Task<AdvisorySymbolMapping?> TryGetAsync(string cveId, CancellationToken cancellationToken = default)
=> Task.FromResult<AdvisorySymbolMapping?>(null);
}
public sealed class FileAdvisoryBundleStore : IAdvisoryBundleStore
{
private readonly string _bundlePath;
private readonly JsonSerializerOptions _serializerOptions = new(JsonSerializerDefaults.Web);
private ImmutableDictionary<string, AdvisorySymbolMapping>? _cache;
public FileAdvisoryBundleStore(string bundlePath)
{
_bundlePath = bundlePath ?? throw new ArgumentNullException(nameof(bundlePath));
}
public async Task<AdvisorySymbolMapping?> TryGetAsync(string cveId, CancellationToken cancellationToken = default)
{
if (string.IsNullOrWhiteSpace(cveId))
{
return null;
}
var normalized = cveId.Trim().ToUpperInvariant();
var cache = await LoadAsync(cancellationToken).ConfigureAwait(false);
return cache.TryGetValue(normalized, out var mapping) ? mapping : null;
}
private async Task<ImmutableDictionary<string, AdvisorySymbolMapping>> LoadAsync(CancellationToken cancellationToken)
{
if (_cache is not null)
{
return _cache;
}
if (!File.Exists(_bundlePath))
{
_cache = ImmutableDictionary<string, AdvisorySymbolMapping>.Empty;
return _cache;
}
await using var stream = File.OpenRead(_bundlePath);
var bundle = await JsonSerializer.DeserializeAsync<AdvisoryBundleDocument>(stream, _serializerOptions, cancellationToken)
.ConfigureAwait(false);
var items = bundle?.Items ?? Array.Empty<AdvisorySymbolMapping>();
var builder = ImmutableDictionary.CreateBuilder<string, AdvisorySymbolMapping>(StringComparer.OrdinalIgnoreCase);
foreach (var item in items)
{
if (string.IsNullOrWhiteSpace(item.CveId))
{
continue;
}
builder[item.CveId.Trim().ToUpperInvariant()] = item;
}
_cache = builder.ToImmutable();
return _cache;
}
private sealed record AdvisoryBundleDocument(IReadOnlyList<AdvisorySymbolMapping> Items);
}

View File

@@ -0,0 +1,196 @@
using System.Collections.Immutable;
using System.Net.Http.Json;
using System.Text.Json;
using Microsoft.Extensions.Caching.Memory;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
using System.Text.Json.Serialization;
namespace StellaOps.Scanner.Advisory;
public sealed class AdvisoryClient : IAdvisoryClient
{
private readonly HttpClient _httpClient;
private readonly IMemoryCache _cache;
private readonly AdvisoryClientOptions _options;
private readonly IAdvisoryBundleStore _bundleStore;
private readonly ILogger<AdvisoryClient> _logger;
private readonly JsonSerializerOptions _serializerOptions = new(JsonSerializerDefaults.Web)
{
PropertyNameCaseInsensitive = true
};
public AdvisoryClient(
HttpClient httpClient,
IMemoryCache cache,
IOptions<AdvisoryClientOptions> options,
IAdvisoryBundleStore bundleStore,
ILogger<AdvisoryClient> logger)
{
_httpClient = httpClient ?? throw new ArgumentNullException(nameof(httpClient));
_cache = cache ?? throw new ArgumentNullException(nameof(cache));
_options = (options ?? throw new ArgumentNullException(nameof(options))).Value;
_bundleStore = bundleStore ?? throw new ArgumentNullException(nameof(bundleStore));
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
_httpClient.Timeout = TimeSpan.FromSeconds(Math.Max(1, _options.TimeoutSeconds));
}
public async Task<AdvisorySymbolMapping?> GetCveSymbolsAsync(string cveId, CancellationToken cancellationToken = default)
{
if (string.IsNullOrWhiteSpace(cveId))
{
return null;
}
var normalized = cveId.Trim().ToUpperInvariant();
var cacheKey = $"advisory:cve:{normalized}";
if (_cache.TryGetValue(cacheKey, out AdvisorySymbolMapping cached))
{
return cached;
}
AdvisorySymbolMapping? mapping = null;
if (_options.Enabled && !string.IsNullOrWhiteSpace(_options.BaseUrl))
{
mapping = await FetchFromConcelierAsync(normalized, cancellationToken).ConfigureAwait(false);
}
mapping ??= await _bundleStore.TryGetAsync(normalized, cancellationToken).ConfigureAwait(false);
if (mapping is not null)
{
var ttl = TimeSpan.FromMinutes(Math.Max(1, _options.CacheTtlMinutes));
_cache.Set(cacheKey, mapping, ttl);
}
return mapping;
}
private async Task<AdvisorySymbolMapping?> FetchFromConcelierAsync(string cveId, CancellationToken cancellationToken)
{
try
{
ApplyHeaders();
var purls = _options.UseSearchEndpoint
? await FetchPurlsFromSearchAsync(cveId, cancellationToken).ConfigureAwait(false)
: await FetchPurlsFromLinksetAsync(cveId, cancellationToken).ConfigureAwait(false);
if (purls.IsDefaultOrEmpty)
{
return null;
}
var packages = purls
.Distinct(StringComparer.OrdinalIgnoreCase)
.OrderBy(p => p, StringComparer.OrdinalIgnoreCase)
.Select(p => new AdvisoryPackageSymbols { Purl = p, Symbols = ImmutableArray<string>.Empty })
.ToImmutableArray();
return new AdvisorySymbolMapping
{
CveId = cveId,
Packages = packages,
Source = "concelier"
};
}
catch (Exception ex) when (ex is HttpRequestException or TaskCanceledException or JsonException)
{
_logger.LogWarning(ex, "Failed to fetch advisory mapping from Concelier for {CveId}", cveId);
return null;
}
}
private async Task<ImmutableArray<string>> FetchPurlsFromLinksetAsync(string cveId, CancellationToken cancellationToken)
{
var path = _options.LinksetEndpointTemplate.Replace("{cveId}", Uri.EscapeDataString(cveId), StringComparison.OrdinalIgnoreCase);
using var response = await _httpClient.GetAsync(path, cancellationToken).ConfigureAwait(false);
if (!response.IsSuccessStatusCode)
{
return ImmutableArray<string>.Empty;
}
var payload = await response.Content.ReadFromJsonAsync<LnmLinksetResponse>(_serializerOptions, cancellationToken)
.ConfigureAwait(false);
if (payload is null)
{
return ImmutableArray<string>.Empty;
}
return CollectPurls(payload).ToImmutableArray();
}
private async Task<ImmutableArray<string>> FetchPurlsFromSearchAsync(string cveId, CancellationToken cancellationToken)
{
var request = new LnmLinksetSearchRequest(cveId);
using var response = await _httpClient.PostAsJsonAsync(_options.SearchEndpoint, request, _serializerOptions, cancellationToken)
.ConfigureAwait(false);
if (!response.IsSuccessStatusCode)
{
return ImmutableArray<string>.Empty;
}
var payload = await response.Content.ReadFromJsonAsync<LnmLinksetPage>(_serializerOptions, cancellationToken)
.ConfigureAwait(false);
if (payload?.Items is null)
{
return ImmutableArray<string>.Empty;
}
var purls = payload.Items.SelectMany(CollectPurls)
.Distinct(StringComparer.OrdinalIgnoreCase)
.OrderBy(p => p, StringComparer.OrdinalIgnoreCase)
.ToImmutableArray();
return purls;
}
private void ApplyHeaders()
{
if (!string.IsNullOrWhiteSpace(_options.BaseUrl) && _httpClient.BaseAddress is null)
{
_httpClient.BaseAddress = new Uri(_options.BaseUrl.TrimEnd('/') + "/", UriKind.Absolute);
}
if (!string.IsNullOrWhiteSpace(_options.Tenant))
{
_httpClient.DefaultRequestHeaders.Remove(_options.TenantHeaderName);
_httpClient.DefaultRequestHeaders.TryAddWithoutValidation(_options.TenantHeaderName, _options.Tenant);
}
if (!string.IsNullOrWhiteSpace(_options.ApiKey))
{
var header = string.IsNullOrWhiteSpace(_options.ApiKeyHeader) ? "Authorization" : _options.ApiKeyHeader;
_httpClient.DefaultRequestHeaders.Remove(header);
_httpClient.DefaultRequestHeaders.TryAddWithoutValidation(header, _options.ApiKey);
}
}
private static IEnumerable<string> CollectPurls(LnmLinksetResponse response)
{
if (response.Normalized?.Purl is { Count: > 0 } normalizedPurls)
{
return normalizedPurls;
}
return response.Purl ?? Array.Empty<string>();
}
private sealed record LnmLinksetResponse(
string AdvisoryId,
string Source,
IReadOnlyList<string>? Purl,
LnmLinksetNormalized? Normalized);
private sealed record LnmLinksetNormalized(
IReadOnlyList<string>? Purl,
IReadOnlyList<string>? Aliases);
private sealed record LnmLinksetPage(IReadOnlyList<LnmLinksetResponse> Items);
private sealed record LnmLinksetSearchRequest(
[property: JsonPropertyName("cve")] string Cve,
[property: JsonPropertyName("page")] int Page = 1,
[property: JsonPropertyName("pageSize")] int PageSize = 100);
}

View File

@@ -0,0 +1,26 @@
namespace StellaOps.Scanner.Advisory;
public sealed class AdvisoryClientOptions
{
public bool Enabled { get; set; } = true;
public string? BaseUrl { get; set; }
public string? Tenant { get; set; }
public string TenantHeaderName { get; set; } = "X-Stella-Tenant";
public string? ApiKey { get; set; }
public string ApiKeyHeader { get; set; } = "Authorization";
public int TimeoutSeconds { get; set; } = 30;
public int CacheTtlMinutes { get; set; } = 60;
public string LinksetEndpointTemplate { get; set; } = "/v1/lnm/linksets/{cveId}";
public string SearchEndpoint { get; set; } = "/v1/lnm/linksets/search";
public bool UseSearchEndpoint { get; set; } = false;
}

View File

@@ -0,0 +1,25 @@
using System.Collections.Immutable;
using System.Text.Json.Serialization;
namespace StellaOps.Scanner.Advisory;
public sealed record AdvisorySymbolMapping
{
[JsonPropertyName("cveId")]
public required string CveId { get; init; }
[JsonPropertyName("packages")]
public ImmutableArray<AdvisoryPackageSymbols> Packages { get; init; } = ImmutableArray<AdvisoryPackageSymbols>.Empty;
[JsonPropertyName("source")]
public required string Source { get; init; }
}
public sealed record AdvisoryPackageSymbols
{
[JsonPropertyName("purl")]
public required string Purl { get; init; }
[JsonPropertyName("symbols")]
public ImmutableArray<string> Symbols { get; init; } = ImmutableArray<string>.Empty;
}

View File

@@ -0,0 +1,6 @@
namespace StellaOps.Scanner.Advisory;
public interface IAdvisoryClient
{
Task<AdvisorySymbolMapping?> GetCveSymbolsAsync(string cveId, CancellationToken cancellationToken = default);
}

View File

@@ -0,0 +1,14 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<LangVersion>preview</LangVersion>
<Nullable>enable</Nullable>
<ImplicitUsings>enable</ImplicitUsings>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="Microsoft.Extensions.Caching.Memory" Version="10.0.0" />
<PackageReference Include="Microsoft.Extensions.Http" Version="10.0.0" />
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="10.0.0" />
<PackageReference Include="Microsoft.Extensions.Options" Version="10.0.0" />
</ItemGroup>
</Project>

View File

@@ -0,0 +1,184 @@
namespace StellaOps.Scanner.Analyzers.Native.RuntimeCapture.Timeline;
/// <summary>
/// Runtime observation timeline for a finding.
/// </summary>
public sealed record RuntimeTimeline
{
/// <summary>
/// Finding this timeline is for.
/// </summary>
public required Guid FindingId { get; init; }
/// <summary>
/// Vulnerable component being tracked.
/// </summary>
public required string ComponentPurl { get; init; }
/// <summary>
/// Time window start.
/// </summary>
public required DateTimeOffset WindowStart { get; init; }
/// <summary>
/// Time window end.
/// </summary>
public required DateTimeOffset WindowEnd { get; init; }
/// <summary>
/// Overall posture based on observations.
/// </summary>
public required RuntimePosture Posture { get; init; }
/// <summary>
/// Posture explanation.
/// </summary>
public required string PostureExplanation { get; init; }
/// <summary>
/// Time buckets with observation summaries.
/// </summary>
public required IReadOnlyList<TimelineBucket> Buckets { get; init; }
/// <summary>
/// Significant events in the timeline.
/// </summary>
public required IReadOnlyList<TimelineEvent> Events { get; init; }
/// <summary>
/// Total observation count.
/// </summary>
public int TotalObservations => Buckets.Sum(b => b.ObservationCount);
/// <summary>
/// Capture session digests.
/// </summary>
public required IReadOnlyList<string> SessionDigests { get; init; }
}
public enum RuntimePosture
{
/// <summary>No runtime data available.</summary>
Unknown,
/// <summary>Runtime evidence supports the verdict.</summary>
Supports,
/// <summary>Runtime evidence contradicts the verdict.</summary>
Contradicts,
/// <summary>Runtime evidence is inconclusive.</summary>
Inconclusive
}
/// <summary>
/// A time bucket in the timeline.
/// </summary>
public sealed record TimelineBucket
{
/// <summary>
/// Bucket start time.
/// </summary>
public required DateTimeOffset Start { get; init; }
/// <summary>
/// Bucket end time.
/// </summary>
public required DateTimeOffset End { get; init; }
/// <summary>
/// Number of observations in this bucket.
/// </summary>
public required int ObservationCount { get; init; }
/// <summary>
/// Observation types in this bucket.
/// </summary>
public required IReadOnlyList<ObservationTypeSummary> ByType { get; init; }
/// <summary>
/// Whether component was loaded in this bucket.
/// </summary>
public required bool ComponentLoaded { get; init; }
/// <summary>
/// Whether vulnerable code was executed.
/// </summary>
public bool? VulnerableCodeExecuted { get; init; }
}
/// <summary>
/// Summary of observations by type.
/// </summary>
public sealed record ObservationTypeSummary
{
public required ObservationType Type { get; init; }
public required int Count { get; init; }
}
public enum ObservationType
{
LibraryLoad,
Syscall,
NetworkConnection,
FileAccess,
ProcessSpawn,
SymbolResolution
}
/// <summary>
/// A significant event in the timeline.
/// </summary>
public sealed record TimelineEvent
{
/// <summary>
/// Event timestamp.
/// </summary>
public required DateTimeOffset Timestamp { get; init; }
/// <summary>
/// Event type.
/// </summary>
public required TimelineEventType Type { get; init; }
/// <summary>
/// Event description.
/// </summary>
public required string Description { get; init; }
/// <summary>
/// Significance level.
/// </summary>
public required EventSignificance Significance { get; init; }
/// <summary>
/// Related evidence digest.
/// </summary>
public string? EvidenceDigest { get; init; }
/// <summary>
/// Additional details.
/// </summary>
public IReadOnlyDictionary<string, string> Details { get; init; }
= new Dictionary<string, string>();
}
public enum TimelineEventType
{
ComponentLoaded,
ComponentUnloaded,
VulnerableFunctionCalled,
NetworkExposure,
SyscallBlocked,
ProcessForked,
CaptureStarted,
CaptureStopped
}
public enum EventSignificance
{
Low,
Medium,
High,
Critical
}

View File

@@ -0,0 +1,257 @@
namespace StellaOps.Scanner.Analyzers.Native.RuntimeCapture.Timeline;
public interface ITimelineBuilder
{
RuntimeTimeline Build(
RuntimeEvidence evidence,
string componentPurl,
TimelineOptions options);
}
public sealed class TimelineBuilder : ITimelineBuilder
{
public RuntimeTimeline Build(
RuntimeEvidence evidence,
string componentPurl,
TimelineOptions options)
{
var windowStart = options.WindowStart ?? evidence.FirstObservation;
var windowEnd = options.WindowEnd ?? evidence.LastObservation;
// Build time buckets
var buckets = BuildBuckets(evidence, componentPurl, windowStart, windowEnd, options.BucketSize);
// Extract significant events
var events = ExtractEvents(evidence, componentPurl);
// Determine posture
var (posture, explanation) = DeterminePosture(buckets, events, componentPurl);
return new RuntimeTimeline
{
FindingId = Guid.Empty, // Set by caller
ComponentPurl = componentPurl,
WindowStart = windowStart,
WindowEnd = windowEnd,
Posture = posture,
PostureExplanation = explanation,
Buckets = buckets,
Events = events.OrderBy(e => e.Timestamp).ToList(),
SessionDigests = evidence.SessionDigests.ToList()
};
}
private List<TimelineBucket> BuildBuckets(
RuntimeEvidence evidence,
string componentPurl,
DateTimeOffset start,
DateTimeOffset end,
TimeSpan bucketSize)
{
var buckets = new List<TimelineBucket>();
var current = start;
while (current < end)
{
var bucketEnd = current + bucketSize;
if (bucketEnd > end) bucketEnd = end;
var observations = evidence.Observations
.Where(o => o.Timestamp >= current && o.Timestamp < bucketEnd)
.ToList();
var byType = observations
.GroupBy(o => ClassifyObservation(o))
.Select(g => new ObservationTypeSummary
{
Type = g.Key,
Count = g.Count()
})
.ToList();
var componentLoaded = observations.Any(o =>
o.Type == "library_load" &&
o.Path?.Contains(ExtractComponentName(componentPurl)) == true);
buckets.Add(new TimelineBucket
{
Start = current,
End = bucketEnd,
ObservationCount = observations.Count,
ByType = byType,
ComponentLoaded = componentLoaded,
VulnerableCodeExecuted = componentLoaded ? DetectVulnerableExecution(observations) : null
});
current = bucketEnd;
}
return buckets;
}
private List<TimelineEvent> ExtractEvents(RuntimeEvidence evidence, string componentPurl)
{
var events = new List<TimelineEvent>();
var componentName = ExtractComponentName(componentPurl);
foreach (var obs in evidence.Observations)
{
if (obs.Type == "library_load" && obs.Path?.Contains(componentName) == true)
{
events.Add(new TimelineEvent
{
Timestamp = obs.Timestamp,
Type = TimelineEventType.ComponentLoaded,
Description = $"Component {componentName} loaded",
Significance = EventSignificance.High,
EvidenceDigest = obs.Digest,
Details = new Dictionary<string, string>
{
["path"] = obs.Path ?? "",
["process_id"] = obs.ProcessId.ToString()
}
});
}
if (obs.Type == "network" && obs.Port is > 0 and < 1024)
{
events.Add(new TimelineEvent
{
Timestamp = obs.Timestamp,
Type = TimelineEventType.NetworkExposure,
Description = $"Network exposure on port {obs.Port}",
Significance = EventSignificance.Critical,
EvidenceDigest = obs.Digest
});
}
}
// Add capture session events
foreach (var session in evidence.Sessions)
{
events.Add(new TimelineEvent
{
Timestamp = session.StartTime,
Type = TimelineEventType.CaptureStarted,
Description = $"Capture session started ({session.Platform})",
Significance = EventSignificance.Low
});
if (session.EndTime.HasValue)
{
events.Add(new TimelineEvent
{
Timestamp = session.EndTime.Value,
Type = TimelineEventType.CaptureStopped,
Description = "Capture session stopped",
Significance = EventSignificance.Low
});
}
}
return events;
}
private static (RuntimePosture posture, string explanation) DeterminePosture(
List<TimelineBucket> buckets,
List<TimelineEvent> events,
string componentPurl)
{
if (buckets.Count == 0 || buckets.All(b => b.ObservationCount == 0))
{
return (RuntimePosture.Unknown, "No runtime observations collected");
}
var componentLoadedCount = buckets.Count(b => b.ComponentLoaded);
var totalBuckets = buckets.Count;
if (componentLoadedCount == 0)
{
return (RuntimePosture.Supports,
$"Component {ExtractComponentName(componentPurl)} was not loaded during observation window");
}
var hasNetworkExposure = events.Any(e => e.Type == TimelineEventType.NetworkExposure);
var hasVulnerableExecution = buckets.Any(b => b.VulnerableCodeExecuted == true);
if (hasVulnerableExecution || hasNetworkExposure)
{
return (RuntimePosture.Contradicts,
"Runtime evidence shows component is actively used and exposed");
}
if (componentLoadedCount < totalBuckets / 2)
{
return (RuntimePosture.Inconclusive,
$"Component loaded in {componentLoadedCount}/{totalBuckets} time periods");
}
return (RuntimePosture.Supports,
"Component loaded but no evidence of vulnerable code execution");
}
private static ObservationType ClassifyObservation(RuntimeObservation obs)
{
return obs.Type switch
{
"library_load" or "dlopen" => ObservationType.LibraryLoad,
"syscall" => ObservationType.Syscall,
"network" or "connect" => ObservationType.NetworkConnection,
"file" or "open" => ObservationType.FileAccess,
"fork" or "exec" => ObservationType.ProcessSpawn,
"symbol" => ObservationType.SymbolResolution,
_ => ObservationType.LibraryLoad
};
}
private static string ExtractComponentName(string purl)
{
// Extract name from PURL like pkg:npm/lodash@4.17.21
var parts = purl.Split('/');
var namePart = parts.LastOrDefault() ?? purl;
return namePart.Split('@').FirstOrDefault() ?? namePart;
}
private static bool? DetectVulnerableExecution(List<RuntimeObservation> observations)
{
// Check if any observation indicates vulnerable code path execution
return observations.Any(o =>
o.Type == "symbol" ||
o.Attributes?.ContainsKey("vulnerable_function") == true);
}
}
public sealed record TimelineOptions
{
public DateTimeOffset? WindowStart { get; init; }
public DateTimeOffset? WindowEnd { get; init; }
public TimeSpan BucketSize { get; init; } = TimeSpan.FromHours(1);
}
// Simplified runtime evidence types for Timeline API
public sealed record RuntimeEvidence
{
public required DateTimeOffset FirstObservation { get; init; }
public required DateTimeOffset LastObservation { get; init; }
public required IReadOnlyList<RuntimeObservation> Observations { get; init; }
public required IReadOnlyList<RuntimeSession> Sessions { get; init; }
public required IReadOnlyList<string> SessionDigests { get; init; }
}
public sealed record RuntimeObservation
{
public required DateTimeOffset Timestamp { get; init; }
public required string Type { get; init; }
public string? Path { get; init; }
public int? Port { get; init; }
public int ProcessId { get; init; }
public string? Digest { get; init; }
public IReadOnlyDictionary<string, string>? Attributes { get; init; }
}
public sealed record RuntimeSession
{
public required DateTimeOffset StartTime { get; init; }
public DateTimeOffset? EndTime { get; init; }
public required string Platform { get; init; }
}

View File

@@ -0,0 +1,35 @@
# AGENTS - Scanner CallGraph Library
## Mission
Provide deterministic call graph extraction for supported languages and native binaries, producing stable node/edge outputs for reachability analysis.
## Roles
- Backend/analyzer engineer (.NET 10, C# preview).
- QA engineer (unit + deterministic fixtures).
## Required Reading
- `docs/README.md`
- `docs/07_HIGH_LEVEL_ARCHITECTURE.md`
- `docs/modules/platform/architecture-overview.md`
- `docs/modules/scanner/architecture.md`
- `docs/reachability/DELIVERY_GUIDE.md`
- `docs/reachability/binary-reachability-schema.md`
## Working Directory & Boundaries
- Primary scope: `src/Scanner/__Libraries/StellaOps.Scanner.CallGraph/`
- Tests: `src/Scanner/__Tests/StellaOps.Scanner.CallGraph.Tests/`
- Avoid cross-module edits unless the sprint explicitly calls them out.
## Determinism & Offline Rules
- Stable ordering for nodes/edges; avoid wall-clock timestamps in outputs.
- No network access or external binaries at runtime.
- Normalize paths and symbol names consistently.
## Testing Expectations
- Add/extend unit tests for new extractors and edge kinds.
- Use deterministic fixtures/golden outputs; document inputs in test comments when needed.
- Run `dotnet test src/Scanner/StellaOps.Scanner.sln` when feasible.
## Workflow
- Update sprint status on start/finish (`TODO -> DOING -> DONE/BLOCKED`).
- Record notable decisions in the sprint Execution Log.

View File

@@ -0,0 +1,128 @@
using System.Collections.Immutable;
using StellaOps.Scanner.CallGraph;
using StellaOps.Scanner.CallGraph.Binary;
namespace StellaOps.Scanner.CallGraph.Binary.Analysis;
internal sealed class BinaryDynamicLoadDetector
{
private static readonly string[] LoaderSymbols =
[
"dlopen",
"dlsym",
"dlmopen",
"LoadLibraryA",
"LoadLibraryW",
"LoadLibraryExA",
"LoadLibraryExW",
"GetProcAddress"
];
private readonly BinaryStringLiteralScanner _stringScanner;
public BinaryDynamicLoadDetector(BinaryStringLiteralScanner? stringScanner = null)
{
_stringScanner = stringScanner ?? new BinaryStringLiteralScanner();
}
public async Task<ImmutableArray<CallGraphEdge>> ExtractAsync(
string path,
BinaryFormat format,
string binaryName,
IReadOnlyCollection<CallGraphEdge> directEdges,
IReadOnlyCollection<BinaryRelocation> relocations,
CancellationToken ct)
{
ct.ThrowIfCancellationRequested();
var loaderNames = new HashSet<string>(LoaderSymbols, StringComparer.OrdinalIgnoreCase);
var loaderSources = new HashSet<string>(StringComparer.Ordinal);
var loaderTargets = new HashSet<string>(StringComparer.OrdinalIgnoreCase);
foreach (var edge in directEdges)
{
if (TryGetSymbol(edge.TargetId, out var targetSymbol)
&& loaderNames.Contains(targetSymbol))
{
loaderSources.Add(edge.SourceId);
loaderTargets.Add(targetSymbol);
}
}
foreach (var reloc in relocations)
{
if (string.IsNullOrWhiteSpace(reloc.TargetSymbol))
{
continue;
}
if (loaderNames.Contains(reloc.TargetSymbol))
{
loaderTargets.Add(reloc.TargetSymbol);
}
}
if (loaderSources.Count == 0 && loaderTargets.Count == 0)
{
return ImmutableArray<CallGraphEdge>.Empty;
}
if (loaderSources.Count == 0)
{
foreach (var target in loaderTargets)
{
loaderSources.Add($"native:{binaryName}/{target}");
}
}
var candidates = await _stringScanner.ExtractLibraryCandidatesAsync(path, format, ct);
if (candidates.IsDefaultOrEmpty)
{
return ImmutableArray<CallGraphEdge>.Empty;
}
var orderedSources = loaderSources.OrderBy(value => value, StringComparer.Ordinal).ToArray();
var orderedCandidates = candidates.OrderBy(value => value, StringComparer.Ordinal).ToArray();
var edges = ImmutableArray.CreateBuilder<CallGraphEdge>(orderedSources.Length * orderedCandidates.Length);
foreach (var source in orderedSources)
{
foreach (var candidate in orderedCandidates)
{
var targetId = $"native:external/{candidate}";
edges.Add(new CallGraphEdge(
SourceId: source,
TargetId: targetId,
CallKind: CallKind.Dynamic,
CallSite: $"string:{candidate}"));
}
}
return edges.ToImmutable();
}
private static bool TryGetSymbol(string nodeId, out string symbol)
{
symbol = string.Empty;
if (string.IsNullOrWhiteSpace(nodeId))
{
return false;
}
const string prefix = "native:";
if (!nodeId.StartsWith(prefix, StringComparison.Ordinal))
{
return false;
}
var remainder = nodeId.Substring(prefix.Length);
var slashIndex = remainder.IndexOf('/');
if (slashIndex < 0 || slashIndex == remainder.Length - 1)
{
return false;
}
symbol = remainder[(slashIndex + 1)..];
return !string.IsNullOrWhiteSpace(symbol);
}
}

View File

@@ -0,0 +1,464 @@
using System.Collections.Immutable;
using System.Text;
using StellaOps.Scanner.CallGraph.Binary;
namespace StellaOps.Scanner.CallGraph.Binary.Analysis;
internal sealed class BinaryStringLiteralScanner
{
private const int MinStringLength = 4;
public async Task<ImmutableArray<string>> ExtractLibraryCandidatesAsync(
string path,
BinaryFormat format,
CancellationToken ct)
{
ct.ThrowIfCancellationRequested();
var sections = await ReadStringSectionsAsync(path, format, ct);
if (sections.Count == 0)
{
return ImmutableArray<string>.Empty;
}
var candidates = new HashSet<string>(StringComparer.OrdinalIgnoreCase);
foreach (var section in sections)
{
foreach (var value in ExtractStrings(section))
{
var normalized = NormalizeCandidate(value);
if (string.IsNullOrWhiteSpace(normalized))
{
continue;
}
if (IsLibraryCandidate(normalized))
{
candidates.Add(normalized);
}
}
}
return candidates
.OrderBy(value => value, StringComparer.Ordinal)
.ToImmutableArray();
}
private static IEnumerable<string> ExtractStrings(byte[] bytes)
{
if (bytes.Length == 0)
{
yield break;
}
var builder = new StringBuilder();
for (var i = 0; i < bytes.Length; i++)
{
var current = bytes[i];
if (current >= 0x20 && current <= 0x7E)
{
builder.Append((char)current);
continue;
}
if (builder.Length >= MinStringLength)
{
yield return builder.ToString();
}
builder.Clear();
}
if (builder.Length >= MinStringLength)
{
yield return builder.ToString();
}
}
private static string NormalizeCandidate(string value)
{
var trimmed = value.Trim().Trim('"', '\'');
if (trimmed.Length == 0)
{
return string.Empty;
}
return trimmed.Replace('\\', '/');
}
private static bool IsLibraryCandidate(string value)
{
var lowered = value.ToLowerInvariant();
if (lowered.EndsWith(".dll", StringComparison.Ordinal)
|| lowered.EndsWith(".dylib", StringComparison.Ordinal))
{
return true;
}
if (lowered.Contains(".so", StringComparison.Ordinal))
{
return true;
}
return false;
}
private static async Task<List<byte[]>> ReadStringSectionsAsync(
string path,
BinaryFormat format,
CancellationToken ct)
{
return format switch
{
BinaryFormat.Elf => await ReadElfStringSectionsAsync(path, ct),
BinaryFormat.Pe => await ReadPeStringSectionsAsync(path, ct),
BinaryFormat.MachO => await ReadMachOStringSectionsAsync(path, ct),
_ => []
};
}
private static async Task<List<byte[]>> ReadElfStringSectionsAsync(string path, CancellationToken ct)
{
using var stream = File.OpenRead(path);
using var reader = new BinaryReader(stream, Encoding.UTF8, leaveOpen: true);
var ident = reader.ReadBytes(16);
if (ident.Length < 16)
{
return [];
}
var is64Bit = ident[4] == 2;
var isLittleEndian = ident[5] == 1;
if (!isLittleEndian)
{
return [];
}
stream.Seek(is64Bit ? 40 : 32, SeekOrigin.Begin);
var sectionHeaderOffset = is64Bit ? reader.ReadInt64() : reader.ReadInt32();
stream.Seek(is64Bit ? 58 : 46, SeekOrigin.Begin);
var sectionHeaderSize = reader.ReadUInt16();
var sectionHeaderCount = reader.ReadUInt16();
var strTabIndex = reader.ReadUInt16();
if (sectionHeaderOffset <= 0 || sectionHeaderCount == 0)
{
return [];
}
var nameTableOffset = ReadElfSectionOffset(reader, stream, sectionHeaderOffset, sectionHeaderSize, strTabIndex, is64Bit);
var nameTableSize = ReadElfSectionSize(reader, stream, sectionHeaderOffset, sectionHeaderSize, strTabIndex, is64Bit);
if (nameTableOffset <= 0 || nameTableSize <= 0)
{
return [];
}
stream.Seek(nameTableOffset, SeekOrigin.Begin);
var nameTable = reader.ReadBytes((int)nameTableSize);
var sections = new List<byte[]>();
for (int i = 0; i < sectionHeaderCount; i++)
{
ct.ThrowIfCancellationRequested();
stream.Seek(sectionHeaderOffset + i * sectionHeaderSize, SeekOrigin.Begin);
var nameIndex = reader.ReadUInt32();
reader.ReadUInt32(); // sh_type
if (is64Bit)
{
reader.ReadUInt64(); // sh_flags
reader.ReadUInt64(); // sh_addr
var offset = reader.ReadInt64();
var size = reader.ReadInt64();
if (ShouldReadSection(nameTable, nameIndex) && offset > 0 && size > 0)
{
sections.Add(ReadSection(reader, stream, offset, size));
}
}
else
{
reader.ReadUInt32(); // sh_flags
reader.ReadUInt32(); // sh_addr
var offset = reader.ReadInt32();
var size = reader.ReadInt32();
if (ShouldReadSection(nameTable, nameIndex) && offset > 0 && size > 0)
{
sections.Add(ReadSection(reader, stream, offset, size));
}
}
}
await Task.CompletedTask;
return sections;
}
private static bool ShouldReadSection(byte[] nameTable, uint nameIndex)
{
var name = ReadNullTerminatedString(nameTable, (int)nameIndex);
if (string.IsNullOrWhiteSpace(name))
{
return false;
}
return name.Contains("rodata", StringComparison.Ordinal)
|| name.Contains("rdata", StringComparison.Ordinal)
|| name.Contains("data", StringComparison.Ordinal)
|| name.Contains("cstring", StringComparison.Ordinal);
}
private static async Task<List<byte[]>> ReadPeStringSectionsAsync(string path, CancellationToken ct)
{
using var stream = File.OpenRead(path);
using var reader = new BinaryReader(stream, Encoding.UTF8, leaveOpen: true);
stream.Seek(0x3C, SeekOrigin.Begin);
var peOffset = reader.ReadInt32();
stream.Seek(peOffset, SeekOrigin.Begin);
var signature = reader.ReadUInt32();
if (signature != 0x00004550)
{
return [];
}
reader.ReadUInt16(); // machine
var numberOfSections = reader.ReadUInt16();
reader.ReadUInt32(); // timestamp
reader.ReadUInt32(); // symbol table ptr
reader.ReadUInt32(); // number of symbols
var optionalHeaderSize = reader.ReadUInt16();
reader.ReadUInt16(); // characteristics
if (optionalHeaderSize == 0)
{
return [];
}
stream.Seek(stream.Position + optionalHeaderSize, SeekOrigin.Begin);
var sections = new List<byte[]>();
for (int i = 0; i < numberOfSections; i++)
{
ct.ThrowIfCancellationRequested();
var nameBytes = reader.ReadBytes(8);
var name = Encoding.ASCII.GetString(nameBytes).TrimEnd('\0');
reader.ReadUInt32(); // virtual size
reader.ReadUInt32(); // virtual address
var sizeOfRawData = reader.ReadUInt32();
var pointerToRawData = reader.ReadUInt32();
reader.ReadUInt32(); // pointer to relocations
reader.ReadUInt32(); // pointer to line numbers
reader.ReadUInt16(); // number of relocations
reader.ReadUInt16(); // number of line numbers
reader.ReadUInt32(); // characteristics
if (!IsPeStringSection(name) || pointerToRawData == 0 || sizeOfRawData == 0)
{
continue;
}
sections.Add(ReadSection(reader, stream, pointerToRawData, sizeOfRawData));
}
await Task.CompletedTask;
return sections;
}
private static bool IsPeStringSection(string name)
{
return string.Equals(name, ".rdata", StringComparison.Ordinal)
|| string.Equals(name, ".data", StringComparison.Ordinal)
|| string.Equals(name, ".rodata", StringComparison.Ordinal);
}
private static async Task<List<byte[]>> ReadMachOStringSectionsAsync(string path, CancellationToken ct)
{
using var stream = File.OpenRead(path);
using var reader = new BinaryReader(stream, Encoding.UTF8, leaveOpen: true);
var magic = reader.ReadUInt32();
var is64Bit = magic is 0xFEEDFACF or 0xCFFAEDFE;
var isSwapped = magic is 0xCEFAEDFE or 0xCFFAEDFE;
if (isSwapped)
{
return [];
}
reader.ReadInt32(); // cputype
reader.ReadInt32(); // cpusubtype
reader.ReadUInt32(); // filetype
var ncmds = reader.ReadUInt32();
reader.ReadUInt32(); // sizeofcmds
reader.ReadUInt32(); // flags
if (is64Bit)
{
reader.ReadUInt32(); // reserved
}
var sections = new List<byte[]>();
for (int i = 0; i < ncmds; i++)
{
ct.ThrowIfCancellationRequested();
var cmdStart = stream.Position;
var cmd = reader.ReadUInt32();
var cmdsize = reader.ReadUInt32();
var isSegment = cmd == (is64Bit ? 0x19u : 0x1u);
if (!isSegment)
{
stream.Seek(cmdStart + cmdsize, SeekOrigin.Begin);
continue;
}
reader.ReadBytes(16); // segname
if (is64Bit)
{
reader.ReadUInt64(); // vmaddr
reader.ReadUInt64(); // vmsize
reader.ReadUInt64(); // fileoff
reader.ReadUInt64(); // filesize
reader.ReadInt32(); // maxprot
reader.ReadInt32(); // initprot
var nsects = reader.ReadUInt32();
reader.ReadUInt32(); // flags
for (int s = 0; s < nsects; s++)
{
var sectName = ReadFixedString(reader, 16);
reader.ReadBytes(16); // segname
reader.ReadUInt64(); // addr
var size = reader.ReadUInt64();
var offset = reader.ReadUInt32();
reader.ReadUInt32(); // align
reader.ReadUInt32(); // reloff
reader.ReadUInt32(); // nreloc
reader.ReadUInt32(); // flags
reader.ReadUInt32(); // reserved1
reader.ReadUInt32(); // reserved2
reader.ReadUInt32(); // reserved3
if (IsMachOStringSection(sectName) && offset > 0 && size > 0)
{
sections.Add(ReadSection(reader, stream, (long)offset, (long)size));
}
}
}
else
{
reader.ReadUInt32(); // vmaddr
reader.ReadUInt32(); // vmsize
reader.ReadUInt32(); // fileoff
reader.ReadUInt32(); // filesize
reader.ReadInt32(); // maxprot
reader.ReadInt32(); // initprot
var nsects = reader.ReadUInt32();
reader.ReadUInt32(); // flags
for (int s = 0; s < nsects; s++)
{
var sectName = ReadFixedString(reader, 16);
reader.ReadBytes(16); // segname
reader.ReadUInt32(); // addr
var size = reader.ReadUInt32();
var offset = reader.ReadUInt32();
reader.ReadUInt32(); // align
reader.ReadUInt32(); // reloff
reader.ReadUInt32(); // nreloc
reader.ReadUInt32(); // flags
reader.ReadUInt32(); // reserved1
reader.ReadUInt32(); // reserved2
if (IsMachOStringSection(sectName) && offset > 0 && size > 0)
{
sections.Add(ReadSection(reader, stream, (long)offset, (long)size));
}
}
}
stream.Seek(cmdStart + cmdsize, SeekOrigin.Begin);
}
await Task.CompletedTask;
return sections;
}
private static bool IsMachOStringSection(string sectName)
{
return string.Equals(sectName, "__cstring", StringComparison.Ordinal)
|| string.Equals(sectName, "__const", StringComparison.Ordinal)
|| string.Equals(sectName, "__data", StringComparison.Ordinal);
}
private static byte[] ReadSection(BinaryReader reader, Stream stream, long offset, long size)
{
if (offset < 0 || size <= 0 || offset + size > stream.Length)
{
return Array.Empty<byte>();
}
var current = stream.Position;
stream.Seek(offset, SeekOrigin.Begin);
var bytes = reader.ReadBytes((int)size);
stream.Seek(current, SeekOrigin.Begin);
return bytes;
}
private static byte[] ReadSection(BinaryReader reader, Stream stream, uint offset, uint size)
=> ReadSection(reader, stream, (long)offset, (long)size);
private static long ReadElfSectionOffset(BinaryReader reader, Stream stream, long sectionHeaderOffset, ushort entrySize, ushort index, bool is64Bit)
{
var position = sectionHeaderOffset + index * entrySize;
return ReadElfSectionOffset(reader, stream, position, is64Bit);
}
private static long ReadElfSectionOffset(BinaryReader reader, Stream stream, long position, bool is64Bit)
{
stream.Seek(position + (is64Bit ? 24 : 16), SeekOrigin.Begin);
return is64Bit ? reader.ReadInt64() : reader.ReadInt32();
}
private static long ReadElfSectionSize(BinaryReader reader, Stream stream, long sectionHeaderOffset, ushort entrySize, ushort index, bool is64Bit)
{
var position = sectionHeaderOffset + index * entrySize;
return ReadElfSectionSize(reader, stream, position, is64Bit);
}
private static long ReadElfSectionSize(BinaryReader reader, Stream stream, long position, bool is64Bit)
{
stream.Seek(position + (is64Bit ? 32 : 20), SeekOrigin.Begin);
return is64Bit ? reader.ReadInt64() : reader.ReadInt32();
}
private static string ReadFixedString(BinaryReader reader, int length)
{
var bytes = reader.ReadBytes(length);
var nullIndex = Array.IndexOf(bytes, (byte)0);
var count = nullIndex >= 0 ? nullIndex : bytes.Length;
return Encoding.ASCII.GetString(bytes, 0, count);
}
private static string ReadNullTerminatedString(byte[] buffer, int offset)
{
if (offset < 0 || offset >= buffer.Length)
{
return string.Empty;
}
var end = offset;
while (end < buffer.Length && buffer[end] != 0)
{
end++;
}
return Encoding.UTF8.GetString(buffer, offset, end - offset);
}
}

View File

@@ -6,6 +6,8 @@
using System.Collections.Immutable;
using Microsoft.Extensions.Logging;
using StellaOps.Scanner.CallGraph.Binary.Analysis;
using StellaOps.Scanner.CallGraph.Binary.Disassembly;
using StellaOps.Scanner.Reachability;
namespace StellaOps.Scanner.CallGraph.Binary;
@@ -19,6 +21,8 @@ public sealed class BinaryCallGraphExtractor : ICallGraphExtractor
private readonly ILogger<BinaryCallGraphExtractor> _logger;
private readonly TimeProvider _timeProvider;
private readonly BinaryEntrypointClassifier _entrypointClassifier;
private readonly DirectCallExtractor _directCallExtractor;
private readonly BinaryDynamicLoadDetector _dynamicLoadDetector;
public BinaryCallGraphExtractor(
ILogger<BinaryCallGraphExtractor> logger,
@@ -27,6 +31,8 @@ public sealed class BinaryCallGraphExtractor : ICallGraphExtractor
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
_timeProvider = timeProvider ?? TimeProvider.System;
_entrypointClassifier = new BinaryEntrypointClassifier();
_directCallExtractor = new DirectCallExtractor();
_dynamicLoadDetector = new BinaryDynamicLoadDetector();
}
/// <inheritdoc />
@@ -70,7 +76,18 @@ public sealed class BinaryCallGraphExtractor : ICallGraphExtractor
_ => []
};
return BuildSnapshot(request.ScanId, targetPath, symbols, relocations);
var directEdges = await ExtractDirectCallEdgesAsync(targetPath, format, symbols, cancellationToken);
var dynamicEdges = await _dynamicLoadDetector.ExtractAsync(
targetPath,
format,
Path.GetFileName(targetPath),
directEdges,
relocations,
cancellationToken);
var extraEdges = directEdges.Concat(dynamicEdges).ToArray();
return BuildSnapshot(request.ScanId, targetPath, symbols, relocations, extraEdges);
}
private async Task<BinaryFormat> DetectBinaryFormatAsync(string path, CancellationToken ct)
@@ -107,6 +124,31 @@ public sealed class BinaryCallGraphExtractor : ICallGraphExtractor
throw new NotSupportedException($"Unknown binary format: {path}");
}
private async Task<IReadOnlyCollection<CallGraphEdge>> ExtractDirectCallEdgesAsync(
string path,
BinaryFormat format,
List<BinarySymbol> symbols,
CancellationToken ct)
{
var textSection = await BinaryTextSectionReader.TryReadAsync(path, format, ct);
if (textSection is null)
{
return Array.Empty<CallGraphEdge>();
}
if (textSection.Architecture == BinaryArchitecture.Unknown)
{
_logger.LogDebug("Skipping disassembly; unknown architecture for {Path}", path);
return Array.Empty<CallGraphEdge>();
}
var binaryName = Path.GetFileName(path);
var edges = _directCallExtractor.Extract(textSection, symbols, binaryName);
_logger.LogDebug("Extracted {Count} direct call edges from .text", edges.Length);
return edges;
}
private async Task<List<BinarySymbol>> ExtractElfSymbolsAsync(string path, CancellationToken ct)
{
var symbols = new List<BinarySymbol>();
@@ -255,6 +297,7 @@ public sealed class BinaryCallGraphExtractor : ICallGraphExtractor
reader.ReadUInt16(); // characteristics
var is64Bit = machine == 0x8664; // AMD64
var sectionBases = new ulong[numberOfSections + 1];
// Read optional header to get export directory
if (optionalHeaderSize > 0)
@@ -271,6 +314,28 @@ public sealed class BinaryCallGraphExtractor : ICallGraphExtractor
// For now, just log that exports exist
_logger.LogDebug("PE has export directory at RVA 0x{Rva:X}", exportRva);
}
var sectionHeadersStart = optionalHeaderStart + optionalHeaderSize;
var currentPos = stream.Position;
stream.Seek(sectionHeadersStart, SeekOrigin.Begin);
for (int i = 0; i < numberOfSections; i++)
{
reader.ReadBytes(8); // name
reader.ReadUInt32(); // virtual size
var virtualAddress = reader.ReadUInt32();
reader.ReadUInt32(); // size of raw data
reader.ReadUInt32(); // pointer to raw data
reader.ReadUInt32(); // pointer to relocations
reader.ReadUInt32(); // pointer to line numbers
reader.ReadUInt16(); // number of relocations
reader.ReadUInt16(); // number of line numbers
reader.ReadUInt32(); // characteristics
sectionBases[i + 1] = virtualAddress;
}
stream.Seek(currentPos, SeekOrigin.Begin);
}
// Read COFF symbol table if present
@@ -310,10 +375,15 @@ public sealed class BinaryCallGraphExtractor : ICallGraphExtractor
name = System.Text.Encoding.ASCII.GetString(nameBytes).TrimEnd('\0');
}
var baseAddress = section > 0 && section < sectionBases.Length
? sectionBases[section]
: 0;
var resolvedAddress = baseAddress + value;
symbols.Add(new BinarySymbol
{
Name = name,
Address = value,
Address = resolvedAddress,
Size = 0, // PE doesn't store function size in symbol table
IsGlobal = storageClass == 2, // IMAGE_SYM_CLASS_EXTERNAL
IsExported = false // Would need to check export directory
@@ -476,6 +546,7 @@ public sealed class BinaryCallGraphExtractor : ICallGraphExtractor
{
// Process relocation section
var isRela = shType == 4;
var isPltReloc = sectionName.Contains(".plt", StringComparison.Ordinal);
var entrySize = is64Bit
? (isRela ? 24 : 16)
: (isRela ? 12 : 8);
@@ -511,9 +582,10 @@ public sealed class BinaryCallGraphExtractor : ICallGraphExtractor
{
Address = relocOffset,
SymbolIndex = (int)symIndex,
SourceSymbol = "", // Will be resolved later
SourceSymbol = isPltReloc ? "__plt__" : "",
TargetSymbol = "", // Will be resolved later
IsExternal = true
IsExternal = true,
CallKind = isPltReloc ? CallKind.Plt : CallKind.Direct
});
}
}
@@ -593,13 +665,20 @@ public sealed class BinaryCallGraphExtractor : ICallGraphExtractor
var magic = reader.ReadUInt16();
var is64Bit = magic == 0x20b; // PE32+
// Skip to data directories
stream.Seek(optionalHeaderStart + (is64Bit ? 112 : 96), SeekOrigin.Begin);
// Read import table RVA and size (directory entry 1)
stream.Seek(8, SeekOrigin.Current); // Skip export table
// Read data directories
var dataDirectoryOffset = optionalHeaderStart + (is64Bit ? 112 : 96);
stream.Seek(dataDirectoryOffset, SeekOrigin.Begin);
var exportTableRva = reader.ReadUInt32();
var exportTableSize = reader.ReadUInt32();
var importTableRva = reader.ReadUInt32();
var importTableSize = reader.ReadUInt32();
stream.Seek(dataDirectoryOffset + 13 * 8, SeekOrigin.Begin); // delay import entry
var delayImportRva = reader.ReadUInt32();
var delayImportSize = reader.ReadUInt32();
_ = exportTableRva;
_ = exportTableSize;
_ = importTableSize;
_ = delayImportSize;
if (importTableRva == 0)
{
@@ -618,6 +697,25 @@ public sealed class BinaryCallGraphExtractor : ICallGraphExtractor
// Parse import directory
stream.Seek(importTableOffset, SeekOrigin.Begin);
ReadPeImportTable(stream, reader, sectionHeadersStart, numberOfSections, is64Bit, importTableOffset, relocations);
ReadPeDelayImportTable(stream, reader, sectionHeadersStart, numberOfSections, is64Bit, delayImportRva, relocations);
await Task.CompletedTask;
_logger.LogDebug("Extracted {Count} imports from PE", relocations.Count);
return relocations;
}
private static void ReadPeImportTable(
Stream stream,
BinaryReader reader,
long sectionHeadersStart,
int numberOfSections,
bool is64Bit,
long importTableOffset,
List<BinaryRelocation> relocations)
{
stream.Seek(importTableOffset, SeekOrigin.Begin);
while (true)
{
var importLookupTableRva = reader.ReadUInt32();
@@ -631,66 +729,151 @@ public sealed class BinaryCallGraphExtractor : ICallGraphExtractor
break; // End of import directory
}
// Read DLL name
var nameOffset = RvaToFileOffset(stream, reader, sectionHeadersStart, numberOfSections, nameRva);
var currentPos = stream.Position;
stream.Seek(nameOffset, SeekOrigin.Begin);
var dllName = ReadCString(reader);
stream.Seek(currentPos, SeekOrigin.Begin);
var dllName = ReadPeDllName(stream, reader, sectionHeadersStart, numberOfSections, nameRva);
if (string.IsNullOrWhiteSpace(dllName))
{
continue;
}
// Parse import lookup table
var lookupOffset = RvaToFileOffset(stream, reader, sectionHeadersStart, numberOfSections, importLookupTableRva);
if (lookupOffset > 0)
{
var lookupPos = stream.Position;
stream.Seek(lookupOffset, SeekOrigin.Begin);
ParseImportLookupTable(stream, reader, sectionHeadersStart, numberOfSections, is64Bit, lookupOffset, dllName, relocations);
}
}
}
while (true)
private static void ReadPeDelayImportTable(
Stream stream,
BinaryReader reader,
long sectionHeadersStart,
int numberOfSections,
bool is64Bit,
uint delayImportRva,
List<BinaryRelocation> relocations)
{
if (delayImportRva == 0)
{
return;
}
var delayImportOffset = RvaToFileOffset(stream, reader, sectionHeadersStart, numberOfSections, delayImportRva);
if (delayImportOffset == 0)
{
return;
}
stream.Seek(delayImportOffset, SeekOrigin.Begin);
for (var i = 0; i < 256; i++)
{
var attributes = reader.ReadUInt32();
var nameRva = reader.ReadUInt32();
reader.ReadUInt32(); // module handle
reader.ReadUInt32(); // delay import address table
var delayImportNameTableRva = reader.ReadUInt32();
reader.ReadUInt32(); // bound delay import table
reader.ReadUInt32(); // unload delay import table
reader.ReadUInt32(); // timestamp
_ = attributes;
if (nameRva == 0)
{
break;
}
var dllName = ReadPeDllName(stream, reader, sectionHeadersStart, numberOfSections, nameRva);
if (string.IsNullOrWhiteSpace(dllName) || delayImportNameTableRva == 0)
{
continue;
}
var nameTableOffset = RvaToFileOffset(stream, reader, sectionHeadersStart, numberOfSections, delayImportNameTableRva);
if (nameTableOffset == 0)
{
continue;
}
ParseImportLookupTable(stream, reader, sectionHeadersStart, numberOfSections, is64Bit, nameTableOffset, dllName, relocations);
}
}
private static string? ReadPeDllName(
Stream stream,
BinaryReader reader,
long sectionHeadersStart,
int numberOfSections,
uint nameRva)
{
var nameOffset = RvaToFileOffset(stream, reader, sectionHeadersStart, numberOfSections, nameRva);
if (nameOffset == 0)
{
return null;
}
var currentPos = stream.Position;
stream.Seek(nameOffset, SeekOrigin.Begin);
var dllName = ReadCString(reader);
stream.Seek(currentPos, SeekOrigin.Begin);
return dllName;
}
private static void ParseImportLookupTable(
Stream stream,
BinaryReader reader,
long sectionHeadersStart,
int numberOfSections,
bool is64Bit,
long lookupOffset,
string dllName,
List<BinaryRelocation> relocations)
{
var lookupPos = stream.Position;
stream.Seek(lookupOffset, SeekOrigin.Begin);
while (true)
{
var entry = is64Bit ? reader.ReadUInt64() : reader.ReadUInt32();
if (entry == 0)
{
break;
}
var isOrdinal = is64Bit
? (entry & 0x8000000000000000) != 0
: (entry & 0x80000000) != 0;
if (!isOrdinal)
{
var hintNameRva = (uint)(entry & 0x7FFFFFFF);
var hintNameOffset = RvaToFileOffset(stream, reader, sectionHeadersStart, numberOfSections, hintNameRva);
if (hintNameOffset > 0)
{
var entry = is64Bit ? reader.ReadUInt64() : reader.ReadUInt32();
if (entry == 0)
var entryPos = stream.Position;
stream.Seek(hintNameOffset + 2, SeekOrigin.Begin); // Skip hint
var funcName = ReadCString(reader);
stream.Seek(entryPos, SeekOrigin.Begin);
if (!string.IsNullOrWhiteSpace(funcName))
{
break;
}
var isOrdinal = is64Bit
? (entry & 0x8000000000000000) != 0
: (entry & 0x80000000) != 0;
if (!isOrdinal)
{
var hintNameRva = (uint)(entry & 0x7FFFFFFF);
var hintNameOffset = RvaToFileOffset(stream, reader, sectionHeadersStart, numberOfSections, hintNameRva);
if (hintNameOffset > 0)
relocations.Add(new BinaryRelocation
{
var entryPos = stream.Position;
stream.Seek(hintNameOffset + 2, SeekOrigin.Begin); // Skip hint
var funcName = ReadCString(reader);
stream.Seek(entryPos, SeekOrigin.Begin);
relocations.Add(new BinaryRelocation
{
Address = 0,
SymbolIndex = 0,
SourceSymbol = dllName,
TargetSymbol = funcName,
IsExternal = true
});
}
Address = 0,
SymbolIndex = 0,
SourceSymbol = dllName,
TargetSymbol = funcName,
IsExternal = true,
CallKind = CallKind.Iat
});
}
}
stream.Seek(lookupPos, SeekOrigin.Begin);
}
}
await Task.CompletedTask;
_logger.LogDebug("Extracted {Count} imports from PE", relocations.Count);
return relocations;
stream.Seek(lookupPos, SeekOrigin.Begin);
}
private long RvaToFileOffset(
private static long RvaToFileOffset(
Stream stream,
BinaryReader reader,
long sectionHeadersStart,
@@ -797,7 +980,8 @@ public sealed class BinaryCallGraphExtractor : ICallGraphExtractor
string scanId,
string binaryPath,
List<BinarySymbol> symbols,
List<BinaryRelocation> relocations)
List<BinaryRelocation> relocations,
IReadOnlyCollection<CallGraphEdge> extraEdges)
{
var nodesById = new Dictionary<string, CallGraphNode>(StringComparer.Ordinal);
var edges = new HashSet<CallGraphEdge>(CallGraphEdgeComparer.Instance);
@@ -826,7 +1010,10 @@ public sealed class BinaryCallGraphExtractor : ICallGraphExtractor
// Add edges from relocations
foreach (var reloc in relocations)
{
var sourceId = $"native:{binaryName}/{reloc.SourceSymbol}";
var sourceSymbol = string.IsNullOrWhiteSpace(reloc.SourceSymbol)
? (reloc.CallKind == CallKind.Plt ? "__plt__" : "__reloc__")
: reloc.SourceSymbol;
var sourceId = $"native:{binaryName}/{sourceSymbol}";
var targetId = reloc.IsExternal
? $"native:external/{reloc.TargetSymbol}"
: $"native:{binaryName}/{reloc.TargetSymbol}";
@@ -834,10 +1021,20 @@ public sealed class BinaryCallGraphExtractor : ICallGraphExtractor
edges.Add(new CallGraphEdge(
SourceId: sourceId,
TargetId: targetId,
CallKind: CallKind.Direct,
CallKind: reloc.CallKind,
CallSite: $"0x{reloc.Address:X}"));
}
if (extraEdges.Count > 0)
{
foreach (var edge in extraEdges)
{
edges.Add(edge);
}
}
EnsureNodesForEdges(nodesById, edges, binaryPath, binaryName);
var nodes = nodesById.Values
.Select(n => n.Trimmed())
.OrderBy(n => n.NodeId, StringComparer.Ordinal)
@@ -876,6 +1073,70 @@ public sealed class BinaryCallGraphExtractor : ICallGraphExtractor
return provisional with { GraphDigest = digest };
}
private static void EnsureNodesForEdges(
Dictionary<string, CallGraphNode> nodesById,
IEnumerable<CallGraphEdge> edges,
string binaryPath,
string binaryName)
{
foreach (var edge in edges)
{
EnsureNode(nodesById, edge.SourceId, binaryPath, binaryName);
EnsureNode(nodesById, edge.TargetId, binaryPath, binaryName);
}
}
private static void EnsureNode(
Dictionary<string, CallGraphNode> nodesById,
string nodeId,
string binaryPath,
string binaryName)
{
if (nodesById.ContainsKey(nodeId))
{
return;
}
var (package, symbol, isExternal) = ParseNodeId(nodeId, binaryName);
var filePath = isExternal ? string.Empty : binaryPath;
var visibility = isExternal ? Visibility.Public : Visibility.Private;
nodesById[nodeId] = new CallGraphNode(
NodeId: nodeId,
Symbol: symbol,
File: filePath,
Line: 0,
Package: package,
Visibility: visibility,
IsEntrypoint: false,
EntrypointType: null,
IsSink: false,
SinkCategory: null);
}
private static (string Package, string Symbol, bool IsExternal) ParseNodeId(string nodeId, string binaryName)
{
const string Prefix = "native:";
if (!nodeId.StartsWith(Prefix, StringComparison.Ordinal))
{
return (binaryName, nodeId, false);
}
var remainder = nodeId.Substring(Prefix.Length);
var slashIndex = remainder.IndexOf('/');
if (slashIndex < 0)
{
return (binaryName, remainder, false);
}
var package = remainder.Substring(0, slashIndex);
var symbol = remainder.Substring(slashIndex + 1);
var isExternal = string.Equals(package, "external", StringComparison.Ordinal);
return (package, symbol, isExternal);
}
private static string ReadNullTerminatedString(byte[] buffer, int offset)
{
if (offset < 0 || offset >= buffer.Length)
@@ -917,4 +1178,5 @@ internal sealed class BinaryRelocation
public ulong Address { get; init; }
public bool IsExternal { get; init; }
public int SymbolIndex { get; init; }
public CallKind CallKind { get; init; } = CallKind.Direct;
}

View File

@@ -0,0 +1,100 @@
using System.Collections.Immutable;
using Gee.External.Capstone;
using Gee.External.Capstone.Arm64;
using StellaOps.Scanner.CallGraph;
namespace StellaOps.Scanner.CallGraph.Binary.Disassembly;
internal sealed class Arm64Disassembler
{
public ImmutableArray<BinaryCallInstruction> ExtractDirectCalls(
ReadOnlySpan<byte> code,
ulong baseAddress)
{
if (code.IsEmpty)
{
return ImmutableArray<BinaryCallInstruction>.Empty;
}
if (!CapstoneDisassembler.IsArm64Supported)
{
return ImmutableArray<BinaryCallInstruction>.Empty;
}
try
{
using var disassembler = CapstoneDisassembler.CreateArm64Disassembler(
Arm64DisassembleMode.Arm | Arm64DisassembleMode.LittleEndian);
disassembler.EnableInstructionDetails = true;
var instructions = disassembler.Disassemble(code.ToArray(), (long)baseAddress);
if (instructions.Length == 0)
{
return ImmutableArray<BinaryCallInstruction>.Empty;
}
var calls = ImmutableArray.CreateBuilder<BinaryCallInstruction>();
foreach (var instruction in instructions)
{
if (instruction.IsSkippedData)
{
continue;
}
var isCall = instruction.Id is Arm64InstructionId.ARM64_INS_BL or Arm64InstructionId.ARM64_INS_BLR;
if (!isCall)
{
continue;
}
if (!instruction.HasDetails || instruction.Details is null)
{
continue;
}
var target = TryResolveTarget(instruction);
if (target is null)
{
calls.Add(new BinaryCallInstruction(
(ulong)instruction.Address,
0,
CallKind.Dynamic));
continue;
}
calls.Add(new BinaryCallInstruction(
(ulong)instruction.Address,
target.Value,
CallKind.Direct));
}
return calls.ToImmutable();
}
catch (DllNotFoundException)
{
return ImmutableArray<BinaryCallInstruction>.Empty;
}
catch (TypeInitializationException)
{
return ImmutableArray<BinaryCallInstruction>.Empty;
}
catch (BadImageFormatException)
{
return ImmutableArray<BinaryCallInstruction>.Empty;
}
}
private static ulong? TryResolveTarget(Arm64Instruction instruction)
{
foreach (var operand in instruction.Details!.Operands)
{
if (operand.Type == Arm64OperandType.Immediate)
{
return (ulong)operand.Immediate;
}
}
return null;
}
}

View File

@@ -0,0 +1,26 @@
using StellaOps.Scanner.CallGraph;
namespace StellaOps.Scanner.CallGraph.Binary.Disassembly;
internal enum BinaryArchitecture
{
Unknown,
X86,
X64,
Arm64
}
internal sealed record BinaryTextSection(
byte[] Bytes,
ulong VirtualAddress,
int Bitness,
BinaryArchitecture Architecture,
string SectionName)
{
public ulong EndAddress => VirtualAddress + (ulong)Bytes.Length;
}
internal sealed record BinaryCallInstruction(
ulong InstructionAddress,
ulong TargetAddress,
CallKind CallKind);

View File

@@ -0,0 +1,395 @@
using System.Text;
using StellaOps.Scanner.CallGraph.Binary;
namespace StellaOps.Scanner.CallGraph.Binary.Disassembly;
internal static class BinaryTextSectionReader
{
public static async Task<BinaryTextSection?> TryReadAsync(
string path,
BinaryFormat format,
CancellationToken ct)
{
ct.ThrowIfCancellationRequested();
return format switch
{
BinaryFormat.Elf => await TryReadElfTextSectionAsync(path, ct),
BinaryFormat.Pe => await TryReadPeTextSectionAsync(path, ct),
BinaryFormat.MachO => await TryReadMachOTextSectionAsync(path, ct),
_ => null
};
}
private static async Task<BinaryTextSection?> TryReadElfTextSectionAsync(string path, CancellationToken ct)
{
using var stream = File.OpenRead(path);
using var reader = new BinaryReader(stream, Encoding.UTF8, leaveOpen: true);
var ident = reader.ReadBytes(16);
if (ident.Length < 16)
{
return null;
}
var is64Bit = ident[4] == 2;
var isLittleEndian = ident[5] == 1;
if (!isLittleEndian)
{
return null;
}
var eType = reader.ReadUInt16();
var eMachine = reader.ReadUInt16();
_ = eType;
var architecture = eMachine switch
{
3 => BinaryArchitecture.X86,
62 => BinaryArchitecture.X64,
183 => BinaryArchitecture.Arm64,
_ => BinaryArchitecture.Unknown
};
// e_shoff
stream.Seek(is64Bit ? 40 : 32, SeekOrigin.Begin);
var sectionHeaderOffset = is64Bit ? reader.ReadInt64() : reader.ReadInt32();
// e_shentsize, e_shnum, e_shstrndx
stream.Seek(is64Bit ? 58 : 46, SeekOrigin.Begin);
var sectionHeaderSize = reader.ReadUInt16();
var sectionHeaderCount = reader.ReadUInt16();
var sectionNameIndex = reader.ReadUInt16();
if (sectionHeaderOffset <= 0 || sectionHeaderCount == 0)
{
return null;
}
// Read section name string table
var nameTableOffset = ReadElfSectionOffset(reader, stream, sectionHeaderOffset, sectionHeaderSize, sectionNameIndex, is64Bit);
var nameTableSize = ReadElfSectionSize(reader, stream, sectionHeaderOffset, sectionHeaderSize, sectionNameIndex, is64Bit);
if (nameTableOffset <= 0 || nameTableSize <= 0)
{
return null;
}
stream.Seek(nameTableOffset, SeekOrigin.Begin);
var nameTable = reader.ReadBytes((int)nameTableSize);
for (int i = 0; i < sectionHeaderCount; i++)
{
stream.Seek(sectionHeaderOffset + i * sectionHeaderSize, SeekOrigin.Begin);
var nameIndex = reader.ReadUInt32();
reader.ReadUInt32(); // sh_type
ulong sectionAddress;
long sectionOffset;
long sectionSize;
if (is64Bit)
{
reader.ReadUInt64(); // sh_flags
sectionAddress = reader.ReadUInt64();
sectionOffset = reader.ReadInt64();
sectionSize = reader.ReadInt64();
}
else
{
reader.ReadUInt32(); // sh_flags
sectionAddress = reader.ReadUInt32();
sectionOffset = reader.ReadInt32();
sectionSize = reader.ReadInt32();
}
var name = ReadNullTerminatedString(nameTable, (int)nameIndex);
if (string.Equals(name, ".text", StringComparison.Ordinal))
{
if (sectionOffset <= 0 || sectionSize <= 0)
{
return null;
}
stream.Seek(sectionOffset, SeekOrigin.Begin);
var bytes = reader.ReadBytes((int)sectionSize);
await Task.CompletedTask;
return new BinaryTextSection(
bytes,
sectionAddress,
is64Bit ? 64 : 32,
architecture,
name);
}
}
return null;
}
private static long ReadElfSectionOffset(BinaryReader reader, Stream stream, long sectionHeaderOffset, ushort entrySize, ushort index, bool is64Bit)
{
var position = sectionHeaderOffset + index * entrySize;
return ReadElfSectionOffset(reader, stream, position, is64Bit);
}
private static long ReadElfSectionOffset(BinaryReader reader, Stream stream, long position, bool is64Bit)
{
stream.Seek(position + (is64Bit ? 24 : 16), SeekOrigin.Begin);
return is64Bit ? reader.ReadInt64() : reader.ReadInt32();
}
private static long ReadElfSectionSize(BinaryReader reader, Stream stream, long sectionHeaderOffset, ushort entrySize, ushort index, bool is64Bit)
{
var position = sectionHeaderOffset + index * entrySize;
return ReadElfSectionSize(reader, stream, position, is64Bit);
}
private static long ReadElfSectionSize(BinaryReader reader, Stream stream, long position, bool is64Bit)
{
stream.Seek(position + (is64Bit ? 32 : 20), SeekOrigin.Begin);
return is64Bit ? reader.ReadInt64() : reader.ReadInt32();
}
private static async Task<BinaryTextSection?> TryReadPeTextSectionAsync(string path, CancellationToken ct)
{
using var stream = File.OpenRead(path);
using var reader = new BinaryReader(stream, Encoding.UTF8, leaveOpen: true);
stream.Seek(0x3C, SeekOrigin.Begin);
var peOffset = reader.ReadInt32();
stream.Seek(peOffset, SeekOrigin.Begin);
var signature = reader.ReadUInt32();
if (signature != 0x00004550)
{
return null;
}
var machine = reader.ReadUInt16();
var numberOfSections = reader.ReadUInt16();
reader.ReadUInt32(); // timestamp
reader.ReadUInt32(); // symbol table ptr
reader.ReadUInt32(); // number of symbols
var optionalHeaderSize = reader.ReadUInt16();
reader.ReadUInt16(); // characteristics
var architecture = machine switch
{
0x014c => BinaryArchitecture.X86,
0x8664 => BinaryArchitecture.X64,
0xaa64 => BinaryArchitecture.Arm64,
_ => BinaryArchitecture.Unknown
};
if (optionalHeaderSize == 0)
{
return null;
}
var optionalHeaderStart = stream.Position;
var magic = reader.ReadUInt16();
var is64Bit = magic == 0x20b;
_ = is64Bit;
stream.Seek(optionalHeaderStart + optionalHeaderSize, SeekOrigin.Begin);
for (int i = 0; i < numberOfSections; i++)
{
var nameBytes = reader.ReadBytes(8);
var name = Encoding.ASCII.GetString(nameBytes).TrimEnd('\0');
var virtualSize = reader.ReadUInt32();
var virtualAddress = reader.ReadUInt32();
var sizeOfRawData = reader.ReadUInt32();
var pointerToRawData = reader.ReadUInt32();
reader.ReadUInt32(); // pointer to relocations
reader.ReadUInt32(); // pointer to line numbers
reader.ReadUInt16(); // number of relocations
reader.ReadUInt16(); // number of line numbers
reader.ReadUInt32(); // characteristics
if (!string.Equals(name, ".text", StringComparison.Ordinal))
{
continue;
}
if (pointerToRawData == 0 || sizeOfRawData == 0)
{
return null;
}
stream.Seek(pointerToRawData, SeekOrigin.Begin);
var bytes = reader.ReadBytes((int)sizeOfRawData);
await Task.CompletedTask;
return new BinaryTextSection(
bytes,
virtualAddress,
is64Bit ? 64 : 32,
architecture,
name);
}
return null;
}
private static async Task<BinaryTextSection?> TryReadMachOTextSectionAsync(string path, CancellationToken ct)
{
using var stream = File.OpenRead(path);
using var reader = new BinaryReader(stream, Encoding.UTF8, leaveOpen: true);
var magic = reader.ReadUInt32();
var is64Bit = magic is 0xFEEDFACF or 0xCFFAEDFE;
var isSwapped = magic is 0xCEFAEDFE or 0xCFFAEDFE;
if (isSwapped)
{
return null;
}
var cpuType = reader.ReadInt32();
reader.ReadInt32(); // cpusubtype
reader.ReadUInt32(); // filetype
var ncmds = reader.ReadUInt32();
reader.ReadUInt32(); // sizeofcmds
reader.ReadUInt32(); // flags
if (is64Bit)
{
reader.ReadUInt32(); // reserved
}
var architecture = cpuType switch
{
7 => BinaryArchitecture.X86,
0x01000007 => BinaryArchitecture.X64,
0x0100000C => BinaryArchitecture.Arm64,
_ => BinaryArchitecture.Unknown
};
for (int i = 0; i < ncmds; i++)
{
var cmdStart = stream.Position;
var cmd = reader.ReadUInt32();
var cmdsize = reader.ReadUInt32();
var isSegment = cmd == (is64Bit ? 0x19u : 0x1u);
if (!isSegment)
{
stream.Seek(cmdStart + cmdsize, SeekOrigin.Begin);
continue;
}
var segName = ReadFixedString(reader, 16);
if (is64Bit)
{
reader.ReadUInt64(); // vmaddr
reader.ReadUInt64(); // vmsize
reader.ReadUInt64(); // fileoff
reader.ReadUInt64(); // filesize
reader.ReadInt32(); // maxprot
reader.ReadInt32(); // initprot
var nsects = reader.ReadUInt32();
reader.ReadUInt32(); // flags
for (int s = 0; s < nsects; s++)
{
var sectName = ReadFixedString(reader, 16);
var sectSegName = ReadFixedString(reader, 16);
var addr = reader.ReadUInt64();
var size = reader.ReadUInt64();
var offset = reader.ReadUInt32();
reader.ReadUInt32(); // align
reader.ReadUInt32(); // reloff
reader.ReadUInt32(); // nreloc
reader.ReadUInt32(); // flags
reader.ReadUInt32(); // reserved1
reader.ReadUInt32(); // reserved2
reader.ReadUInt32(); // reserved3
if (!string.Equals(sectName, "__text", StringComparison.Ordinal))
{
continue;
}
stream.Seek(offset, SeekOrigin.Begin);
var bytes = reader.ReadBytes((int)size);
await Task.CompletedTask;
return new BinaryTextSection(
bytes,
addr,
64,
architecture,
sectName);
}
}
else
{
reader.ReadUInt32(); // vmaddr
reader.ReadUInt32(); // vmsize
reader.ReadUInt32(); // fileoff
reader.ReadUInt32(); // filesize
reader.ReadInt32(); // maxprot
reader.ReadInt32(); // initprot
var nsects = reader.ReadUInt32();
reader.ReadUInt32(); // flags
for (int s = 0; s < nsects; s++)
{
var sectName = ReadFixedString(reader, 16);
var sectSegName = ReadFixedString(reader, 16);
var addr = reader.ReadUInt32();
var size = reader.ReadUInt32();
var offset = reader.ReadUInt32();
reader.ReadUInt32(); // align
reader.ReadUInt32(); // reloff
reader.ReadUInt32(); // nreloc
reader.ReadUInt32(); // flags
reader.ReadUInt32(); // reserved1
reader.ReadUInt32(); // reserved2
if (!string.Equals(sectName, "__text", StringComparison.Ordinal))
{
continue;
}
stream.Seek(offset, SeekOrigin.Begin);
var bytes = reader.ReadBytes((int)size);
await Task.CompletedTask;
return new BinaryTextSection(
bytes,
addr,
32,
architecture,
sectName);
}
}
stream.Seek(cmdStart + cmdsize, SeekOrigin.Begin);
}
return null;
}
private static string ReadFixedString(BinaryReader reader, int length)
{
var bytes = reader.ReadBytes(length);
var nullIndex = Array.IndexOf(bytes, (byte)0);
var count = nullIndex >= 0 ? nullIndex : bytes.Length;
return Encoding.ASCII.GetString(bytes, 0, count);
}
private static string ReadNullTerminatedString(byte[] buffer, int offset)
{
if (offset < 0 || offset >= buffer.Length)
{
return string.Empty;
}
var end = offset;
while (end < buffer.Length && buffer[end] != 0)
{
end++;
}
return Encoding.UTF8.GetString(buffer, offset, end - offset);
}
}

View File

@@ -0,0 +1,146 @@
using System.Collections.Immutable;
using StellaOps.Scanner.CallGraph;
using StellaOps.Scanner.CallGraph.Binary;
namespace StellaOps.Scanner.CallGraph.Binary.Disassembly;
internal sealed class DirectCallExtractor
{
private readonly X86Disassembler _x86Disassembler;
private readonly Arm64Disassembler _arm64Disassembler;
public DirectCallExtractor(
X86Disassembler? x86Disassembler = null,
Arm64Disassembler? arm64Disassembler = null)
{
_x86Disassembler = x86Disassembler ?? new X86Disassembler();
_arm64Disassembler = arm64Disassembler ?? new Arm64Disassembler();
}
public ImmutableArray<CallGraphEdge> Extract(
BinaryTextSection textSection,
IReadOnlyList<BinarySymbol> symbols,
string binaryName)
{
ArgumentNullException.ThrowIfNull(textSection);
ArgumentNullException.ThrowIfNull(symbols);
if (textSection.Bytes.Length == 0)
{
return ImmutableArray<CallGraphEdge>.Empty;
}
var orderedSymbols = symbols
.Where(symbol => symbol is not null)
.OrderBy(symbol => symbol.Address)
.ThenBy(symbol => symbol.Name, StringComparer.Ordinal)
.ToArray();
var calls = textSection.Architecture switch
{
BinaryArchitecture.X86 => _x86Disassembler.ExtractDirectCalls(
textSection.Bytes,
textSection.VirtualAddress,
32),
BinaryArchitecture.X64 => _x86Disassembler.ExtractDirectCalls(
textSection.Bytes,
textSection.VirtualAddress,
64),
BinaryArchitecture.Arm64 => _arm64Disassembler.ExtractDirectCalls(
textSection.Bytes,
textSection.VirtualAddress),
_ => ImmutableArray<BinaryCallInstruction>.Empty
};
if (calls.IsDefaultOrEmpty)
{
return ImmutableArray<CallGraphEdge>.Empty;
}
var edges = ImmutableArray.CreateBuilder<CallGraphEdge>(calls.Length);
foreach (var call in calls)
{
var sourceSymbol = ResolveSymbol(orderedSymbols, call.InstructionAddress);
var targetSymbol = ResolveSymbol(orderedSymbols, call.TargetAddress);
var targetIsInternal = call.TargetAddress >= textSection.VirtualAddress
&& call.TargetAddress < textSection.EndAddress;
var sourceId = BuildNodeId(binaryName, sourceSymbol, call.InstructionAddress, isExternal: false);
var targetId = BuildNodeId(
targetIsInternal ? binaryName : "external",
targetSymbol,
call.TargetAddress,
isExternal: !targetIsInternal);
edges.Add(new CallGraphEdge(
SourceId: sourceId,
TargetId: targetId,
CallKind: call.CallKind,
CallSite: $"0x{call.InstructionAddress:X}"));
}
return edges
.OrderBy(edge => edge.SourceId, StringComparer.Ordinal)
.ThenBy(edge => edge.TargetId, StringComparer.Ordinal)
.ThenBy(edge => edge.CallKind.ToString(), StringComparer.Ordinal)
.ThenBy(edge => edge.CallSite ?? string.Empty, StringComparer.Ordinal)
.ToImmutableArray();
}
private static string? ResolveSymbol(IReadOnlyList<BinarySymbol> symbols, ulong address)
{
string? bestSymbol = null;
ulong bestAddress = 0;
foreach (var symbol in symbols)
{
if (symbol.Address > address)
{
break;
}
if (symbol.Address == address)
{
return symbol.Name;
}
if (symbol.Address <= address)
{
bestSymbol = symbol.Name;
bestAddress = symbol.Address;
}
}
if (bestSymbol is null)
{
return null;
}
var candidate = symbols.FirstOrDefault(s => s.Address == bestAddress);
if (candidate is not null && candidate.Size > 0)
{
var end = candidate.Address + candidate.Size;
if (address >= end)
{
return null;
}
}
return bestSymbol;
}
private static string BuildNodeId(
string binaryName,
string? symbol,
ulong address,
bool isExternal)
{
var safeSymbol = string.IsNullOrWhiteSpace(symbol) ? $"addr_{address:X}" : symbol!;
if (isExternal)
{
return $"native:external/{safeSymbol}";
}
return $"native:{binaryName}/{safeSymbol}";
}
}

View File

@@ -0,0 +1,53 @@
using System.Collections.Immutable;
using Iced.Intel;
using StellaOps.Scanner.CallGraph;
namespace StellaOps.Scanner.CallGraph.Binary.Disassembly;
internal sealed class X86Disassembler
{
public ImmutableArray<BinaryCallInstruction> ExtractDirectCalls(
ReadOnlySpan<byte> code,
ulong baseAddress,
int bitness)
{
if (bitness is not (16 or 32 or 64))
{
throw new ArgumentOutOfRangeException(nameof(bitness), "Bitness must be 16, 32, or 64.");
}
if (code.IsEmpty)
{
return ImmutableArray<BinaryCallInstruction>.Empty;
}
var reader = new ByteArrayCodeReader(code.ToArray());
var decoder = Decoder.Create(bitness, reader);
decoder.IP = baseAddress;
var calls = ImmutableArray.CreateBuilder<BinaryCallInstruction>();
while (reader.CanReadByte)
{
decoder.Decode(out var instruction);
if (instruction.IsInvalid)
{
break;
}
if (instruction.IsCallNear || instruction.IsJmpNear)
{
if (instruction.Op0Kind is OpKind.NearBranch16 or OpKind.NearBranch32 or OpKind.NearBranch64)
{
var target = instruction.NearBranchTarget;
calls.Add(new BinaryCallInstruction(
instruction.IP,
target,
CallKind.Direct));
}
}
}
return calls.ToImmutable();
}
}

View File

@@ -123,7 +123,9 @@ public enum CallKind
Virtual,
Delegate,
Reflection,
Dynamic
Dynamic,
Plt,
Iat
}
[JsonConverter(typeof(JsonStringEnumConverter<EntrypointType>))]

View File

@@ -12,6 +12,8 @@
</ItemGroup>
<ItemGroup>
<PackageReference Include="Gee.External.Capstone" Version="2.3.0" />
<PackageReference Include="Iced" Version="1.21.0" />
<PackageReference Include="Microsoft.Build.Locator" Version="1.10.0" />
<PackageReference Include="Microsoft.CodeAnalysis.CSharp.Workspaces" Version="4.14.0" />
<PackageReference Include="Microsoft.CodeAnalysis.Workspaces.MSBuild" Version="4.14.0" />

View File

@@ -6,14 +6,11 @@ using CycloneDX.Models;
namespace StellaOps.Scanner.Emit.Composition;
/// <summary>
/// Extension methods for CycloneDX 1.7 support.
/// Workaround for CycloneDX.Core not yet exposing SpecificationVersion.v1_7.
/// Helpers and media type constants for CycloneDX 1.7.
/// </summary>
/// <remarks>
/// Sprint: SPRINT_5000_0001_0001 - Advisory Alignment (CycloneDX 1.7 Upgrade)
///
/// Once CycloneDX.Core adds v1_7 support, this extension can be removed
/// and the code can use SpecificationVersion.v1_7 directly.
/// Keep upgrade helpers for backward-compatibility with 1.6 inputs.
/// </remarks>
public static class CycloneDx17Extensions
{

View File

@@ -47,12 +47,38 @@ public sealed record CycloneDxArtifact
public required string ProtobufMediaType { get; init; }
}
public sealed record SpdxArtifact
{
public required SbomView View { get; init; }
public required DateTimeOffset GeneratedAt { get; init; }
public required byte[] JsonBytes { get; init; }
public required string JsonSha256 { get; init; }
/// <summary>
/// Canonical content hash (sha256, hex) of the SPDX JSON-LD payload.
/// </summary>
public required string ContentHash { get; init; }
public required string JsonMediaType { get; init; }
public byte[]? TagValueBytes { get; init; }
public string? TagValueSha256 { get; init; }
public string? TagValueMediaType { get; init; }
}
public sealed record SbomCompositionResult
{
public required CycloneDxArtifact Inventory { get; init; }
public CycloneDxArtifact? Usage { get; init; }
public SpdxArtifact? SpdxInventory { get; init; }
public required ComponentGraph Graph { get; init; }
/// <summary>

View File

@@ -0,0 +1,413 @@
using System;
using System.Collections.Generic;
using System.Collections.Immutable;
using System.Linq;
using StellaOps.Canonical.Json;
using StellaOps.Scanner.Core.Contracts;
using StellaOps.Scanner.Core.Utility;
using StellaOps.Scanner.Emit.Spdx;
using StellaOps.Scanner.Emit.Spdx.Models;
using StellaOps.Scanner.Emit.Spdx.Serialization;
namespace StellaOps.Scanner.Emit.Composition;
public interface ISpdxComposer
{
SpdxArtifact Compose(
SbomCompositionRequest request,
SpdxCompositionOptions options,
CancellationToken cancellationToken = default);
ValueTask<SpdxArtifact> ComposeAsync(
SbomCompositionRequest request,
SpdxCompositionOptions options,
CancellationToken cancellationToken = default);
}
public sealed record SpdxCompositionOptions
{
public string CreatorTool { get; init; } = "StellaOps-Scanner";
public string? CreatorOrganization { get; init; }
public string NamespaceBase { get; init; } = "https://stellaops.io/spdx";
public bool IncludeFiles { get; init; }
public bool IncludeSnippets { get; init; }
public bool IncludeTagValue { get; init; }
public SpdxLicenseListVersion LicenseListVersion { get; init; } = SpdxLicenseListVersion.V3_21;
public ImmutableArray<string> ProfileConformance { get; init; } = ImmutableArray.Create("core", "software");
}
public sealed class SpdxComposer : ISpdxComposer
{
private const string JsonMediaType = "application/spdx+json; version=3.0.1";
private const string TagValueMediaType = "text/spdx; version=2.3";
public SpdxArtifact Compose(
SbomCompositionRequest request,
SpdxCompositionOptions options,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(request);
ArgumentNullException.ThrowIfNull(options);
var graph = ComponentGraphBuilder.Build(request.LayerFragments);
var generatedAt = ScannerTimestamps.Normalize(request.GeneratedAt);
var idBuilder = new SpdxIdBuilder(options.NamespaceBase, request.Image.ImageDigest);
var licenseList = SpdxLicenseListProvider.Get(options.LicenseListVersion);
var creationInfo = BuildCreationInfo(request, options, generatedAt);
var document = BuildDocument(request, options, graph, idBuilder, creationInfo, licenseList);
var jsonBytes = SpdxJsonLdSerializer.Serialize(document);
var jsonHash = CanonJson.Sha256Hex(jsonBytes);
byte[]? tagBytes = null;
string? tagHash = null;
if (options.IncludeTagValue)
{
tagBytes = SpdxTagValueSerializer.Serialize(document);
tagHash = CanonJson.Sha256Hex(tagBytes);
}
return new SpdxArtifact
{
View = SbomView.Inventory,
GeneratedAt = generatedAt,
JsonBytes = jsonBytes,
JsonSha256 = jsonHash,
ContentHash = jsonHash,
JsonMediaType = JsonMediaType,
TagValueBytes = tagBytes,
TagValueSha256 = tagHash,
TagValueMediaType = tagBytes is null ? null : TagValueMediaType
};
}
public ValueTask<SpdxArtifact> ComposeAsync(
SbomCompositionRequest request,
SpdxCompositionOptions options,
CancellationToken cancellationToken = default)
=> ValueTask.FromResult(Compose(request, options, cancellationToken));
private static SpdxCreationInfo BuildCreationInfo(
SbomCompositionRequest request,
SpdxCompositionOptions options,
DateTimeOffset generatedAt)
{
var creators = ImmutableArray.CreateBuilder<string>();
var toolName = !string.IsNullOrWhiteSpace(request.GeneratorName)
? request.GeneratorName!.Trim()
: options.CreatorTool;
if (!string.IsNullOrWhiteSpace(toolName))
{
var toolLabel = !string.IsNullOrWhiteSpace(request.GeneratorVersion)
? $"{toolName}-{request.GeneratorVersion!.Trim()}"
: toolName;
creators.Add($"Tool: {toolLabel}");
}
if (!string.IsNullOrWhiteSpace(options.CreatorOrganization))
{
creators.Add($"Organization: {options.CreatorOrganization!.Trim()}");
}
return new SpdxCreationInfo
{
Created = generatedAt,
Creators = creators.ToImmutable(),
SpecVersion = SpdxDefaults.SpecVersion
};
}
private static SpdxDocument BuildDocument(
SbomCompositionRequest request,
SpdxCompositionOptions options,
ComponentGraph graph,
SpdxIdBuilder idBuilder,
SpdxCreationInfo creationInfo,
SpdxLicenseList licenseList)
{
var packages = new List<SpdxPackage>();
var packageIdMap = new Dictionary<string, string>(StringComparer.Ordinal);
var rootPackage = BuildRootPackage(request.Image, idBuilder);
packages.Add(rootPackage);
foreach (var component in graph.Components)
{
var package = BuildComponentPackage(component, idBuilder, licenseList);
packages.Add(package);
packageIdMap[component.Identity.Key] = package.SpdxId;
}
var rootElementIds = packages
.Select(static pkg => pkg.SpdxId)
.OrderBy(id => id, StringComparer.Ordinal)
.ToImmutableArray();
var sbom = new SpdxSbom
{
SpdxId = idBuilder.SbomId,
Name = "software-sbom",
RootElements = new[] { rootPackage.SpdxId }.ToImmutableArray(),
Elements = rootElementIds,
SbomTypes = new[] { "build" }.ToImmutableArray()
};
var relationships = BuildRelationships(idBuilder, graph, rootPackage, packageIdMap);
var name = request.Image.ImageReference ?? request.Image.Repository ?? request.Image.ImageDigest;
return new SpdxDocument
{
DocumentNamespace = idBuilder.DocumentNamespace,
Name = $"SBOM for {name}",
CreationInfo = creationInfo,
Sbom = sbom,
Elements = packages.Cast<SpdxElement>().ToImmutableArray(),
Relationships = relationships,
ProfileConformance = options.ProfileConformance
};
}
private static ImmutableArray<SpdxRelationship> BuildRelationships(
SpdxIdBuilder idBuilder,
ComponentGraph graph,
SpdxPackage rootPackage,
IReadOnlyDictionary<string, string> packageIdMap)
{
var relationships = new List<SpdxRelationship>();
var documentId = idBuilder.DocumentNamespace;
relationships.Add(new SpdxRelationship
{
SpdxId = idBuilder.CreateRelationshipId(documentId, "describes", rootPackage.SpdxId),
FromElement = documentId,
Type = SpdxRelationshipType.Describes,
ToElements = ImmutableArray.Create(rootPackage.SpdxId)
});
var dependencyTargets = new HashSet<string>(StringComparer.Ordinal);
foreach (var component in graph.Components)
{
foreach (var dependencyKey in component.Dependencies)
{
if (packageIdMap.ContainsKey(dependencyKey))
{
dependencyTargets.Add(dependencyKey);
}
}
}
var rootDependencies = graph.Components
.Where(component => !dependencyTargets.Contains(component.Identity.Key))
.OrderBy(component => component.Identity.Key, StringComparer.Ordinal)
.ToArray();
foreach (var component in rootDependencies)
{
if (!packageIdMap.TryGetValue(component.Identity.Key, out var targetId))
{
continue;
}
relationships.Add(new SpdxRelationship
{
SpdxId = idBuilder.CreateRelationshipId(rootPackage.SpdxId, "dependsOn", targetId),
FromElement = rootPackage.SpdxId,
Type = SpdxRelationshipType.DependsOn,
ToElements = ImmutableArray.Create(targetId)
});
}
foreach (var component in graph.Components.OrderBy(component => component.Identity.Key, StringComparer.Ordinal))
{
if (!packageIdMap.TryGetValue(component.Identity.Key, out var fromId))
{
continue;
}
var deps = component.Dependencies
.Where(packageIdMap.ContainsKey)
.OrderBy(key => key, StringComparer.Ordinal)
.ToArray();
foreach (var depKey in deps)
{
var toId = packageIdMap[depKey];
relationships.Add(new SpdxRelationship
{
SpdxId = idBuilder.CreateRelationshipId(fromId, "dependsOn", toId),
FromElement = fromId,
Type = SpdxRelationshipType.DependsOn,
ToElements = ImmutableArray.Create(toId)
});
}
}
return relationships
.OrderBy(rel => rel.FromElement, StringComparer.Ordinal)
.ThenBy(rel => rel.Type)
.ThenBy(rel => rel.ToElements.FirstOrDefault() ?? string.Empty, StringComparer.Ordinal)
.ToImmutableArray();
}
private static SpdxPackage BuildRootPackage(ImageArtifactDescriptor image, SpdxIdBuilder idBuilder)
{
var digest = image.ImageDigest;
var digestParts = digest.Split(':', 2, StringSplitOptions.TrimEntries);
var digestValue = digestParts.Length == 2 ? digestParts[1] : digest;
var checksums = ImmutableArray.Create(new SpdxChecksum
{
Algorithm = digestParts.Length == 2 ? digestParts[0].ToUpperInvariant() : "SHA256",
Value = digestValue
});
return new SpdxPackage
{
SpdxId = idBuilder.CreatePackageId($"image:{image.ImageDigest}"),
Name = image.ImageReference ?? image.Repository ?? image.ImageDigest,
Version = digestValue,
PackageUrl = BuildImagePurl(image),
DownloadLocation = "NOASSERTION",
PrimaryPurpose = "container",
Checksums = checksums
};
}
private static SpdxPackage BuildComponentPackage(
AggregatedComponent component,
SpdxIdBuilder idBuilder,
SpdxLicenseList licenseList)
{
var packageUrl = !string.IsNullOrWhiteSpace(component.Identity.Purl)
? component.Identity.Purl
: (component.Identity.Key.StartsWith("pkg:", StringComparison.Ordinal) ? component.Identity.Key : null);
var declared = BuildLicenseExpression(component.Metadata?.Licenses, licenseList);
return new SpdxPackage
{
SpdxId = idBuilder.CreatePackageId(component.Identity.Key),
Name = component.Identity.Name,
Version = component.Identity.Version,
PackageUrl = packageUrl,
DownloadLocation = "NOASSERTION",
PrimaryPurpose = MapPrimaryPurpose(component.Identity.ComponentType),
DeclaredLicense = declared
};
}
private static SpdxLicenseExpression? BuildLicenseExpression(
IReadOnlyList<string>? licenses,
SpdxLicenseList licenseList)
{
if (licenses is null || licenses.Count == 0)
{
return null;
}
var expressions = new List<SpdxLicenseExpression>();
foreach (var license in licenses)
{
if (string.IsNullOrWhiteSpace(license))
{
continue;
}
if (SpdxLicenseExpressionParser.TryParse(license, out var parsed, licenseList))
{
expressions.Add(parsed!);
continue;
}
expressions.Add(new SpdxSimpleLicense(ToLicenseRef(license)));
}
if (expressions.Count == 0)
{
return null;
}
var current = expressions[0];
for (var i = 1; i < expressions.Count; i++)
{
current = new SpdxDisjunctiveLicense(current, expressions[i]);
}
return current;
}
private static string ToLicenseRef(string license)
{
var normalized = new string(license
.Trim()
.Select(ch => char.IsLetterOrDigit(ch) || ch == '.' || ch == '-' ? ch : '-')
.ToArray());
if (normalized.StartsWith("LicenseRef-", StringComparison.Ordinal))
{
return normalized;
}
return $"LicenseRef-{normalized}";
}
private static string? MapPrimaryPurpose(string? type)
{
if (string.IsNullOrWhiteSpace(type))
{
return "library";
}
return type.Trim().ToLowerInvariant() switch
{
"application" => "application",
"framework" => "framework",
"container" => "container",
"operating-system" or "os" => "operatingSystem",
"device" => "device",
"firmware" => "firmware",
"file" => "file",
_ => "library"
};
}
private static string? BuildImagePurl(ImageArtifactDescriptor image)
{
if (string.IsNullOrWhiteSpace(image.Repository))
{
return null;
}
var repo = image.Repository.Trim();
var tag = string.IsNullOrWhiteSpace(image.Tag) ? null : image.Tag.Trim();
var digest = image.ImageDigest.Trim();
var builder = new System.Text.StringBuilder("pkg:oci/");
builder.Append(repo.Replace("/", "%2F", StringComparison.Ordinal));
if (!string.IsNullOrWhiteSpace(tag))
{
builder.Append('@').Append(tag);
}
builder.Append("?digest=").Append(Uri.EscapeDataString(digest));
if (!string.IsNullOrWhiteSpace(image.Architecture))
{
builder.Append("&arch=").Append(Uri.EscapeDataString(image.Architecture.Trim()));
}
return builder.ToString();
}
}

View File

@@ -88,6 +88,17 @@ public sealed class ScannerArtifactPackageBuilder
descriptors.Add(CreateDescriptor(ArtifactDocumentType.ImageBom, ArtifactDocumentFormat.CycloneDxProtobuf, composition.Usage.ProtobufMediaType, composition.Usage.ProtobufBytes, composition.Usage.ProtobufSha256, SbomView.Usage));
}
if (composition.SpdxInventory is not null)
{
descriptors.Add(CreateDescriptor(
ArtifactDocumentType.ImageBom,
ArtifactDocumentFormat.SpdxJson,
composition.SpdxInventory.JsonMediaType,
composition.SpdxInventory.JsonBytes,
composition.SpdxInventory.JsonSha256,
SbomView.Inventory));
}
descriptors.Add(CreateDescriptor(ArtifactDocumentType.Index, ArtifactDocumentFormat.BomIndex, "application/vnd.stellaops.bom-index.v1+binary", bomIndex.Bytes, bomIndex.Sha256, null));
descriptors.Add(CreateDescriptor(

View File

@@ -0,0 +1,196 @@
using System;
using System.Collections.Generic;
using System.Collections.Immutable;
using System.Globalization;
using System.Linq;
using CycloneDX.Models;
using StellaOps.Scanner.Core.Utility;
using StellaOps.Scanner.Emit.Spdx.Models;
namespace StellaOps.Scanner.Emit.Spdx.Conversion;
public sealed record SpdxConversionOptions
{
public string NamespaceBase { get; init; } = "https://stellaops.io/spdx";
}
public static class SpdxCycloneDxConverter
{
public static SpdxDocument FromCycloneDx(Bom bom, SpdxConversionOptions? options = null)
{
ArgumentNullException.ThrowIfNull(bom);
options ??= new SpdxConversionOptions();
var basis = bom.SerialNumber ?? bom.Metadata?.Component?.BomRef ?? "cyclonedx";
var namespaceHash = ScannerIdentifiers.CreateDeterministicHash(basis);
var creationInfo = new SpdxCreationInfo
{
Created = bom.Metadata?.Timestamp is { } timestamp
? new DateTimeOffset(timestamp, TimeSpan.Zero)
: ScannerTimestamps.UtcNow(),
Creators = ImmutableArray.Create("Tool: CycloneDX")
};
var idBuilder = new SpdxIdBuilder(options.NamespaceBase, namespaceHash);
var documentNamespace = idBuilder.DocumentNamespace;
var rootComponent = bom.Metadata?.Component;
var rootPackage = rootComponent is null
? new SpdxPackage
{
SpdxId = idBuilder.CreatePackageId("root"),
Name = "root",
DownloadLocation = "NOASSERTION",
PrimaryPurpose = "application"
}
: MapComponent(rootComponent, idBuilder);
var packages = new List<SpdxPackage> { rootPackage };
if (bom.Components is not null)
{
packages.AddRange(bom.Components.Select(component => MapComponent(component, idBuilder)));
}
var sbom = new SpdxSbom
{
SpdxId = idBuilder.SbomId,
Name = "software-sbom",
RootElements = ImmutableArray.Create(rootPackage.SpdxId),
Elements = packages.Select(package => package.SpdxId).OrderBy(id => id, StringComparer.Ordinal).ToImmutableArray(),
SbomTypes = ImmutableArray.Create("build")
};
var relationships = BuildRelationshipsFromCycloneDx(bom, idBuilder, packages);
return new SpdxDocument
{
DocumentNamespace = documentNamespace,
Name = "SPDX converted from CycloneDX",
CreationInfo = creationInfo,
Sbom = sbom,
Elements = packages.Cast<SpdxElement>().ToImmutableArray(),
Relationships = relationships
};
}
public static Bom ToCycloneDx(SpdxDocument document)
{
ArgumentNullException.ThrowIfNull(document);
var rootId = document.Sbom.RootElements.FirstOrDefault();
var packages = document.Elements.OfType<SpdxPackage>().ToList();
var rootPackage = packages.FirstOrDefault(pkg => string.Equals(pkg.SpdxId, rootId, StringComparison.Ordinal))
?? packages.FirstOrDefault();
var bom = new Bom
{
SpecVersion = SpecificationVersion.v1_7,
Version = 1,
Metadata = new Metadata
{
Timestamp = document.CreationInfo.Created.UtcDateTime,
Component = rootPackage is null ? null : MapPackage(rootPackage)
}
};
bom.Components = packages
.Where(pkg => rootPackage is null || !string.Equals(pkg.SpdxId, rootPackage.SpdxId, StringComparison.Ordinal))
.Select(MapPackage)
.ToList();
bom.Dependencies = BuildDependenciesFromSpdx(document, packages);
return bom;
}
private static SpdxPackage MapComponent(Component component, SpdxIdBuilder idBuilder)
{
return new SpdxPackage
{
SpdxId = idBuilder.CreatePackageId(component.BomRef ?? component.Name ?? "component"),
Name = component.Name ?? component.BomRef ?? "component",
Version = component.Version,
PackageUrl = component.Purl,
DownloadLocation = "NOASSERTION",
PrimaryPurpose = component.Type.ToString().Replace("_", "-", StringComparison.Ordinal).ToLowerInvariant()
};
}
private static Component MapPackage(SpdxPackage package)
{
return new Component
{
BomRef = package.SpdxId,
Name = package.Name ?? package.SpdxId,
Version = package.Version,
Purl = package.PackageUrl,
Type = Component.Classification.Library
};
}
private static ImmutableArray<SpdxRelationship> BuildRelationshipsFromCycloneDx(
Bom bom,
SpdxIdBuilder idBuilder,
IReadOnlyList<SpdxPackage> packages)
{
var packageMap = packages.ToDictionary(pkg => pkg.SpdxId, StringComparer.Ordinal);
var relationships = new List<SpdxRelationship>();
if (bom.Dependencies is null)
{
return ImmutableArray<SpdxRelationship>.Empty;
}
foreach (var dependency in bom.Dependencies)
{
if (dependency.Dependencies is null || dependency.Ref is null)
{
continue;
}
foreach (var target in dependency.Dependencies.Where(dep => dep.Ref is not null))
{
relationships.Add(new SpdxRelationship
{
SpdxId = idBuilder.CreateRelationshipId(dependency.Ref, "dependsOn", target.Ref!),
FromElement = dependency.Ref,
Type = SpdxRelationshipType.DependsOn,
ToElements = ImmutableArray.Create(target.Ref!)
});
}
}
return relationships.ToImmutableArray();
}
private static List<Dependency>? BuildDependenciesFromSpdx(
SpdxDocument document,
IReadOnlyList<SpdxPackage> packages)
{
var dependencies = new List<Dependency>();
var packageIds = packages.Select(pkg => pkg.SpdxId).ToHashSet(StringComparer.Ordinal);
foreach (var relationship in document.Relationships
.Where(rel => rel.Type == SpdxRelationshipType.DependsOn))
{
if (!packageIds.Contains(relationship.FromElement))
{
continue;
}
var targets = relationship.ToElements.Where(packageIds.Contains).ToList();
if (targets.Count == 0)
{
continue;
}
dependencies.Add(new Dependency
{
Ref = relationship.FromElement,
Dependencies = targets.Select(target => new Dependency { Ref = target }).ToList()
});
}
return dependencies.Count == 0 ? null : dependencies;
}
}

View File

@@ -0,0 +1,35 @@
namespace StellaOps.Scanner.Emit.Spdx.Models;
public abstract record SpdxLicenseExpression;
public sealed record SpdxSimpleLicense(string LicenseId) : SpdxLicenseExpression;
public sealed record SpdxConjunctiveLicense(
SpdxLicenseExpression Left,
SpdxLicenseExpression Right) : SpdxLicenseExpression;
public sealed record SpdxDisjunctiveLicense(
SpdxLicenseExpression Left,
SpdxLicenseExpression Right) : SpdxLicenseExpression;
public sealed record SpdxWithException(
SpdxLicenseExpression License,
string Exception) : SpdxLicenseExpression;
public sealed record SpdxNoneLicense : SpdxLicenseExpression
{
public static SpdxNoneLicense Instance { get; } = new();
private SpdxNoneLicense()
{
}
}
public sealed record SpdxNoAssertionLicense : SpdxLicenseExpression
{
public static SpdxNoAssertionLicense Instance { get; } = new();
private SpdxNoAssertionLicense()
{
}
}

View File

@@ -0,0 +1,406 @@
using System;
using System.Collections.Generic;
using System.Collections.Immutable;
using System.Linq;
using System.Reflection;
using System.Text;
using System.Text.Json;
namespace StellaOps.Scanner.Emit.Spdx.Models;
public enum SpdxLicenseListVersion
{
V3_21
}
public sealed record SpdxLicenseList
{
public required string Version { get; init; }
public required ImmutableHashSet<string> LicenseIds { get; init; }
public required ImmutableHashSet<string> ExceptionIds { get; init; }
}
public static class SpdxLicenseListProvider
{
private const string LicenseResource = "StellaOps.Scanner.Emit.Spdx.Resources.spdx-license-list-3.21.json";
private const string ExceptionResource = "StellaOps.Scanner.Emit.Spdx.Resources.spdx-license-exceptions-3.21.json";
private static readonly Lazy<SpdxLicenseList> LicenseListV321 = new(LoadV321);
public static SpdxLicenseList Get(SpdxLicenseListVersion version)
=> version switch
{
SpdxLicenseListVersion.V3_21 => LicenseListV321.Value,
_ => LicenseListV321.Value,
};
private static SpdxLicenseList LoadV321()
{
var assembly = Assembly.GetExecutingAssembly();
var licenseIds = LoadLicenseIds(assembly, LicenseResource, "licenses", "licenseId");
var exceptionIds = LoadLicenseIds(assembly, ExceptionResource, "exceptions", "licenseExceptionId");
return new SpdxLicenseList
{
Version = "3.21",
LicenseIds = licenseIds,
ExceptionIds = exceptionIds,
};
}
private static ImmutableHashSet<string> LoadLicenseIds(
Assembly assembly,
string resourceName,
string arrayProperty,
string idProperty)
{
using var stream = assembly.GetManifestResourceStream(resourceName)
?? throw new InvalidOperationException($"Missing embedded resource: {resourceName}");
using var document = JsonDocument.Parse(stream);
if (!document.RootElement.TryGetProperty(arrayProperty, out var array) ||
array.ValueKind != JsonValueKind.Array)
{
return ImmutableHashSet<string>.Empty;
}
var builder = ImmutableHashSet.CreateBuilder<string>(StringComparer.Ordinal);
foreach (var entry in array.EnumerateArray())
{
if (entry.TryGetProperty(idProperty, out var idElement) &&
idElement.ValueKind == JsonValueKind.String &&
idElement.GetString() is { Length: > 0 } id)
{
builder.Add(id);
}
}
return builder.ToImmutable();
}
}
public static class SpdxLicenseExpressionParser
{
public static bool TryParse(string expression, out SpdxLicenseExpression? result, SpdxLicenseList? licenseList = null)
{
result = null;
if (string.IsNullOrWhiteSpace(expression))
{
return false;
}
try
{
result = Parse(expression, licenseList);
return true;
}
catch (FormatException)
{
return false;
}
}
public static SpdxLicenseExpression Parse(string expression, SpdxLicenseList? licenseList = null)
{
if (string.IsNullOrWhiteSpace(expression))
{
throw new FormatException("License expression is empty.");
}
var tokens = Tokenize(expression);
var parser = new Parser(tokens);
var parsed = parser.ParseExpression();
if (parser.HasMoreTokens)
{
throw new FormatException("Unexpected trailing tokens in license expression.");
}
if (licenseList is not null)
{
Validate(parsed, licenseList);
}
return parsed;
}
private static void Validate(SpdxLicenseExpression expression, SpdxLicenseList list)
{
switch (expression)
{
case SpdxSimpleLicense simple:
if (IsSpecial(simple.LicenseId) || IsLicenseRef(simple.LicenseId))
{
return;
}
if (!list.LicenseIds.Contains(simple.LicenseId))
{
throw new FormatException($"Unknown SPDX license identifier: {simple.LicenseId}");
}
break;
case SpdxWithException withException:
Validate(withException.License, list);
if (!list.ExceptionIds.Contains(withException.Exception))
{
throw new FormatException($"Unknown SPDX license exception: {withException.Exception}");
}
break;
case SpdxConjunctiveLicense conjunctive:
Validate(conjunctive.Left, list);
Validate(conjunctive.Right, list);
break;
case SpdxDisjunctiveLicense disjunctive:
Validate(disjunctive.Left, list);
Validate(disjunctive.Right, list);
break;
case SpdxNoneLicense:
case SpdxNoAssertionLicense:
break;
default:
throw new FormatException("Unsupported SPDX license expression node.");
}
}
private static bool IsSpecial(string licenseId)
=> string.Equals(licenseId, "NONE", StringComparison.Ordinal)
|| string.Equals(licenseId, "NOASSERTION", StringComparison.Ordinal);
private static bool IsLicenseRef(string licenseId)
=> licenseId.StartsWith("LicenseRef-", StringComparison.Ordinal)
|| licenseId.StartsWith("DocumentRef-", StringComparison.Ordinal);
private static List<Token> Tokenize(string expression)
{
var tokens = new List<Token>();
var buffer = new StringBuilder();
void Flush()
{
if (buffer.Length == 0)
{
return;
}
var value = buffer.ToString();
buffer.Clear();
tokens.Add(Token.From(value));
}
foreach (var ch in expression)
{
switch (ch)
{
case '(':
Flush();
tokens.Add(new Token(TokenType.OpenParen, "("));
break;
case ')':
Flush();
tokens.Add(new Token(TokenType.CloseParen, ")"));
break;
default:
if (char.IsWhiteSpace(ch))
{
Flush();
}
else
{
buffer.Append(ch);
}
break;
}
}
Flush();
return tokens;
}
private sealed class Parser
{
private readonly IReadOnlyList<Token> _tokens;
private int _index;
public Parser(IReadOnlyList<Token> tokens)
{
_tokens = tokens;
}
public bool HasMoreTokens => _index < _tokens.Count;
public SpdxLicenseExpression ParseExpression()
{
var left = ParseWith();
while (TryMatch(TokenType.And, out _) || TryMatch(TokenType.Or, out var op))
{
var right = ParseWith();
left = op!.Type == TokenType.And
? new SpdxConjunctiveLicense(left, right)
: new SpdxDisjunctiveLicense(left, right);
}
return left;
}
private SpdxLicenseExpression ParseWith()
{
var left = ParsePrimary();
if (TryMatch(TokenType.With, out var withToken))
{
var exception = Expect(TokenType.Identifier);
left = new SpdxWithException(left, exception.Value);
}
return left;
}
private SpdxLicenseExpression ParsePrimary()
{
if (TryMatch(TokenType.OpenParen, out _))
{
var inner = ParseExpression();
Expect(TokenType.CloseParen);
return inner;
}
var token = Expect(TokenType.Identifier);
if (string.Equals(token.Value, "NONE", StringComparison.OrdinalIgnoreCase))
{
return SpdxNoneLicense.Instance;
}
if (string.Equals(token.Value, "NOASSERTION", StringComparison.OrdinalIgnoreCase))
{
return SpdxNoAssertionLicense.Instance;
}
return new SpdxSimpleLicense(token.Value);
}
private bool TryMatch(TokenType type, out Token? token)
{
token = null;
if (_index >= _tokens.Count)
{
return false;
}
var candidate = _tokens[_index];
if (candidate.Type != type)
{
return false;
}
_index++;
token = candidate;
return true;
}
private Token Expect(TokenType type)
{
if (_index >= _tokens.Count)
{
throw new FormatException($"Expected {type} but reached end of expression.");
}
var token = _tokens[_index++];
if (token.Type != type)
{
throw new FormatException($"Expected {type} but found {token.Type}.");
}
return token;
}
}
private sealed record Token(TokenType Type, string Value)
{
public static Token From(string value)
{
var normalized = value.Trim();
if (string.Equals(normalized, "AND", StringComparison.OrdinalIgnoreCase))
{
return new Token(TokenType.And, "AND");
}
if (string.Equals(normalized, "OR", StringComparison.OrdinalIgnoreCase))
{
return new Token(TokenType.Or, "OR");
}
if (string.Equals(normalized, "WITH", StringComparison.OrdinalIgnoreCase))
{
return new Token(TokenType.With, "WITH");
}
return new Token(TokenType.Identifier, normalized);
}
}
private enum TokenType
{
Identifier,
And,
Or,
With,
OpenParen,
CloseParen
}
}
public static class SpdxLicenseExpressionRenderer
{
public static string Render(SpdxLicenseExpression expression)
{
return RenderInternal(expression, parentOperator: null);
}
private static string RenderInternal(SpdxLicenseExpression expression, SpdxBinaryOperator? parentOperator)
{
switch (expression)
{
case SpdxSimpleLicense simple:
return simple.LicenseId;
case SpdxNoneLicense:
return "NONE";
case SpdxNoAssertionLicense:
return "NOASSERTION";
case SpdxWithException withException:
var licenseText = RenderInternal(withException.License, parentOperator: null);
return $"{licenseText} WITH {withException.Exception}";
case SpdxConjunctiveLicense conjunctive:
return RenderBinary(conjunctive.Left, conjunctive.Right, "AND", SpdxBinaryOperator.And, parentOperator);
case SpdxDisjunctiveLicense disjunctive:
return RenderBinary(disjunctive.Left, disjunctive.Right, "OR", SpdxBinaryOperator.Or, parentOperator);
default:
throw new InvalidOperationException("Unsupported SPDX license expression node.");
}
}
private static string RenderBinary(
SpdxLicenseExpression left,
SpdxLicenseExpression right,
string op,
SpdxBinaryOperator current,
SpdxBinaryOperator? parent)
{
var leftText = RenderInternal(left, current);
var rightText = RenderInternal(right, current);
var text = $"{leftText} {op} {rightText}";
if (parent.HasValue && parent.Value != current)
{
return $"({text})";
}
return text;
}
private enum SpdxBinaryOperator
{
And,
Or
}
}

View File

@@ -0,0 +1,204 @@
using System;
using System.Collections.Immutable;
namespace StellaOps.Scanner.Emit.Spdx.Models;
public static class SpdxDefaults
{
public const string SpecVersion = "3.0.1";
public const string JsonLdContext = "https://spdx.org/rdf/3.0.1/spdx-context.jsonld";
public const string DocumentType = "SpdxDocument";
public const string SbomType = "software_Sbom";
public const string PackageType = "software_Package";
public const string FileType = "software_File";
public const string SnippetType = "software_Snippet";
public const string RelationshipType = "Relationship";
}
public sealed record SpdxDocument
{
public required string DocumentNamespace { get; init; }
public required string Name { get; init; }
public required SpdxCreationInfo CreationInfo { get; init; }
public required SpdxSbom Sbom { get; init; }
public ImmutableArray<SpdxElement> Elements { get; init; } = ImmutableArray<SpdxElement>.Empty;
public ImmutableArray<SpdxRelationship> Relationships { get; init; } = ImmutableArray<SpdxRelationship>.Empty;
public ImmutableArray<SpdxAnnotation> Annotations { get; init; } = ImmutableArray<SpdxAnnotation>.Empty;
public ImmutableArray<string> ProfileConformance { get; init; } = ImmutableArray<string>.Empty;
public string SpecVersion { get; init; } = SpdxDefaults.SpecVersion;
}
public sealed record SpdxCreationInfo
{
public required DateTimeOffset Created { get; init; }
public ImmutableArray<string> Creators { get; init; } = ImmutableArray<string>.Empty;
public ImmutableArray<string> CreatedUsing { get; init; } = ImmutableArray<string>.Empty;
public string SpecVersion { get; init; } = SpdxDefaults.SpecVersion;
}
public abstract record SpdxElement
{
public required string SpdxId { get; init; }
public string? Name { get; init; }
public string? Summary { get; init; }
public string? Description { get; init; }
public string? Comment { get; init; }
}
public sealed record SpdxSbom : SpdxElement
{
public ImmutableArray<string> RootElements { get; init; } = ImmutableArray<string>.Empty;
public ImmutableArray<string> Elements { get; init; } = ImmutableArray<string>.Empty;
public ImmutableArray<string> SbomTypes { get; init; } = ImmutableArray<string>.Empty;
}
public sealed record SpdxPackage : SpdxElement
{
public string? Version { get; init; }
public string? PackageUrl { get; init; }
public string? DownloadLocation { get; init; }
public string? PrimaryPurpose { get; init; }
public SpdxLicenseExpression? DeclaredLicense { get; init; }
public SpdxLicenseExpression? ConcludedLicense { get; init; }
public string? CopyrightText { get; init; }
public ImmutableArray<SpdxChecksum> Checksums { get; init; } = ImmutableArray<SpdxChecksum>.Empty;
public ImmutableArray<SpdxExternalRef> ExternalRefs { get; init; } = ImmutableArray<SpdxExternalRef>.Empty;
public SpdxPackageVerificationCode? VerificationCode { get; init; }
}
public sealed record SpdxFile : SpdxElement
{
public string? FileName { get; init; }
public SpdxLicenseExpression? ConcludedLicense { get; init; }
public string? CopyrightText { get; init; }
public ImmutableArray<SpdxChecksum> Checksums { get; init; } = ImmutableArray<SpdxChecksum>.Empty;
}
public sealed record SpdxSnippet : SpdxElement
{
public required string FromFileSpdxId { get; init; }
public long? ByteRangeStart { get; init; }
public long? ByteRangeEnd { get; init; }
public long? LineRangeStart { get; init; }
public long? LineRangeEnd { get; init; }
}
public sealed record SpdxRelationship
{
public required string SpdxId { get; init; }
public required string FromElement { get; init; }
public required SpdxRelationshipType Type { get; init; }
public required ImmutableArray<string> ToElements { get; init; }
}
public enum SpdxRelationshipType
{
Describes,
DependsOn,
Contains,
ContainedBy,
Other
}
public sealed record SpdxAnnotation
{
public required string SpdxId { get; init; }
public required string Annotator { get; init; }
public required DateTimeOffset AnnotatedAt { get; init; }
public required string AnnotationType { get; init; }
public required string Comment { get; init; }
}
public sealed record SpdxChecksum
{
public required string Algorithm { get; init; }
public required string Value { get; init; }
}
public sealed record SpdxExternalRef
{
public required string Category { get; init; }
public required string Type { get; init; }
public required string Locator { get; init; }
}
public sealed record SpdxPackageVerificationCode
{
public required string Value { get; init; }
public ImmutableArray<string> ExcludedFiles { get; init; } = ImmutableArray<string>.Empty;
}
public sealed record SpdxExtractedLicense
{
public required string LicenseId { get; init; }
public string? Name { get; init; }
public string? Text { get; init; }
public ImmutableArray<string> References { get; init; } = ImmutableArray<string>.Empty;
}
public sealed record SpdxVulnerability : SpdxElement
{
public string? Locator { get; init; }
public string? StatusNotes { get; init; }
public DateTimeOffset? PublishedTime { get; init; }
public DateTimeOffset? ModifiedTime { get; init; }
}
public sealed record SpdxVulnAssessment : SpdxElement
{
public string? Severity { get; init; }
public string? VectorString { get; init; }
public string? Score { get; init; }
}

View File

@@ -0,0 +1,643 @@
{
"licenseListVersion": "3.21",
"exceptions": [
{
"reference": "./389-exception.json",
"isDeprecatedLicenseId": false,
"detailsUrl": "./389-exception.html",
"referenceNumber": 48,
"name": "389 Directory Server Exception",
"licenseExceptionId": "389-exception",
"seeAlso": [
"http://directory.fedoraproject.org/wiki/GPL_Exception_License_Text",
"https://web.archive.org/web/20080828121337/http://directory.fedoraproject.org/wiki/GPL_Exception_License_Text"
]
},
{
"reference": "./Asterisk-exception.json",
"isDeprecatedLicenseId": false,
"detailsUrl": "./Asterisk-exception.html",
"referenceNumber": 33,
"name": "Asterisk exception",
"licenseExceptionId": "Asterisk-exception",
"seeAlso": [
"https://github.com/asterisk/libpri/blob/7f91151e6bd10957c746c031c1f4a030e8146e9a/pri.c#L22",
"https://github.com/asterisk/libss7/blob/03e81bcd0d28ff25d4c77c78351ddadc82ff5c3f/ss7.c#L24"
]
},
{
"reference": "./Autoconf-exception-2.0.json",
"isDeprecatedLicenseId": false,
"detailsUrl": "./Autoconf-exception-2.0.html",
"referenceNumber": 42,
"name": "Autoconf exception 2.0",
"licenseExceptionId": "Autoconf-exception-2.0",
"seeAlso": [
"http://ac-archive.sourceforge.net/doc/copyright.html",
"http://ftp.gnu.org/gnu/autoconf/autoconf-2.59.tar.gz"
]
},
{
"reference": "./Autoconf-exception-3.0.json",
"isDeprecatedLicenseId": false,
"detailsUrl": "./Autoconf-exception-3.0.html",
"referenceNumber": 41,
"name": "Autoconf exception 3.0",
"licenseExceptionId": "Autoconf-exception-3.0",
"seeAlso": [
"http://www.gnu.org/licenses/autoconf-exception-3.0.html"
]
},
{
"reference": "./Autoconf-exception-generic.json",
"isDeprecatedLicenseId": false,
"detailsUrl": "./Autoconf-exception-generic.html",
"referenceNumber": 4,
"name": "Autoconf generic exception",
"licenseExceptionId": "Autoconf-exception-generic",
"seeAlso": [
"https://launchpad.net/ubuntu/precise/+source/xmltooling/+copyright",
"https://tracker.debian.org/media/packages/s/sipwitch/copyright-1.9.15-3",
"https://opensource.apple.com/source/launchd/launchd-258.1/launchd/compile.auto.html"
]
},
{
"reference": "./Autoconf-exception-macro.json",
"isDeprecatedLicenseId": false,
"detailsUrl": "./Autoconf-exception-macro.html",
"referenceNumber": 19,
"name": "Autoconf macro exception",
"licenseExceptionId": "Autoconf-exception-macro",
"seeAlso": [
"https://github.com/freedesktop/xorg-macros/blob/39f07f7db58ebbf3dcb64a2bf9098ed5cf3d1223/xorg-macros.m4.in",
"https://www.gnu.org/software/autoconf-archive/ax_pthread.html",
"https://launchpad.net/ubuntu/precise/+source/xmltooling/+copyright"
]
},
{
"reference": "./Bison-exception-2.2.json",
"isDeprecatedLicenseId": false,
"detailsUrl": "./Bison-exception-2.2.html",
"referenceNumber": 11,
"name": "Bison exception 2.2",
"licenseExceptionId": "Bison-exception-2.2",
"seeAlso": [
"http://git.savannah.gnu.org/cgit/bison.git/tree/data/yacc.c?id\u003d193d7c7054ba7197b0789e14965b739162319b5e#n141"
]
},
{
"reference": "./Bootloader-exception.json",
"isDeprecatedLicenseId": false,
"detailsUrl": "./Bootloader-exception.html",
"referenceNumber": 50,
"name": "Bootloader Distribution Exception",
"licenseExceptionId": "Bootloader-exception",
"seeAlso": [
"https://github.com/pyinstaller/pyinstaller/blob/develop/COPYING.txt"
]
},
{
"reference": "./Classpath-exception-2.0.json",
"isDeprecatedLicenseId": false,
"detailsUrl": "./Classpath-exception-2.0.html",
"referenceNumber": 36,
"name": "Classpath exception 2.0",
"licenseExceptionId": "Classpath-exception-2.0",
"seeAlso": [
"http://www.gnu.org/software/classpath/license.html",
"https://fedoraproject.org/wiki/Licensing/GPL_Classpath_Exception"
]
},
{
"reference": "./CLISP-exception-2.0.json",
"isDeprecatedLicenseId": false,
"detailsUrl": "./CLISP-exception-2.0.html",
"referenceNumber": 9,
"name": "CLISP exception 2.0",
"licenseExceptionId": "CLISP-exception-2.0",
"seeAlso": [
"http://sourceforge.net/p/clisp/clisp/ci/default/tree/COPYRIGHT"
]
},
{
"reference": "./cryptsetup-OpenSSL-exception.json",
"isDeprecatedLicenseId": false,
"detailsUrl": "./cryptsetup-OpenSSL-exception.html",
"referenceNumber": 39,
"name": "cryptsetup OpenSSL exception",
"licenseExceptionId": "cryptsetup-OpenSSL-exception",
"seeAlso": [
"https://gitlab.com/cryptsetup/cryptsetup/-/blob/main/COPYING",
"https://gitlab.nic.cz/datovka/datovka/-/blob/develop/COPYING",
"https://github.com/nbs-system/naxsi/blob/951123ad456bdf5ac94e8d8819342fe3d49bc002/naxsi_src/naxsi_raw.c",
"http://web.mit.edu/jgross/arch/amd64_deb60/bin/mosh"
]
},
{
"reference": "./DigiRule-FOSS-exception.json",
"isDeprecatedLicenseId": false,
"detailsUrl": "./DigiRule-FOSS-exception.html",
"referenceNumber": 20,
"name": "DigiRule FOSS License Exception",
"licenseExceptionId": "DigiRule-FOSS-exception",
"seeAlso": [
"http://www.digirulesolutions.com/drupal/foss"
]
},
{
"reference": "./eCos-exception-2.0.json",
"isDeprecatedLicenseId": false,
"detailsUrl": "./eCos-exception-2.0.html",
"referenceNumber": 38,
"name": "eCos exception 2.0",
"licenseExceptionId": "eCos-exception-2.0",
"seeAlso": [
"http://ecos.sourceware.org/license-overview.html"
]
},
{
"reference": "./Fawkes-Runtime-exception.json",
"isDeprecatedLicenseId": false,
"detailsUrl": "./Fawkes-Runtime-exception.html",
"referenceNumber": 8,
"name": "Fawkes Runtime Exception",
"licenseExceptionId": "Fawkes-Runtime-exception",
"seeAlso": [
"http://www.fawkesrobotics.org/about/license/"
]
},
{
"reference": "./FLTK-exception.json",
"isDeprecatedLicenseId": false,
"detailsUrl": "./FLTK-exception.html",
"referenceNumber": 18,
"name": "FLTK exception",
"licenseExceptionId": "FLTK-exception",
"seeAlso": [
"http://www.fltk.org/COPYING.php"
]
},
{
"reference": "./Font-exception-2.0.json",
"isDeprecatedLicenseId": false,
"detailsUrl": "./Font-exception-2.0.html",
"referenceNumber": 7,
"name": "Font exception 2.0",
"licenseExceptionId": "Font-exception-2.0",
"seeAlso": [
"http://www.gnu.org/licenses/gpl-faq.html#FontException"
]
},
{
"reference": "./freertos-exception-2.0.json",
"isDeprecatedLicenseId": false,
"detailsUrl": "./freertos-exception-2.0.html",
"referenceNumber": 47,
"name": "FreeRTOS Exception 2.0",
"licenseExceptionId": "freertos-exception-2.0",
"seeAlso": [
"https://web.archive.org/web/20060809182744/http://www.freertos.org/a00114.html"
]
},
{
"reference": "./GCC-exception-2.0.json",
"isDeprecatedLicenseId": false,
"detailsUrl": "./GCC-exception-2.0.html",
"referenceNumber": 54,
"name": "GCC Runtime Library exception 2.0",
"licenseExceptionId": "GCC-exception-2.0",
"seeAlso": [
"https://gcc.gnu.org/git/?p\u003dgcc.git;a\u003dblob;f\u003dgcc/libgcc1.c;h\u003d762f5143fc6eed57b6797c82710f3538aa52b40b;hb\u003dcb143a3ce4fb417c68f5fa2691a1b1b1053dfba9#l10"
]
},
{
"reference": "./GCC-exception-3.1.json",
"isDeprecatedLicenseId": false,
"detailsUrl": "./GCC-exception-3.1.html",
"referenceNumber": 27,
"name": "GCC Runtime Library exception 3.1",
"licenseExceptionId": "GCC-exception-3.1",
"seeAlso": [
"http://www.gnu.org/licenses/gcc-exception-3.1.html"
]
},
{
"reference": "./GNAT-exception.json",
"isDeprecatedLicenseId": false,
"detailsUrl": "./GNAT-exception.html",
"referenceNumber": 13,
"name": "GNAT exception",
"licenseExceptionId": "GNAT-exception",
"seeAlso": [
"https://github.com/AdaCore/florist/blob/master/libsrc/posix-configurable_file_limits.adb"
]
},
{
"reference": "./gnu-javamail-exception.json",
"isDeprecatedLicenseId": false,
"detailsUrl": "./gnu-javamail-exception.html",
"referenceNumber": 34,
"name": "GNU JavaMail exception",
"licenseExceptionId": "gnu-javamail-exception",
"seeAlso": [
"http://www.gnu.org/software/classpathx/javamail/javamail.html"
]
},
{
"reference": "./GPL-3.0-interface-exception.json",
"isDeprecatedLicenseId": false,
"detailsUrl": "./GPL-3.0-interface-exception.html",
"referenceNumber": 21,
"name": "GPL-3.0 Interface Exception",
"licenseExceptionId": "GPL-3.0-interface-exception",
"seeAlso": [
"https://www.gnu.org/licenses/gpl-faq.en.html#LinkingOverControlledInterface"
]
},
{
"reference": "./GPL-3.0-linking-exception.json",
"isDeprecatedLicenseId": false,
"detailsUrl": "./GPL-3.0-linking-exception.html",
"referenceNumber": 1,
"name": "GPL-3.0 Linking Exception",
"licenseExceptionId": "GPL-3.0-linking-exception",
"seeAlso": [
"https://www.gnu.org/licenses/gpl-faq.en.html#GPLIncompatibleLibs"
]
},
{
"reference": "./GPL-3.0-linking-source-exception.json",
"isDeprecatedLicenseId": false,
"detailsUrl": "./GPL-3.0-linking-source-exception.html",
"referenceNumber": 37,
"name": "GPL-3.0 Linking Exception (with Corresponding Source)",
"licenseExceptionId": "GPL-3.0-linking-source-exception",
"seeAlso": [
"https://www.gnu.org/licenses/gpl-faq.en.html#GPLIncompatibleLibs",
"https://github.com/mirror/wget/blob/master/src/http.c#L20"
]
},
{
"reference": "./GPL-CC-1.0.json",
"isDeprecatedLicenseId": false,
"detailsUrl": "./GPL-CC-1.0.html",
"referenceNumber": 52,
"name": "GPL Cooperation Commitment 1.0",
"licenseExceptionId": "GPL-CC-1.0",
"seeAlso": [
"https://github.com/gplcc/gplcc/blob/master/Project/COMMITMENT",
"https://gplcc.github.io/gplcc/Project/README-PROJECT.html"
]
},
{
"reference": "./GStreamer-exception-2005.json",
"isDeprecatedLicenseId": false,
"detailsUrl": "./GStreamer-exception-2005.html",
"referenceNumber": 35,
"name": "GStreamer Exception (2005)",
"licenseExceptionId": "GStreamer-exception-2005",
"seeAlso": [
"https://gstreamer.freedesktop.org/documentation/frequently-asked-questions/licensing.html?gi-language\u003dc#licensing-of-applications-using-gstreamer"
]
},
{
"reference": "./GStreamer-exception-2008.json",
"isDeprecatedLicenseId": false,
"detailsUrl": "./GStreamer-exception-2008.html",
"referenceNumber": 30,
"name": "GStreamer Exception (2008)",
"licenseExceptionId": "GStreamer-exception-2008",
"seeAlso": [
"https://gstreamer.freedesktop.org/documentation/frequently-asked-questions/licensing.html?gi-language\u003dc#licensing-of-applications-using-gstreamer"
]
},
{
"reference": "./i2p-gpl-java-exception.json",
"isDeprecatedLicenseId": false,
"detailsUrl": "./i2p-gpl-java-exception.html",
"referenceNumber": 40,
"name": "i2p GPL+Java Exception",
"licenseExceptionId": "i2p-gpl-java-exception",
"seeAlso": [
"http://geti2p.net/en/get-involved/develop/licenses#java_exception"
]
},
{
"reference": "./KiCad-libraries-exception.json",
"isDeprecatedLicenseId": false,
"detailsUrl": "./KiCad-libraries-exception.html",
"referenceNumber": 28,
"name": "KiCad Libraries Exception",
"licenseExceptionId": "KiCad-libraries-exception",
"seeAlso": [
"https://www.kicad.org/libraries/license/"
]
},
{
"reference": "./LGPL-3.0-linking-exception.json",
"isDeprecatedLicenseId": false,
"detailsUrl": "./LGPL-3.0-linking-exception.html",
"referenceNumber": 2,
"name": "LGPL-3.0 Linking Exception",
"licenseExceptionId": "LGPL-3.0-linking-exception",
"seeAlso": [
"https://raw.githubusercontent.com/go-xmlpath/xmlpath/v2/LICENSE",
"https://github.com/goamz/goamz/blob/master/LICENSE",
"https://github.com/juju/errors/blob/master/LICENSE"
]
},
{
"reference": "./libpri-OpenH323-exception.json",
"isDeprecatedLicenseId": false,
"detailsUrl": "./libpri-OpenH323-exception.html",
"referenceNumber": 32,
"name": "libpri OpenH323 exception",
"licenseExceptionId": "libpri-OpenH323-exception",
"seeAlso": [
"https://github.com/asterisk/libpri/blob/1.6.0/README#L19-L22"
]
},
{
"reference": "./Libtool-exception.json",
"isDeprecatedLicenseId": false,
"detailsUrl": "./Libtool-exception.html",
"referenceNumber": 17,
"name": "Libtool Exception",
"licenseExceptionId": "Libtool-exception",
"seeAlso": [
"http://git.savannah.gnu.org/cgit/libtool.git/tree/m4/libtool.m4"
]
},
{
"reference": "./Linux-syscall-note.json",
"isDeprecatedLicenseId": false,
"detailsUrl": "./Linux-syscall-note.html",
"referenceNumber": 49,
"name": "Linux Syscall Note",
"licenseExceptionId": "Linux-syscall-note",
"seeAlso": [
"https://git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git/tree/COPYING"
]
},
{
"reference": "./LLGPL.json",
"isDeprecatedLicenseId": false,
"detailsUrl": "./LLGPL.html",
"referenceNumber": 3,
"name": "LLGPL Preamble",
"licenseExceptionId": "LLGPL",
"seeAlso": [
"http://opensource.franz.com/preamble.html"
]
},
{
"reference": "./LLVM-exception.json",
"isDeprecatedLicenseId": false,
"detailsUrl": "./LLVM-exception.html",
"referenceNumber": 14,
"name": "LLVM Exception",
"licenseExceptionId": "LLVM-exception",
"seeAlso": [
"http://llvm.org/foundation/relicensing/LICENSE.txt"
]
},
{
"reference": "./LZMA-exception.json",
"isDeprecatedLicenseId": false,
"detailsUrl": "./LZMA-exception.html",
"referenceNumber": 55,
"name": "LZMA exception",
"licenseExceptionId": "LZMA-exception",
"seeAlso": [
"http://nsis.sourceforge.net/Docs/AppendixI.html#I.6"
]
},
{
"reference": "./mif-exception.json",
"isDeprecatedLicenseId": false,
"detailsUrl": "./mif-exception.html",
"referenceNumber": 53,
"name": "Macros and Inline Functions Exception",
"licenseExceptionId": "mif-exception",
"seeAlso": [
"http://www.scs.stanford.edu/histar/src/lib/cppsup/exception",
"http://dev.bertos.org/doxygen/",
"https://www.threadingbuildingblocks.org/licensing"
]
},
{
"reference": "./Nokia-Qt-exception-1.1.json",
"isDeprecatedLicenseId": true,
"detailsUrl": "./Nokia-Qt-exception-1.1.html",
"referenceNumber": 31,
"name": "Nokia Qt LGPL exception 1.1",
"licenseExceptionId": "Nokia-Qt-exception-1.1",
"seeAlso": [
"https://www.keepassx.org/dev/projects/keepassx/repository/revisions/b8dfb9cc4d5133e0f09cd7533d15a4f1c19a40f2/entry/LICENSE.NOKIA-LGPL-EXCEPTION"
]
},
{
"reference": "./OCaml-LGPL-linking-exception.json",
"isDeprecatedLicenseId": false,
"detailsUrl": "./OCaml-LGPL-linking-exception.html",
"referenceNumber": 29,
"name": "OCaml LGPL Linking Exception",
"licenseExceptionId": "OCaml-LGPL-linking-exception",
"seeAlso": [
"https://caml.inria.fr/ocaml/license.en.html"
]
},
{
"reference": "./OCCT-exception-1.0.json",
"isDeprecatedLicenseId": false,
"detailsUrl": "./OCCT-exception-1.0.html",
"referenceNumber": 15,
"name": "Open CASCADE Exception 1.0",
"licenseExceptionId": "OCCT-exception-1.0",
"seeAlso": [
"http://www.opencascade.com/content/licensing"
]
},
{
"reference": "./OpenJDK-assembly-exception-1.0.json",
"isDeprecatedLicenseId": false,
"detailsUrl": "./OpenJDK-assembly-exception-1.0.html",
"referenceNumber": 24,
"name": "OpenJDK Assembly exception 1.0",
"licenseExceptionId": "OpenJDK-assembly-exception-1.0",
"seeAlso": [
"http://openjdk.java.net/legal/assembly-exception.html"
]
},
{
"reference": "./openvpn-openssl-exception.json",
"isDeprecatedLicenseId": false,
"detailsUrl": "./openvpn-openssl-exception.html",
"referenceNumber": 43,
"name": "OpenVPN OpenSSL Exception",
"licenseExceptionId": "openvpn-openssl-exception",
"seeAlso": [
"http://openvpn.net/index.php/license.html"
]
},
{
"reference": "./PS-or-PDF-font-exception-20170817.json",
"isDeprecatedLicenseId": false,
"detailsUrl": "./PS-or-PDF-font-exception-20170817.html",
"referenceNumber": 45,
"name": "PS/PDF font exception (2017-08-17)",
"licenseExceptionId": "PS-or-PDF-font-exception-20170817",
"seeAlso": [
"https://github.com/ArtifexSoftware/urw-base35-fonts/blob/65962e27febc3883a17e651cdb23e783668c996f/LICENSE"
]
},
{
"reference": "./QPL-1.0-INRIA-2004-exception.json",
"isDeprecatedLicenseId": false,
"detailsUrl": "./QPL-1.0-INRIA-2004-exception.html",
"referenceNumber": 44,
"name": "INRIA QPL 1.0 2004 variant exception",
"licenseExceptionId": "QPL-1.0-INRIA-2004-exception",
"seeAlso": [
"https://git.frama-c.com/pub/frama-c/-/blob/master/licenses/Q_MODIFIED_LICENSE",
"https://github.com/maranget/hevea/blob/master/LICENSE"
]
},
{
"reference": "./Qt-GPL-exception-1.0.json",
"isDeprecatedLicenseId": false,
"detailsUrl": "./Qt-GPL-exception-1.0.html",
"referenceNumber": 10,
"name": "Qt GPL exception 1.0",
"licenseExceptionId": "Qt-GPL-exception-1.0",
"seeAlso": [
"http://code.qt.io/cgit/qt/qtbase.git/tree/LICENSE.GPL3-EXCEPT"
]
},
{
"reference": "./Qt-LGPL-exception-1.1.json",
"isDeprecatedLicenseId": false,
"detailsUrl": "./Qt-LGPL-exception-1.1.html",
"referenceNumber": 16,
"name": "Qt LGPL exception 1.1",
"licenseExceptionId": "Qt-LGPL-exception-1.1",
"seeAlso": [
"http://code.qt.io/cgit/qt/qtbase.git/tree/LGPL_EXCEPTION.txt"
]
},
{
"reference": "./Qwt-exception-1.0.json",
"isDeprecatedLicenseId": false,
"detailsUrl": "./Qwt-exception-1.0.html",
"referenceNumber": 51,
"name": "Qwt exception 1.0",
"licenseExceptionId": "Qwt-exception-1.0",
"seeAlso": [
"http://qwt.sourceforge.net/qwtlicense.html"
]
},
{
"reference": "./SHL-2.0.json",
"isDeprecatedLicenseId": false,
"detailsUrl": "./SHL-2.0.html",
"referenceNumber": 26,
"name": "Solderpad Hardware License v2.0",
"licenseExceptionId": "SHL-2.0",
"seeAlso": [
"https://solderpad.org/licenses/SHL-2.0/"
]
},
{
"reference": "./SHL-2.1.json",
"isDeprecatedLicenseId": false,
"detailsUrl": "./SHL-2.1.html",
"referenceNumber": 23,
"name": "Solderpad Hardware License v2.1",
"licenseExceptionId": "SHL-2.1",
"seeAlso": [
"https://solderpad.org/licenses/SHL-2.1/"
]
},
{
"reference": "./SWI-exception.json",
"isDeprecatedLicenseId": false,
"detailsUrl": "./SWI-exception.html",
"referenceNumber": 22,
"name": "SWI exception",
"licenseExceptionId": "SWI-exception",
"seeAlso": [
"https://github.com/SWI-Prolog/packages-clpqr/blob/bfa80b9270274f0800120d5b8e6fef42ac2dc6a5/clpqr/class.pl"
]
},
{
"reference": "./Swift-exception.json",
"isDeprecatedLicenseId": false,
"detailsUrl": "./Swift-exception.html",
"referenceNumber": 46,
"name": "Swift Exception",
"licenseExceptionId": "Swift-exception",
"seeAlso": [
"https://swift.org/LICENSE.txt",
"https://github.com/apple/swift-package-manager/blob/7ab2275f447a5eb37497ed63a9340f8a6d1e488b/LICENSE.txt#L205"
]
},
{
"reference": "./u-boot-exception-2.0.json",
"isDeprecatedLicenseId": false,
"detailsUrl": "./u-boot-exception-2.0.html",
"referenceNumber": 5,
"name": "U-Boot exception 2.0",
"licenseExceptionId": "u-boot-exception-2.0",
"seeAlso": [
"http://git.denx.de/?p\u003du-boot.git;a\u003dblob;f\u003dLicenses/Exceptions"
]
},
{
"reference": "./Universal-FOSS-exception-1.0.json",
"isDeprecatedLicenseId": false,
"detailsUrl": "./Universal-FOSS-exception-1.0.html",
"referenceNumber": 12,
"name": "Universal FOSS Exception, Version 1.0",
"licenseExceptionId": "Universal-FOSS-exception-1.0",
"seeAlso": [
"https://oss.oracle.com/licenses/universal-foss-exception/"
]
},
{
"reference": "./vsftpd-openssl-exception.json",
"isDeprecatedLicenseId": false,
"detailsUrl": "./vsftpd-openssl-exception.html",
"referenceNumber": 56,
"name": "vsftpd OpenSSL exception",
"licenseExceptionId": "vsftpd-openssl-exception",
"seeAlso": [
"https://git.stg.centos.org/source-git/vsftpd/blob/f727873674d9c9cd7afcae6677aa782eb54c8362/f/LICENSE",
"https://launchpad.net/debian/squeeze/+source/vsftpd/+copyright",
"https://github.com/richardcochran/vsftpd/blob/master/COPYING"
]
},
{
"reference": "./WxWindows-exception-3.1.json",
"isDeprecatedLicenseId": false,
"detailsUrl": "./WxWindows-exception-3.1.html",
"referenceNumber": 25,
"name": "WxWindows Library Exception 3.1",
"licenseExceptionId": "WxWindows-exception-3.1",
"seeAlso": [
"http://www.opensource.org/licenses/WXwindows"
]
},
{
"reference": "./x11vnc-openssl-exception.json",
"isDeprecatedLicenseId": false,
"detailsUrl": "./x11vnc-openssl-exception.html",
"referenceNumber": 6,
"name": "x11vnc OpenSSL Exception",
"licenseExceptionId": "x11vnc-openssl-exception",
"seeAlso": [
"https://github.com/LibVNC/x11vnc/blob/master/src/8to24.c#L22"
]
}
],
"releaseDate": "2023-06-18"
}

View File

@@ -0,0 +1,413 @@
using System;
using System.Collections.Generic;
using System.Collections.Immutable;
using System.Linq;
using System.Text.Json;
using System.Text.Json.Serialization;
using StellaOps.Canonical.Json;
using StellaOps.Scanner.Core.Utility;
using StellaOps.Scanner.Emit.Spdx.Models;
namespace StellaOps.Scanner.Emit.Spdx.Serialization;
public static class SpdxJsonLdSerializer
{
private static readonly JsonSerializerOptions JsonOptions = new()
{
WriteIndented = false,
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull
};
public static byte[] Serialize(SpdxDocument document)
{
ArgumentNullException.ThrowIfNull(document);
var creationInfoId = "_:creationinfo";
var creatorNodes = BuildCreatorNodes(document, creationInfoId, document.CreationInfo.Creators);
var createdUsingNodes = BuildCreatorNodes(document, creationInfoId, document.CreationInfo.CreatedUsing);
var createdByRefs = creatorNodes
.Select(node => node.Reference)
.Distinct(StringComparer.Ordinal)
.OrderBy(reference => reference, StringComparer.Ordinal)
.ToArray();
var createdUsingRefs = createdUsingNodes
.Select(node => node.Reference)
.Distinct(StringComparer.Ordinal)
.OrderBy(reference => reference, StringComparer.Ordinal)
.ToArray();
var creationInfo = new Dictionary<string, object?>
{
["type"] = "CreationInfo",
["@id"] = creationInfoId,
["created"] = ScannerTimestamps.ToIso8601(document.CreationInfo.Created),
["specVersion"] = document.CreationInfo.SpecVersion
};
if (createdByRefs.Length > 0)
{
creationInfo["createdBy"] = createdByRefs;
}
if (createdUsingRefs.Length > 0)
{
creationInfo["createdUsing"] = createdUsingRefs;
}
var graph = new List<object>
{
creationInfo
};
foreach (var node in creatorNodes.Concat(createdUsingNodes).Select(entry => entry.Node))
{
graph.Add(node);
}
var documentId = document.DocumentNamespace;
var elementIds = BuildElementIds(document, creatorNodes, createdUsingNodes);
var profileConformance = document.ProfileConformance.IsDefaultOrEmpty
? new[] { "core", "software" }
: document.ProfileConformance.OrderBy(value => value, StringComparer.Ordinal).ToArray();
var documentNode = new Dictionary<string, object?>
{
["type"] = SpdxDefaults.DocumentType,
["spdxId"] = documentId,
["creationInfo"] = creationInfoId,
["rootElement"] = new[] { document.Sbom.SpdxId },
["element"] = elementIds,
["profileConformance"] = profileConformance
};
graph.Add(documentNode);
var sbomElementIds = document.Elements
.OfType<SpdxElement>()
.Select(element => element.SpdxId)
.OrderBy(id => id, StringComparer.Ordinal)
.ToArray();
var sbomNode = new Dictionary<string, object?>
{
["type"] = SpdxDefaults.SbomType,
["spdxId"] = document.Sbom.SpdxId,
["creationInfo"] = creationInfoId,
["rootElement"] = document.Sbom.RootElements.OrderBy(id => id, StringComparer.Ordinal).ToArray(),
["element"] = sbomElementIds,
["software_sbomType"] = document.Sbom.SbomTypes.IsDefaultOrEmpty
? new[] { "build" }
: document.Sbom.SbomTypes.OrderBy(value => value, StringComparer.Ordinal).ToArray()
};
graph.Add(sbomNode);
foreach (var element in document.Elements.OrderBy(element => element.SpdxId, StringComparer.Ordinal))
{
switch (element)
{
case SpdxPackage package:
graph.Add(BuildPackageNode(package, creationInfoId));
break;
case SpdxFile file:
graph.Add(BuildFileNode(file, creationInfoId));
break;
case SpdxSnippet snippet:
graph.Add(BuildSnippetNode(snippet, creationInfoId));
break;
case SpdxVulnerability vulnerability:
graph.Add(BuildVulnerabilityNode(vulnerability, creationInfoId));
break;
case SpdxVulnAssessment assessment:
graph.Add(BuildVulnAssessmentNode(assessment, creationInfoId));
break;
}
}
foreach (var relationship in document.Relationships.OrderBy(relationship => relationship.SpdxId, StringComparer.Ordinal))
{
graph.Add(BuildRelationshipNode(relationship, creationInfoId));
}
var root = new Dictionary<string, object?>
{
["@context"] = SpdxDefaults.JsonLdContext,
["@graph"] = graph
};
return CanonJson.Canonicalize(root, JsonOptions);
}
private static string[] BuildElementIds(
SpdxDocument document,
IEnumerable<CreatorNode> creatorNodes,
IEnumerable<CreatorNode> createdUsingNodes)
{
var ids = new HashSet<string>(StringComparer.Ordinal)
{
document.Sbom.SpdxId
};
foreach (var element in document.Elements)
{
ids.Add(element.SpdxId);
}
foreach (var relationship in document.Relationships)
{
ids.Add(relationship.SpdxId);
}
foreach (var creator in creatorNodes.Concat(createdUsingNodes))
{
if (!string.IsNullOrWhiteSpace(creator.Reference))
{
ids.Add(creator.Reference);
}
}
return ids.OrderBy(id => id, StringComparer.Ordinal).ToArray();
}
private static IReadOnlyList<CreatorNode> BuildCreatorNodes(
SpdxDocument document,
string creationInfoId,
ImmutableArray<string> creators)
{
if (creators.IsDefaultOrEmpty)
{
return Array.Empty<CreatorNode>();
}
var nodes = new List<CreatorNode>();
foreach (var entry in creators)
{
if (string.IsNullOrWhiteSpace(entry))
{
continue;
}
var parsed = ParseCreator(entry);
if (parsed is null)
{
var fallbackName = entry.Trim();
var fallbackReference = CreateCreatorId(document.DocumentNamespace, "tool", fallbackName);
nodes.Add(new CreatorNode(fallbackReference, fallbackName, new Dictionary<string, object?>
{
["type"] = "Tool",
["spdxId"] = fallbackReference,
["name"] = fallbackName,
["creationInfo"] = creationInfoId
}));
continue;
}
var (type, name) = parsed.Value;
var reference = CreateCreatorId(document.DocumentNamespace, type, name);
var node = new Dictionary<string, object?>
{
["type"] = type,
["spdxId"] = reference,
["name"] = name,
["creationInfo"] = creationInfoId
};
nodes.Add(new CreatorNode(reference, name, node));
}
return nodes
.OrderBy(node => node.Reference, StringComparer.Ordinal)
.ToArray();
}
private static (string Type, string Name)? ParseCreator(string creator)
{
var trimmed = creator.Trim();
var splitIndex = trimmed.IndexOf(':');
if (splitIndex <= 0)
{
return null;
}
var prefix = trimmed[..splitIndex].Trim();
var name = trimmed[(splitIndex + 1)..].Trim();
if (string.IsNullOrWhiteSpace(name))
{
return null;
}
return prefix.ToLowerInvariant() switch
{
"tool" => ("Tool", name),
"organization" => ("Organization", name),
"person" => ("Person", name),
_ => null
};
}
private static string CreateCreatorId(string documentNamespace, string type, string name)
{
var normalizedType = type.Trim().ToLowerInvariant();
var normalizedName = name.Trim();
return $"{documentNamespace}#{normalizedType}-{ScannerIdentifiers.CreateDeterministicHash(documentNamespace, normalizedType, normalizedName)}";
}
private static Dictionary<string, object?> BuildPackageNode(SpdxPackage package, string creationInfoId)
{
var node = new Dictionary<string, object?>
{
["type"] = SpdxDefaults.PackageType,
["spdxId"] = package.SpdxId,
["creationInfo"] = creationInfoId,
["name"] = package.Name ?? package.SpdxId
};
AddIfValue(node, "software_packageVersion", package.Version);
AddIfValue(node, "software_packageUrl", package.PackageUrl);
if (!string.Equals(package.DownloadLocation, "NOASSERTION", StringComparison.OrdinalIgnoreCase))
{
AddIfValue(node, "software_downloadLocation", package.DownloadLocation);
}
AddIfValue(node, "software_primaryPurpose", package.PrimaryPurpose);
AddIfValue(node, "software_copyrightText", package.CopyrightText);
if (package.DeclaredLicense is not null)
{
node["simplelicensing_licenseExpression"] = SpdxLicenseExpressionRenderer.Render(package.DeclaredLicense);
}
else if (package.ConcludedLicense is not null)
{
node["simplelicensing_licenseExpression"] = SpdxLicenseExpressionRenderer.Render(package.ConcludedLicense);
}
return node;
}
private static Dictionary<string, object?> BuildFileNode(SpdxFile file, string creationInfoId)
{
var node = new Dictionary<string, object?>
{
["type"] = SpdxDefaults.FileType,
["spdxId"] = file.SpdxId,
["creationInfo"] = creationInfoId,
["name"] = file.FileName ?? file.Name ?? file.SpdxId
};
AddIfValue(node, "software_copyrightText", file.CopyrightText);
if (file.ConcludedLicense is not null)
{
node["simplelicensing_licenseExpression"] = SpdxLicenseExpressionRenderer.Render(file.ConcludedLicense);
}
return node;
}
private static Dictionary<string, object?> BuildSnippetNode(SpdxSnippet snippet, string creationInfoId)
{
var node = new Dictionary<string, object?>
{
["type"] = SpdxDefaults.SnippetType,
["spdxId"] = snippet.SpdxId,
["creationInfo"] = creationInfoId,
["name"] = snippet.Name ?? snippet.SpdxId,
["software_snippetFromFile"] = snippet.FromFileSpdxId
};
return node;
}
private static Dictionary<string, object?> BuildVulnerabilityNode(SpdxVulnerability vulnerability, string creationInfoId)
{
var node = new Dictionary<string, object?>
{
["type"] = "security_Vulnerability",
["spdxId"] = vulnerability.SpdxId,
["creationInfo"] = creationInfoId,
["name"] = vulnerability.Name ?? vulnerability.SpdxId
};
AddIfValue(node, "security_locator", vulnerability.Locator);
AddIfValue(node, "security_statusNotes", vulnerability.StatusNotes);
AddIfValue(node, "security_publishedTime", vulnerability.PublishedTime);
AddIfValue(node, "security_modifiedTime", vulnerability.ModifiedTime);
return node;
}
private static Dictionary<string, object?> BuildVulnAssessmentNode(SpdxVulnAssessment assessment, string creationInfoId)
{
var node = new Dictionary<string, object?>
{
["type"] = "security_VulnAssessmentRelationship",
["spdxId"] = assessment.SpdxId,
["creationInfo"] = creationInfoId,
["name"] = assessment.Name ?? assessment.SpdxId
};
AddIfValue(node, "security_severity", assessment.Severity);
AddIfValue(node, "security_vectorString", assessment.VectorString);
AddIfValue(node, "security_score", assessment.Score);
return node;
}
private static Dictionary<string, object?> BuildRelationshipNode(SpdxRelationship relationship, string creationInfoId)
{
var node = new Dictionary<string, object?>
{
["type"] = SpdxDefaults.RelationshipType,
["spdxId"] = relationship.SpdxId,
["creationInfo"] = creationInfoId,
["from"] = relationship.FromElement,
["relationshipType"] = RelationshipTypeToString(relationship.Type),
["to"] = relationship.ToElements.ToArray()
};
return node;
}
private static void AddIfValue(Dictionary<string, object?> node, string key, string? value)
{
if (string.IsNullOrWhiteSpace(value))
{
return;
}
node[key] = value;
}
private static void AddIfValue(Dictionary<string, object?> node, string key, long? value)
{
if (!value.HasValue)
{
return;
}
node[key] = value.Value;
}
private static void AddIfValue(Dictionary<string, object?> node, string key, DateTimeOffset? value)
{
if (!value.HasValue)
{
return;
}
node[key] = ScannerTimestamps.ToIso8601(value.Value);
}
private static string RelationshipTypeToString(SpdxRelationshipType type)
=> type switch
{
SpdxRelationshipType.Describes => "describes",
SpdxRelationshipType.DependsOn => "dependsOn",
SpdxRelationshipType.Contains => "contains",
SpdxRelationshipType.ContainedBy => "containedBy",
_ => "other"
};
private sealed record CreatorNode(string Reference, string Name, Dictionary<string, object?> Node);
}

View File

@@ -0,0 +1,115 @@
using System;
using System.Collections.Immutable;
using System.Linq;
using System.Text;
using StellaOps.Scanner.Core.Utility;
using StellaOps.Scanner.Emit.Spdx.Models;
namespace StellaOps.Scanner.Emit.Spdx.Serialization;
public sealed record SpdxTagValueOptions
{
public bool IncludeFiles { get; init; }
public bool IncludeSnippets { get; init; }
}
public static class SpdxTagValueSerializer
{
public static byte[] Serialize(SpdxDocument document, SpdxTagValueOptions? options = null)
{
ArgumentNullException.ThrowIfNull(document);
options ??= new SpdxTagValueOptions();
var builder = new StringBuilder();
builder.AppendLine("SPDXVersion: SPDX-2.3");
builder.AppendLine("DataLicense: CC0-1.0");
builder.AppendLine("SPDXID: SPDXRef-DOCUMENT");
builder.AppendLine($"DocumentName: {Escape(document.Name)}");
builder.AppendLine($"DocumentNamespace: {Escape(document.DocumentNamespace)}");
foreach (var creator in document.CreationInfo.Creators
.Where(static entry => !string.IsNullOrWhiteSpace(entry))
.OrderBy(entry => entry, StringComparer.Ordinal))
{
builder.AppendLine($"Creator: {Escape(creator)}");
}
builder.AppendLine($"Created: {ScannerTimestamps.ToIso8601(document.CreationInfo.Created)}");
builder.AppendLine();
var packages = document.Elements
.OfType<SpdxPackage>()
.OrderBy(pkg => pkg.SpdxId, StringComparer.Ordinal)
.ToImmutableArray();
foreach (var package in packages)
{
builder.AppendLine($"PackageName: {Escape(package.Name ?? package.SpdxId)}");
builder.AppendLine($"SPDXID: {Escape(package.SpdxId)}");
if (!string.IsNullOrWhiteSpace(package.Version))
{
builder.AppendLine($"PackageVersion: {Escape(package.Version)}");
}
builder.AppendLine($"PackageDownloadLocation: {Escape(package.DownloadLocation ?? "NOASSERTION")}");
if (package.DeclaredLicense is not null)
{
builder.AppendLine($"PackageLicenseDeclared: {SpdxLicenseExpressionRenderer.Render(package.DeclaredLicense)}");
}
else if (package.ConcludedLicense is not null)
{
builder.AppendLine($"PackageLicenseConcluded: {SpdxLicenseExpressionRenderer.Render(package.ConcludedLicense)}");
}
if (!string.IsNullOrWhiteSpace(package.PackageUrl))
{
builder.AppendLine($"ExternalRef: PACKAGE-MANAGER purl {Escape(package.PackageUrl)}");
}
if (!string.IsNullOrWhiteSpace(package.PrimaryPurpose))
{
builder.AppendLine($"PrimaryPackagePurpose: {Escape(package.PrimaryPurpose)}");
}
builder.AppendLine();
}
foreach (var relationship in document.Relationships
.OrderBy(rel => rel.FromElement, StringComparer.Ordinal)
.ThenBy(rel => rel.Type)
.ThenBy(rel => rel.ToElements.FirstOrDefault() ?? string.Empty, StringComparer.Ordinal))
{
foreach (var target in relationship.ToElements.OrderBy(id => id, StringComparer.Ordinal))
{
builder.AppendLine($"Relationship: {Escape(relationship.FromElement)} {RelationshipTypeToTagValue(relationship.Type)} {Escape(target)}");
}
}
return Encoding.UTF8.GetBytes(builder.ToString());
}
private static string RelationshipTypeToTagValue(SpdxRelationshipType type)
=> type switch
{
SpdxRelationshipType.Describes => "DESCRIBES",
SpdxRelationshipType.DependsOn => "DEPENDS_ON",
SpdxRelationshipType.Contains => "CONTAINS",
SpdxRelationshipType.ContainedBy => "CONTAINED_BY",
_ => "OTHER"
};
private static string Escape(string value)
{
if (!value.Contains('\n', StringComparison.Ordinal) && !value.Contains('\r', StringComparison.Ordinal))
{
return value.Trim();
}
var normalized = value.Replace("\r\n", "\n", StringComparison.Ordinal).Replace('\r', '\n');
return $"<text>{normalized}</text>";
}
}

View File

@@ -0,0 +1,45 @@
using System;
using StellaOps.Scanner.Core.Utility;
namespace StellaOps.Scanner.Emit.Spdx;
internal sealed class SpdxIdBuilder
{
public SpdxIdBuilder(string namespaceBase, string imageDigest)
{
if (string.IsNullOrWhiteSpace(namespaceBase))
{
throw new ArgumentException("Namespace base is required.", nameof(namespaceBase));
}
var normalizedBase = TrimTrailingSlash(namespaceBase.Trim());
var normalizedDigest = ScannerIdentifiers.NormalizeDigest(imageDigest) ?? "unknown";
var digestValue = normalizedDigest.Split(':', 2, StringSplitOptions.TrimEntries)[^1];
DocumentNamespace = $"{normalizedBase}/image/{digestValue}";
}
public string DocumentNamespace { get; }
public string DocumentId => $"{DocumentNamespace}#document";
public string SbomId => $"{DocumentNamespace}#sbom";
public string CreationInfoId => "_:creationinfo";
public string CreatePackageId(string key)
=> $"{DocumentNamespace}#pkg-{ScannerIdentifiers.CreateDeterministicHash(DocumentNamespace, "pkg", key)}";
public string CreateRelationshipId(string from, string type, string to)
=> $"{DocumentNamespace}#rel-{ScannerIdentifiers.CreateDeterministicHash(DocumentNamespace, "rel", from, type, to)}";
public string CreateToolId(string name)
=> $"{DocumentNamespace}#tool-{ScannerIdentifiers.CreateDeterministicHash(DocumentNamespace, "tool", name)}";
public string CreateOrganizationId(string name)
=> $"{DocumentNamespace}#org-{ScannerIdentifiers.CreateDeterministicHash(DocumentNamespace, "org", name)}";
private static string TrimTrailingSlash(string value)
=> string.IsNullOrWhiteSpace(value)
? string.Empty
: value.Trim().TrimEnd('/');
}

View File

@@ -14,7 +14,12 @@
</ItemGroup>
<ItemGroup>
<PackageReference Include="CycloneDX.Core" Version="10.0.2" />
<PackageReference Include="CycloneDX.Core" Version="11.0.0" />
<PackageReference Include="RoaringBitmap" Version="0.0.9" />
</ItemGroup>
<ItemGroup>
<EmbeddedResource Include="Spdx/Resources/spdx-license-list-3.21.json" LogicalName="StellaOps.Scanner.Emit.Spdx.Resources.spdx-license-list-3.21.json" />
<EmbeddedResource Include="Spdx/Resources/spdx-license-exceptions-3.21.json" LogicalName="StellaOps.Scanner.Emit.Spdx.Resources.spdx-license-exceptions-3.21.json" />
</ItemGroup>
</Project>

View File

@@ -3,3 +3,4 @@
| Task ID | Sprint | Status | Notes |
| --- | --- | --- | --- |
| `BSE-009` | `docs/implplan/SPRINT_3500_0012_0001_binary_sbom_emission.md` | DONE | Added end-to-end integration test coverage for native binary SBOM emission (emit → fragments → CycloneDX). |
| `SPRINT-3600-0002-T1` | `docs/implplan/SPRINT_3600_0002_0001_cyclonedx_1_7_upgrade.md` | DOING | Update CycloneDX packages and defaults to 1.7. |

View File

@@ -0,0 +1,239 @@
// SPDX-License-Identifier: AGPL-3.0-or-later
// Sprint: SPRINT_4300_0002_0001
// Task: Evidence Privacy Controls - Evidence model definitions
namespace StellaOps.Scanner.Evidence.Models;
/// <summary>
/// Bundle of evidence for a finding.
/// </summary>
public sealed record EvidenceBundle
{
/// <summary>
/// Reachability analysis evidence.
/// </summary>
public ReachabilityEvidence? Reachability { get; init; }
/// <summary>
/// Call stack evidence (runtime or static analysis).
/// </summary>
public CallStackEvidence? CallStack { get; init; }
/// <summary>
/// Provenance/build evidence.
/// </summary>
public ProvenanceEvidence? Provenance { get; init; }
/// <summary>
/// VEX statements.
/// </summary>
public VexEvidence? Vex { get; init; }
/// <summary>
/// EPSS evidence.
/// </summary>
public EpssEvidence? Epss { get; init; }
}
/// <summary>
/// Reachability analysis evidence.
/// </summary>
public sealed record ReachabilityEvidence
{
/// <summary>
/// Reachability result.
/// </summary>
public required string Result { get; init; }
/// <summary>
/// Confidence score [0,1].
/// </summary>
public required double Confidence { get; init; }
/// <summary>
/// Paths from entrypoints to vulnerable code.
/// </summary>
public required IReadOnlyList<ReachabilityPath> Paths { get; init; }
/// <summary>
/// Number of paths (preserved in minimal redaction).
/// </summary>
public int PathCount => Paths.Count;
/// <summary>
/// Digest of the call graph used.
/// </summary>
public required string GraphDigest { get; init; }
}
/// <summary>
/// A path from an entrypoint to vulnerable code.
/// </summary>
public sealed record ReachabilityPath
{
/// <summary>
/// Unique path identifier.
/// </summary>
public required string PathId { get; init; }
/// <summary>
/// Steps in the path.
/// </summary>
public required IReadOnlyList<ReachabilityStep> Steps { get; init; }
}
/// <summary>
/// A step in a reachability path.
/// </summary>
public sealed record ReachabilityStep
{
/// <summary>
/// Node identifier (function/method name).
/// </summary>
public required string Node { get; init; }
/// <summary>
/// Hash of the file containing this code.
/// </summary>
public required string FileHash { get; init; }
/// <summary>
/// Line range [start, end].
/// </summary>
public required int[] Lines { get; init; }
/// <summary>
/// Raw source code (null when redacted).
/// </summary>
public string? SourceCode { get; init; }
}
/// <summary>
/// Call stack evidence.
/// </summary>
public sealed record CallStackEvidence
{
/// <summary>
/// Stack frames.
/// </summary>
public required IReadOnlyList<CallFrame> Frames { get; init; }
/// <summary>
/// Stack trace digest.
/// </summary>
public string? StackDigest { get; init; }
}
/// <summary>
/// A frame in a call stack.
/// </summary>
public sealed record CallFrame
{
/// <summary>
/// Function/method name.
/// </summary>
public required string Function { get; init; }
/// <summary>
/// Hash of the file.
/// </summary>
public required string FileHash { get; init; }
/// <summary>
/// Line number.
/// </summary>
public required int Line { get; init; }
/// <summary>
/// Function arguments (null when redacted).
/// </summary>
public IReadOnlyDictionary<string, string>? Arguments { get; init; }
/// <summary>
/// Local variables (null when redacted).
/// </summary>
public IReadOnlyDictionary<string, string>? Locals { get; init; }
}
/// <summary>
/// Provenance/build evidence.
/// </summary>
public sealed record ProvenanceEvidence
{
/// <summary>
/// Build identifier.
/// </summary>
public required string BuildId { get; init; }
/// <summary>
/// Build digest.
/// </summary>
public required string BuildDigest { get; init; }
/// <summary>
/// Whether provenance was verified.
/// </summary>
public required bool Verified { get; init; }
/// <summary>
/// Additional metadata (null when redacted).
/// </summary>
public IReadOnlyDictionary<string, string>? Metadata { get; init; }
}
/// <summary>
/// VEX evidence.
/// </summary>
public sealed record VexEvidence
{
/// <summary>
/// VEX status.
/// </summary>
public required string Status { get; init; }
/// <summary>
/// Justification for not_affected status.
/// </summary>
public string? Justification { get; init; }
/// <summary>
/// Impact statement.
/// </summary>
public string? ImpactStatement { get; init; }
/// <summary>
/// Action statement.
/// </summary>
public string? ActionStatement { get; init; }
/// <summary>
/// Timestamp of the VEX statement.
/// </summary>
public DateTimeOffset? Timestamp { get; init; }
}
/// <summary>
/// EPSS evidence.
/// </summary>
public sealed record EpssEvidence
{
/// <summary>
/// EPSS probability score [0,1].
/// </summary>
public required double Score { get; init; }
/// <summary>
/// EPSS percentile rank [0,1].
/// </summary>
public required double Percentile { get; init; }
/// <summary>
/// Model date.
/// </summary>
public required DateOnly ModelDate { get; init; }
/// <summary>
/// When this evidence was captured.
/// </summary>
public required DateTimeOffset CapturedAt { get; init; }
}

View File

@@ -0,0 +1,46 @@
// SPDX-License-Identifier: AGPL-3.0-or-later
// Sprint: SPRINT_4300_0002_0001
// Task: T1 - Define Redaction Levels
namespace StellaOps.Scanner.Evidence.Privacy;
/// <summary>
/// Redaction levels for evidence data.
/// </summary>
public enum EvidenceRedactionLevel
{
/// <summary>
/// Full evidence including raw source code.
/// Requires elevated permissions.
/// </summary>
Full = 0,
/// <summary>
/// Standard redaction: file hashes, symbol names, line ranges.
/// No raw source code.
/// </summary>
Standard = 1,
/// <summary>
/// Minimal: only digests and counts.
/// For external sharing.
/// </summary>
Minimal = 2
}
/// <summary>
/// Fields that can be redacted.
/// </summary>
[Flags]
public enum RedactableFields
{
None = 0,
SourceCode = 1 << 0,
FilePaths = 1 << 1,
LineNumbers = 1 << 2,
SymbolNames = 1 << 3,
CallArguments = 1 << 4,
EnvironmentVars = 1 << 5,
InternalUrls = 1 << 6,
All = SourceCode | FilePaths | LineNumbers | SymbolNames | CallArguments | EnvironmentVars | InternalUrls
}

View File

@@ -0,0 +1,227 @@
// SPDX-License-Identifier: AGPL-3.0-or-later
// Sprint: SPRINT_4300_0002_0001
// Task: T2 - Implement EvidenceRedactionService
using System.Security.Claims;
using Microsoft.Extensions.Logging;
using StellaOps.Scanner.Evidence.Models;
namespace StellaOps.Scanner.Evidence.Privacy;
/// <summary>
/// Service interface for redacting evidence based on privacy rules.
/// </summary>
public interface IEvidenceRedactionService
{
/// <summary>
/// Redacts evidence based on the specified level.
/// </summary>
EvidenceBundle Redact(EvidenceBundle bundle, EvidenceRedactionLevel level);
/// <summary>
/// Redacts specific fields from evidence.
/// </summary>
EvidenceBundle RedactFields(EvidenceBundle bundle, RedactableFields fields);
/// <summary>
/// Determines the appropriate redaction level for a user.
/// </summary>
EvidenceRedactionLevel DetermineLevel(ClaimsPrincipal user);
}
/// <summary>
/// Service for redacting evidence based on privacy rules.
/// </summary>
public sealed class EvidenceRedactionService : IEvidenceRedactionService
{
private readonly ILogger<EvidenceRedactionService> _logger;
public EvidenceRedactionService(ILogger<EvidenceRedactionService> logger)
{
_logger = logger;
}
/// <summary>
/// Redacts evidence based on the specified level.
/// </summary>
public EvidenceBundle Redact(EvidenceBundle bundle, EvidenceRedactionLevel level)
{
_logger.LogDebug("Redacting evidence to level {Level}", level);
return level switch
{
EvidenceRedactionLevel.Full => bundle,
EvidenceRedactionLevel.Standard => RedactStandard(bundle),
EvidenceRedactionLevel.Minimal => RedactMinimal(bundle),
_ => RedactStandard(bundle)
};
}
/// <summary>
/// Redacts specific fields from evidence.
/// </summary>
public EvidenceBundle RedactFields(EvidenceBundle bundle, RedactableFields fields)
{
if (fields == RedactableFields.None)
{
return bundle;
}
var result = bundle;
if (fields.HasFlag(RedactableFields.SourceCode))
{
result = result with
{
Reachability = result.Reachability is not null
? RedactSourceCodeFromReachability(result.Reachability)
: null
};
}
if (fields.HasFlag(RedactableFields.CallArguments))
{
result = result with
{
CallStack = result.CallStack is not null
? RedactCallStackArguments(result.CallStack)
: null
};
}
return result;
}
/// <summary>
/// Determines the appropriate redaction level for a user.
/// </summary>
public EvidenceRedactionLevel DetermineLevel(ClaimsPrincipal user)
{
if (user.HasClaim("scope", "evidence:full") ||
user.HasClaim("role", "security_admin"))
{
_logger.LogDebug("User has full evidence access");
return EvidenceRedactionLevel.Full;
}
if (user.HasClaim("scope", "evidence:standard") ||
user.HasClaim("role", "security_analyst"))
{
_logger.LogDebug("User has standard evidence access");
return EvidenceRedactionLevel.Standard;
}
_logger.LogDebug("User has minimal evidence access (default)");
return EvidenceRedactionLevel.Minimal;
}
private EvidenceBundle RedactStandard(EvidenceBundle bundle)
{
return bundle with
{
Reachability = bundle.Reachability is not null
? RedactReachability(bundle.Reachability)
: null,
CallStack = bundle.CallStack is not null
? RedactCallStack(bundle.CallStack)
: null,
Provenance = bundle.Provenance // Keep as-is (already redacted at standard level)
};
}
private ReachabilityEvidence RedactReachability(ReachabilityEvidence evidence)
{
return evidence with
{
Paths = evidence.Paths.Select(p => new ReachabilityPath
{
PathId = p.PathId,
Steps = p.Steps.Select(s => new ReachabilityStep
{
Node = RedactSymbol(s.Node),
FileHash = s.FileHash, // Keep hash
Lines = s.Lines, // Keep line range
SourceCode = null // Redact source
}).ToList()
}).ToList()
};
}
private CallStackEvidence RedactCallStack(CallStackEvidence evidence)
{
return evidence with
{
Frames = evidence.Frames.Select(f => new CallFrame
{
Function = RedactSymbol(f.Function),
FileHash = f.FileHash,
Line = f.Line,
Arguments = null, // Redact arguments
Locals = null // Redact locals
}).ToList()
};
}
private string RedactSymbol(string symbol)
{
// Keep class and method names, redact arguments
// "MyClass.MyMethod(string arg1, int arg2)" -> "MyClass.MyMethod(...)"
var parenIndex = symbol.IndexOf('(');
if (parenIndex > 0)
{
return symbol[..parenIndex] + "(...)";
}
return symbol;
}
private EvidenceBundle RedactMinimal(EvidenceBundle bundle)
{
return bundle with
{
Reachability = bundle.Reachability is not null
? new ReachabilityEvidence
{
Result = bundle.Reachability.Result,
Confidence = bundle.Reachability.Confidence,
Paths = [], // No paths
GraphDigest = bundle.Reachability.GraphDigest
}
: null,
CallStack = null, // Remove entirely
Provenance = bundle.Provenance is not null
? new ProvenanceEvidence
{
BuildId = bundle.Provenance.BuildId,
BuildDigest = bundle.Provenance.BuildDigest,
Verified = bundle.Provenance.Verified
}
: null,
Vex = bundle.Vex, // Keep VEX (public data)
Epss = bundle.Epss // Keep EPSS (public data)
};
}
private ReachabilityEvidence RedactSourceCodeFromReachability(ReachabilityEvidence evidence)
{
return evidence with
{
Paths = evidence.Paths.Select(p => new ReachabilityPath
{
PathId = p.PathId,
Steps = p.Steps.Select(s => s with { SourceCode = null }).ToList()
}).ToList()
};
}
private CallStackEvidence RedactCallStackArguments(CallStackEvidence evidence)
{
return evidence with
{
Frames = evidence.Frames.Select(f => f with
{
Arguments = null,
Locals = null
}).ToList()
};
}
}

View File

@@ -0,0 +1,18 @@
<?xml version='1.0' encoding='utf-8'?>
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<LangVersion>preview</LangVersion>
<Nullable>enable</Nullable>
<ImplicitUsings>enable</ImplicitUsings>
<TreatWarningsAsErrors>false</TreatWarningsAsErrors>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="10.0.0" />
<PackageReference Include="Microsoft.Extensions.Options" Version="10.0.0" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="../../../Authority/StellaOps.Authority/StellaOps.Auth.Client/StellaOps.Auth.Client.csproj" />
<ProjectReference Include="../../../__Libraries/StellaOps.Auth.Security/StellaOps.Auth.Security.csproj" />
</ItemGroup>
</Project>

View File

@@ -0,0 +1,433 @@
using System.Diagnostics;
using Microsoft.Extensions.Logging;
namespace StellaOps.Scanner.Orchestration.Fidelity;
public interface IFidelityAwareAnalyzer
{
Task<FidelityAnalysisResult> AnalyzeAsync(
AnalysisRequest request,
FidelityLevel level,
CancellationToken ct);
Task<FidelityUpgradeResult> UpgradeFidelityAsync(
Guid findingId,
FidelityLevel targetLevel,
CancellationToken ct);
}
public sealed class FidelityAwareAnalyzer : IFidelityAwareAnalyzer
{
private readonly ICallGraphExtractor _callGraphExtractor;
private readonly IRuntimeCorrelator _runtimeCorrelator;
private readonly IBinaryMapper _binaryMapper;
private readonly IPackageMatcher _packageMatcher;
private readonly IAnalysisRepository _repository;
private readonly ILogger<FidelityAwareAnalyzer> _logger;
public FidelityAwareAnalyzer(
ICallGraphExtractor callGraphExtractor,
IRuntimeCorrelator runtimeCorrelator,
IBinaryMapper binaryMapper,
IPackageMatcher packageMatcher,
IAnalysisRepository repository,
ILogger<FidelityAwareAnalyzer> logger)
{
_callGraphExtractor = callGraphExtractor;
_runtimeCorrelator = runtimeCorrelator;
_binaryMapper = binaryMapper;
_packageMatcher = packageMatcher;
_repository = repository;
_logger = logger;
}
public async Task<FidelityAnalysisResult> AnalyzeAsync(
AnalysisRequest request,
FidelityLevel level,
CancellationToken ct)
{
var config = FidelityConfiguration.FromLevel(level);
var stopwatch = Stopwatch.StartNew();
using var cts = CancellationTokenSource.CreateLinkedTokenSource(ct);
cts.CancelAfter(config.Timeout);
try
{
// Level 1: Package matching (always done)
var packageResult = await _packageMatcher.MatchAsync(request, cts.Token);
if (level == FidelityLevel.Quick)
{
return BuildResult(packageResult, config, stopwatch.Elapsed);
}
// Level 2: Call graph analysis (Standard and Deep)
CallGraphResult? callGraphResult = null;
if (config.EnableCallGraph)
{
var languages = config.TargetLanguages ?? request.DetectedLanguages;
callGraphResult = await _callGraphExtractor.ExtractAsync(
request,
languages,
config.MaxCallGraphDepth,
cts.Token);
}
if (level == FidelityLevel.Standard)
{
return BuildResult(packageResult, callGraphResult, config, stopwatch.Elapsed);
}
// Level 3: Binary mapping and runtime (Deep only)
BinaryMappingResult? binaryResult = null;
RuntimeCorrelationResult? runtimeResult = null;
if (config.EnableBinaryMapping)
{
binaryResult = await _binaryMapper.MapAsync(request, cts.Token);
}
if (config.EnableRuntimeCorrelation)
{
runtimeResult = await _runtimeCorrelator.CorrelateAsync(request, cts.Token);
}
return BuildResult(
packageResult,
callGraphResult,
binaryResult,
runtimeResult,
config,
stopwatch.Elapsed);
}
catch (OperationCanceledException) when (cts.IsCancellationRequested && !ct.IsCancellationRequested)
{
_logger.LogWarning(
"Analysis timeout at fidelity {Level} after {Elapsed}",
level, stopwatch.Elapsed);
return BuildTimeoutResult(level, config, stopwatch.Elapsed);
}
}
public async Task<FidelityUpgradeResult> UpgradeFidelityAsync(
Guid findingId,
FidelityLevel targetLevel,
CancellationToken ct)
{
// Load existing analysis
var existing = await _repository.GetAnalysisAsync(findingId, ct);
if (existing is null)
{
return FidelityUpgradeResult.NotFound(findingId);
}
if (existing.FidelityLevel >= targetLevel)
{
return FidelityUpgradeResult.AlreadyAtLevel(existing);
}
// Perform incremental upgrade
var request = existing.ToAnalysisRequest();
var result = await AnalyzeAsync(request, targetLevel, ct);
// Merge with existing
var merged = MergeResults(existing, result);
// Persist upgraded result
await _repository.SaveAnalysisAsync(merged, ct);
return new FidelityUpgradeResult
{
Success = true,
FindingId = findingId,
PreviousLevel = existing.FidelityLevel,
NewLevel = targetLevel,
ConfidenceImprovement = merged.Confidence - existing.Confidence,
NewResult = merged
};
}
private FidelityAnalysisResult BuildResult(
PackageMatchResult packageResult,
FidelityConfiguration config,
TimeSpan elapsed)
{
var confidence = config.BaseConfidence;
// Adjust confidence based on match quality
if (packageResult.HasExactMatch)
confidence += 0.1m;
return new FidelityAnalysisResult
{
FidelityLevel = config.Level,
Confidence = Math.Min(confidence, 1.0m),
IsReachable = null, // Unknown at Quick level
PackageMatches = packageResult.Matches,
CallGraph = null,
BinaryMapping = null,
RuntimeCorrelation = null,
AnalysisTime = elapsed,
TimedOut = false,
CanUpgrade = true,
UpgradeRecommendation = "Upgrade to Standard for call graph analysis"
};
}
private FidelityAnalysisResult BuildResult(
PackageMatchResult packageResult,
CallGraphResult? callGraphResult,
FidelityConfiguration config,
TimeSpan elapsed)
{
var confidence = config.BaseConfidence;
// Adjust based on call graph completeness
if (callGraphResult?.IsComplete == true)
confidence += 0.15m;
var isReachable = callGraphResult?.HasPathToVulnerable;
return new FidelityAnalysisResult
{
FidelityLevel = config.Level,
Confidence = Math.Min(confidence, 1.0m),
IsReachable = isReachable,
PackageMatches = packageResult.Matches,
CallGraph = callGraphResult,
BinaryMapping = null,
RuntimeCorrelation = null,
AnalysisTime = elapsed,
TimedOut = false,
CanUpgrade = true,
UpgradeRecommendation = isReachable == true
? "Upgrade to Deep for runtime verification"
: "Upgrade to Deep for binary mapping confirmation"
};
}
private FidelityAnalysisResult BuildResult(
PackageMatchResult packageResult,
CallGraphResult? callGraphResult,
BinaryMappingResult? binaryResult,
RuntimeCorrelationResult? runtimeResult,
FidelityConfiguration config,
TimeSpan elapsed)
{
var confidence = config.BaseConfidence;
// Adjust based on runtime corroboration
if (runtimeResult?.HasCorroboration == true)
confidence = 0.95m;
else if (binaryResult?.HasMapping == true)
confidence += 0.05m;
var isReachable = DetermineReachability(
callGraphResult,
binaryResult,
runtimeResult);
return new FidelityAnalysisResult
{
FidelityLevel = config.Level,
Confidence = Math.Min(confidence, 1.0m),
IsReachable = isReachable,
PackageMatches = packageResult.Matches,
CallGraph = callGraphResult,
BinaryMapping = binaryResult,
RuntimeCorrelation = runtimeResult,
AnalysisTime = elapsed,
TimedOut = false,
CanUpgrade = false,
UpgradeRecommendation = null
};
}
private static bool? DetermineReachability(
CallGraphResult? callGraph,
BinaryMappingResult? binary,
RuntimeCorrelationResult? runtime)
{
// Runtime is authoritative
if (runtime?.WasExecuted == true)
return true;
if (runtime?.WasExecuted == false && runtime.ObservationCount > 100)
return false;
// Fall back to call graph
if (callGraph?.HasPathToVulnerable == true)
return true;
if (callGraph?.HasPathToVulnerable == false && callGraph.IsComplete)
return false;
return null; // Unknown
}
private FidelityAnalysisResult BuildTimeoutResult(
FidelityLevel attemptedLevel,
FidelityConfiguration config,
TimeSpan elapsed)
{
return new FidelityAnalysisResult
{
FidelityLevel = attemptedLevel,
Confidence = 0.3m,
IsReachable = null,
PackageMatches = [],
CallGraph = null,
BinaryMapping = null,
RuntimeCorrelation = null,
AnalysisTime = elapsed,
TimedOut = true,
CanUpgrade = false,
UpgradeRecommendation = "Analysis timed out. Try with smaller scope."
};
}
private FidelityAnalysisResult MergeResults(
FidelityAnalysisResult existing,
FidelityAnalysisResult upgraded)
{
// Take the upgraded result but preserve any existing data not replaced
return new FidelityAnalysisResult
{
FidelityLevel = upgraded.FidelityLevel,
Confidence = upgraded.Confidence,
IsReachable = upgraded.IsReachable ?? existing.IsReachable,
PackageMatches = upgraded.PackageMatches,
CallGraph = upgraded.CallGraph ?? existing.CallGraph,
BinaryMapping = upgraded.BinaryMapping ?? existing.BinaryMapping,
RuntimeCorrelation = upgraded.RuntimeCorrelation ?? existing.RuntimeCorrelation,
AnalysisTime = existing.AnalysisTime + upgraded.AnalysisTime,
TimedOut = upgraded.TimedOut,
CanUpgrade = upgraded.CanUpgrade,
UpgradeRecommendation = upgraded.UpgradeRecommendation
};
}
}
public sealed record FidelityAnalysisResult
{
public required FidelityLevel FidelityLevel { get; init; }
public required decimal Confidence { get; init; }
public bool? IsReachable { get; init; }
public required IReadOnlyList<PackageMatch> PackageMatches { get; init; }
public CallGraphResult? CallGraph { get; init; }
public BinaryMappingResult? BinaryMapping { get; init; }
public RuntimeCorrelationResult? RuntimeCorrelation { get; init; }
public required TimeSpan AnalysisTime { get; init; }
public required bool TimedOut { get; init; }
public required bool CanUpgrade { get; init; }
public string? UpgradeRecommendation { get; init; }
public AnalysisRequest ToAnalysisRequest()
{
// Convert back to analysis request for upgrade scenarios
return new AnalysisRequest
{
// Populate from existing result
};
}
}
public sealed record FidelityUpgradeResult
{
public required bool Success { get; init; }
public Guid FindingId { get; init; }
public FidelityLevel? PreviousLevel { get; init; }
public FidelityLevel? NewLevel { get; init; }
public decimal ConfidenceImprovement { get; init; }
public FidelityAnalysisResult? NewResult { get; init; }
public string? Error { get; init; }
public static FidelityUpgradeResult NotFound(Guid id) => new()
{
Success = false,
FindingId = id,
Error = "Finding not found"
};
public static FidelityUpgradeResult AlreadyAtLevel(FidelityAnalysisResult existing) => new()
{
Success = true,
PreviousLevel = existing.FidelityLevel,
NewLevel = existing.FidelityLevel,
ConfidenceImprovement = 0,
NewResult = existing
};
}
// Supporting interfaces and types
public interface ICallGraphExtractor
{
Task<CallGraphResult> ExtractAsync(
AnalysisRequest request,
IReadOnlyList<string> languages,
int maxDepth,
CancellationToken ct);
}
public interface IRuntimeCorrelator
{
Task<RuntimeCorrelationResult> CorrelateAsync(
AnalysisRequest request,
CancellationToken ct);
}
public interface IBinaryMapper
{
Task<BinaryMappingResult> MapAsync(
AnalysisRequest request,
CancellationToken ct);
}
public interface IPackageMatcher
{
Task<PackageMatchResult> MatchAsync(
AnalysisRequest request,
CancellationToken ct);
}
public interface IAnalysisRepository
{
Task<FidelityAnalysisResult?> GetAnalysisAsync(Guid findingId, CancellationToken ct);
Task SaveAnalysisAsync(FidelityAnalysisResult result, CancellationToken ct);
}
public sealed record AnalysisRequest
{
public IReadOnlyList<string> DetectedLanguages { get; init; } = Array.Empty<string>();
}
public sealed record PackageMatchResult
{
public bool HasExactMatch { get; init; }
public IReadOnlyList<PackageMatch> Matches { get; init; } = Array.Empty<PackageMatch>();
}
public sealed record PackageMatch
{
public required string PackageName { get; init; }
public required string Version { get; init; }
}
public sealed record CallGraphResult
{
public bool IsComplete { get; init; }
public bool? HasPathToVulnerable { get; init; }
}
public sealed record BinaryMappingResult
{
public bool HasMapping { get; init; }
}
public sealed record RuntimeCorrelationResult
{
public bool? WasExecuted { get; init; }
public int ObservationCount { get; init; }
public bool HasCorroboration { get; init; }
}

View File

@@ -0,0 +1,112 @@
namespace StellaOps.Scanner.Orchestration.Fidelity;
/// <summary>
/// Analysis fidelity level controlling depth vs speed tradeoff.
/// </summary>
public enum FidelityLevel
{
/// <summary>
/// Fast heuristic analysis. Uses package-level matching only.
/// ~10x faster than Standard. Lower confidence.
/// </summary>
Quick,
/// <summary>
/// Standard analysis. Includes call graph for top languages.
/// Balanced speed and accuracy.
/// </summary>
Standard,
/// <summary>
/// Deep analysis. Full call graph, runtime correlation, binary mapping.
/// Highest confidence but slowest.
/// </summary>
Deep
}
/// <summary>
/// Configuration for each fidelity level.
/// </summary>
public sealed record FidelityConfiguration
{
public required FidelityLevel Level { get; init; }
/// <summary>
/// Whether to perform call graph extraction.
/// </summary>
public bool EnableCallGraph { get; init; }
/// <summary>
/// Whether to correlate with runtime evidence.
/// </summary>
public bool EnableRuntimeCorrelation { get; init; }
/// <summary>
/// Whether to perform binary mapping.
/// </summary>
public bool EnableBinaryMapping { get; init; }
/// <summary>
/// Maximum call graph depth.
/// </summary>
public int MaxCallGraphDepth { get; init; }
/// <summary>
/// Timeout for analysis.
/// </summary>
public TimeSpan Timeout { get; init; }
/// <summary>
/// Base confidence for this fidelity level.
/// </summary>
public decimal BaseConfidence { get; init; }
/// <summary>
/// Languages to analyze (null = all).
/// </summary>
public IReadOnlyList<string>? TargetLanguages { get; init; }
public static FidelityConfiguration Quick => new()
{
Level = FidelityLevel.Quick,
EnableCallGraph = false,
EnableRuntimeCorrelation = false,
EnableBinaryMapping = false,
MaxCallGraphDepth = 0,
Timeout = TimeSpan.FromSeconds(30),
BaseConfidence = 0.5m,
TargetLanguages = null
};
public static FidelityConfiguration Standard => new()
{
Level = FidelityLevel.Standard,
EnableCallGraph = true,
EnableRuntimeCorrelation = false,
EnableBinaryMapping = false,
MaxCallGraphDepth = 10,
Timeout = TimeSpan.FromMinutes(5),
BaseConfidence = 0.75m,
TargetLanguages = ["java", "dotnet", "python", "go", "node"]
};
public static FidelityConfiguration Deep => new()
{
Level = FidelityLevel.Deep,
EnableCallGraph = true,
EnableRuntimeCorrelation = true,
EnableBinaryMapping = true,
MaxCallGraphDepth = 50,
Timeout = TimeSpan.FromMinutes(30),
BaseConfidence = 0.9m,
TargetLanguages = null
};
public static FidelityConfiguration FromLevel(FidelityLevel level) => level switch
{
FidelityLevel.Quick => Quick,
FidelityLevel.Standard => Standard,
FidelityLevel.Deep => Deep,
_ => Standard
};
}

View File

@@ -0,0 +1,36 @@
# AGENTS - Scanner Reachability Library
## Mission
Deliver deterministic reachability analysis, slice generation, and evidence artifacts used by Scanner and downstream policy/VEX workflows.
## Roles
- Backend engineer (.NET 10, C# preview).
- QA engineer (unit/integration tests with deterministic fixtures).
## Required Reading
- `docs/README.md`
- `docs/07_HIGH_LEVEL_ARCHITECTURE.md`
- `docs/modules/platform/architecture-overview.md`
- `docs/modules/scanner/architecture.md`
- `docs/reachability/DELIVERY_GUIDE.md`
- `docs/reachability/slice-schema.md`
- `docs/reachability/replay-verification.md`
## Working Directory & Boundaries
- Primary scope: `src/Scanner/__Libraries/StellaOps.Scanner.Reachability/`
- Tests: `src/Scanner/__Tests/StellaOps.Scanner.Reachability.Tests/`
- Avoid cross-module edits unless explicitly noted in the sprint.
## Determinism & Offline Rules
- Stable ordering for graphs, slices, and diffs.
- UTC timestamps only; avoid wall-clock nondeterminism.
- Offline-first: no external network calls; use CAS and local caches.
## Testing Expectations
- Add schema validation and round-trip tests for slice artifacts.
- Ensure deterministic serialization bytes for any DSSE payloads.
- Run `dotnet test src/Scanner/StellaOps.Scanner.sln` when feasible.
## Workflow
- Update sprint status on task transitions.
- Record decisions/risks in sprint Execution Log and Decisions & Risks.

View File

@@ -0,0 +1,17 @@
using StellaOps.Scanner.Reachability.Subgraph;
namespace StellaOps.Scanner.Reachability.Attestation;
public sealed record ReachabilitySubgraphPublishResult(
string SubgraphDigest,
string? CasUri,
string AttestationDigest,
byte[] DsseEnvelopeBytes);
public interface IReachabilitySubgraphPublisher
{
Task<ReachabilitySubgraphPublishResult> PublishAsync(
ReachabilitySubgraph subgraph,
string subjectDigest,
CancellationToken cancellationToken = default);
}

View File

@@ -47,6 +47,18 @@ public static class ReachabilityAttestationServiceCollectionExtensions
// Register options
services.AddOptions<ReachabilityWitnessOptions>();
services.AddOptions<ReachabilitySubgraphOptions>();
// Register subgraph publisher
services.TryAddSingleton<IReachabilitySubgraphPublisher>(sp =>
new ReachabilitySubgraphPublisher(
sp.GetRequiredService<IOptions<ReachabilitySubgraphOptions>>(),
sp.GetRequiredService<ICryptoHash>(),
sp.GetRequiredService<ILogger<ReachabilitySubgraphPublisher>>(),
timeProvider: sp.GetService<TimeProvider>(),
cas: sp.GetService<IFileContentAddressableStore>(),
dsseSigningService: sp.GetService<IDsseSigningService>(),
cryptoProfile: sp.GetService<ICryptoProfile>()));
return services;
}
@@ -64,4 +76,18 @@ public static class ReachabilityAttestationServiceCollectionExtensions
services.Configure(configure);
return services;
}
/// <summary>
/// Configures reachability subgraph options.
/// </summary>
/// <param name="services">The service collection.</param>
/// <param name="configure">Configuration action.</param>
/// <returns>The service collection for chaining.</returns>
public static IServiceCollection ConfigureReachabilitySubgraphOptions(
this IServiceCollection services,
Action<ReachabilitySubgraphOptions> configure)
{
services.Configure(configure);
return services;
}
}

View File

@@ -0,0 +1,24 @@
namespace StellaOps.Scanner.Reachability.Attestation;
/// <summary>
/// Options for reachability subgraph attestation.
/// </summary>
public sealed class ReachabilitySubgraphOptions
{
public const string SectionName = "Scanner:ReachabilitySubgraph";
/// <summary>
/// Whether to generate DSSE attestations.
/// </summary>
public bool Enabled { get; set; } = true;
/// <summary>
/// Whether to store subgraph payloads in CAS when available.
/// </summary>
public bool StoreInCas { get; set; } = true;
/// <summary>
/// Optional signing key identifier.
/// </summary>
public string? SigningKeyId { get; set; }
}

View File

@@ -0,0 +1,217 @@
using System.Security.Cryptography;
using System.Text.Json;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
using StellaOps.Attestor.ProofChain.Predicates;
using StellaOps.Attestor.ProofChain.Statements;
using StellaOps.Cryptography;
using StellaOps.Replay.Core;
using StellaOps.Scanner.Cache.Abstractions;
using StellaOps.Scanner.ProofSpine;
using StellaOps.Scanner.Reachability.Subgraph;
namespace StellaOps.Scanner.Reachability.Attestation;
public sealed class ReachabilitySubgraphPublisher : IReachabilitySubgraphPublisher
{
private static readonly JsonSerializerOptions DsseJsonOptions = new(JsonSerializerDefaults.Web)
{
WriteIndented = false
};
private readonly ReachabilitySubgraphOptions _options;
private readonly ICryptoHash _cryptoHash;
private readonly ILogger<ReachabilitySubgraphPublisher> _logger;
private readonly TimeProvider _timeProvider;
private readonly IFileContentAddressableStore? _cas;
private readonly IDsseSigningService? _dsseSigningService;
private readonly ICryptoProfile? _cryptoProfile;
public ReachabilitySubgraphPublisher(
IOptions<ReachabilitySubgraphOptions> options,
ICryptoHash cryptoHash,
ILogger<ReachabilitySubgraphPublisher> logger,
TimeProvider? timeProvider = null,
IFileContentAddressableStore? cas = null,
IDsseSigningService? dsseSigningService = null,
ICryptoProfile? cryptoProfile = null)
{
_options = options?.Value ?? throw new ArgumentNullException(nameof(options));
_cryptoHash = cryptoHash ?? throw new ArgumentNullException(nameof(cryptoHash));
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
_timeProvider = timeProvider ?? TimeProvider.System;
_cas = cas;
_dsseSigningService = dsseSigningService;
_cryptoProfile = cryptoProfile;
}
public async Task<ReachabilitySubgraphPublishResult> PublishAsync(
ReachabilitySubgraph subgraph,
string subjectDigest,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(subgraph);
ArgumentException.ThrowIfNullOrWhiteSpace(subjectDigest);
if (!_options.Enabled)
{
_logger.LogDebug("Reachability subgraph attestation disabled");
return new ReachabilitySubgraphPublishResult(
SubgraphDigest: string.Empty,
CasUri: null,
AttestationDigest: string.Empty,
DsseEnvelopeBytes: Array.Empty<byte>());
}
var normalized = subgraph.Normalize();
var subgraphBytes = CanonicalJson.SerializeToUtf8Bytes(normalized);
var subgraphDigest = _cryptoHash.ComputePrefixedHashForPurpose(subgraphBytes, HashPurpose.Graph);
string? casUri = null;
if (_options.StoreInCas)
{
casUri = await StoreSubgraphAsync(subgraphBytes, subgraphDigest, cancellationToken).ConfigureAwait(false);
}
var statement = BuildStatement(normalized, subgraphDigest, casUri, subjectDigest);
var statementBytes = CanonicalJson.SerializeToUtf8Bytes(statement);
var (envelope, envelopeBytes) = await CreateDsseEnvelopeAsync(statement, statementBytes, cancellationToken)
.ConfigureAwait(false);
var attestationDigest = _cryptoHash.ComputePrefixedHashForPurpose(envelopeBytes, HashPurpose.Attestation);
_logger.LogInformation(
"Created reachability subgraph attestation: graphDigest={GraphDigest}, attestationDigest={AttestationDigest}",
subgraphDigest,
attestationDigest);
return new ReachabilitySubgraphPublishResult(
SubgraphDigest: subgraphDigest,
CasUri: casUri,
AttestationDigest: attestationDigest,
DsseEnvelopeBytes: envelopeBytes);
}
private ReachabilitySubgraphStatement BuildStatement(
ReachabilitySubgraph subgraph,
string subgraphDigest,
string? casUri,
string subjectDigest)
{
var analysis = subgraph.AnalysisMetadata;
var predicate = new ReachabilitySubgraphPredicate
{
SchemaVersion = subgraph.Version,
GraphDigest = subgraphDigest,
GraphCasUri = casUri,
FindingKeys = subgraph.FindingKeys,
Analysis = new ReachabilitySubgraphAnalysis
{
Analyzer = analysis?.Analyzer ?? "reachability",
AnalyzerVersion = analysis?.AnalyzerVersion ?? "unknown",
Confidence = analysis?.Confidence ?? 0.5,
Completeness = analysis?.Completeness ?? "partial",
GeneratedAt = analysis?.GeneratedAt ?? _timeProvider.GetUtcNow(),
HashAlgorithm = _cryptoHash.GetAlgorithmForPurpose(HashPurpose.Graph)
}
};
return new ReachabilitySubgraphStatement
{
Subject =
[
BuildSubject(subjectDigest)
],
Predicate = predicate
};
}
private static Subject BuildSubject(string digest)
{
var (algorithm, value) = SplitDigest(digest);
return new Subject
{
Name = digest,
Digest = new Dictionary<string, string> { [algorithm] = value }
};
}
private async Task<string?> StoreSubgraphAsync(byte[] subgraphBytes, string subgraphDigest, CancellationToken cancellationToken)
{
if (_cas is null)
{
_logger.LogWarning("CAS storage requested but no CAS store configured; skipping subgraph storage.");
return null;
}
var key = ExtractHashDigest(subgraphDigest);
var existing = await _cas.TryGetAsync(key, cancellationToken).ConfigureAwait(false);
if (existing is null)
{
await using var stream = new MemoryStream(subgraphBytes, writable: false);
await _cas.PutAsync(new FileCasPutRequest(key, stream, leaveOpen: false), cancellationToken).ConfigureAwait(false);
}
return $"cas://reachability/subgraphs/{key}";
}
private async Task<(DsseEnvelope Envelope, byte[] EnvelopeBytes)> CreateDsseEnvelopeAsync(
ReachabilitySubgraphStatement statement,
byte[] statementBytes,
CancellationToken cancellationToken)
{
const string payloadType = "application/vnd.in-toto+json";
if (_dsseSigningService is not null)
{
var profile = _cryptoProfile ?? new InlineCryptoProfile(_options.SigningKeyId ?? "scanner-deterministic", "hs256");
var signed = await _dsseSigningService.SignAsync(statement, payloadType, profile, cancellationToken).ConfigureAwait(false);
return (signed, SerializeDsseEnvelope(signed));
}
var signature = SHA256.HashData(statementBytes);
var envelope = new DsseEnvelope(
payloadType,
Convert.ToBase64String(statementBytes),
new[] { new DsseSignature(_options.SigningKeyId ?? "scanner-deterministic", Convert.ToBase64String(signature)) });
return (envelope, SerializeDsseEnvelope(envelope));
}
private static byte[] SerializeDsseEnvelope(DsseEnvelope envelope)
{
var signatures = envelope.Signatures
.OrderBy(s => s.KeyId, StringComparer.Ordinal)
.ThenBy(s => s.Sig, StringComparer.Ordinal)
.Select(s => new { keyid = s.KeyId, sig = s.Sig })
.ToArray();
var dto = new
{
payloadType = envelope.PayloadType,
payload = envelope.Payload,
signatures
};
return JsonSerializer.SerializeToUtf8Bytes(dto, DsseJsonOptions);
}
private static string ExtractHashDigest(string prefixedHash)
{
var colonIndex = prefixedHash.IndexOf(':');
return colonIndex >= 0 ? prefixedHash[(colonIndex + 1)..] : prefixedHash;
}
private static (string Algorithm, string Value) SplitDigest(string digest)
{
var colonIndex = digest.IndexOf(':');
if (colonIndex <= 0 || colonIndex == digest.Length - 1)
{
return ("sha256", digest);
}
return (digest[..colonIndex], digest[(colonIndex + 1)..]);
}
private sealed record InlineCryptoProfile(string KeyId, string Algorithm) : ICryptoProfile;
}

View File

@@ -0,0 +1,247 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Security.Cryptography;
using System.Text;
namespace StellaOps.Scanner.Reachability.MiniMap;
public interface IMiniMapExtractor
{
ReachabilityMiniMap Extract(RichGraph graph, string vulnerableComponent, int maxPaths = 10);
}
public sealed class MiniMapExtractor : IMiniMapExtractor
{
public ReachabilityMiniMap Extract(
RichGraph graph,
string vulnerableComponent,
int maxPaths = 10)
{
// Find vulnerable component node
var vulnNode = graph.Nodes.FirstOrDefault(n =>
n.Purl == vulnerableComponent ||
n.SymbolId?.Contains(vulnerableComponent) == true);
if (vulnNode is null)
{
return CreateNotFoundMap(vulnerableComponent);
}
// Find all entrypoints
var entrypoints = graph.Nodes
.Where(n => IsEntrypoint(n))
.ToList();
// BFS from each entrypoint to vulnerable component
var paths = new List<MiniMapPath>();
var entrypointInfos = new List<MiniMapEntrypoint>();
foreach (var ep in entrypoints)
{
var epPaths = FindPaths(graph, ep, vulnNode, maxDepth: 20);
if (epPaths.Count > 0)
{
entrypointInfos.Add(new MiniMapEntrypoint
{
Node = ToMiniMapNode(ep),
Kind = ClassifyEntrypoint(ep),
PathCount = epPaths.Count,
ShortestPathLength = epPaths.Min(p => p.Length)
});
paths.AddRange(epPaths.Take(maxPaths / Math.Max(entrypoints.Count, 1) + 1));
}
}
// Determine state
var state = paths.Count > 0
? (paths.Any(p => p.HasRuntimeEvidence)
? ReachabilityState.ConfirmedReachable
: ReachabilityState.StaticReachable)
: ReachabilityState.StaticUnreachable;
// Calculate confidence
var confidence = CalculateConfidence(paths, entrypointInfos, graph);
return new ReachabilityMiniMap
{
FindingId = Guid.Empty, // Set by caller
VulnerabilityId = string.Empty, // Set by caller
VulnerableComponent = ToMiniMapNode(vulnNode),
Entrypoints = entrypointInfos.OrderBy(e => e.ShortestPathLength).ToList(),
Paths = paths.OrderBy(p => p.Length).Take(maxPaths).ToList(),
State = state,
Confidence = confidence,
GraphDigest = ComputeGraphDigest(graph),
AnalyzedAt = DateTimeOffset.UtcNow
};
}
private static ReachabilityMiniMap CreateNotFoundMap(string vulnerableComponent)
{
return new ReachabilityMiniMap
{
FindingId = Guid.Empty,
VulnerabilityId = string.Empty,
VulnerableComponent = new MiniMapNode
{
Id = vulnerableComponent,
Label = vulnerableComponent,
Type = MiniMapNodeType.VulnerableComponent
},
Entrypoints = Array.Empty<MiniMapEntrypoint>(),
Paths = Array.Empty<MiniMapPath>(),
State = ReachabilityState.Unknown,
Confidence = 0m,
GraphDigest = string.Empty,
AnalyzedAt = DateTimeOffset.UtcNow
};
}
private static bool IsEntrypoint(RichGraphNode node)
{
return node.Kind is "entrypoint" or "export" or "main" or "handler";
}
private static EntrypointKind ClassifyEntrypoint(RichGraphNode node)
{
if (node.Attributes?.ContainsKey("http_method") == true)
return EntrypointKind.HttpEndpoint;
if (node.Attributes?.ContainsKey("grpc_service") == true)
return EntrypointKind.GrpcMethod;
if (node.Kind == "main")
return EntrypointKind.MainFunction;
if (node.Kind == "handler")
return EntrypointKind.EventHandler;
if (node.Attributes?.ContainsKey("cli_command") == true)
return EntrypointKind.CliCommand;
return EntrypointKind.PublicApi;
}
private List<MiniMapPath> FindPaths(
RichGraph graph,
RichGraphNode start,
RichGraphNode end,
int maxDepth)
{
var paths = new List<MiniMapPath>();
var queue = new Queue<(RichGraphNode node, List<RichGraphNode> path)>();
queue.Enqueue((start, new List<RichGraphNode> { start }));
while (queue.Count > 0 && paths.Count < 100)
{
var (current, path) = queue.Dequeue();
if (path.Count > maxDepth) continue;
if (current.Id == end.Id)
{
paths.Add(BuildPath(path, graph));
continue;
}
var edges = graph.Edges.Where(e => e.From == current.Id);
foreach (var edge in edges)
{
var nextNode = graph.Nodes.FirstOrDefault(n => n.Id == edge.To);
if (nextNode is not null && !path.Any(n => n.Id == nextNode.Id))
{
var newPath = new List<RichGraphNode>(path) { nextNode };
queue.Enqueue((nextNode, newPath));
}
}
}
return paths;
}
private static MiniMapPath BuildPath(List<RichGraphNode> nodes, RichGraph graph)
{
var steps = nodes.Select((n, i) =>
{
var edge = i < nodes.Count - 1
? graph.Edges.FirstOrDefault(e => e.From == n.Id && e.To == nodes[i + 1].Id)
: null;
return new MiniMapPathStep
{
Index = i,
Node = ToMiniMapNode(n),
CallType = edge?.Kind
};
}).ToList();
var hasRuntime = graph.Edges
.Where(e => nodes.Any(n => n.Id == e.From))
.Any(e => e.Evidence?.Contains("runtime") == true);
return new MiniMapPath
{
PathId = $"path:{ComputePathHash(nodes)}",
EntrypointId = nodes.First().Id,
Steps = steps,
HasRuntimeEvidence = hasRuntime,
PathConfidence = hasRuntime ? 0.95m : 0.75m
};
}
private static MiniMapNode ToMiniMapNode(RichGraphNode node)
{
var sourceFile = node.Attributes?.GetValueOrDefault("source_file");
int? lineNumber = null;
if (node.Attributes?.TryGetValue("line", out var lineStr) == true && int.TryParse(lineStr, out var line))
{
lineNumber = line;
}
return new MiniMapNode
{
Id = node.Id,
Label = node.Display ?? node.SymbolId ?? node.Id,
Type = node.Kind switch
{
"entrypoint" or "export" or "main" => MiniMapNodeType.Entrypoint,
"function" or "method" => MiniMapNodeType.Function,
"class" => MiniMapNodeType.Class,
"module" or "package" => MiniMapNodeType.Module,
"sink" => MiniMapNodeType.Sink,
_ => MiniMapNodeType.Function
},
Purl = node.Purl,
SourceFile = sourceFile,
LineNumber = lineNumber
};
}
private static decimal CalculateConfidence(
List<MiniMapPath> paths,
List<MiniMapEntrypoint> entrypoints,
RichGraph graph)
{
if (paths.Count == 0) return 0.9m; // High confidence in unreachability
var runtimePaths = paths.Count(p => p.HasRuntimeEvidence);
var runtimeRatio = paths.Count > 0 ? (decimal)runtimePaths / paths.Count : 0m;
return 0.6m + (0.3m * runtimeRatio);
}
private static string ComputePathHash(List<RichGraphNode> nodes)
{
var ids = string.Join("|", nodes.Select(n => n.Id));
var hash = SHA256.HashData(Encoding.UTF8.GetBytes(ids));
return Convert.ToHexString(hash)[..16].ToLowerInvariant();
}
private static string ComputeGraphDigest(RichGraph graph)
{
var nodeIds = string.Join(",", graph.Nodes.Select(n => n.Id).OrderBy(x => x));
var edgeIds = string.Join(",", graph.Edges.Select(e => $"{e.From}->{e.To}").OrderBy(x => x));
var combined = $"{nodeIds}|{edgeIds}";
var hash = SHA256.HashData(Encoding.UTF8.GetBytes(combined));
return $"sha256:{Convert.ToHexString(hash).ToLowerInvariant()}";
}
}

View File

@@ -0,0 +1,203 @@
namespace StellaOps.Scanner.Reachability.MiniMap;
/// <summary>
/// Condensed reachability visualization for a finding.
/// Shows paths from entrypoints to vulnerable component to sinks.
/// </summary>
public sealed record ReachabilityMiniMap
{
/// <summary>
/// Finding this map is for.
/// </summary>
public required Guid FindingId { get; init; }
/// <summary>
/// Vulnerability ID.
/// </summary>
public required string VulnerabilityId { get; init; }
/// <summary>
/// The vulnerable component.
/// </summary>
public required MiniMapNode VulnerableComponent { get; init; }
/// <summary>
/// Entry points that reach the vulnerable component.
/// </summary>
public required IReadOnlyList<MiniMapEntrypoint> Entrypoints { get; init; }
/// <summary>
/// Paths from entrypoints to vulnerable component.
/// </summary>
public required IReadOnlyList<MiniMapPath> Paths { get; init; }
/// <summary>
/// Overall reachability state.
/// </summary>
public required ReachabilityState State { get; init; }
/// <summary>
/// Confidence of the analysis.
/// </summary>
public required decimal Confidence { get; init; }
/// <summary>
/// Full graph digest for verification.
/// </summary>
public required string GraphDigest { get; init; }
/// <summary>
/// When analysis was performed.
/// </summary>
public required DateTimeOffset AnalyzedAt { get; init; }
}
/// <summary>
/// A node in the mini-map.
/// </summary>
public sealed record MiniMapNode
{
/// <summary>
/// Node identifier.
/// </summary>
public required string Id { get; init; }
/// <summary>
/// Display label.
/// </summary>
public required string Label { get; init; }
/// <summary>
/// Node type.
/// </summary>
public required MiniMapNodeType Type { get; init; }
/// <summary>
/// Package URL (if applicable).
/// </summary>
public string? Purl { get; init; }
/// <summary>
/// Source file location.
/// </summary>
public string? SourceFile { get; init; }
/// <summary>
/// Line number in source.
/// </summary>
public int? LineNumber { get; init; }
}
public enum MiniMapNodeType
{
Entrypoint,
Function,
Class,
Module,
VulnerableComponent,
Sink
}
/// <summary>
/// An entry point in the mini-map.
/// </summary>
public sealed record MiniMapEntrypoint
{
/// <summary>
/// Entry point node.
/// </summary>
public required MiniMapNode Node { get; init; }
/// <summary>
/// Entry point kind.
/// </summary>
public required EntrypointKind Kind { get; init; }
/// <summary>
/// Number of paths from this entrypoint.
/// </summary>
public required int PathCount { get; init; }
/// <summary>
/// Shortest path length to vulnerable component.
/// </summary>
public required int ShortestPathLength { get; init; }
}
public enum EntrypointKind
{
HttpEndpoint,
GrpcMethod,
MessageHandler,
CliCommand,
MainFunction,
PublicApi,
EventHandler,
Other
}
/// <summary>
/// A path from entrypoint to vulnerable component.
/// </summary>
public sealed record MiniMapPath
{
/// <summary>
/// Path identifier.
/// </summary>
public required string PathId { get; init; }
/// <summary>
/// Starting entrypoint ID.
/// </summary>
public required string EntrypointId { get; init; }
/// <summary>
/// Ordered steps in the path.
/// </summary>
public required IReadOnlyList<MiniMapPathStep> Steps { get; init; }
/// <summary>
/// Path length.
/// </summary>
public int Length => Steps.Count;
/// <summary>
/// Whether path has runtime corroboration.
/// </summary>
public bool HasRuntimeEvidence { get; init; }
/// <summary>
/// Confidence for this specific path.
/// </summary>
public decimal PathConfidence { get; init; }
}
/// <summary>
/// A step in a path.
/// </summary>
public sealed record MiniMapPathStep
{
/// <summary>
/// Step index (0-based).
/// </summary>
public required int Index { get; init; }
/// <summary>
/// Node at this step.
/// </summary>
public required MiniMapNode Node { get; init; }
/// <summary>
/// Call type to next step.
/// </summary>
public string? CallType { get; init; }
}
public enum ReachabilityState
{
Unknown,
StaticReachable,
StaticUnreachable,
ConfirmedReachable,
ConfirmedUnreachable
}

View File

@@ -0,0 +1,311 @@
using System.Text.Json;
using System.Text.Json.Serialization;
using StellaOps.Scanner.Reachability.Gates;
namespace StellaOps.Scanner.Reachability;
public sealed class RichGraphReader
{
private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web)
{
PropertyNameCaseInsensitive = true
};
public async Task<RichGraph> ReadAsync(Stream stream, CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(stream);
var document = await JsonSerializer.DeserializeAsync<RichGraphDocument>(
stream,
SerializerOptions,
cancellationToken)
.ConfigureAwait(false);
if (document is null)
{
throw new InvalidOperationException("Failed to deserialize richgraph payload.");
}
return Map(document);
}
public RichGraph Read(ReadOnlySpan<byte> payload)
{
var document = JsonSerializer.Deserialize<RichGraphDocument>(payload, SerializerOptions);
if (document is null)
{
throw new InvalidOperationException("Failed to deserialize richgraph payload.");
}
return Map(document);
}
private static RichGraph Map(RichGraphDocument document)
{
var analyzerDoc = document.Analyzer;
var analyzer = new RichGraphAnalyzer(
analyzerDoc?.Name ?? "scanner.reachability",
analyzerDoc?.Version ?? "0.1.0",
analyzerDoc?.ToolchainDigest);
var nodes = document.Nodes?
.Select(MapNode)
.Where(n => !string.IsNullOrWhiteSpace(n.Id))
.ToList() ?? new List<RichGraphNode>();
var edges = document.Edges?
.Select(MapEdge)
.Where(e => !string.IsNullOrWhiteSpace(e.From) && !string.IsNullOrWhiteSpace(e.To))
.ToList() ?? new List<RichGraphEdge>();
var roots = document.Roots?
.Select(r => new RichGraphRoot(
r.Id ?? string.Empty,
string.IsNullOrWhiteSpace(r.Phase) ? "runtime" : r.Phase,
r.Source))
.Where(r => !string.IsNullOrWhiteSpace(r.Id))
.ToList() ?? new List<RichGraphRoot>();
return new RichGraph(nodes, edges, roots, analyzer, document.Schema ?? "richgraph-v1").Trimmed();
}
private static RichGraphNode MapNode(RichGraphNodeDocument node)
{
var symbol = node.Symbol is null
? null
: new ReachabilitySymbol(
node.Symbol.Mangled,
node.Symbol.Demangled,
node.Symbol.Source,
node.Symbol.Confidence);
return new RichGraphNode(
Id: node.Id ?? string.Empty,
SymbolId: string.IsNullOrWhiteSpace(node.SymbolId) ? (node.Id ?? string.Empty) : node.SymbolId,
CodeId: node.CodeId,
Purl: node.Purl,
Lang: string.IsNullOrWhiteSpace(node.Lang) ? "unknown" : node.Lang,
Kind: string.IsNullOrWhiteSpace(node.Kind) ? "unknown" : node.Kind,
Display: node.Display,
BuildId: node.BuildId,
Evidence: node.Evidence,
Attributes: node.Attributes,
SymbolDigest: node.SymbolDigest,
Symbol: symbol,
CodeBlockHash: node.CodeBlockHash);
}
private static RichGraphEdge MapEdge(RichGraphEdgeDocument edge)
{
IReadOnlyList<DetectedGate>? gates = null;
if (edge.Gates is { Count: > 0 })
{
gates = edge.Gates.Select(MapGate).ToList();
}
return new RichGraphEdge(
From: edge.From ?? string.Empty,
To: edge.To ?? string.Empty,
Kind: string.IsNullOrWhiteSpace(edge.Kind) ? "call" : edge.Kind,
Purl: edge.Purl,
SymbolDigest: edge.SymbolDigest,
Evidence: edge.Evidence,
Confidence: edge.Confidence,
Candidates: edge.Candidates,
Gates: gates,
GateMultiplierBps: edge.GateMultiplierBps);
}
private static DetectedGate MapGate(RichGraphGateDocument gate)
{
return new DetectedGate
{
Type = ParseGateType(gate.Type),
Detail = gate.Detail ?? string.Empty,
GuardSymbol = gate.GuardSymbol ?? string.Empty,
SourceFile = gate.SourceFile,
LineNumber = gate.LineNumber,
Confidence = gate.Confidence,
DetectionMethod = gate.DetectionMethod ?? string.Empty
};
}
private static GateType ParseGateType(string? value)
{
if (string.IsNullOrWhiteSpace(value))
{
return GateType.NonDefaultConfig;
}
var normalized = value
.Trim()
.Replace("_", string.Empty, StringComparison.Ordinal)
.Replace("-", string.Empty, StringComparison.Ordinal)
.ToLowerInvariant();
return normalized switch
{
"authrequired" => GateType.AuthRequired,
"featureflag" => GateType.FeatureFlag,
"adminonly" => GateType.AdminOnly,
"nondefaultconfig" => GateType.NonDefaultConfig,
_ => GateType.NonDefaultConfig
};
}
}
internal sealed class RichGraphDocument
{
[JsonPropertyName("schema")]
public string? Schema { get; init; }
[JsonPropertyName("analyzer")]
public RichGraphAnalyzerDocument? Analyzer { get; init; }
[JsonPropertyName("nodes")]
public List<RichGraphNodeDocument>? Nodes { get; init; }
[JsonPropertyName("edges")]
public List<RichGraphEdgeDocument>? Edges { get; init; }
[JsonPropertyName("roots")]
public List<RichGraphRootDocument>? Roots { get; init; }
}
internal sealed class RichGraphAnalyzerDocument
{
[JsonPropertyName("name")]
public string? Name { get; init; }
[JsonPropertyName("version")]
public string? Version { get; init; }
[JsonPropertyName("toolchain_digest")]
public string? ToolchainDigest { get; init; }
}
internal sealed class RichGraphNodeDocument
{
[JsonPropertyName("id")]
public string? Id { get; init; }
[JsonPropertyName("symbol_id")]
public string? SymbolId { get; init; }
[JsonPropertyName("code_id")]
public string? CodeId { get; init; }
[JsonPropertyName("purl")]
public string? Purl { get; init; }
[JsonPropertyName("lang")]
public string? Lang { get; init; }
[JsonPropertyName("kind")]
public string? Kind { get; init; }
[JsonPropertyName("display")]
public string? Display { get; init; }
[JsonPropertyName("build_id")]
public string? BuildId { get; init; }
[JsonPropertyName("code_block_hash")]
public string? CodeBlockHash { get; init; }
[JsonPropertyName("symbol_digest")]
public string? SymbolDigest { get; init; }
[JsonPropertyName("evidence")]
public List<string>? Evidence { get; init; }
[JsonPropertyName("attributes")]
public Dictionary<string, string>? Attributes { get; init; }
[JsonPropertyName("symbol")]
public RichGraphSymbolDocument? Symbol { get; init; }
}
internal sealed class RichGraphSymbolDocument
{
[JsonPropertyName("mangled")]
public string? Mangled { get; init; }
[JsonPropertyName("demangled")]
public string? Demangled { get; init; }
[JsonPropertyName("source")]
public string? Source { get; init; }
[JsonPropertyName("confidence")]
public double? Confidence { get; init; }
}
internal sealed class RichGraphEdgeDocument
{
[JsonPropertyName("from")]
public string? From { get; init; }
[JsonPropertyName("to")]
public string? To { get; init; }
[JsonPropertyName("kind")]
public string? Kind { get; init; }
[JsonPropertyName("purl")]
public string? Purl { get; init; }
[JsonPropertyName("symbol_digest")]
public string? SymbolDigest { get; init; }
[JsonPropertyName("confidence")]
public double Confidence { get; init; } = 0.0;
[JsonPropertyName("gate_multiplier_bps")]
public int GateMultiplierBps { get; init; } = 10000;
[JsonPropertyName("gates")]
public List<RichGraphGateDocument>? Gates { get; init; }
[JsonPropertyName("evidence")]
public List<string>? Evidence { get; init; }
[JsonPropertyName("candidates")]
public List<string>? Candidates { get; init; }
}
internal sealed class RichGraphGateDocument
{
[JsonPropertyName("type")]
public string? Type { get; init; }
[JsonPropertyName("detail")]
public string? Detail { get; init; }
[JsonPropertyName("guard_symbol")]
public string? GuardSymbol { get; init; }
[JsonPropertyName("source_file")]
public string? SourceFile { get; init; }
[JsonPropertyName("line_number")]
public int? LineNumber { get; init; }
[JsonPropertyName("confidence")]
public double Confidence { get; init; } = 0.0;
[JsonPropertyName("detection_method")]
public string? DetectionMethod { get; init; }
}
internal sealed class RichGraphRootDocument
{
[JsonPropertyName("id")]
public string? Id { get; init; }
[JsonPropertyName("phase")]
public string? Phase { get; init; }
[JsonPropertyName("source")]
public string? Source { get; init; }
}

View File

@@ -0,0 +1,347 @@
using System.Collections.Immutable;
using Microsoft.Extensions.Logging;
using StellaOps.Scanner.Core;
using StellaOps.Scanner.Reachability.Slices;
namespace StellaOps.Scanner.Reachability.Runtime;
/// <summary>
/// Configuration for runtime-static graph merging.
/// </summary>
public sealed record RuntimeStaticMergeOptions
{
/// <summary>
/// Confidence boost for edges observed at runtime. Default: 1.0 (max).
/// </summary>
public double ObservedConfidenceBoost { get; init; } = 1.0;
/// <summary>
/// Base confidence for runtime-only edges (not in static graph). Default: 0.9.
/// </summary>
public double RuntimeOnlyConfidence { get; init; } = 0.9;
/// <summary>
/// Minimum observation count to include a runtime-only edge. Default: 1.
/// </summary>
public int MinObservationCount { get; init; } = 1;
/// <summary>
/// Maximum age of observations to consider fresh. Default: 7 days.
/// </summary>
public TimeSpan FreshnessWindow { get; init; } = TimeSpan.FromDays(7);
/// <summary>
/// Whether to add edges from runtime that don't exist in static graph.
/// </summary>
public bool AddRuntimeOnlyEdges { get; init; } = true;
}
/// <summary>
/// Result of merging runtime traces with static call graph.
/// </summary>
public sealed record RuntimeStaticMergeResult
{
/// <summary>
/// Merged graph with runtime annotations.
/// </summary>
public required CallGraph MergedGraph { get; init; }
/// <summary>
/// Statistics about the merge operation.
/// </summary>
public required MergeStatistics Statistics { get; init; }
/// <summary>
/// Edges that were observed at runtime.
/// </summary>
public ImmutableArray<ObservedEdge> ObservedEdges { get; init; } = ImmutableArray<ObservedEdge>.Empty;
/// <summary>
/// Edges added from runtime that weren't in static graph.
/// </summary>
public ImmutableArray<RuntimeOnlyEdge> RuntimeOnlyEdges { get; init; } = ImmutableArray<RuntimeOnlyEdge>.Empty;
}
/// <summary>
/// Statistics from the merge operation.
/// </summary>
public sealed record MergeStatistics
{
public int StaticEdgeCount { get; init; }
public int RuntimeEventCount { get; init; }
public int MatchedEdgeCount { get; init; }
public int RuntimeOnlyEdgeCount { get; init; }
public int UnmatchedStaticEdgeCount { get; init; }
public double CoverageRatio => StaticEdgeCount > 0
? (double)MatchedEdgeCount / StaticEdgeCount
: 0.0;
}
/// <summary>
/// An edge that was observed at runtime.
/// </summary>
public sealed record ObservedEdge
{
public required string From { get; init; }
public required string To { get; init; }
public required DateTimeOffset FirstObserved { get; init; }
public required DateTimeOffset LastObserved { get; init; }
public required int ObservationCount { get; init; }
public string? TraceDigest { get; init; }
}
/// <summary>
/// An edge that only exists in runtime observations (dynamic dispatch, etc).
/// </summary>
public sealed record RuntimeOnlyEdge
{
public required string From { get; init; }
public required string To { get; init; }
public required DateTimeOffset FirstObserved { get; init; }
public required DateTimeOffset LastObserved { get; init; }
public required int ObservationCount { get; init; }
public required string Origin { get; init; } // "runtime", "dynamic_dispatch", etc.
public string? TraceDigest { get; init; }
}
/// <summary>
/// Represents a runtime call event from eBPF/ETW collectors.
/// </summary>
public sealed record RuntimeCallEvent
{
public required ulong Timestamp { get; init; }
public required uint Pid { get; init; }
public required uint Tid { get; init; }
public required string CallerSymbol { get; init; }
public required string CalleeSymbol { get; init; }
public required string BinaryPath { get; init; }
public string? TraceDigest { get; init; }
}
/// <summary>
/// Merges runtime trace observations with static call graphs.
/// </summary>
public sealed class RuntimeStaticMerger
{
private readonly RuntimeStaticMergeOptions _options;
private readonly ILogger<RuntimeStaticMerger> _logger;
private readonly TimeProvider _timeProvider;
public RuntimeStaticMerger(
RuntimeStaticMergeOptions? options = null,
ILogger<RuntimeStaticMerger>? logger = null,
TimeProvider? timeProvider = null)
{
_options = options ?? new RuntimeStaticMergeOptions();
_logger = logger ?? Microsoft.Extensions.Logging.Abstractions.NullLogger<RuntimeStaticMerger>.Instance;
_timeProvider = timeProvider ?? TimeProvider.System;
}
/// <summary>
/// Merge runtime events into a static call graph.
/// </summary>
public RuntimeStaticMergeResult Merge(
CallGraph staticGraph,
IEnumerable<RuntimeCallEvent> runtimeEvents)
{
ArgumentNullException.ThrowIfNull(staticGraph);
ArgumentNullException.ThrowIfNull(runtimeEvents);
var now = _timeProvider.GetUtcNow();
var freshnessThreshold = now - _options.FreshnessWindow;
// Index static edges for fast lookup
var staticEdgeIndex = BuildStaticEdgeIndex(staticGraph);
// Aggregate runtime events by edge
var runtimeEdgeAggregates = AggregateRuntimeEvents(runtimeEvents);
var observedEdges = new List<ObservedEdge>();
var runtimeOnlyEdges = new List<RuntimeOnlyEdge>();
var modifiedEdges = new List<CallEdge>();
var matchedEdgeKeys = new HashSet<string>(StringComparer.Ordinal);
foreach (var (edgeKey, aggregate) in runtimeEdgeAggregates)
{
// Skip stale observations
if (aggregate.LastObserved < freshnessThreshold)
{
continue;
}
// Skip low observation counts
if (aggregate.ObservationCount < _options.MinObservationCount)
{
continue;
}
if (staticEdgeIndex.TryGetValue(edgeKey, out var staticEdge))
{
// Edge exists in static graph - mark as observed
matchedEdgeKeys.Add(edgeKey);
var observedMetadata = new ObservedEdgeMetadata
{
FirstObserved = aggregate.FirstObserved,
LastObserved = aggregate.LastObserved,
ObservationCount = aggregate.ObservationCount,
TraceDigest = aggregate.TraceDigest
};
var boostedEdge = staticEdge with
{
Confidence = _options.ObservedConfidenceBoost,
Observed = observedMetadata
};
modifiedEdges.Add(boostedEdge);
observedEdges.Add(new ObservedEdge
{
From = aggregate.From,
To = aggregate.To,
FirstObserved = aggregate.FirstObserved,
LastObserved = aggregate.LastObserved,
ObservationCount = aggregate.ObservationCount,
TraceDigest = aggregate.TraceDigest
});
}
else if (_options.AddRuntimeOnlyEdges)
{
// Edge only exists in runtime - add it
var runtimeEdge = new CallEdge
{
From = aggregate.From,
To = aggregate.To,
Kind = CallEdgeKind.Dynamic,
Confidence = ComputeRuntimeOnlyConfidence(aggregate),
Evidence = "runtime_observation",
Observed = new ObservedEdgeMetadata
{
FirstObserved = aggregate.FirstObserved,
LastObserved = aggregate.LastObserved,
ObservationCount = aggregate.ObservationCount,
TraceDigest = aggregate.TraceDigest
}
};
modifiedEdges.Add(runtimeEdge);
runtimeOnlyEdges.Add(new RuntimeOnlyEdge
{
From = aggregate.From,
To = aggregate.To,
FirstObserved = aggregate.FirstObserved,
LastObserved = aggregate.LastObserved,
ObservationCount = aggregate.ObservationCount,
Origin = "runtime",
TraceDigest = aggregate.TraceDigest
});
}
}
// Build merged edge list: unmatched static + modified
var mergedEdges = new List<CallEdge>();
foreach (var edge in staticGraph.Edges)
{
var key = BuildEdgeKey(edge.From, edge.To);
if (!matchedEdgeKeys.Contains(key))
{
mergedEdges.Add(edge);
}
}
mergedEdges.AddRange(modifiedEdges);
var mergedGraph = staticGraph with
{
Edges = mergedEdges.ToImmutableArray()
};
var statistics = new MergeStatistics
{
StaticEdgeCount = staticGraph.Edges.Length,
RuntimeEventCount = runtimeEdgeAggregates.Count,
MatchedEdgeCount = matchedEdgeKeys.Count,
RuntimeOnlyEdgeCount = runtimeOnlyEdges.Count,
UnmatchedStaticEdgeCount = staticGraph.Edges.Length - matchedEdgeKeys.Count
};
_logger.LogInformation(
"Merged runtime traces: {Matched}/{Static} edges observed ({Coverage:P1}), {RuntimeOnly} runtime-only edges added",
statistics.MatchedEdgeCount,
statistics.StaticEdgeCount,
statistics.CoverageRatio,
statistics.RuntimeOnlyEdgeCount);
return new RuntimeStaticMergeResult
{
MergedGraph = mergedGraph,
Statistics = statistics,
ObservedEdges = observedEdges.ToImmutableArray(),
RuntimeOnlyEdges = runtimeOnlyEdges.ToImmutableArray()
};
}
private static Dictionary<string, CallEdge> BuildStaticEdgeIndex(CallGraph graph)
{
var index = new Dictionary<string, CallEdge>(StringComparer.Ordinal);
foreach (var edge in graph.Edges)
{
var key = BuildEdgeKey(edge.From, edge.To);
index.TryAdd(key, edge);
}
return index;
}
private static Dictionary<string, RuntimeEdgeAggregate> AggregateRuntimeEvents(
IEnumerable<RuntimeCallEvent> events)
{
var aggregates = new Dictionary<string, RuntimeEdgeAggregate>(StringComparer.Ordinal);
foreach (var evt in events)
{
var key = BuildEdgeKey(evt.CallerSymbol, evt.CalleeSymbol);
if (aggregates.TryGetValue(key, out var existing))
{
aggregates[key] = existing with
{
ObservationCount = existing.ObservationCount + 1,
LastObserved = DateTimeOffset.FromUnixTimeMilliseconds((long)(evt.Timestamp / 1_000_000))
};
}
else
{
var timestamp = DateTimeOffset.FromUnixTimeMilliseconds((long)(evt.Timestamp / 1_000_000));
aggregates[key] = new RuntimeEdgeAggregate
{
From = evt.CallerSymbol,
To = evt.CalleeSymbol,
FirstObserved = timestamp,
LastObserved = timestamp,
ObservationCount = 1,
TraceDigest = evt.TraceDigest
};
}
}
return aggregates;
}
private double ComputeRuntimeOnlyConfidence(RuntimeEdgeAggregate aggregate)
{
// Higher observation count = higher confidence, capped at runtime-only max
var countFactor = Math.Min(1.0, aggregate.ObservationCount / 10.0);
return _options.RuntimeOnlyConfidence * (0.5 + 0.5 * countFactor);
}
private static string BuildEdgeKey(string from, string to) => $"{from}->{to}";
private sealed record RuntimeEdgeAggregate
{
public required string From { get; init; }
public required string To { get; init; }
public required DateTimeOffset FirstObserved { get; init; }
public required DateTimeOffset LastObserved { get; init; }
public required int ObservationCount { get; init; }
public string? TraceDigest { get; init; }
}
}

View File

@@ -0,0 +1,67 @@
namespace StellaOps.Scanner.Reachability.Slices;
/// <summary>
/// Cache for reachability slices to avoid redundant computation.
/// </summary>
public interface ISliceCache
{
/// <summary>
/// Try to get a cached slice result.
/// </summary>
Task<CachedSliceResult?> TryGetAsync(
string cacheKey,
CancellationToken cancellationToken = default);
/// <summary>
/// Store a slice result in cache.
/// </summary>
Task SetAsync(
string cacheKey,
CachedSliceResult result,
TimeSpan ttl,
CancellationToken cancellationToken = default);
/// <summary>
/// Remove a slice from cache.
/// </summary>
Task RemoveAsync(
string cacheKey,
CancellationToken cancellationToken = default);
/// <summary>
/// Clear all cached slices.
/// </summary>
Task ClearAsync(CancellationToken cancellationToken = default);
/// <summary>
/// Get cache statistics.
/// </summary>
CacheStatistics GetStatistics();
}
/// <summary>
/// Cached slice result.
/// </summary>
public sealed record CachedSliceResult
{
public required string SliceDigest { get; init; }
public required string Verdict { get; init; }
public required double Confidence { get; init; }
public required IReadOnlyList<string> PathWitnesses { get; init; }
public required DateTimeOffset CachedAt { get; init; }
}
/// <summary>
/// Cache statistics.
/// </summary>
public sealed record CacheStatistics
{
public required long HitCount { get; init; }
public required long MissCount { get; init; }
public required long EntryCount { get; init; }
public required long EstimatedSizeBytes { get; init; }
public double HitRate => (HitCount + MissCount) == 0
? 0.0
: (double)HitCount / (HitCount + MissCount);
}

View File

@@ -0,0 +1,210 @@
using System.Collections.Concurrent;
using Microsoft.Extensions.Logging;
namespace StellaOps.Scanner.Reachability.Slices;
/// <summary>
/// In-memory implementation of slice cache with TTL and memory pressure handling.
/// </summary>
public sealed class InMemorySliceCache : ISliceCache, IDisposable
{
private readonly ConcurrentDictionary<string, CacheEntry> _cache = new();
private readonly ILogger<InMemorySliceCache> _logger;
private readonly TimeProvider _timeProvider;
private readonly Timer _evictionTimer;
private readonly SemaphoreSlim _evictionLock = new(1, 1);
private long _hitCount;
private long _missCount;
private const long MaxCacheSizeBytes = 1_073_741_824; // 1GB
private const int EvictionIntervalSeconds = 60;
public InMemorySliceCache(
ILogger<InMemorySliceCache> logger,
TimeProvider? timeProvider = null)
{
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
_timeProvider = timeProvider ?? TimeProvider.System;
_evictionTimer = new Timer(
_ => _ = EvictExpiredEntriesAsync(CancellationToken.None),
null,
TimeSpan.FromSeconds(EvictionIntervalSeconds),
TimeSpan.FromSeconds(EvictionIntervalSeconds));
}
public Task<CachedSliceResult?> TryGetAsync(
string cacheKey,
CancellationToken cancellationToken = default)
{
ArgumentException.ThrowIfNullOrWhiteSpace(cacheKey);
if (_cache.TryGetValue(cacheKey, out var entry))
{
var now = _timeProvider.GetUtcNow();
if (entry.ExpiresAt > now)
{
Interlocked.Increment(ref _hitCount);
_logger.LogDebug("Cache hit for key {CacheKey}", cacheKey);
return Task.FromResult<CachedSliceResult?>(entry.Result);
}
_cache.TryRemove(cacheKey, out _);
_logger.LogDebug("Cache entry expired for key {CacheKey}", cacheKey);
}
Interlocked.Increment(ref _missCount);
_logger.LogDebug("Cache miss for key {CacheKey}", cacheKey);
return Task.FromResult<CachedSliceResult?>(null);
}
public Task SetAsync(
string cacheKey,
CachedSliceResult result,
TimeSpan ttl,
CancellationToken cancellationToken = default)
{
ArgumentException.ThrowIfNullOrWhiteSpace(cacheKey);
ArgumentNullException.ThrowIfNull(result);
var now = _timeProvider.GetUtcNow();
var entry = new CacheEntry(result, now + ttl, EstimateSize(result));
_cache.AddOrUpdate(cacheKey, entry, (_, _) => entry);
_logger.LogDebug(
"Cached slice with key {CacheKey}, expires at {ExpiresAt}",
cacheKey,
entry.ExpiresAt);
_ = CheckMemoryPressureAsync(cancellationToken);
return Task.CompletedTask;
}
public Task RemoveAsync(
string cacheKey,
CancellationToken cancellationToken = default)
{
ArgumentException.ThrowIfNullOrWhiteSpace(cacheKey);
_cache.TryRemove(cacheKey, out _);
_logger.LogDebug("Removed cache entry for key {CacheKey}", cacheKey);
return Task.CompletedTask;
}
public Task ClearAsync(CancellationToken cancellationToken = default)
{
_cache.Clear();
_logger.LogInformation("Cleared all cache entries");
return Task.CompletedTask;
}
public CacheStatistics GetStatistics()
{
var estimatedSize = _cache.Values.Sum(e => e.EstimatedSizeBytes);
return new CacheStatistics
{
HitCount = Interlocked.Read(ref _hitCount),
MissCount = Interlocked.Read(ref _missCount),
EntryCount = _cache.Count,
EstimatedSizeBytes = estimatedSize
};
}
private async Task EvictExpiredEntriesAsync(CancellationToken cancellationToken)
{
if (!await _evictionLock.WaitAsync(0, cancellationToken).ConfigureAwait(false))
{
return;
}
try
{
var now = _timeProvider.GetUtcNow();
var expiredKeys = _cache
.Where(kv => kv.Value.ExpiresAt <= now)
.Select(kv => kv.Key)
.ToList();
foreach (var key in expiredKeys)
{
_cache.TryRemove(key, out _);
}
if (expiredKeys.Count > 0)
{
_logger.LogDebug("Evicted {Count} expired cache entries", expiredKeys.Count);
}
}
finally
{
_evictionLock.Release();
}
}
private async Task CheckMemoryPressureAsync(CancellationToken cancellationToken)
{
var stats = GetStatistics();
if (stats.EstimatedSizeBytes <= MaxCacheSizeBytes)
{
return;
}
if (!await _evictionLock.WaitAsync(0, cancellationToken).ConfigureAwait(false))
{
return;
}
try
{
var orderedEntries = _cache
.OrderBy(kv => kv.Value.ExpiresAt)
.ToList();
var evictionCount = Math.Max(1, orderedEntries.Count / 10);
var toEvict = orderedEntries.Take(evictionCount);
foreach (var entry in toEvict)
{
_cache.TryRemove(entry.Key, out _);
}
_logger.LogWarning(
"Memory pressure detected. Evicted {Count} entries. Cache size: {SizeBytes} bytes",
evictionCount,
stats.EstimatedSizeBytes);
}
finally
{
_evictionLock.Release();
}
}
private static long EstimateSize(CachedSliceResult result)
{
const int baseObjectSize = 128;
const int stringOverhead = 32;
const int pathWitnessAvgSize = 256;
var size = baseObjectSize;
size += result.SliceDigest.Length * 2 + stringOverhead;
size += result.Verdict.Length * 2 + stringOverhead;
size += result.PathWitnesses.Count * pathWitnessAvgSize;
return size;
}
public void Dispose()
{
_evictionTimer?.Dispose();
_evictionLock?.Dispose();
}
private sealed record CacheEntry(
CachedSliceResult Result,
DateTimeOffset ExpiresAt,
long EstimatedSizeBytes);
}

View File

@@ -0,0 +1,223 @@
using System.Collections.Immutable;
using Microsoft.Extensions.Logging;
using StellaOps.Scanner.Core;
using StellaOps.Scanner.Reachability.Runtime;
namespace StellaOps.Scanner.Reachability.Slices;
/// <summary>
/// Options for observed path slice generation.
/// </summary>
public sealed record ObservedPathSliceOptions
{
/// <summary>
/// Minimum confidence threshold to include in slice. Default: 0.0 (include all).
/// </summary>
public double MinConfidence { get; init; } = 0.0;
/// <summary>
/// Whether to include runtime-only edges. Default: true.
/// </summary>
public bool IncludeRuntimeOnlyEdges { get; init; } = true;
/// <summary>
/// Whether to promote observed edges to highest confidence. Default: true.
/// </summary>
public bool PromoteObservedConfidence { get; init; } = true;
}
/// <summary>
/// Generates reachability slices that incorporate runtime observations.
/// </summary>
public sealed class ObservedPathSliceGenerator
{
private readonly SliceExtractor _baseExtractor;
private readonly RuntimeStaticMerger _merger;
private readonly ObservedPathSliceOptions _options;
private readonly ILogger<ObservedPathSliceGenerator> _logger;
public ObservedPathSliceGenerator(
SliceExtractor baseExtractor,
RuntimeStaticMerger merger,
ObservedPathSliceOptions? options = null,
ILogger<ObservedPathSliceGenerator>? logger = null)
{
_baseExtractor = baseExtractor ?? throw new ArgumentNullException(nameof(baseExtractor));
_merger = merger ?? throw new ArgumentNullException(nameof(merger));
_options = options ?? new ObservedPathSliceOptions();
_logger = logger ?? Microsoft.Extensions.Logging.Abstractions.NullLogger<ObservedPathSliceGenerator>.Instance;
}
/// <summary>
/// Extract a slice with runtime observations merged in.
/// </summary>
public ReachabilitySlice ExtractWithObservations(
SliceExtractionRequest request,
IEnumerable<RuntimeCallEvent> runtimeEvents)
{
ArgumentNullException.ThrowIfNull(request);
ArgumentNullException.ThrowIfNull(runtimeEvents);
// First merge runtime observations into the graph
var mergeResult = _merger.Merge(request.Graph, runtimeEvents);
_logger.LogDebug(
"Merged {Matched} observed edges, {RuntimeOnly} runtime-only edges (coverage: {Coverage:P1})",
mergeResult.Statistics.MatchedEdgeCount,
mergeResult.Statistics.RuntimeOnlyEdgeCount,
mergeResult.Statistics.CoverageRatio);
// Extract slice from merged graph
var mergedRequest = request with { Graph = mergeResult.MergedGraph };
var baseSlice = _baseExtractor.Extract(mergedRequest);
// Enhance verdict based on observations
var enhancedVerdict = EnhanceVerdict(baseSlice.Verdict, mergeResult);
// Filter and transform edges based on options
var enhancedSubgraph = EnhanceSubgraph(baseSlice.Subgraph, mergeResult);
return baseSlice with
{
Verdict = enhancedVerdict,
Subgraph = enhancedSubgraph
};
}
/// <summary>
/// Check if any paths in the slice have been observed at runtime.
/// </summary>
public bool HasObservedPaths(ReachabilitySlice slice)
{
return slice.Subgraph.Edges.Any(e => e.Observed != null);
}
/// <summary>
/// Get coverage statistics for a slice.
/// </summary>
public ObservationCoverage GetCoverage(ReachabilitySlice slice)
{
var totalEdges = slice.Subgraph.Edges.Length;
var observedEdges = slice.Subgraph.Edges.Count(e => e.Observed != null);
return new ObservationCoverage
{
TotalEdges = totalEdges,
ObservedEdges = observedEdges,
CoverageRatio = totalEdges > 0 ? (double)observedEdges / totalEdges : 0.0,
HasFullCoverage = totalEdges > 0 && observedEdges == totalEdges
};
}
private SliceVerdict EnhanceVerdict(SliceVerdict baseVerdict, RuntimeStaticMergeResult mergeResult)
{
// If we have observed paths to targets, upgrade to observed_reachable
var hasObservedPathToTarget = mergeResult.ObservedEdges.Any();
if (hasObservedPathToTarget && baseVerdict.Status == SliceVerdictStatus.Reachable)
{
return baseVerdict with
{
Status = SliceVerdictStatus.ObservedReachable,
Confidence = 1.0, // Maximum confidence for runtime-observed
Reasons = baseVerdict.Reasons.Add("Runtime observation confirms reachability")
};
}
// If static analysis said unreachable but we observed it, override
if (hasObservedPathToTarget && baseVerdict.Status == SliceVerdictStatus.Unreachable)
{
_logger.LogWarning(
"Runtime observation contradicts static analysis (unreachable -> observed_reachable)");
return baseVerdict with
{
Status = SliceVerdictStatus.ObservedReachable,
Confidence = 1.0,
Reasons = baseVerdict.Reasons.Add("Runtime observation overrides static analysis")
};
}
// Boost confidence if we have supporting observations
if (mergeResult.Statistics.CoverageRatio > 0)
{
var boostedConfidence = Math.Min(1.0,
baseVerdict.Confidence + (1.0 - baseVerdict.Confidence) * mergeResult.Statistics.CoverageRatio);
return baseVerdict with
{
Confidence = boostedConfidence,
Reasons = baseVerdict.Reasons.Add($"Confidence boosted by {mergeResult.Statistics.CoverageRatio:P0} runtime coverage")
};
}
return baseVerdict;
}
private SliceSubgraph EnhanceSubgraph(SliceSubgraph baseSubgraph, RuntimeStaticMergeResult mergeResult)
{
var enhancedEdges = baseSubgraph.Edges
.Select(edge => EnhanceEdge(edge, mergeResult))
.Where(edge => edge.Confidence >= _options.MinConfidence)
.ToImmutableArray();
return baseSubgraph with { Edges = enhancedEdges };
}
private SliceEdge EnhanceEdge(SliceEdge edge, RuntimeStaticMergeResult mergeResult)
{
// Check if this edge was observed
var observed = mergeResult.ObservedEdges
.FirstOrDefault(o => o.From == edge.From && o.To == edge.To);
if (observed != null)
{
var confidence = _options.PromoteObservedConfidence ? 1.0 : edge.Confidence;
return edge with
{
Confidence = confidence,
Observed = new ObservedEdgeMetadata
{
FirstObserved = observed.FirstObserved,
LastObserved = observed.LastObserved,
ObservationCount = observed.ObservationCount,
TraceDigest = observed.TraceDigest
}
};
}
// Check if this is a runtime-only edge
var runtimeOnly = mergeResult.RuntimeOnlyEdges
.FirstOrDefault(r => r.From == edge.From && r.To == edge.To);
if (runtimeOnly != null && _options.IncludeRuntimeOnlyEdges)
{
return edge with
{
Kind = SliceEdgeKind.Dynamic,
Evidence = $"runtime:{runtimeOnly.Origin}",
Observed = new ObservedEdgeMetadata
{
FirstObserved = runtimeOnly.FirstObserved,
LastObserved = runtimeOnly.LastObserved,
ObservationCount = runtimeOnly.ObservationCount,
TraceDigest = runtimeOnly.TraceDigest
}
};
}
return edge;
}
}
/// <summary>
/// Coverage statistics for runtime observations.
/// </summary>
public sealed record ObservationCoverage
{
public int TotalEdges { get; init; }
public int ObservedEdges { get; init; }
public double CoverageRatio { get; init; }
public bool HasFullCoverage { get; init; }
}

View File

@@ -0,0 +1,173 @@
using System.Text.Json.Serialization;
namespace StellaOps.Scanner.Reachability.Slices;
/// <summary>
/// Policy binding mode for slices.
/// </summary>
public enum PolicyBindingMode
{
/// <summary>
/// Slice is invalid if policy changes at all.
/// </summary>
Strict,
/// <summary>
/// Slice is valid with newer policy versions only.
/// </summary>
Forward,
/// <summary>
/// Slice is valid with any policy version.
/// </summary>
Any
}
/// <summary>
/// Policy binding information for a reachability slice.
/// </summary>
public sealed record PolicyBinding
{
/// <summary>
/// Content-addressed hash of the policy DSL.
/// </summary>
[JsonPropertyName("policyDigest")]
public required string PolicyDigest { get; init; }
/// <summary>
/// Semantic version of the policy.
/// </summary>
[JsonPropertyName("policyVersion")]
public required string PolicyVersion { get; init; }
/// <summary>
/// When the policy was bound to this slice.
/// </summary>
[JsonPropertyName("boundAt")]
public required DateTimeOffset BoundAt { get; init; }
/// <summary>
/// Binding mode for validation.
/// </summary>
[JsonPropertyName("mode")]
[JsonConverter(typeof(JsonStringEnumConverter))]
public required PolicyBindingMode Mode { get; init; }
/// <summary>
/// Optional policy name/identifier.
/// </summary>
[JsonPropertyName("policyName")]
public string? PolicyName { get; init; }
/// <summary>
/// Optional policy source (e.g., git commit hash).
/// </summary>
[JsonPropertyName("policySource")]
public string? PolicySource { get; init; }
}
/// <summary>
/// Result of policy binding validation.
/// </summary>
public sealed record PolicyBindingValidationResult
{
public required bool Valid { get; init; }
public string? FailureReason { get; init; }
public required PolicyBinding SlicePolicy { get; init; }
public required PolicyBinding CurrentPolicy { get; init; }
}
/// <summary>
/// Validator for policy bindings.
/// </summary>
public sealed class PolicyBindingValidator
{
/// <summary>
/// Validate a policy binding against current policy.
/// </summary>
public PolicyBindingValidationResult Validate(
PolicyBinding sliceBinding,
PolicyBinding currentPolicy)
{
ArgumentNullException.ThrowIfNull(sliceBinding);
ArgumentNullException.ThrowIfNull(currentPolicy);
var result = sliceBinding.Mode switch
{
PolicyBindingMode.Strict => ValidateStrict(sliceBinding, currentPolicy),
PolicyBindingMode.Forward => ValidateForward(sliceBinding, currentPolicy),
PolicyBindingMode.Any => ValidateAny(sliceBinding, currentPolicy),
_ => throw new ArgumentException($"Unknown policy binding mode: {sliceBinding.Mode}")
};
return result with
{
SlicePolicy = sliceBinding,
CurrentPolicy = currentPolicy
};
}
private static PolicyBindingValidationResult ValidateStrict(
PolicyBinding sliceBinding,
PolicyBinding currentPolicy)
{
var digestMatch = string.Equals(
sliceBinding.PolicyDigest,
currentPolicy.PolicyDigest,
StringComparison.Ordinal);
return new PolicyBindingValidationResult
{
Valid = digestMatch,
FailureReason = digestMatch
? null
: $"Policy digest mismatch. Slice bound to {sliceBinding.PolicyDigest}, current is {currentPolicy.PolicyDigest}.",
SlicePolicy = sliceBinding,
CurrentPolicy = currentPolicy
};
}
private static PolicyBindingValidationResult ValidateForward(
PolicyBinding sliceBinding,
PolicyBinding currentPolicy)
{
// Check if current policy version is newer or equal
if (!Version.TryParse(sliceBinding.PolicyVersion, out var sliceVersion) ||
!Version.TryParse(currentPolicy.PolicyVersion, out var currentVersion))
{
return new PolicyBindingValidationResult
{
Valid = false,
FailureReason = "Invalid version format for forward compatibility check.",
SlicePolicy = sliceBinding,
CurrentPolicy = currentPolicy
};
}
var isForwardCompatible = currentVersion >= sliceVersion;
return new PolicyBindingValidationResult
{
Valid = isForwardCompatible,
FailureReason = isForwardCompatible
? null
: $"Policy version downgrade detected. Slice bound to {sliceVersion}, current is {currentVersion}.",
SlicePolicy = sliceBinding,
CurrentPolicy = currentPolicy
};
}
private static PolicyBindingValidationResult ValidateAny(
PolicyBinding sliceBinding,
PolicyBinding currentPolicy)
{
// Always valid in 'any' mode
return new PolicyBindingValidationResult
{
Valid = true,
FailureReason = null,
SlicePolicy = sliceBinding,
CurrentPolicy = currentPolicy
};
}
}

View File

@@ -0,0 +1,113 @@
using System.Collections.Immutable;
namespace StellaOps.Scanner.Reachability.Slices.Replay;
/// <summary>
/// Computes detailed diffs between two reachability slices.
/// </summary>
public sealed class SliceDiffComputer
{
public SliceDiffResult Compute(ReachabilitySlice original, ReachabilitySlice recomputed)
{
ArgumentNullException.ThrowIfNull(original);
ArgumentNullException.ThrowIfNull(recomputed);
var normalizedOriginal = original.Normalize();
var normalizedRecomputed = recomputed.Normalize();
var nodesDiff = ComputeNodesDiff(
normalizedOriginal.Subgraph.Nodes,
normalizedRecomputed.Subgraph.Nodes);
var edgesDiff = ComputeEdgesDiff(
normalizedOriginal.Subgraph.Edges,
normalizedRecomputed.Subgraph.Edges);
var verdictDiff = ComputeVerdictDiff(
normalizedOriginal.Verdict,
normalizedRecomputed.Verdict);
var hasChanges = nodesDiff.HasChanges || edgesDiff.HasChanges || verdictDiff is not null;
return new SliceDiffResult(
Match: !hasChanges,
NodesDiff: nodesDiff,
EdgesDiff: edgesDiff,
VerdictDiff: verdictDiff);
}
private static NodesDiff ComputeNodesDiff(
ImmutableArray<SliceNode> original,
ImmutableArray<SliceNode> recomputed)
{
var originalIds = original.Select(n => n.Id).ToHashSet(StringComparer.Ordinal);
var recomputedIds = recomputed.Select(n => n.Id).ToHashSet(StringComparer.Ordinal);
var missing = originalIds.Except(recomputedIds).Order(StringComparer.Ordinal).ToImmutableArray();
var extra = recomputedIds.Except(originalIds).Order(StringComparer.Ordinal).ToImmutableArray();
var hasChanges = missing.Length > 0 || extra.Length > 0;
return new NodesDiff(missing, extra, hasChanges);
}
private static EdgesDiff ComputeEdgesDiff(
ImmutableArray<SliceEdge> original,
ImmutableArray<SliceEdge> recomputed)
{
var originalKeys = original
.Select(e => EdgeKey(e))
.ToHashSet(StringComparer.Ordinal);
var recomputedKeys = recomputed
.Select(e => EdgeKey(e))
.ToHashSet(StringComparer.Ordinal);
var missing = originalKeys.Except(recomputedKeys).Order(StringComparer.Ordinal).ToImmutableArray();
var extra = recomputedKeys.Except(originalKeys).Order(StringComparer.Ordinal).ToImmutableArray();
var hasChanges = missing.Length > 0 || extra.Length > 0;
return new EdgesDiff(missing, extra, hasChanges);
}
private static string EdgeKey(SliceEdge edge)
=> $"{edge.From}→{edge.To}:{edge.Kind}";
private static string? ComputeVerdictDiff(SliceVerdict original, SliceVerdict recomputed)
{
if (original.Status != recomputed.Status)
{
return $"Status changed: {original.Status} → {recomputed.Status}";
}
var confidenceDiff = Math.Abs(original.Confidence - recomputed.Confidence);
if (confidenceDiff > 0.01)
{
return $"Confidence changed: {original.Confidence:F3} → {recomputed.Confidence:F3} (Δ={confidenceDiff:F3})";
}
if (original.UnknownCount != recomputed.UnknownCount)
{
return $"Unknown count changed: {original.UnknownCount} → {recomputed.UnknownCount}";
}
return null;
}
}
public sealed record SliceDiffResult(
bool Match,
NodesDiff NodesDiff,
EdgesDiff EdgesDiff,
string? VerdictDiff);
public sealed record NodesDiff(
ImmutableArray<string> Missing,
ImmutableArray<string> Extra,
bool HasChanges);
public sealed record EdgesDiff(
ImmutableArray<string> Missing,
ImmutableArray<string> Extra,
bool HasChanges);

View File

@@ -0,0 +1,180 @@
using System.Collections.Concurrent;
using Microsoft.Extensions.Options;
namespace StellaOps.Scanner.Reachability.Slices;
/// <summary>
/// Options for slice caching behavior.
/// </summary>
public sealed class SliceCacheOptions
{
/// <summary>
/// Cache time-to-live. Default: 1 hour.
/// </summary>
public TimeSpan Ttl { get; set; } = TimeSpan.FromHours(1);
/// <summary>
/// Maximum number of cached items before eviction. Default: 10000.
/// </summary>
public int MaxItems { get; set; } = 10_000;
/// <summary>
/// Whether caching is enabled. Default: true.
/// </summary>
public bool Enabled { get; set; } = true;
}
/// <summary>
/// In-memory LRU cache for reachability slices with TTL eviction.
/// </summary>
public sealed class SliceCache : ISliceCache, IDisposable
{
private readonly SliceCacheOptions _options;
private readonly ConcurrentDictionary<string, CacheItem> _cache = new(StringComparer.Ordinal);
private readonly Timer _evictionTimer;
private long _hitCount;
private long _missCount;
private bool _disposed;
public SliceCache(IOptions<SliceCacheOptions> options)
{
_options = options?.Value ?? new SliceCacheOptions();
_evictionTimer = new Timer(EvictExpired, null, TimeSpan.FromMinutes(1), TimeSpan.FromMinutes(1));
}
public Task<CachedSliceResult?> TryGetAsync(string cacheKey, CancellationToken cancellationToken = default)
{
ArgumentException.ThrowIfNullOrWhiteSpace(cacheKey);
if (!_options.Enabled)
{
return Task.FromResult<CachedSliceResult?>(null);
}
if (_cache.TryGetValue(cacheKey, out var item))
{
if (item.ExpiresAt > DateTimeOffset.UtcNow)
{
item.LastAccessed = DateTimeOffset.UtcNow;
Interlocked.Increment(ref _hitCount);
var result = new CachedSliceResult
{
SliceDigest = item.Digest,
Verdict = item.Verdict,
Confidence = item.Confidence,
PathWitnesses = item.PathWitnesses,
CachedAt = item.CachedAt
};
return Task.FromResult<CachedSliceResult?>(result);
}
// Expired - remove and return miss
_cache.TryRemove(cacheKey, out _);
}
Interlocked.Increment(ref _missCount);
return Task.FromResult<CachedSliceResult?>(null);
}
public Task SetAsync(string cacheKey, CachedSliceResult result, TimeSpan ttl, CancellationToken cancellationToken = default)
{
ArgumentException.ThrowIfNullOrWhiteSpace(cacheKey);
ArgumentNullException.ThrowIfNull(result);
if (!_options.Enabled) return Task.CompletedTask;
// Evict if at capacity
if (_cache.Count >= _options.MaxItems)
{
EvictLru();
}
var now = DateTimeOffset.UtcNow;
var item = new CacheItem
{
Digest = result.SliceDigest,
Verdict = result.Verdict,
Confidence = result.Confidence,
PathWitnesses = result.PathWitnesses.ToList(),
CachedAt = now,
ExpiresAt = now.Add(ttl),
LastAccessed = now
};
_cache[cacheKey] = item;
return Task.CompletedTask;
}
public Task RemoveAsync(string cacheKey, CancellationToken cancellationToken = default)
{
ArgumentException.ThrowIfNullOrWhiteSpace(cacheKey);
_cache.TryRemove(cacheKey, out _);
return Task.CompletedTask;
}
public Task ClearAsync(CancellationToken cancellationToken = default)
{
_cache.Clear();
Interlocked.Exchange(ref _hitCount, 0);
Interlocked.Exchange(ref _missCount, 0);
return Task.CompletedTask;
}
public CacheStatistics GetStatistics() => new()
{
HitCount = Interlocked.Read(ref _hitCount),
MissCount = Interlocked.Read(ref _missCount),
EntryCount = _cache.Count,
EstimatedSizeBytes = _cache.Count * 1024 // Rough estimate
};
private void EvictExpired(object? state)
{
if (_disposed) return;
var now = DateTimeOffset.UtcNow;
var keysToRemove = _cache
.Where(kvp => kvp.Value.ExpiresAt <= now)
.Select(kvp => kvp.Key)
.ToList();
foreach (var key in keysToRemove)
{
_cache.TryRemove(key, out _);
}
}
private void EvictLru()
{
// Remove oldest 10% of items
var toRemove = Math.Max(1, _options.MaxItems / 10);
var oldest = _cache
.OrderBy(kvp => kvp.Value.LastAccessed)
.Take(toRemove)
.Select(kvp => kvp.Key)
.ToList();
foreach (var key in oldest)
{
_cache.TryRemove(key, out _);
}
}
public void Dispose()
{
if (_disposed) return;
_disposed = true;
_evictionTimer.Dispose();
}
private sealed class CacheItem
{
public required string Digest { get; init; }
public required string Verdict { get; init; }
public required double Confidence { get; init; }
public required List<string> PathWitnesses { get; init; }
public required DateTimeOffset CachedAt { get; init; }
public required DateTimeOffset ExpiresAt { get; init; }
public DateTimeOffset LastAccessed { get; set; }
}
}

View File

@@ -0,0 +1,68 @@
using StellaOps.Cryptography;
using StellaOps.Replay.Core;
using StellaOps.Scanner.Cache.Abstractions;
namespace StellaOps.Scanner.Reachability.Slices;
public sealed class SliceCasStorage
{
private readonly SliceHasher _hasher;
private readonly SliceDsseSigner _signer;
private readonly ICryptoHash _cryptoHash;
public SliceCasStorage(SliceHasher hasher, SliceDsseSigner signer, ICryptoHash cryptoHash)
{
_hasher = hasher ?? throw new ArgumentNullException(nameof(hasher));
_signer = signer ?? throw new ArgumentNullException(nameof(signer));
_cryptoHash = cryptoHash ?? throw new ArgumentNullException(nameof(cryptoHash));
}
public async Task<SliceCasResult> StoreAsync(
ReachabilitySlice slice,
IFileContentAddressableStore cas,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(slice);
ArgumentNullException.ThrowIfNull(cas);
var digestResult = _hasher.ComputeDigest(slice);
var casKey = ExtractDigestHex(digestResult.Digest);
await using (var sliceStream = new MemoryStream(digestResult.CanonicalBytes, writable: false))
{
await cas.PutAsync(new FileCasPutRequest(casKey, sliceStream, leaveOpen: false), cancellationToken)
.ConfigureAwait(false);
}
var signed = await _signer.SignAsync(slice, cancellationToken).ConfigureAwait(false);
var envelopeBytes = CanonicalJson.SerializeToUtf8Bytes(signed.Envelope);
var dsseDigest = _cryptoHash.ComputePrefixedHashForPurpose(envelopeBytes, HashPurpose.Attestation);
var dsseKey = $"{casKey}.dsse";
await using (var dsseStream = new MemoryStream(envelopeBytes, writable: false))
{
await cas.PutAsync(new FileCasPutRequest(dsseKey, dsseStream, leaveOpen: false), cancellationToken)
.ConfigureAwait(false);
}
return new SliceCasResult(
signed.SliceDigest,
$"cas://slices/{casKey}",
dsseDigest,
$"cas://slices/{dsseKey}",
signed);
}
private static string ExtractDigestHex(string prefixed)
{
var colonIndex = prefixed.IndexOf(':');
return colonIndex >= 0 ? prefixed[(colonIndex + 1)..] : prefixed;
}
}
public sealed record SliceCasResult(
string SliceDigest,
string SliceCasUri,
string DsseDigest,
string DsseCasUri,
SignedSlice SignedSlice);

View File

@@ -0,0 +1,178 @@
using System.Collections.Immutable;
using System.Security.Cryptography;
using System.Text;
namespace StellaOps.Scanner.Reachability.Slices;
/// <summary>
/// Computes detailed diffs between two slices for replay verification.
/// </summary>
public sealed class SliceDiffComputer
{
/// <summary>
/// Compare two slices and produce a detailed diff.
/// </summary>
public SliceDiffResult Compare(ReachabilitySlice original, ReachabilitySlice recomputed)
{
ArgumentNullException.ThrowIfNull(original);
ArgumentNullException.ThrowIfNull(recomputed);
var nodeDiff = CompareNodes(original.Subgraph.Nodes, recomputed.Subgraph.Nodes);
var edgeDiff = CompareEdges(original.Subgraph.Edges, recomputed.Subgraph.Edges);
var verdictDiff = CompareVerdicts(original.Verdict, recomputed.Verdict);
var match = nodeDiff.MissingNodes.IsEmpty &&
nodeDiff.ExtraNodes.IsEmpty &&
edgeDiff.MissingEdges.IsEmpty &&
edgeDiff.ExtraEdges.IsEmpty &&
verdictDiff == null;
return new SliceDiffResult
{
Match = match,
MissingNodes = nodeDiff.MissingNodes,
ExtraNodes = nodeDiff.ExtraNodes,
MissingEdges = edgeDiff.MissingEdges,
ExtraEdges = edgeDiff.ExtraEdges,
VerdictDiff = verdictDiff
};
}
/// <summary>
/// Compute a cache key for a query based on its parameters.
/// </summary>
public static string ComputeCacheKey(string scanId, string? cveId, IEnumerable<string>? symbols, IEnumerable<string>? entrypoints, string? policyHash)
{
using var sha256 = SHA256.Create();
var sb = new StringBuilder();
sb.Append("scan:").Append(scanId ?? "").Append('|');
sb.Append("cve:").Append(cveId ?? "").Append('|');
if (symbols != null)
{
foreach (var s in symbols.OrderBy(x => x, StringComparer.Ordinal))
{
sb.Append("sym:").Append(s).Append(',');
}
}
sb.Append('|');
if (entrypoints != null)
{
foreach (var e in entrypoints.OrderBy(x => x, StringComparer.Ordinal))
{
sb.Append("ep:").Append(e).Append(',');
}
}
sb.Append('|');
sb.Append("policy:").Append(policyHash ?? "");
var hash = sha256.ComputeHash(Encoding.UTF8.GetBytes(sb.ToString()));
return Convert.ToHexString(hash).ToLowerInvariant();
}
private static NodeDiffResult CompareNodes(ImmutableArray<SliceNode> original, ImmutableArray<SliceNode> recomputed)
{
var originalIds = original.Select(n => n.Id).ToHashSet(StringComparer.Ordinal);
var recomputedIds = recomputed.Select(n => n.Id).ToHashSet(StringComparer.Ordinal);
var missing = originalIds.Except(recomputedIds)
.OrderBy(x => x, StringComparer.Ordinal)
.ToImmutableArray();
var extra = recomputedIds.Except(originalIds)
.OrderBy(x => x, StringComparer.Ordinal)
.ToImmutableArray();
return new NodeDiffResult(missing, extra);
}
private static EdgeDiffResult CompareEdges(ImmutableArray<SliceEdge> original, ImmutableArray<SliceEdge> recomputed)
{
static string EdgeKey(SliceEdge e) => $"{e.From}->{e.To}:{e.Kind}";
var originalKeys = original.Select(EdgeKey).ToHashSet(StringComparer.Ordinal);
var recomputedKeys = recomputed.Select(EdgeKey).ToHashSet(StringComparer.Ordinal);
var missing = originalKeys.Except(recomputedKeys)
.OrderBy(x => x, StringComparer.Ordinal)
.ToImmutableArray();
var extra = recomputedKeys.Except(originalKeys)
.OrderBy(x => x, StringComparer.Ordinal)
.ToImmutableArray();
return new EdgeDiffResult(missing, extra);
}
private static string? CompareVerdicts(SliceVerdict original, SliceVerdict recomputed)
{
if (original.Status != recomputed.Status)
{
return $"Status: {original.Status} -> {recomputed.Status}";
}
if (Math.Abs(original.Confidence - recomputed.Confidence) > 0.0001)
{
return $"Confidence: {original.Confidence:F4} -> {recomputed.Confidence:F4}";
}
return null;
}
private readonly record struct NodeDiffResult(ImmutableArray<string> MissingNodes, ImmutableArray<string> ExtraNodes);
private readonly record struct EdgeDiffResult(ImmutableArray<string> MissingEdges, ImmutableArray<string> ExtraEdges);
}
/// <summary>
/// Result of slice comparison.
/// </summary>
public sealed record SliceDiffResult
{
public required bool Match { get; init; }
public ImmutableArray<string> MissingNodes { get; init; } = ImmutableArray<string>.Empty;
public ImmutableArray<string> ExtraNodes { get; init; } = ImmutableArray<string>.Empty;
public ImmutableArray<string> MissingEdges { get; init; } = ImmutableArray<string>.Empty;
public ImmutableArray<string> ExtraEdges { get; init; } = ImmutableArray<string>.Empty;
public string? VerdictDiff { get; init; }
/// <summary>
/// Get human-readable diff summary.
/// </summary>
public string ToSummary()
{
if (Match) return "Slices match exactly.";
var sb = new StringBuilder();
sb.AppendLine("Slice diff:");
if (!MissingNodes.IsDefaultOrEmpty)
{
sb.AppendLine($" Missing nodes ({MissingNodes.Length}): {string.Join(", ", MissingNodes.Take(5))}{(MissingNodes.Length > 5 ? "..." : "")}");
}
if (!ExtraNodes.IsDefaultOrEmpty)
{
sb.AppendLine($" Extra nodes ({ExtraNodes.Length}): {string.Join(", ", ExtraNodes.Take(5))}{(ExtraNodes.Length > 5 ? "..." : "")}");
}
if (!MissingEdges.IsDefaultOrEmpty)
{
sb.AppendLine($" Missing edges ({MissingEdges.Length}): {string.Join(", ", MissingEdges.Take(5))}{(MissingEdges.Length > 5 ? "..." : "")}");
}
if (!ExtraEdges.IsDefaultOrEmpty)
{
sb.AppendLine($" Extra edges ({ExtraEdges.Length}): {string.Join(", ", ExtraEdges.Take(5))}{(ExtraEdges.Length > 5 ? "..." : "")}");
}
if (VerdictDiff != null)
{
sb.AppendLine($" Verdict changed: {VerdictDiff}");
}
return sb.ToString();
}
}

View File

@@ -0,0 +1,51 @@
using StellaOps.Replay.Core;
using StellaOps.Scanner.ProofSpine;
namespace StellaOps.Scanner.Reachability.Slices;
public sealed class SliceDsseSigner
{
private readonly IDsseSigningService _signingService;
private readonly ICryptoProfile _cryptoProfile;
private readonly SliceHasher _hasher;
private readonly TimeProvider _timeProvider;
public SliceDsseSigner(
IDsseSigningService signingService,
ICryptoProfile cryptoProfile,
SliceHasher hasher,
TimeProvider? timeProvider = null)
{
_signingService = signingService ?? throw new ArgumentNullException(nameof(signingService));
_cryptoProfile = cryptoProfile ?? throw new ArgumentNullException(nameof(cryptoProfile));
_hasher = hasher ?? throw new ArgumentNullException(nameof(hasher));
_timeProvider = timeProvider ?? TimeProvider.System;
}
public async Task<SignedSlice> SignAsync(ReachabilitySlice slice, CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(slice);
var normalized = slice.Normalize();
var digestResult = _hasher.ComputeDigest(normalized);
var envelope = await _signingService.SignAsync(
normalized,
SliceSchema.DssePayloadType,
_cryptoProfile,
cancellationToken)
.ConfigureAwait(false);
return new SignedSlice(
Slice: normalized,
SliceDigest: digestResult.Digest,
Envelope: envelope,
SignedAt: _timeProvider.GetUtcNow());
}
}
public sealed record SignedSlice(
ReachabilitySlice Slice,
string SliceDigest,
DsseEnvelope Envelope,
DateTimeOffset SignedAt);

View File

@@ -0,0 +1,568 @@
using System.Collections.Immutable;
using StellaOps.Scanner.Core;
using StellaOps.Scanner.Reachability.Gates;
namespace StellaOps.Scanner.Reachability.Slices;
public sealed class SliceExtractor
{
private readonly VerdictComputer _verdictComputer;
public SliceExtractor(VerdictComputer verdictComputer)
{
_verdictComputer = verdictComputer ?? throw new ArgumentNullException(nameof(verdictComputer));
}
public ReachabilitySlice Extract(SliceExtractionRequest request, SliceVerdictOptions? verdictOptions = null)
{
ArgumentNullException.ThrowIfNull(request);
var graph = request.Graph;
var query = request.Query;
var nodeLookup = graph.Nodes.ToDictionary(n => n.Id, StringComparer.Ordinal);
var entrypoints = ResolveEntrypoints(query, graph, nodeLookup);
var targets = ResolveTargets(query, graph);
if (entrypoints.Count == 0 || targets.Count == 0)
{
return BuildEmptySlice(request, entrypoints.Count == 0, targets.Count == 0);
}
var forwardEdges = BuildEdgeLookup(graph.Edges);
var reverseEdges = BuildReverseEdgeLookup(graph.Edges);
var reachableFromEntrypoints = Traverse(entrypoints, forwardEdges);
var canReachTargets = Traverse(targets, reverseEdges);
var includedNodes = new HashSet<string>(reachableFromEntrypoints, StringComparer.Ordinal);
includedNodes.IntersectWith(canReachTargets);
foreach (var entry in entrypoints)
{
includedNodes.Add(entry);
}
foreach (var target in targets)
{
includedNodes.Add(target);
}
var subgraphEdges = graph.Edges
.Where(e => includedNodes.Contains(e.From) && includedNodes.Contains(e.To))
.Where(e => reachableFromEntrypoints.Contains(e.From) && canReachTargets.Contains(e.To))
.ToList();
var subgraphNodes = includedNodes
.Where(nodeLookup.ContainsKey)
.Select(id => nodeLookup[id])
.ToList();
var nodes = subgraphNodes
.Select(node => MapNode(node, entrypoints, targets))
.ToImmutableArray();
var edges = subgraphEdges
.Select(MapEdge)
.ToImmutableArray();
var paths = BuildPathSummaries(entrypoints, targets, subgraphEdges, nodeLookup);
var unknownEdges = edges.Count(e => e.Kind == SliceEdgeKind.Unknown || e.Confidence < 0.5);
var verdict = _verdictComputer.Compute(paths, unknownEdges, verdictOptions);
return new ReachabilitySlice
{
Inputs = request.Inputs,
Query = request.Query,
Subgraph = new SliceSubgraph { Nodes = nodes, Edges = edges },
Verdict = verdict,
Manifest = request.Manifest
}.Normalize();
}
private static ReachabilitySlice BuildEmptySlice(SliceExtractionRequest request, bool missingEntrypoints, bool missingTargets)
{
var reasons = new List<string>();
if (missingEntrypoints)
{
reasons.Add("missing_entrypoints");
}
if (missingTargets)
{
reasons.Add("missing_targets");
}
return new ReachabilitySlice
{
Inputs = request.Inputs,
Query = request.Query,
Subgraph = new SliceSubgraph(),
Verdict = new SliceVerdict
{
Status = SliceVerdictStatus.Unknown,
Confidence = 0.0,
Reasons = reasons.ToImmutableArray()
},
Manifest = request.Manifest
}.Normalize();
}
private static HashSet<string> ResolveEntrypoints(
SliceQuery query,
RichGraph graph,
Dictionary<string, RichGraphNode> nodeLookup)
{
var entrypoints = new HashSet<string>(StringComparer.Ordinal);
var explicitEntrypoints = query.Entrypoints;
if (!explicitEntrypoints.IsDefaultOrEmpty)
{
foreach (var entry in explicitEntrypoints)
{
if (string.IsNullOrWhiteSpace(entry))
{
continue;
}
var trimmed = entry.Trim();
if (nodeLookup.ContainsKey(trimmed))
{
entrypoints.Add(trimmed);
}
}
}
else
{
foreach (var root in graph.Roots ?? Array.Empty<RichGraphRoot>())
{
if (string.IsNullOrWhiteSpace(root.Id))
{
continue;
}
var trimmed = root.Id.Trim();
if (nodeLookup.ContainsKey(trimmed))
{
entrypoints.Add(trimmed);
}
}
}
return entrypoints;
}
private static HashSet<string> ResolveTargets(SliceQuery query, RichGraph graph)
{
var targets = new HashSet<string>(StringComparer.Ordinal);
if (query.TargetSymbols.IsDefaultOrEmpty)
{
return targets;
}
foreach (var target in query.TargetSymbols)
{
if (string.IsNullOrWhiteSpace(target))
{
continue;
}
var trimmed = target.Trim();
if (IsPackageTarget(trimmed))
{
var packageTargets = graph.Nodes
.Where(n => string.Equals(n.Purl, trimmed, StringComparison.OrdinalIgnoreCase))
.Where(IsPublicNode)
.Select(n => n.Id);
foreach (var nodeId in packageTargets)
{
targets.Add(nodeId);
}
continue;
}
foreach (var node in graph.Nodes)
{
if (string.Equals(node.Id, trimmed, StringComparison.Ordinal) ||
string.Equals(node.SymbolId, trimmed, StringComparison.Ordinal))
{
targets.Add(node.Id);
}
else if (!string.IsNullOrWhiteSpace(node.Display) &&
string.Equals(node.Display, trimmed, StringComparison.Ordinal))
{
targets.Add(node.Id);
}
}
}
return targets;
}
private static bool IsPackageTarget(string value)
=> value.StartsWith("pkg:", StringComparison.OrdinalIgnoreCase);
private static bool IsPublicNode(RichGraphNode node)
{
if (node.Attributes is not null &&
node.Attributes.TryGetValue("visibility", out var visibility) &&
!string.IsNullOrWhiteSpace(visibility))
{
return visibility.Equals("public", StringComparison.OrdinalIgnoreCase)
|| visibility.Equals("exported", StringComparison.OrdinalIgnoreCase);
}
return true;
}
private static Dictionary<string, List<RichGraphEdge>> BuildEdgeLookup(IReadOnlyList<RichGraphEdge> edges)
{
var lookup = new Dictionary<string, List<RichGraphEdge>>(StringComparer.Ordinal);
foreach (var edge in edges ?? Array.Empty<RichGraphEdge>())
{
if (!lookup.TryGetValue(edge.From, out var list))
{
list = new List<RichGraphEdge>();
lookup[edge.From] = list;
}
list.Add(edge);
}
foreach (var list in lookup.Values)
{
list.Sort(CompareForward);
}
return lookup;
}
private static Dictionary<string, List<RichGraphEdge>> BuildReverseEdgeLookup(IReadOnlyList<RichGraphEdge> edges)
{
var lookup = new Dictionary<string, List<RichGraphEdge>>(StringComparer.Ordinal);
foreach (var edge in edges ?? Array.Empty<RichGraphEdge>())
{
if (!lookup.TryGetValue(edge.To, out var list))
{
list = new List<RichGraphEdge>();
lookup[edge.To] = list;
}
list.Add(edge);
}
foreach (var list in lookup.Values)
{
list.Sort(CompareReverse);
}
return lookup;
}
private static HashSet<string> Traverse(
HashSet<string> seeds,
Dictionary<string, List<RichGraphEdge>> edgeLookup)
{
var visited = new HashSet<string>(seeds, StringComparer.Ordinal);
var queue = new Queue<string>(seeds);
while (queue.Count > 0)
{
var current = queue.Dequeue();
if (!edgeLookup.TryGetValue(current, out var edges))
{
continue;
}
foreach (var edge in edges)
{
var next = edge.From == current ? edge.To : edge.From;
if (!visited.Add(next))
{
continue;
}
queue.Enqueue(next);
}
}
return visited;
}
private static SliceNode MapNode(
RichGraphNode node,
HashSet<string> entrypoints,
HashSet<string> targets)
{
var kind = SliceNodeKind.Intermediate;
if (entrypoints.Contains(node.Id))
{
kind = SliceNodeKind.Entrypoint;
}
else if (targets.Contains(node.Id))
{
kind = SliceNodeKind.Target;
}
return new SliceNode
{
Id = node.Id,
Symbol = node.Display ?? node.SymbolId ?? node.Id,
Kind = kind,
File = ExtractAttribute(node, "file") ?? ExtractAttribute(node, "source_file"),
Line = ExtractIntAttribute(node, "line"),
Purl = node.Purl,
Attributes = node.Attributes
};
}
private static SliceEdge MapEdge(RichGraphEdge edge)
{
return new SliceEdge
{
From = edge.From,
To = edge.To,
Kind = MapEdgeKind(edge.Kind),
Confidence = edge.Confidence,
Evidence = edge.Evidence?.FirstOrDefault(),
Gate = MapGate(edge.Gates)
};
}
private static SliceEdgeKind MapEdgeKind(string? kind)
{
if (string.IsNullOrWhiteSpace(kind))
{
return SliceEdgeKind.Direct;
}
var normalized = kind.Trim().ToLowerInvariant();
if (normalized.Contains("plt", StringComparison.Ordinal))
{
return SliceEdgeKind.Plt;
}
if (normalized.Contains("iat", StringComparison.Ordinal))
{
return SliceEdgeKind.Iat;
}
return normalized switch
{
EdgeTypes.Dynamic => SliceEdgeKind.Dynamic,
EdgeTypes.Dlopen => SliceEdgeKind.Dynamic,
EdgeTypes.Loads => SliceEdgeKind.Dynamic,
EdgeTypes.Call => SliceEdgeKind.Direct,
EdgeTypes.Import => SliceEdgeKind.Direct,
_ => SliceEdgeKind.Unknown
};
}
private static SliceGateInfo? MapGate(IReadOnlyList<DetectedGate>? gates)
{
if (gates is null || gates.Count == 0)
{
return null;
}
var gate = gates
.OrderByDescending(g => g.Confidence)
.ThenBy(g => g.Detail, StringComparer.Ordinal)
.First();
return new SliceGateInfo
{
Type = gate.Type switch
{
GateType.FeatureFlag => SliceGateType.FeatureFlag,
GateType.AuthRequired => SliceGateType.Auth,
GateType.NonDefaultConfig => SliceGateType.Config,
GateType.AdminOnly => SliceGateType.AdminOnly,
_ => SliceGateType.Config
},
Condition = gate.Detail,
Satisfied = false
};
}
private static ImmutableArray<SlicePathSummary> BuildPathSummaries(
HashSet<string> entrypoints,
HashSet<string> targets,
IReadOnlyList<RichGraphEdge> edges,
Dictionary<string, RichGraphNode> nodeLookup)
{
var edgeLookup = BuildEdgeLookup(edges);
var edgeMap = new Dictionary<(string From, string To), RichGraphEdge>();
foreach (var edge in edges
.OrderBy(e => e.From, StringComparer.Ordinal)
.ThenBy(e => e.To, StringComparer.Ordinal)
.ThenBy(e => e.Kind, StringComparer.Ordinal))
{
var key = (edge.From, edge.To);
if (!edgeMap.TryGetValue(key, out var existing) || edge.Confidence > existing.Confidence)
{
edgeMap[key] = edge;
}
}
var results = new List<SlicePathSummary>();
var pathIndex = 0;
foreach (var entry in entrypoints.OrderBy(e => e, StringComparer.Ordinal))
{
foreach (var target in targets.OrderBy(t => t, StringComparer.Ordinal))
{
var path = FindShortestPath(entry, target, edgeLookup);
if (path is null || path.Count == 0)
{
continue;
}
var minConfidence = 1.0;
var witnessParts = new List<string>();
for (var i = 0; i < path.Count; i++)
{
if (nodeLookup.TryGetValue(path[i], out var node))
{
witnessParts.Add(node.Display ?? node.SymbolId ?? node.Id);
}
else
{
witnessParts.Add(path[i]);
}
if (i == path.Count - 1)
{
continue;
}
if (edgeMap.TryGetValue((path[i], path[i + 1]), out var edge))
{
minConfidence = Math.Min(minConfidence, edge.Confidence);
}
}
var witness = string.Join(" -> ", witnessParts);
results.Add(new SlicePathSummary(
PathId: $"path:{entry}:{target}:{pathIndex++}",
MinConfidence: minConfidence,
PathWitness: witness));
}
}
return results.ToImmutableArray();
}
private static List<string>? FindShortestPath(
string start,
string target,
Dictionary<string, List<RichGraphEdge>> edgeLookup)
{
var queue = new Queue<string>();
var visited = new HashSet<string>(StringComparer.Ordinal) { start };
var previous = new Dictionary<string, string?>(StringComparer.Ordinal) { [start] = null };
queue.Enqueue(start);
while (queue.Count > 0)
{
var current = queue.Dequeue();
if (string.Equals(current, target, StringComparison.Ordinal))
{
return BuildPath(target, previous);
}
if (!edgeLookup.TryGetValue(current, out var edges))
{
continue;
}
foreach (var edge in edges)
{
var next = edge.To;
if (!visited.Add(next))
{
continue;
}
previous[next] = current;
queue.Enqueue(next);
}
}
return null;
}
private static int CompareForward(RichGraphEdge left, RichGraphEdge right)
{
var result = string.Compare(left.To, right.To, StringComparison.Ordinal);
if (result != 0)
{
return result;
}
result = string.Compare(left.Kind, right.Kind, StringComparison.Ordinal);
if (result != 0)
{
return result;
}
return left.Confidence.CompareTo(right.Confidence);
}
private static int CompareReverse(RichGraphEdge left, RichGraphEdge right)
{
var result = string.Compare(left.From, right.From, StringComparison.Ordinal);
if (result != 0)
{
return result;
}
result = string.Compare(left.Kind, right.Kind, StringComparison.Ordinal);
if (result != 0)
{
return result;
}
return left.Confidence.CompareTo(right.Confidence);
}
private static List<string> BuildPath(string target, Dictionary<string, string?> previous)
{
var path = new List<string>();
string? current = target;
while (current is not null)
{
path.Add(current);
current = previous[current];
}
path.Reverse();
return path;
}
private static string? ExtractAttribute(RichGraphNode node, string key)
{
if (node.Attributes is not null && node.Attributes.TryGetValue(key, out var value))
{
return value;
}
return null;
}
private static int? ExtractIntAttribute(RichGraphNode node, string key)
{
var value = ExtractAttribute(node, key);
if (value is not null && int.TryParse(value, out var parsed))
{
return parsed;
}
return null;
}
}
public sealed record SliceExtractionRequest(
RichGraph Graph,
SliceInputs Inputs,
SliceQuery Query,
ScanManifest Manifest);

View File

@@ -0,0 +1,27 @@
using StellaOps.Cryptography;
using StellaOps.Replay.Core;
namespace StellaOps.Scanner.Reachability.Slices;
public sealed class SliceHasher
{
private readonly ICryptoHash _cryptoHash;
public SliceHasher(ICryptoHash cryptoHash)
{
_cryptoHash = cryptoHash ?? throw new ArgumentNullException(nameof(cryptoHash));
}
public SliceDigestResult ComputeDigest(ReachabilitySlice slice)
{
ArgumentNullException.ThrowIfNull(slice);
var normalized = slice.Normalize();
var bytes = CanonicalJson.SerializeToUtf8Bytes(normalized);
var digest = _cryptoHash.ComputePrefixedHashForPurpose(bytes, HashPurpose.Graph);
return new SliceDigestResult(digest, bytes);
}
}
public sealed record SliceDigestResult(string Digest, byte[] CanonicalBytes);

View File

@@ -0,0 +1,392 @@
using System.Collections.Immutable;
using System.Text.Json;
using System.Text.Json.Serialization;
using StellaOps.Scanner.Core;
namespace StellaOps.Scanner.Reachability.Slices;
public sealed record ReachabilitySlice
{
[JsonPropertyName("_type")]
public string Type { get; init; } = SliceSchema.PredicateType;
[JsonPropertyName("inputs")]
public required SliceInputs Inputs { get; init; }
[JsonPropertyName("query")]
public required SliceQuery Query { get; init; }
[JsonPropertyName("subgraph")]
public required SliceSubgraph Subgraph { get; init; }
[JsonPropertyName("verdict")]
public required SliceVerdict Verdict { get; init; }
[JsonPropertyName("manifest")]
public required ScanManifest Manifest { get; init; }
public ReachabilitySlice Normalize() => SliceNormalization.Normalize(this);
}
public sealed record SliceInputs
{
[JsonPropertyName("graphDigest")]
public required string GraphDigest { get; init; }
[JsonPropertyName("binaryDigests")]
public ImmutableArray<string> BinaryDigests { get; init; } = ImmutableArray<string>.Empty;
[JsonPropertyName("sbomDigest")]
public string? SbomDigest { get; init; }
[JsonPropertyName("layerDigests")]
public ImmutableArray<string> LayerDigests { get; init; } = ImmutableArray<string>.Empty;
}
public sealed record SliceQuery
{
[JsonPropertyName("cveId")]
public string? CveId { get; init; }
[JsonPropertyName("targetSymbols")]
public ImmutableArray<string> TargetSymbols { get; init; } = ImmutableArray<string>.Empty;
[JsonPropertyName("entrypoints")]
public ImmutableArray<string> Entrypoints { get; init; } = ImmutableArray<string>.Empty;
[JsonPropertyName("policyHash")]
public string? PolicyHash { get; init; }
}
public sealed record SliceSubgraph
{
[JsonPropertyName("nodes")]
public ImmutableArray<SliceNode> Nodes { get; init; } = ImmutableArray<SliceNode>.Empty;
[JsonPropertyName("edges")]
public ImmutableArray<SliceEdge> Edges { get; init; } = ImmutableArray<SliceEdge>.Empty;
}
public enum SliceNodeKind
{
Entrypoint,
Intermediate,
Target,
Unknown
}
public sealed record SliceNode
{
[JsonPropertyName("id")]
public required string Id { get; init; }
[JsonPropertyName("symbol")]
public required string Symbol { get; init; }
[JsonPropertyName("kind")]
[JsonConverter(typeof(SnakeCaseStringEnumConverter))]
public required SliceNodeKind Kind { get; init; }
[JsonPropertyName("file")]
public string? File { get; init; }
[JsonPropertyName("line")]
public int? Line { get; init; }
[JsonPropertyName("purl")]
public string? Purl { get; init; }
[JsonPropertyName("attributes")]
public IReadOnlyDictionary<string, string>? Attributes { get; init; }
}
public enum SliceEdgeKind
{
Direct,
Plt,
Iat,
Dynamic,
Unknown
}
public sealed record SliceEdge
{
[JsonPropertyName("from")]
public required string From { get; init; }
[JsonPropertyName("to")]
public required string To { get; init; }
[JsonPropertyName("kind")]
[JsonConverter(typeof(SnakeCaseStringEnumConverter))]
public SliceEdgeKind Kind { get; init; } = SliceEdgeKind.Direct;
[JsonPropertyName("confidence")]
public double Confidence { get; init; }
[JsonPropertyName("evidence")]
public string? Evidence { get; init; }
[JsonPropertyName("gate")]
public SliceGateInfo? Gate { get; init; }
[JsonPropertyName("observed")]
public ObservedEdgeMetadata? Observed { get; init; }
}
public enum SliceGateType
{
FeatureFlag,
Auth,
Config,
AdminOnly
}
public sealed record SliceGateInfo
{
[JsonPropertyName("type")]
[JsonConverter(typeof(SnakeCaseStringEnumConverter))]
public required SliceGateType Type { get; init; }
[JsonPropertyName("condition")]
public required string Condition { get; init; }
[JsonPropertyName("satisfied")]
public required bool Satisfied { get; init; }
}
public sealed record ObservedEdgeMetadata
{
[JsonPropertyName("firstObserved")]
public required DateTimeOffset FirstObserved { get; init; }
[JsonPropertyName("lastObserved")]
public required DateTimeOffset LastObserved { get; init; }
[JsonPropertyName("count")]
public required int ObservationCount { get; init; }
[JsonPropertyName("traceDigest")]
public string? TraceDigest { get; init; }
}
public enum SliceVerdictStatus
{
Reachable,
Unreachable,
Unknown,
Gated,
ObservedReachable
}
public sealed record GatedPath
{
[JsonPropertyName("pathId")]
public required string PathId { get; init; }
[JsonPropertyName("gateType")]
public required string GateType { get; init; }
[JsonPropertyName("gateCondition")]
public required string GateCondition { get; init; }
[JsonPropertyName("gateSatisfied")]
public required bool GateSatisfied { get; init; }
}
public sealed record SliceVerdict
{
[JsonPropertyName("status")]
[JsonConverter(typeof(SnakeCaseStringEnumConverter))]
public required SliceVerdictStatus Status { get; init; }
[JsonPropertyName("confidence")]
public required double Confidence { get; init; }
[JsonPropertyName("reasons")]
public ImmutableArray<string> Reasons { get; init; } = ImmutableArray<string>.Empty;
[JsonPropertyName("pathWitnesses")]
public ImmutableArray<string> PathWitnesses { get; init; } = ImmutableArray<string>.Empty;
[JsonPropertyName("unknownCount")]
public int UnknownCount { get; init; }
[JsonPropertyName("gatedPaths")]
public ImmutableArray<GatedPath> GatedPaths { get; init; } = ImmutableArray<GatedPath>.Empty;
}
internal static class SliceNormalization
{
public static ReachabilitySlice Normalize(ReachabilitySlice slice)
{
ArgumentNullException.ThrowIfNull(slice);
return slice with
{
Type = string.IsNullOrWhiteSpace(slice.Type) ? SliceSchema.PredicateType : slice.Type.Trim(),
Inputs = Normalize(slice.Inputs),
Query = Normalize(slice.Query),
Subgraph = Normalize(slice.Subgraph),
Verdict = Normalize(slice.Verdict),
Manifest = slice.Manifest
};
}
private static SliceInputs Normalize(SliceInputs inputs)
{
return inputs with
{
GraphDigest = inputs.GraphDigest.Trim(),
BinaryDigests = NormalizeStrings(inputs.BinaryDigests),
SbomDigest = string.IsNullOrWhiteSpace(inputs.SbomDigest) ? null : inputs.SbomDigest.Trim(),
LayerDigests = NormalizeStrings(inputs.LayerDigests)
};
}
private static SliceQuery Normalize(SliceQuery query)
{
return query with
{
CveId = string.IsNullOrWhiteSpace(query.CveId) ? null : query.CveId.Trim(),
TargetSymbols = NormalizeStrings(query.TargetSymbols),
Entrypoints = NormalizeStrings(query.Entrypoints),
PolicyHash = string.IsNullOrWhiteSpace(query.PolicyHash) ? null : query.PolicyHash.Trim()
};
}
private static SliceSubgraph Normalize(SliceSubgraph subgraph)
{
var nodes = subgraph.Nodes
.Where(n => n is not null)
.Select(Normalize)
.OrderBy(n => n.Id, StringComparer.Ordinal)
.ToImmutableArray();
var edges = subgraph.Edges
.Where(e => e is not null)
.Select(Normalize)
.OrderBy(e => e.From, StringComparer.Ordinal)
.ThenBy(e => e.To, StringComparer.Ordinal)
.ThenBy(e => e.Kind.ToString(), StringComparer.Ordinal)
.ToImmutableArray();
return subgraph with { Nodes = nodes, Edges = edges };
}
private static SliceNode Normalize(SliceNode node)
{
return node with
{
Id = node.Id.Trim(),
Symbol = node.Symbol.Trim(),
File = string.IsNullOrWhiteSpace(node.File) ? null : node.File.Trim(),
Purl = string.IsNullOrWhiteSpace(node.Purl) ? null : node.Purl.Trim(),
Attributes = NormalizeAttributes(node.Attributes)
};
}
private static SliceEdge Normalize(SliceEdge edge)
{
return edge with
{
From = edge.From.Trim(),
To = edge.To.Trim(),
Confidence = Math.Clamp(edge.Confidence, 0.0, 1.0),
Evidence = string.IsNullOrWhiteSpace(edge.Evidence) ? null : edge.Evidence.Trim(),
Gate = Normalize(edge.Gate),
Observed = Normalize(edge.Observed)
};
}
private static SliceGateInfo? Normalize(SliceGateInfo? gate)
{
if (gate is null)
{
return null;
}
return gate with
{
Condition = gate.Condition.Trim()
};
}
private static ObservedEdgeMetadata? Normalize(ObservedEdgeMetadata? observed)
{
if (observed is null)
{
return null;
}
return observed with
{
FirstObserved = observed.FirstObserved.ToUniversalTime(),
LastObserved = observed.LastObserved.ToUniversalTime(),
ObservationCount = Math.Max(0, observed.ObservationCount),
TraceDigest = string.IsNullOrWhiteSpace(observed.TraceDigest) ? null : observed.TraceDigest.Trim()
};
}
private static SliceVerdict Normalize(SliceVerdict verdict)
{
return verdict with
{
Confidence = Math.Clamp(verdict.Confidence, 0.0, 1.0),
Reasons = NormalizeStrings(verdict.Reasons),
PathWitnesses = NormalizeStrings(verdict.PathWitnesses),
UnknownCount = Math.Max(0, verdict.UnknownCount),
GatedPaths = verdict.GatedPaths
.Select(Normalize)
.OrderBy(p => p.PathId, StringComparer.Ordinal)
.ToImmutableArray()
};
}
private static GatedPath Normalize(GatedPath path)
{
return path with
{
PathId = path.PathId.Trim(),
GateType = path.GateType.Trim(),
GateCondition = path.GateCondition.Trim()
};
}
private static ImmutableArray<string> NormalizeStrings(ImmutableArray<string> values)
{
if (values.IsDefaultOrEmpty)
{
return ImmutableArray<string>.Empty;
}
return values
.Where(v => !string.IsNullOrWhiteSpace(v))
.Select(v => v.Trim())
.Distinct(StringComparer.Ordinal)
.OrderBy(v => v, StringComparer.Ordinal)
.ToImmutableArray();
}
private static IReadOnlyDictionary<string, string>? NormalizeAttributes(IReadOnlyDictionary<string, string>? attributes)
{
if (attributes is null || attributes.Count == 0)
{
return null;
}
return attributes
.Where(kv => !string.IsNullOrWhiteSpace(kv.Key) && kv.Value is not null)
.ToImmutableSortedDictionary(
kv => kv.Key.Trim(),
kv => kv.Value.Trim(),
StringComparer.Ordinal);
}
}
internal sealed class SnakeCaseStringEnumConverter : JsonStringEnumConverter
{
public SnakeCaseStringEnumConverter() : base(JsonNamingPolicy.SnakeCaseLower)
{
}
}

View File

@@ -0,0 +1,11 @@
namespace StellaOps.Scanner.Reachability.Slices;
/// <summary>
/// Constants for the reachability slice schema.
/// </summary>
public static class SliceSchema
{
public const string PredicateType = "stellaops.dev/predicates/reachability-slice@v1";
public const string JsonSchemaUri = "https://stellaops.dev/schemas/stellaops-slice.v1.schema.json";
public const string DssePayloadType = "application/vnd.stellaops.slice.v1+json";
}

View File

@@ -0,0 +1,109 @@
using System.Collections.Immutable;
namespace StellaOps.Scanner.Reachability.Slices;
public sealed class VerdictComputer
{
public SliceVerdict Compute(
IReadOnlyList<SlicePathSummary> paths,
int unknownEdgeCount,
SliceVerdictOptions? options = null)
{
options ??= new SliceVerdictOptions();
var hasPath = paths.Count > 0;
var minConfidence = hasPath ? paths.Min(p => p.MinConfidence) : 0.0;
var unknowns = Math.Max(0, unknownEdgeCount);
SliceVerdictStatus status;
if (hasPath && minConfidence > options.ReachableThreshold && unknowns == 0)
{
status = SliceVerdictStatus.Reachable;
}
else if (!hasPath && unknowns == 0)
{
status = SliceVerdictStatus.Unreachable;
}
else
{
status = SliceVerdictStatus.Unknown;
}
var confidence = status switch
{
SliceVerdictStatus.Reachable => minConfidence,
SliceVerdictStatus.Unreachable => options.UnreachableConfidence,
_ => hasPath ? Math.Min(minConfidence, options.UnknownConfidence) : options.UnknownConfidence
};
var reasons = BuildReasons(status, hasPath, unknowns, minConfidence, options);
var witnesses = paths
.Select(p => p.PathWitness)
.Where(p => !string.IsNullOrWhiteSpace(p))
.Select(p => p!.Trim())
.Distinct(StringComparer.Ordinal)
.OrderBy(p => p, StringComparer.Ordinal)
.ToImmutableArray();
return new SliceVerdict
{
Status = status,
Confidence = confidence,
Reasons = reasons,
PathWitnesses = witnesses,
UnknownCount = unknowns
};
}
private static ImmutableArray<string> BuildReasons(
SliceVerdictStatus status,
bool hasPath,
int unknowns,
double minConfidence,
SliceVerdictOptions options)
{
var reasons = new List<string>();
switch (status)
{
case SliceVerdictStatus.Reachable:
reasons.Add("path_exists_high_confidence");
break;
case SliceVerdictStatus.Unreachable:
reasons.Add("no_paths_found");
break;
default:
if (!hasPath)
{
reasons.Add("no_paths_found_with_unknowns");
}
else if (minConfidence < options.UnknownThreshold)
{
reasons.Add("low_confidence_path");
}
else
{
reasons.Add("unknown_edges_present");
}
break;
}
if (unknowns > 0)
{
reasons.Add($"unknown_edges:{unknowns}");
}
return reasons.OrderBy(r => r, StringComparer.Ordinal).ToImmutableArray();
}
}
public sealed record SliceVerdictOptions
{
public double ReachableThreshold { get; init; } = 0.7;
public double UnknownThreshold { get; init; } = 0.5;
public double UnreachableConfidence { get; init; } = 0.9;
public double UnknownConfidence { get; init; } = 0.4;
}
public sealed record SlicePathSummary(
string PathId,
double MinConfidence,
string? PathWitness);

View File

@@ -10,6 +10,7 @@
<PackageReference Include="Npgsql" Version="9.0.3" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\StellaOps.Scanner.Core\StellaOps.Scanner.Core.csproj" />
<ProjectReference Include="..\StellaOps.Scanner.Cache\StellaOps.Scanner.Cache.csproj" />
<ProjectReference Include="..\StellaOps.Scanner.ProofSpine\StellaOps.Scanner.ProofSpine.csproj" />
<ProjectReference Include="..\StellaOps.Scanner.Surface.Env\StellaOps.Scanner.Surface.Env.csproj" />
@@ -17,6 +18,7 @@
<ProjectReference Include="..\..\StellaOps.Scanner.Analyzers.Native\StellaOps.Scanner.Analyzers.Native.csproj" />
<ProjectReference Include="..\..\..\Attestor\StellaOps.Attestor\StellaOps.Attestor.Core\StellaOps.Attestor.Core.csproj" />
<ProjectReference Include="..\..\..\Attestor\StellaOps.Attestor.Envelope\StellaOps.Attestor.Envelope.csproj" />
<ProjectReference Include="..\..\..\Attestor\__Libraries\StellaOps.Attestor.ProofChain\StellaOps.Attestor.ProofChain.csproj" />
<ProjectReference Include="..\..\..\__Libraries\StellaOps.Replay.Core\StellaOps.Replay.Core.csproj" />
<ProjectReference Include="..\..\..\__Libraries\StellaOps.Cryptography\StellaOps.Cryptography.csproj" />
</ItemGroup>

View File

@@ -0,0 +1,401 @@
using System.Collections.Immutable;
using StellaOps.Scanner.Reachability.Gates;
namespace StellaOps.Scanner.Reachability.Subgraph;
public sealed record ReachabilitySubgraphRequest(
RichGraph Graph,
ImmutableArray<string> FindingKeys,
ImmutableArray<string> TargetSymbols,
ImmutableArray<string> Entrypoints,
string? AnalyzerName = null,
string? AnalyzerVersion = null,
double Confidence = 0.9,
string Completeness = "partial");
/// <summary>
/// Extracts a focused subgraph from the full reachability graph.
/// </summary>
public sealed class ReachabilitySubgraphExtractor
{
private readonly TimeProvider _timeProvider;
public ReachabilitySubgraphExtractor(TimeProvider? timeProvider = null)
{
_timeProvider = timeProvider ?? TimeProvider.System;
}
public ReachabilitySubgraph Extract(ReachabilitySubgraphRequest request)
{
ArgumentNullException.ThrowIfNull(request);
ArgumentNullException.ThrowIfNull(request.Graph);
var graph = request.Graph;
var nodeLookup = graph.Nodes.ToDictionary(n => n.Id, StringComparer.Ordinal);
var entrypoints = ResolveEntrypoints(request, graph, nodeLookup);
var targets = ResolveTargets(request, graph, nodeLookup);
if (entrypoints.Count == 0 || targets.Count == 0)
{
return BuildEmptySubgraph(request).Normalize();
}
var forwardEdges = BuildEdgeLookup(graph.Edges);
var reverseEdges = BuildReverseEdgeLookup(graph.Edges);
var reachableFromEntrypoints = Traverse(entrypoints, forwardEdges);
var canReachTargets = Traverse(targets, reverseEdges);
var includedNodes = new HashSet<string>(reachableFromEntrypoints, StringComparer.Ordinal);
includedNodes.IntersectWith(canReachTargets);
foreach (var entry in entrypoints)
{
includedNodes.Add(entry);
}
foreach (var target in targets)
{
includedNodes.Add(target);
}
var subgraphEdges = graph.Edges
.Where(e => includedNodes.Contains(e.From) && includedNodes.Contains(e.To))
.Where(e => reachableFromEntrypoints.Contains(e.From) && canReachTargets.Contains(e.To))
.ToList();
var subgraphNodes = includedNodes
.Where(nodeLookup.ContainsKey)
.Select(id => nodeLookup[id])
.ToList();
var nodes = subgraphNodes
.Select(node => MapNode(node, entrypoints, targets))
.ToImmutableArray();
var edges = subgraphEdges
.Select(MapEdge)
.ToImmutableArray();
return new ReachabilitySubgraph
{
FindingKeys = request.FindingKeys,
Nodes = nodes,
Edges = edges,
AnalysisMetadata = BuildMetadata(request, graph)
}.Normalize();
}
private ReachabilitySubgraph BuildEmptySubgraph(ReachabilitySubgraphRequest request)
{
return new ReachabilitySubgraph
{
FindingKeys = request.FindingKeys,
Nodes = [],
Edges = [],
AnalysisMetadata = BuildMetadata(request, request.Graph)
};
}
private ReachabilitySubgraphMetadata BuildMetadata(ReachabilitySubgraphRequest request, RichGraph graph)
{
var analyzerName = request.AnalyzerName ?? graph.Analyzer.Name;
var analyzerVersion = request.AnalyzerVersion ?? graph.Analyzer.Version;
return new ReachabilitySubgraphMetadata
{
Analyzer = string.IsNullOrWhiteSpace(analyzerName) ? "reachability" : analyzerName,
AnalyzerVersion = string.IsNullOrWhiteSpace(analyzerVersion) ? "unknown" : analyzerVersion,
Confidence = Math.Clamp(request.Confidence, 0.0, 1.0),
Completeness = string.IsNullOrWhiteSpace(request.Completeness) ? "partial" : request.Completeness,
GeneratedAt = _timeProvider.GetUtcNow()
};
}
private static HashSet<string> ResolveEntrypoints(
ReachabilitySubgraphRequest request,
RichGraph graph,
Dictionary<string, RichGraphNode> nodeLookup)
{
var entrypoints = new HashSet<string>(StringComparer.Ordinal);
if (!request.Entrypoints.IsDefaultOrEmpty)
{
foreach (var entry in request.Entrypoints)
{
if (string.IsNullOrWhiteSpace(entry))
{
continue;
}
var trimmed = entry.Trim();
if (nodeLookup.ContainsKey(trimmed))
{
entrypoints.Add(trimmed);
}
}
}
else
{
foreach (var root in graph.Roots ?? Array.Empty<RichGraphRoot>())
{
if (string.IsNullOrWhiteSpace(root.Id))
{
continue;
}
var trimmed = root.Id.Trim();
if (nodeLookup.ContainsKey(trimmed))
{
entrypoints.Add(trimmed);
}
}
}
return entrypoints;
}
private static HashSet<string> ResolveTargets(
ReachabilitySubgraphRequest request,
RichGraph graph,
Dictionary<string, RichGraphNode> nodeLookup)
{
var targets = new HashSet<string>(StringComparer.Ordinal);
if (request.TargetSymbols.IsDefaultOrEmpty)
{
return targets;
}
foreach (var target in request.TargetSymbols)
{
if (string.IsNullOrWhiteSpace(target))
{
continue;
}
var trimmed = target.Trim();
if (IsPackageTarget(trimmed))
{
foreach (var node in graph.Nodes.Where(n => string.Equals(n.Purl, trimmed, StringComparison.OrdinalIgnoreCase)))
{
if (!string.IsNullOrWhiteSpace(node.Id))
{
targets.Add(node.Id);
}
}
continue;
}
foreach (var node in graph.Nodes)
{
if (string.Equals(node.Id, trimmed, StringComparison.Ordinal) ||
string.Equals(node.SymbolId, trimmed, StringComparison.Ordinal))
{
targets.Add(node.Id);
}
else if (!string.IsNullOrWhiteSpace(node.Display) &&
string.Equals(node.Display, trimmed, StringComparison.Ordinal))
{
targets.Add(node.Id);
}
}
}
return targets;
}
private static bool IsPackageTarget(string value)
=> value.StartsWith("pkg:", StringComparison.OrdinalIgnoreCase);
private static Dictionary<string, List<RichGraphEdge>> BuildEdgeLookup(IReadOnlyList<RichGraphEdge> edges)
{
var lookup = new Dictionary<string, List<RichGraphEdge>>(StringComparer.Ordinal);
foreach (var edge in edges ?? Array.Empty<RichGraphEdge>())
{
if (!lookup.TryGetValue(edge.From, out var list))
{
list = new List<RichGraphEdge>();
lookup[edge.From] = list;
}
list.Add(edge);
}
foreach (var list in lookup.Values)
{
list.Sort(CompareForward);
}
return lookup;
}
private static Dictionary<string, List<RichGraphEdge>> BuildReverseEdgeLookup(IReadOnlyList<RichGraphEdge> edges)
{
var lookup = new Dictionary<string, List<RichGraphEdge>>(StringComparer.Ordinal);
foreach (var edge in edges ?? Array.Empty<RichGraphEdge>())
{
if (!lookup.TryGetValue(edge.To, out var list))
{
list = new List<RichGraphEdge>();
lookup[edge.To] = list;
}
list.Add(edge);
}
foreach (var list in lookup.Values)
{
list.Sort(CompareReverse);
}
return lookup;
}
private static HashSet<string> Traverse(
HashSet<string> seeds,
Dictionary<string, List<RichGraphEdge>> edgeLookup)
{
var visited = new HashSet<string>(seeds, StringComparer.Ordinal);
var queue = new Queue<string>(seeds);
while (queue.Count > 0)
{
var current = queue.Dequeue();
if (!edgeLookup.TryGetValue(current, out var edges))
{
continue;
}
foreach (var edge in edges)
{
var next = edge.From == current ? edge.To : edge.From;
if (!visited.Add(next))
{
continue;
}
queue.Enqueue(next);
}
}
return visited;
}
private static ReachabilitySubgraphNode MapNode(
RichGraphNode node,
HashSet<string> entrypoints,
HashSet<string> targets)
{
var type = ReachabilitySubgraphNodeType.Call;
if (entrypoints.Contains(node.Id))
{
type = ReachabilitySubgraphNodeType.Entrypoint;
}
else if (targets.Contains(node.Id))
{
type = ReachabilitySubgraphNodeType.Vulnerable;
}
return new ReachabilitySubgraphNode
{
Id = node.Id,
Symbol = node.Display ?? node.SymbolId ?? node.Id,
Type = type,
File = ExtractAttribute(node, "file") ?? ExtractAttribute(node, "source_file"),
Line = ExtractIntAttribute(node, "line"),
Purl = node.Purl,
Attributes = node.Attributes
};
}
private static ReachabilitySubgraphEdge MapEdge(RichGraphEdge edge)
{
return new ReachabilitySubgraphEdge
{
From = edge.From,
To = edge.To,
Type = string.IsNullOrWhiteSpace(edge.Kind) ? "call" : edge.Kind,
Confidence = edge.Confidence,
Evidence = edge.Evidence?.FirstOrDefault(),
Gate = MapGate(edge.Gates)
};
}
private static ReachabilitySubgraphGate? MapGate(IReadOnlyList<DetectedGate>? gates)
{
if (gates is null || gates.Count == 0)
{
return null;
}
var gate = gates
.OrderByDescending(g => g.Confidence)
.ThenBy(g => g.Detail, StringComparer.Ordinal)
.First();
return new ReachabilitySubgraphGate
{
GateType = ReachabilityGateMappings.ToGateTypeString(gate.Type),
Condition = gate.Detail,
GuardSymbol = gate.GuardSymbol,
Confidence = gate.Confidence,
SourceFile = gate.SourceFile,
Line = gate.LineNumber,
DetectionMethod = gate.DetectionMethod
};
}
private static int CompareForward(RichGraphEdge left, RichGraphEdge right)
{
var result = string.Compare(left.To, right.To, StringComparison.Ordinal);
if (result != 0)
{
return result;
}
result = string.Compare(left.Kind, right.Kind, StringComparison.Ordinal);
if (result != 0)
{
return result;
}
return left.Confidence.CompareTo(right.Confidence);
}
private static int CompareReverse(RichGraphEdge left, RichGraphEdge right)
{
var result = string.Compare(left.From, right.From, StringComparison.Ordinal);
if (result != 0)
{
return result;
}
result = string.Compare(left.Kind, right.Kind, StringComparison.Ordinal);
if (result != 0)
{
return result;
}
return left.Confidence.CompareTo(right.Confidence);
}
private static string? ExtractAttribute(RichGraphNode node, string key)
{
if (node.Attributes is not null && node.Attributes.TryGetValue(key, out var value))
{
return value;
}
return null;
}
private static int? ExtractIntAttribute(RichGraphNode node, string key)
{
var value = ExtractAttribute(node, key);
if (value is not null && int.TryParse(value, out var parsed))
{
return parsed;
}
return null;
}
}

View File

@@ -0,0 +1,272 @@
using System.Collections.Immutable;
using System.Text.Json.Serialization;
using StellaOps.Scanner.Reachability.Gates;
namespace StellaOps.Scanner.Reachability.Subgraph;
/// <summary>
/// Portable reachability subgraph representation.
/// </summary>
public sealed record ReachabilitySubgraph
{
[JsonPropertyName("version")]
public string Version { get; init; } = "1.0";
[JsonPropertyName("findingKeys")]
public ImmutableArray<string> FindingKeys { get; init; } = [];
[JsonPropertyName("nodes")]
public ImmutableArray<ReachabilitySubgraphNode> Nodes { get; init; } = [];
[JsonPropertyName("edges")]
public ImmutableArray<ReachabilitySubgraphEdge> Edges { get; init; } = [];
[JsonPropertyName("analysisMetadata")]
public ReachabilitySubgraphMetadata? AnalysisMetadata { get; init; }
public ReachabilitySubgraph Normalize() => ReachabilitySubgraphNormalizer.Normalize(this);
}
/// <summary>
/// Subgraph node.
/// </summary>
public sealed record ReachabilitySubgraphNode
{
[JsonPropertyName("id")]
public required string Id { get; init; }
[JsonPropertyName("type")]
[JsonConverter(typeof(JsonStringEnumConverter<ReachabilitySubgraphNodeType>))]
public required ReachabilitySubgraphNodeType Type { get; init; }
[JsonPropertyName("symbol")]
public required string Symbol { get; init; }
[JsonPropertyName("file")]
public string? File { get; init; }
[JsonPropertyName("line")]
public int? Line { get; init; }
[JsonPropertyName("purl")]
public string? Purl { get; init; }
[JsonPropertyName("attributes")]
public IReadOnlyDictionary<string, string>? Attributes { get; init; }
}
/// <summary>
/// Subgraph node type.
/// </summary>
[JsonConverter(typeof(JsonStringEnumConverter<ReachabilitySubgraphNodeType>))]
public enum ReachabilitySubgraphNodeType
{
[JsonStringEnumMemberName("entrypoint")]
Entrypoint,
[JsonStringEnumMemberName("call")]
Call,
[JsonStringEnumMemberName("vulnerable")]
Vulnerable,
[JsonStringEnumMemberName("unknown")]
Unknown
}
/// <summary>
/// Subgraph edge.
/// </summary>
public sealed record ReachabilitySubgraphEdge
{
[JsonPropertyName("from")]
public required string From { get; init; }
[JsonPropertyName("to")]
public required string To { get; init; }
[JsonPropertyName("type")]
public required string Type { get; init; }
[JsonPropertyName("confidence")]
public double Confidence { get; init; }
[JsonPropertyName("evidence")]
public string? Evidence { get; init; }
[JsonPropertyName("gate")]
public ReachabilitySubgraphGate? Gate { get; init; }
}
/// <summary>
/// Gate metadata associated with a subgraph edge.
/// </summary>
public sealed record ReachabilitySubgraphGate
{
[JsonPropertyName("gateType")]
public required string GateType { get; init; }
[JsonPropertyName("condition")]
public required string Condition { get; init; }
[JsonPropertyName("guardSymbol")]
public required string GuardSymbol { get; init; }
[JsonPropertyName("confidence")]
public required double Confidence { get; init; }
[JsonPropertyName("sourceFile")]
public string? SourceFile { get; init; }
[JsonPropertyName("line")]
public int? Line { get; init; }
[JsonPropertyName("detectionMethod")]
public string? DetectionMethod { get; init; }
}
/// <summary>
/// Metadata about the subgraph extraction.
/// </summary>
public sealed record ReachabilitySubgraphMetadata
{
[JsonPropertyName("analyzer")]
public required string Analyzer { get; init; }
[JsonPropertyName("analyzerVersion")]
public required string AnalyzerVersion { get; init; }
[JsonPropertyName("confidence")]
public required double Confidence { get; init; }
[JsonPropertyName("completeness")]
public required string Completeness { get; init; }
[JsonPropertyName("generatedAt")]
public required DateTimeOffset GeneratedAt { get; init; }
}
internal static class ReachabilitySubgraphNormalizer
{
public static ReachabilitySubgraph Normalize(ReachabilitySubgraph subgraph)
{
ArgumentNullException.ThrowIfNull(subgraph);
var nodes = subgraph.Nodes
.Where(n => n is not null)
.Select(Normalize)
.OrderBy(n => n.Id, StringComparer.Ordinal)
.ToImmutableArray();
var edges = subgraph.Edges
.Where(e => e is not null)
.Select(Normalize)
.OrderBy(e => e.From, StringComparer.Ordinal)
.ThenBy(e => e.To, StringComparer.Ordinal)
.ThenBy(e => e.Type, StringComparer.Ordinal)
.ToImmutableArray();
var findingKeys = subgraph.FindingKeys
.Where(k => !string.IsNullOrWhiteSpace(k))
.Select(k => k.Trim())
.Distinct(StringComparer.Ordinal)
.OrderBy(k => k, StringComparer.Ordinal)
.ToImmutableArray();
return subgraph with
{
Version = string.IsNullOrWhiteSpace(subgraph.Version) ? "1.0" : subgraph.Version.Trim(),
FindingKeys = findingKeys,
Nodes = nodes,
Edges = edges,
AnalysisMetadata = Normalize(subgraph.AnalysisMetadata)
};
}
private static ReachabilitySubgraphNode Normalize(ReachabilitySubgraphNode node)
{
return node with
{
Id = node.Id.Trim(),
Symbol = node.Symbol.Trim(),
File = string.IsNullOrWhiteSpace(node.File) ? null : node.File.Trim(),
Purl = string.IsNullOrWhiteSpace(node.Purl) ? null : node.Purl.Trim(),
Attributes = NormalizeAttributes(node.Attributes)
};
}
private static ReachabilitySubgraphEdge Normalize(ReachabilitySubgraphEdge edge)
{
return edge with
{
From = edge.From.Trim(),
To = edge.To.Trim(),
Type = string.IsNullOrWhiteSpace(edge.Type) ? "call" : edge.Type.Trim(),
Confidence = Math.Clamp(edge.Confidence, 0.0, 1.0),
Evidence = string.IsNullOrWhiteSpace(edge.Evidence) ? null : edge.Evidence.Trim(),
Gate = Normalize(edge.Gate)
};
}
private static ReachabilitySubgraphGate? Normalize(ReachabilitySubgraphGate? gate)
{
if (gate is null)
{
return null;
}
return gate with
{
GateType = gate.GateType.Trim(),
Condition = gate.Condition.Trim(),
GuardSymbol = gate.GuardSymbol.Trim(),
DetectionMethod = string.IsNullOrWhiteSpace(gate.DetectionMethod) ? null : gate.DetectionMethod.Trim(),
SourceFile = string.IsNullOrWhiteSpace(gate.SourceFile) ? null : gate.SourceFile.Trim(),
Confidence = Math.Clamp(gate.Confidence, 0.0, 1.0)
};
}
private static ReachabilitySubgraphMetadata? Normalize(ReachabilitySubgraphMetadata? metadata)
{
if (metadata is null)
{
return null;
}
return metadata with
{
Analyzer = metadata.Analyzer.Trim(),
AnalyzerVersion = metadata.AnalyzerVersion.Trim(),
Completeness = metadata.Completeness.Trim(),
Confidence = Math.Clamp(metadata.Confidence, 0.0, 1.0),
GeneratedAt = metadata.GeneratedAt.ToUniversalTime()
};
}
private static IReadOnlyDictionary<string, string>? NormalizeAttributes(IReadOnlyDictionary<string, string>? attributes)
{
if (attributes is null || attributes.Count == 0)
{
return null;
}
return attributes
.Where(kv => !string.IsNullOrWhiteSpace(kv.Key) && kv.Value is not null)
.ToImmutableSortedDictionary(
kv => kv.Key.Trim(),
kv => kv.Value.Trim(),
StringComparer.Ordinal);
}
}
internal static class ReachabilityGateMappings
{
public static string ToGateTypeString(GateType type) => type switch
{
GateType.AuthRequired => "auth",
GateType.FeatureFlag => "feature_flag",
GateType.AdminOnly => "admin_only",
GateType.NonDefaultConfig => "non_default_config",
_ => "unknown"
};
}

View File

@@ -0,0 +1,35 @@
# AGENTS - Scanner Runtime Library
## Mission
Capture and normalize runtime trace evidence (eBPF/ETW) and merge it with static reachability graphs to produce observed-path evidence.
## Roles
- Backend engineer (.NET 10, C# preview).
- QA engineer (deterministic tests; offline fixtures).
## Required Reading
- `docs/README.md`
- `docs/07_HIGH_LEVEL_ARCHITECTURE.md`
- `docs/modules/platform/architecture-overview.md`
- `docs/modules/scanner/architecture.md`
- `docs/modules/zastava/architecture.md`
- `docs/reachability/runtime-facts.md`
- `docs/reachability/runtime-static-union-schema.md`
## Working Directory & Boundaries
- Primary scope: `src/Scanner/__Libraries/StellaOps.Scanner.Runtime/`
- Tests: `src/Scanner/__Tests/StellaOps.Scanner.Runtime.Tests/`
- Avoid cross-module edits unless explicitly noted in the sprint.
## Determinism & Offline Rules
- Normalize timestamps to UTC; stable ordering of events and edges.
- Offline-first; no network access in collectors or ingestion.
- Prefer configuration-driven retention policies with deterministic pruning.
## Testing Expectations
- Unit tests for ingestion, merge, and retention logic.
- Use deterministic fixtures (fixed timestamps and IDs).
## Workflow
- Update sprint status on task transitions.
- Log design/decision changes in sprint Execution Log.

View File

@@ -0,0 +1,150 @@
using Microsoft.Extensions.Logging;
using System.Runtime.InteropServices;
namespace StellaOps.Scanner.Runtime.Ebpf;
/// <summary>
/// eBPF-based trace collector for Linux using uprobe tracing.
/// </summary>
public sealed class EbpfTraceCollector : ITraceCollector
{
private readonly ILogger<EbpfTraceCollector> _logger;
private readonly ISymbolResolver _symbolResolver;
private readonly TimeProvider _timeProvider;
private bool _isRunning;
private TraceCollectorStats _stats = new TraceCollectorStats
{
EventsCollected = 0,
EventsDropped = 0,
BytesProcessed = 0,
StartedAt = DateTimeOffset.UtcNow
};
public EbpfTraceCollector(
ILogger<EbpfTraceCollector> logger,
ISymbolResolver symbolResolver,
TimeProvider? timeProvider = null)
{
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
_symbolResolver = symbolResolver ?? throw new ArgumentNullException(nameof(symbolResolver));
_timeProvider = timeProvider ?? TimeProvider.System;
}
public Task StartAsync(TraceCollectorConfig config, CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(config);
if (!RuntimeInformation.IsOSPlatform(OSPlatform.Linux))
{
throw new PlatformNotSupportedException("eBPF tracing is only supported on Linux");
}
if (_isRunning)
{
throw new InvalidOperationException("Collector is already running");
}
_logger.LogInformation(
"Starting eBPF trace collector for PID {Pid}, container {Container}",
config.TargetPid,
config.TargetContainerId ?? "all");
// TODO: Actual eBPF program loading and uprobe attachment
// This would use libbpf or bpf2go to:
// 1. Load BPF program into kernel
// 2. Attach uprobes to target functions
// 3. Set up ringbuffer for event streaming
// 4. Handle ASLR via /proc/pid/maps
_isRunning = true;
_stats = _stats with { StartedAt = _timeProvider.GetUtcNow() };
_logger.LogInformation("eBPF trace collector started successfully");
return Task.CompletedTask;
}
public Task StopAsync(CancellationToken cancellationToken = default)
{
if (!_isRunning)
{
return Task.CompletedTask;
}
_logger.LogInformation("Stopping eBPF trace collector");
// TODO: Detach uprobes and cleanup BPF resources
_isRunning = false;
_stats = _stats with { Duration = _timeProvider.GetUtcNow() - _stats.StartedAt };
_logger.LogInformation(
"eBPF trace collector stopped. Events: {Events}, Dropped: {Dropped}",
_stats.EventsCollected,
_stats.EventsDropped);
return Task.CompletedTask;
}
public async IAsyncEnumerable<RuntimeCallEvent> GetEventsAsync(
[System.Runtime.CompilerServices.EnumeratorCancellation] CancellationToken cancellationToken = default)
{
if (!_isRunning)
{
yield break;
}
// TODO: Read events from eBPF ringbuffer
// This is a placeholder - actual implementation would:
// 1. Poll ringbuffer for events
// 2. Resolve symbols using /proc/kallsyms and binary debug info
// 3. Handle container namespace awareness
// 4. Apply rate limiting
await Task.Delay(100, cancellationToken).ConfigureAwait(false);
yield break;
}
public TraceCollectorStats GetStatistics() => _stats;
public async ValueTask DisposeAsync()
{
await StopAsync().ConfigureAwait(false);
}
}
/// <summary>
/// Symbol resolver for eBPF events.
/// </summary>
public interface ISymbolResolver
{
Task<string> ResolveSymbolAsync(uint pid, ulong address, CancellationToken cancellationToken = default);
}
/// <summary>
/// Symbol resolver implementation using /proc and binary debug info.
/// </summary>
public sealed class LinuxSymbolResolver : ISymbolResolver
{
private readonly ILogger<LinuxSymbolResolver> _logger;
public LinuxSymbolResolver(ILogger<LinuxSymbolResolver> logger)
{
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
}
public async Task<string> ResolveSymbolAsync(
uint pid,
ulong address,
CancellationToken cancellationToken = default)
{
// TODO: Actual symbol resolution:
// 1. Read /proc/{pid}/maps to find binary containing address
// 2. Adjust for ASLR offset
// 3. Use libdwarf or addr2line to resolve symbol
// 4. Cache results for performance
await Task.Delay(1, cancellationToken).ConfigureAwait(false);
return $"func_0x{address:x}";
}
}

View File

@@ -0,0 +1,112 @@
using Microsoft.Extensions.Logging;
using System.Runtime.InteropServices;
namespace StellaOps.Scanner.Runtime.Etw;
/// <summary>
/// ETW-based trace collector for Windows.
/// </summary>
public sealed class EtwTraceCollector : ITraceCollector
{
private readonly ILogger<EtwTraceCollector> _logger;
private readonly TimeProvider _timeProvider;
private bool _isRunning;
private TraceCollectorStats _stats = new TraceCollectorStats
{
EventsCollected = 0,
EventsDropped = 0,
BytesProcessed = 0,
StartedAt = DateTimeOffset.UtcNow
};
public EtwTraceCollector(
ILogger<EtwTraceCollector> logger,
TimeProvider? timeProvider = null)
{
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
_timeProvider = timeProvider ?? TimeProvider.System;
}
public Task StartAsync(TraceCollectorConfig config, CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(config);
if (!RuntimeInformation.IsOSPlatform(OSPlatform.Windows))
{
throw new PlatformNotSupportedException("ETW tracing is only supported on Windows");
}
if (_isRunning)
{
throw new InvalidOperationException("Collector is already running");
}
_logger.LogInformation(
"Starting ETW trace collector for PID {Pid}",
config.TargetPid);
// TODO: Actual ETW session setup
// This would use TraceEvent or Microsoft.Diagnostics.Tracing.TraceEvent to:
// 1. Create ETW session
// 2. Subscribe to Microsoft-Windows-DotNETRuntime provider
// 3. Subscribe to native call events
// 4. Enable stack walking
// 5. Filter by process ID
_isRunning = true;
_stats = _stats with { StartedAt = _timeProvider.GetUtcNow() };
_logger.LogInformation("ETW trace collector started successfully");
return Task.CompletedTask;
}
public Task StopAsync(CancellationToken cancellationToken = default)
{
if (!_isRunning)
{
return Task.CompletedTask;
}
_logger.LogInformation("Stopping ETW trace collector");
// TODO: Stop ETW session and cleanup
_isRunning = false;
_stats = _stats with { Duration = _timeProvider.GetUtcNow() - _stats.StartedAt };
_logger.LogInformation(
"ETW trace collector stopped. Events: {Events}, Dropped: {Dropped}",
_stats.EventsCollected,
_stats.EventsDropped);
return Task.CompletedTask;
}
public async IAsyncEnumerable<RuntimeCallEvent> GetEventsAsync(
[System.Runtime.CompilerServices.EnumeratorCancellation] CancellationToken cancellationToken = default)
{
if (!_isRunning)
{
yield break;
}
// TODO: Process ETW events
// This is a placeholder - actual implementation would:
// 1. Subscribe to ETW event stream
// 2. Process CLR and native method events
// 3. Resolve symbols using DbgHelp
// 4. Correlate stack traces
// 5. Apply rate limiting
await Task.Delay(100, cancellationToken).ConfigureAwait(false);
yield break;
}
public TraceCollectorStats GetStatistics() => _stats;
public async ValueTask DisposeAsync()
{
await StopAsync().ConfigureAwait(false);
}
}

View File

@@ -0,0 +1,136 @@
namespace StellaOps.Scanner.Runtime;
/// <summary>
/// Runtime call event captured by trace collector.
/// </summary>
public sealed record RuntimeCallEvent
{
/// <summary>
/// Nanoseconds since boot (Linux) or UTC timestamp (Windows).
/// </summary>
public required ulong Timestamp { get; init; }
/// <summary>
/// Process ID.
/// </summary>
public required uint Pid { get; init; }
/// <summary>
/// Thread ID.
/// </summary>
public required uint Tid { get; init; }
/// <summary>
/// Caller function address.
/// </summary>
public required ulong CallerAddress { get; init; }
/// <summary>
/// Callee function address.
/// </summary>
public required ulong CalleeAddress { get; init; }
/// <summary>
/// Resolved caller symbol name.
/// </summary>
public required string CallerSymbol { get; init; }
/// <summary>
/// Resolved callee symbol name.
/// </summary>
public required string CalleeSymbol { get; init; }
/// <summary>
/// Binary path containing the symbols.
/// </summary>
public required string BinaryPath { get; init; }
/// <summary>
/// Container ID if running in container.
/// </summary>
public string? ContainerId { get; init; }
/// <summary>
/// Stack trace if available.
/// </summary>
public IReadOnlyList<ulong>? StackTrace { get; init; }
}
/// <summary>
/// Configuration for trace collector.
/// </summary>
public sealed record TraceCollectorConfig
{
/// <summary>
/// Target process ID to trace (0 = all processes).
/// </summary>
public uint TargetPid { get; init; }
/// <summary>
/// Target container ID to trace.
/// </summary>
public string? TargetContainerId { get; init; }
/// <summary>
/// Symbol patterns to trace (glob patterns).
/// </summary>
public IReadOnlyList<string>? SymbolPatterns { get; init; }
/// <summary>
/// Binary paths to trace.
/// </summary>
public IReadOnlyList<string>? BinaryPaths { get; init; }
/// <summary>
/// Maximum events per second (rate limiting).
/// </summary>
public int MaxEventsPerSecond { get; init; } = 10_000;
/// <summary>
/// Event buffer size.
/// </summary>
public int BufferSize { get; init; } = 8192;
/// <summary>
/// Enable stack trace capture.
/// </summary>
public bool CaptureStackTraces { get; init; }
}
/// <summary>
/// Platform-agnostic trace collector interface.
/// </summary>
public interface ITraceCollector : IAsyncDisposable
{
/// <summary>
/// Start collecting runtime traces.
/// </summary>
Task StartAsync(TraceCollectorConfig config, CancellationToken cancellationToken = default);
/// <summary>
/// Stop collecting traces.
/// </summary>
Task StopAsync(CancellationToken cancellationToken = default);
/// <summary>
/// Get stream of runtime call events.
/// </summary>
IAsyncEnumerable<RuntimeCallEvent> GetEventsAsync(CancellationToken cancellationToken = default);
/// <summary>
/// Get collector statistics.
/// </summary>
TraceCollectorStats GetStatistics();
}
/// <summary>
/// Trace collector statistics.
/// </summary>
public sealed record TraceCollectorStats
{
public required long EventsCollected { get; init; }
public required long EventsDropped { get; init; }
public required long BytesProcessed { get; init; }
public required DateTimeOffset StartedAt { get; init; }
public TimeSpan? Duration { get; init; }
}

View File

@@ -0,0 +1,74 @@
namespace StellaOps.Scanner.Runtime.Ingestion;
/// <summary>
/// Normalized runtime trace for storage.
/// </summary>
public sealed record NormalizedTrace
{
public required string TraceId { get; init; }
public required string ScanId { get; init; }
public required DateTimeOffset CollectedAt { get; init; }
public required IReadOnlyList<RuntimeCallEdge> Edges { get; init; }
public required TraceMetadata Metadata { get; init; }
}
/// <summary>
/// Runtime call edge.
/// </summary>
public sealed record RuntimeCallEdge
{
public required string From { get; init; }
public required string To { get; init; }
public required ulong ObservationCount { get; init; }
public required DateTimeOffset FirstObserved { get; init; }
public required DateTimeOffset LastObserved { get; init; }
public IReadOnlyList<ulong>? StackTraces { get; init; }
}
/// <summary>
/// Trace metadata.
/// </summary>
public sealed record TraceMetadata
{
public required uint ProcessId { get; init; }
public required string BinaryPath { get; init; }
public required TimeSpan Duration { get; init; }
public required long EventCount { get; init; }
public string? ContainerId { get; init; }
public string? CollectorVersion { get; init; }
}
/// <summary>
/// Service for ingesting and storing runtime traces.
/// </summary>
public interface ITraceIngestionService
{
/// <summary>
/// Ingest runtime call events and normalize for storage.
/// </summary>
Task<NormalizedTrace> IngestAsync(
IAsyncEnumerable<RuntimeCallEvent> events,
string scanId,
CancellationToken cancellationToken = default);
/// <summary>
/// Store normalized trace.
/// </summary>
Task<string> StoreAsync(
NormalizedTrace trace,
CancellationToken cancellationToken = default);
/// <summary>
/// Retrieve trace by ID.
/// </summary>
Task<NormalizedTrace?> GetTraceAsync(
string traceId,
CancellationToken cancellationToken = default);
/// <summary>
/// Get all traces for a scan.
/// </summary>
Task<IReadOnlyList<NormalizedTrace>> GetTracesForScanAsync(
string scanId,
CancellationToken cancellationToken = default);
}

View File

@@ -0,0 +1,187 @@
using Microsoft.Extensions.Logging;
using StellaOps.Scanner.Cache.Abstractions;
using System.Security.Cryptography;
namespace StellaOps.Scanner.Runtime.Ingestion;
/// <summary>
/// Service for ingesting runtime traces.
/// </summary>
public sealed class TraceIngestionService : ITraceIngestionService
{
private readonly IFileContentAddressableStore _cas;
private readonly ILogger<TraceIngestionService> _logger;
private readonly TimeProvider _timeProvider;
public TraceIngestionService(
IFileContentAddressableStore cas,
ILogger<TraceIngestionService> logger,
TimeProvider? timeProvider = null)
{
_cas = cas ?? throw new ArgumentNullException(nameof(cas));
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
_timeProvider = timeProvider ?? TimeProvider.System;
}
public async Task<NormalizedTrace> IngestAsync(
IAsyncEnumerable<RuntimeCallEvent> events,
string scanId,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(events);
ArgumentException.ThrowIfNullOrWhiteSpace(scanId);
var edgeMap = new Dictionary<(string, string), RuntimeCallEdgeBuilder>();
var eventCount = 0L;
var firstEvent = (DateTimeOffset?)null;
var lastEvent = (DateTimeOffset?)null;
uint? pid = null;
string? binaryPath = null;
await foreach (var evt in events.WithCancellation(cancellationToken))
{
eventCount++;
var timestamp = DateTimeOffset.FromUnixTimeMilliseconds((long)(evt.Timestamp / 1_000_000));
firstEvent ??= timestamp;
lastEvent = timestamp;
pid ??= evt.Pid;
binaryPath ??= evt.BinaryPath;
var key = (evt.CallerSymbol, evt.CalleeSymbol);
if (!edgeMap.TryGetValue(key, out var builder))
{
builder = new RuntimeCallEdgeBuilder
{
From = evt.CallerSymbol,
To = evt.CalleeSymbol,
FirstObserved = timestamp,
LastObserved = timestamp,
ObservationCount = 1
};
edgeMap[key] = builder;
}
else
{
builder.LastObserved = timestamp;
builder.ObservationCount++;
}
}
var edges = edgeMap.Values
.Select(b => new RuntimeCallEdge
{
From = b.From,
To = b.To,
ObservationCount = b.ObservationCount,
FirstObserved = b.FirstObserved,
LastObserved = b.LastObserved
})
.OrderBy(e => e.From)
.ThenBy(e => e.To)
.ToList();
var duration = (lastEvent ?? _timeProvider.GetUtcNow()) - (firstEvent ?? _timeProvider.GetUtcNow());
var trace = new NormalizedTrace
{
TraceId = GenerateTraceId(scanId, eventCount),
ScanId = scanId,
CollectedAt = _timeProvider.GetUtcNow(),
Edges = edges,
Metadata = new TraceMetadata
{
ProcessId = pid ?? 0,
BinaryPath = binaryPath ?? "unknown",
Duration = duration,
EventCount = eventCount
}
};
_logger.LogInformation(
"Ingested trace {TraceId} for scan {ScanId}: {EdgeCount} edges from {EventCount} events",
trace.TraceId,
scanId,
edges.Count,
eventCount);
return trace;
}
public async Task<string> StoreAsync(
NormalizedTrace trace,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(trace);
var json = System.Text.Json.JsonSerializer.Serialize(trace);
var bytes = System.Text.Encoding.UTF8.GetBytes(json);
await using var stream = new MemoryStream(bytes, writable: false);
var casKey = $"trace_{trace.TraceId}";
await _cas.PutAsync(new FileCasPutRequest(casKey, stream, leaveOpen: false), cancellationToken)
.ConfigureAwait(false);
_logger.LogInformation("Stored trace {TraceId} in CAS with key {CasKey}", trace.TraceId, casKey);
return trace.TraceId;
}
public async Task<NormalizedTrace?> GetTraceAsync(
string traceId,
CancellationToken cancellationToken = default)
{
ArgumentException.ThrowIfNullOrWhiteSpace(traceId);
var casKey = $"trace_{traceId}";
try
{
var bytes = await _cas.GetAsync(new FileCasGetRequest(casKey), cancellationToken)
.ConfigureAwait(false);
if (bytes is null)
{
return null;
}
var trace = System.Text.Json.JsonSerializer.Deserialize<NormalizedTrace>(bytes);
return trace;
}
catch (Exception ex)
{
_logger.LogError(ex, "Error retrieving trace {TraceId}", traceId);
return null;
}
}
public async Task<IReadOnlyList<NormalizedTrace>> GetTracesForScanAsync(
string scanId,
CancellationToken cancellationToken = default)
{
ArgumentException.ThrowIfNullOrWhiteSpace(scanId);
// TODO: Implement scan-to-trace index
// For now, return empty list
await Task.Delay(1, cancellationToken).ConfigureAwait(false);
return Array.Empty<NormalizedTrace>();
}
private static string GenerateTraceId(string scanId, long eventCount)
{
var input = $"{scanId}|{eventCount}|{DateTimeOffset.UtcNow.Ticks}";
var hash = SHA256.HashData(System.Text.Encoding.UTF8.GetBytes(input));
return $"trace_{Convert.ToHexString(hash)[..16].ToLowerInvariant()}";
}
private sealed class RuntimeCallEdgeBuilder
{
public required string From { get; init; }
public required string To { get; init; }
public required DateTimeOffset FirstObserved { get; set; }
public required DateTimeOffset LastObserved { get; set; }
public required ulong ObservationCount { get; set; }
}
}

View File

@@ -0,0 +1,62 @@
using StellaOps.Scanner.Reachability;
using StellaOps.Scanner.Runtime.Ingestion;
namespace StellaOps.Scanner.Runtime.Merge;
/// <summary>
/// Merged graph combining static analysis and runtime observations.
/// </summary>
public sealed record MergedGraph
{
public required RichGraph StaticGraph { get; init; }
public required NormalizedTrace RuntimeTrace { get; init; }
public required RichGraph UnionGraph { get; init; }
public required MergeStatistics Statistics { get; init; }
}
/// <summary>
/// Statistics from static+runtime merge.
/// </summary>
public sealed record MergeStatistics
{
public required int StaticEdges { get; init; }
public required int RuntimeEdges { get; init; }
public required int ConfirmedEdges { get; init; }
public required int NewEdges { get; init; }
public required int UnobservedEdges { get; init; }
public required double CoveragePercent { get; init; }
}
/// <summary>
/// Edge enrichment from runtime observations.
/// </summary>
public sealed record EdgeEnrichment
{
public required bool Observed { get; init; }
public required DateTimeOffset? FirstObserved { get; init; }
public required DateTimeOffset? LastObserved { get; init; }
public required ulong ObservationCount { get; init; }
public required double ConfidenceBoost { get; init; }
}
/// <summary>
/// Merges static analysis graphs with runtime trace data.
/// </summary>
public interface IStaticRuntimeMerger
{
/// <summary>
/// Merge static graph with runtime trace.
/// </summary>
Task<MergedGraph> MergeAsync(
RichGraph staticGraph,
NormalizedTrace runtimeTrace,
CancellationToken cancellationToken = default);
/// <summary>
/// Enrich static edges with runtime observations.
/// </summary>
Task<IReadOnlyDictionary<string, EdgeEnrichment>> EnrichEdgesAsync(
RichGraph staticGraph,
NormalizedTrace runtimeTrace,
CancellationToken cancellationToken = default);
}

View File

@@ -0,0 +1,186 @@
using Microsoft.Extensions.Logging;
using StellaOps.Scanner.Reachability;
using StellaOps.Scanner.Runtime.Ingestion;
namespace StellaOps.Scanner.Runtime.Merge;
/// <summary>
/// Merges static analysis with runtime observations.
/// </summary>
public sealed class StaticRuntimeMerger : IStaticRuntimeMerger
{
private readonly ILogger<StaticRuntimeMerger> _logger;
private const double RuntimeObservationConfidenceBoost = 0.3;
public StaticRuntimeMerger(ILogger<StaticRuntimeMerger> logger)
{
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
}
public async Task<MergedGraph> MergeAsync(
RichGraph staticGraph,
NormalizedTrace runtimeTrace,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(staticGraph);
ArgumentNullException.ThrowIfNull(runtimeTrace);
_logger.LogInformation(
"Merging static graph ({StaticEdges} edges) with runtime trace ({RuntimeEdges} edges)",
staticGraph.Edges.Count,
runtimeTrace.Edges.Count);
var enrichment = await EnrichEdgesAsync(staticGraph, runtimeTrace, cancellationToken)
.ConfigureAwait(false);
var unionEdges = BuildUnionEdges(staticGraph, runtimeTrace, enrichment);
var unionGraph = staticGraph with { Edges = unionEdges };
var stats = ComputeStatistics(staticGraph, runtimeTrace, enrichment);
_logger.LogInformation(
"Merge complete: {Confirmed} confirmed, {New} new, {Unobserved} unobserved, {Coverage:F1}% coverage",
stats.ConfirmedEdges,
stats.NewEdges,
stats.UnobservedEdges,
stats.CoveragePercent);
return new MergedGraph
{
StaticGraph = staticGraph,
RuntimeTrace = runtimeTrace,
UnionGraph = unionGraph,
Statistics = stats
};
}
public Task<IReadOnlyDictionary<string, EdgeEnrichment>> EnrichEdgesAsync(
RichGraph staticGraph,
NormalizedTrace runtimeTrace,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(staticGraph);
ArgumentNullException.ThrowIfNull(runtimeTrace);
var runtimeEdgeMap = runtimeTrace.Edges
.ToDictionary(e => EdgeKey(e.From, e.To), e => e);
var enrichment = new Dictionary<string, EdgeEnrichment>();
foreach (var staticEdge in staticGraph.Edges)
{
var key = EdgeKey(staticEdge.From, staticEdge.To);
if (runtimeEdgeMap.TryGetValue(key, out var runtimeEdge))
{
// Edge confirmed by runtime observation
enrichment[key] = new EdgeEnrichment
{
Observed = true,
FirstObserved = runtimeEdge.FirstObserved,
LastObserved = runtimeEdge.LastObserved,
ObservationCount = runtimeEdge.ObservationCount,
ConfidenceBoost = RuntimeObservationConfidenceBoost
};
}
else
{
// Edge not observed at runtime
enrichment[key] = new EdgeEnrichment
{
Observed = false,
FirstObserved = null,
LastObserved = null,
ObservationCount = 0,
ConfidenceBoost = 0.0
};
}
}
return Task.FromResult<IReadOnlyDictionary<string, EdgeEnrichment>>(enrichment);
}
private IReadOnlyList<RichGraphEdge> BuildUnionEdges(
RichGraph staticGraph,
NormalizedTrace runtimeTrace,
IReadOnlyDictionary<string, EdgeEnrichment> enrichment)
{
var unionEdges = new List<RichGraphEdge>();
var staticEdgeKeys = new HashSet<string>();
// Add enriched static edges
foreach (var staticEdge in staticGraph.Edges)
{
var key = EdgeKey(staticEdge.From, staticEdge.To);
staticEdgeKeys.Add(key);
if (enrichment.TryGetValue(key, out var enrich) && enrich.Observed)
{
var boostedConfidence = Math.Min(1.0, staticEdge.Confidence + enrich.ConfidenceBoost);
unionEdges.Add(staticEdge with { Confidence = boostedConfidence });
}
else
{
unionEdges.Add(staticEdge);
}
}
// Add runtime-only edges (new discoveries)
foreach (var runtimeEdge in runtimeTrace.Edges)
{
var key = EdgeKey(runtimeEdge.From, runtimeEdge.To);
if (!staticEdgeKeys.Contains(key))
{
// New edge discovered at runtime
unionEdges.Add(new RichGraphEdge(
From: runtimeEdge.From,
To: runtimeEdge.To,
Kind: "runtime_observed",
Purl: null,
SymbolDigest: null,
Evidence: new[] { "runtime_observation" },
Confidence: 0.95,
Candidates: null,
Gates: null,
GateMultiplierBps: 10000));
}
}
return unionEdges.OrderBy(e => e.From).ThenBy(e => e.To).ToList();
}
private static MergeStatistics ComputeStatistics(
RichGraph staticGraph,
NormalizedTrace runtimeTrace,
IReadOnlyDictionary<string, EdgeEnrichment> enrichment)
{
var staticEdges = staticGraph.Edges.Count;
var runtimeEdges = runtimeTrace.Edges.Count;
var confirmedEdges = enrichment.Count(e => e.Value.Observed);
var unobservedEdges = staticEdges - confirmedEdges;
var runtimeEdgeKeys = runtimeTrace.Edges
.Select(e => EdgeKey(e.From, e.To))
.ToHashSet();
var staticEdgeKeys = staticGraph.Edges
.Select(e => EdgeKey(e.From, e.To))
.ToHashSet();
var newEdges = runtimeEdgeKeys.Except(staticEdgeKeys).Count();
var coverage = staticEdges > 0 ? (double)confirmedEdges / staticEdges * 100.0 : 0.0;
return new MergeStatistics
{
StaticEdges = staticEdges,
RuntimeEdges = runtimeEdges,
ConfirmedEdges = confirmedEdges,
NewEdges = newEdges,
UnobservedEdges = unobservedEdges,
CoveragePercent = coverage
};
}
private static string EdgeKey(string from, string to) => $"{from}→{to}";
}

View File

@@ -0,0 +1,419 @@
using Microsoft.Extensions.Logging;
namespace StellaOps.Scanner.Runtime.Retention;
/// <summary>
/// Configuration for trace retention policies.
/// </summary>
public sealed record TraceRetentionOptions
{
/// <summary>
/// Default retention period for trace data. Default: 30 days.
/// </summary>
public TimeSpan DefaultRetentionPeriod { get; init; } = TimeSpan.FromDays(30);
/// <summary>
/// Extended retention period for traces referenced by active slices. Default: 90 days.
/// </summary>
public TimeSpan ActiveSliceRetentionPeriod { get; init; } = TimeSpan.FromDays(90);
/// <summary>
/// Maximum storage quota in bytes. Default: 10 GB.
/// </summary>
public long MaxStorageQuotaBytes { get; init; } = 10L * 1024 * 1024 * 1024;
/// <summary>
/// Whether to aggregate old traces into summaries before deletion. Default: true.
/// </summary>
public bool EnableAggregation { get; init; } = true;
/// <summary>
/// Age threshold for trace aggregation. Default: 7 days.
/// </summary>
public TimeSpan AggregationThreshold { get; init; } = TimeSpan.FromDays(7);
/// <summary>
/// Batch size for pruning operations. Default: 1000.
/// </summary>
public int PruningBatchSize { get; init; } = 1000;
/// <summary>
/// Interval between automatic pruning runs. Default: 1 hour.
/// </summary>
public TimeSpan PruningInterval { get; init; } = TimeSpan.FromHours(1);
}
/// <summary>
/// Result of a pruning operation.
/// </summary>
public sealed record PruningResult
{
public required DateTimeOffset CompletedAt { get; init; }
public required int TracesDeleted { get; init; }
public required int TracesAggregated { get; init; }
public required long BytesFreed { get; init; }
public required int TracesRetained { get; init; }
public required TimeSpan Duration { get; init; }
public string? Error { get; init; }
}
/// <summary>
/// Aggregated trace summary for old traces.
/// </summary>
public sealed record TraceSummary
{
public required string ScanId { get; init; }
public required DateTimeOffset PeriodStart { get; init; }
public required DateTimeOffset PeriodEnd { get; init; }
public required int TotalEvents { get; init; }
public required int UniqueEdges { get; init; }
public required Dictionary<string, int> EdgeCounts { get; init; }
public required DateTimeOffset AggregatedAt { get; init; }
}
/// <summary>
/// Interface for trace storage operations needed by retention manager.
/// </summary>
public interface ITraceStorageProvider
{
Task<IReadOnlyList<TraceMetadata>> GetTracesOlderThanAsync(
DateTimeOffset threshold,
int limit,
CancellationToken cancellationToken = default);
Task<IReadOnlyList<TraceMetadata>> GetTracesReferencedBySlicesAsync(
CancellationToken cancellationToken = default);
Task<long> GetTotalStorageUsedAsync(CancellationToken cancellationToken = default);
Task DeleteTracesAsync(
IEnumerable<string> traceIds,
CancellationToken cancellationToken = default);
Task StoreSummaryAsync(
TraceSummary summary,
CancellationToken cancellationToken = default);
Task<IReadOnlyList<RuntimeCallEvent>> GetTraceEventsAsync(
string traceId,
CancellationToken cancellationToken = default);
}
/// <summary>
/// Metadata for a stored trace.
/// </summary>
public sealed record TraceMetadata
{
public required string TraceId { get; init; }
public required string ScanId { get; init; }
public required DateTimeOffset CreatedAt { get; init; }
public required long SizeBytes { get; init; }
public required int EventCount { get; init; }
public bool IsReferencedBySlice { get; init; }
}
/// <summary>
/// Runtime call event (shared with RuntimeStaticMerger).
/// </summary>
public sealed record RuntimeCallEvent
{
public required ulong Timestamp { get; init; }
public required uint Pid { get; init; }
public required uint Tid { get; init; }
public required string CallerSymbol { get; init; }
public required string CalleeSymbol { get; init; }
public required string BinaryPath { get; init; }
public string? TraceDigest { get; init; }
}
/// <summary>
/// Manages trace retention and pruning policies.
/// </summary>
public sealed class TraceRetentionManager
{
private readonly ITraceStorageProvider _storage;
private readonly TraceRetentionOptions _options;
private readonly ILogger<TraceRetentionManager> _logger;
private readonly TimeProvider _timeProvider;
public TraceRetentionManager(
ITraceStorageProvider storage,
TraceRetentionOptions? options = null,
ILogger<TraceRetentionManager>? logger = null,
TimeProvider? timeProvider = null)
{
_storage = storage ?? throw new ArgumentNullException(nameof(storage));
_options = options ?? new TraceRetentionOptions();
_logger = logger ?? Microsoft.Extensions.Logging.Abstractions.NullLogger<TraceRetentionManager>.Instance;
_timeProvider = timeProvider ?? TimeProvider.System;
}
/// <summary>
/// Run a pruning cycle to enforce retention policies.
/// </summary>
public async Task<PruningResult> PruneAsync(CancellationToken cancellationToken = default)
{
var startTime = _timeProvider.GetUtcNow();
var tracesDeleted = 0;
var tracesAggregated = 0;
long bytesFreed = 0;
string? error = null;
try
{
_logger.LogInformation("Starting trace pruning cycle");
// Get traces referenced by active slices (protected from deletion)
var protectedTraces = await _storage.GetTracesReferencedBySlicesAsync(cancellationToken)
.ConfigureAwait(false);
var protectedIds = protectedTraces.Select(t => t.TraceId).ToHashSet(StringComparer.Ordinal);
_logger.LogDebug("{Count} traces protected by slice references", protectedIds.Count);
// Check quota - if exceeded, delete oldest first regardless of age
var currentUsage = await _storage.GetTotalStorageUsedAsync(cancellationToken).ConfigureAwait(false);
if (currentUsage > _options.MaxStorageQuotaBytes)
{
var quotaResult = await EnforceQuotaAsync(protectedIds, currentUsage, cancellationToken)
.ConfigureAwait(false);
tracesDeleted += quotaResult.Deleted;
bytesFreed += quotaResult.BytesFreed;
}
// Delete traces older than retention period
var retentionThreshold = startTime - _options.DefaultRetentionPeriod;
var oldTraces = await _storage.GetTracesOlderThanAsync(
retentionThreshold,
_options.PruningBatchSize,
cancellationToken).ConfigureAwait(false);
var tracesToDelete = oldTraces
.Where(t => !protectedIds.Contains(t.TraceId))
.ToList();
// Aggregate before deletion if enabled
if (_options.EnableAggregation && tracesToDelete.Count > 0)
{
tracesAggregated = await AggregateTracesAsync(tracesToDelete, cancellationToken)
.ConfigureAwait(false);
}
// Delete old traces
if (tracesToDelete.Count > 0)
{
await _storage.DeleteTracesAsync(
tracesToDelete.Select(t => t.TraceId),
cancellationToken).ConfigureAwait(false);
bytesFreed += tracesToDelete.Sum(t => t.SizeBytes);
tracesDeleted += tracesToDelete.Count;
_logger.LogInformation(
"Deleted {Count} traces older than {Threshold:O}, freed {Bytes:N0} bytes",
tracesToDelete.Count,
retentionThreshold,
bytesFreed);
}
// Delete protected traces if they exceed extended retention
var extendedThreshold = startTime - _options.ActiveSliceRetentionPeriod;
var expiredProtected = protectedTraces
.Where(t => t.CreatedAt < extendedThreshold)
.ToList();
if (expiredProtected.Count > 0)
{
await _storage.DeleteTracesAsync(
expiredProtected.Select(t => t.TraceId),
cancellationToken).ConfigureAwait(false);
bytesFreed += expiredProtected.Sum(t => t.SizeBytes);
tracesDeleted += expiredProtected.Count;
_logger.LogInformation(
"Deleted {Count} protected traces exceeding extended retention ({Days} days)",
expiredProtected.Count,
_options.ActiveSliceRetentionPeriod.TotalDays);
}
}
catch (Exception ex)
{
_logger.LogError(ex, "Error during trace pruning");
error = ex.Message;
}
var duration = _timeProvider.GetUtcNow() - startTime;
var tracesRetained = await GetRetainedCountAsync(cancellationToken).ConfigureAwait(false);
_logger.LogInformation(
"Pruning cycle completed in {Duration:N1}ms: {Deleted} deleted, {Aggregated} aggregated, {Retained} retained, {BytesFreed:N0} bytes freed",
duration.TotalMilliseconds,
tracesDeleted,
tracesAggregated,
tracesRetained,
bytesFreed);
return new PruningResult
{
CompletedAt = _timeProvider.GetUtcNow(),
TracesDeleted = tracesDeleted,
TracesAggregated = tracesAggregated,
BytesFreed = bytesFreed,
TracesRetained = tracesRetained,
Duration = duration,
Error = error
};
}
/// <summary>
/// Get current storage statistics.
/// </summary>
public async Task<StorageStatistics> GetStatisticsAsync(CancellationToken cancellationToken = default)
{
var usage = await _storage.GetTotalStorageUsedAsync(cancellationToken).ConfigureAwait(false);
var protectedTraces = await _storage.GetTracesReferencedBySlicesAsync(cancellationToken)
.ConfigureAwait(false);
return new StorageStatistics
{
TotalBytesUsed = usage,
QuotaBytesLimit = _options.MaxStorageQuotaBytes,
QuotaUsageRatio = (double)usage / _options.MaxStorageQuotaBytes,
ProtectedTraceCount = protectedTraces.Count,
ProtectedBytesUsed = protectedTraces.Sum(t => t.SizeBytes)
};
}
private async Task<(int Deleted, long BytesFreed)> EnforceQuotaAsync(
HashSet<string> protectedIds,
long currentUsage,
CancellationToken cancellationToken)
{
var targetUsage = (long)(_options.MaxStorageQuotaBytes * 0.9); // Target 90% of quota
var bytesToFree = currentUsage - targetUsage;
if (bytesToFree <= 0) return (0, 0);
_logger.LogWarning(
"Storage quota exceeded ({Usage:N0}/{Quota:N0} bytes), freeing {ToFree:N0} bytes",
currentUsage,
_options.MaxStorageQuotaBytes,
bytesToFree);
var deleted = 0;
long freed = 0;
// Get oldest traces first
var threshold = _timeProvider.GetUtcNow(); // Get all traces
while (freed < bytesToFree)
{
var batch = await _storage.GetTracesOlderThanAsync(
threshold,
_options.PruningBatchSize,
cancellationToken).ConfigureAwait(false);
if (batch.Count == 0) break;
var toDelete = batch
.Where(t => !protectedIds.Contains(t.TraceId))
.OrderBy(t => t.CreatedAt)
.TakeWhile(t =>
{
if (freed >= bytesToFree) return false;
freed += t.SizeBytes;
return true;
})
.ToList();
if (toDelete.Count == 0) break;
await _storage.DeleteTracesAsync(
toDelete.Select(t => t.TraceId),
cancellationToken).ConfigureAwait(false);
deleted += toDelete.Count;
threshold = toDelete.Min(t => t.CreatedAt);
}
return (deleted, freed);
}
private async Task<int> AggregateTracesAsync(
IReadOnlyList<TraceMetadata> traces,
CancellationToken cancellationToken)
{
var aggregated = 0;
var grouped = traces.GroupBy(t => t.ScanId);
foreach (var group in grouped)
{
var scanId = group.Key;
var edgeCounts = new Dictionary<string, int>(StringComparer.Ordinal);
var totalEvents = 0;
DateTimeOffset? periodStart = null;
DateTimeOffset? periodEnd = null;
foreach (var trace in group)
{
periodStart = periodStart == null
? trace.CreatedAt
: (trace.CreatedAt < periodStart ? trace.CreatedAt : periodStart);
periodEnd = periodEnd == null
? trace.CreatedAt
: (trace.CreatedAt > periodEnd ? trace.CreatedAt : periodEnd);
var events = await _storage.GetTraceEventsAsync(trace.TraceId, cancellationToken)
.ConfigureAwait(false);
foreach (var evt in events)
{
totalEvents++;
var edgeKey = $"{evt.CallerSymbol}->{evt.CalleeSymbol}";
edgeCounts.TryGetValue(edgeKey, out var count);
edgeCounts[edgeKey] = count + 1;
}
}
if (periodStart.HasValue && periodEnd.HasValue && totalEvents > 0)
{
var summary = new TraceSummary
{
ScanId = scanId,
PeriodStart = periodStart.Value,
PeriodEnd = periodEnd.Value,
TotalEvents = totalEvents,
UniqueEdges = edgeCounts.Count,
EdgeCounts = edgeCounts,
AggregatedAt = _timeProvider.GetUtcNow()
};
await _storage.StoreSummaryAsync(summary, cancellationToken).ConfigureAwait(false);
aggregated += group.Count();
}
}
return aggregated;
}
private async Task<int> GetRetainedCountAsync(CancellationToken cancellationToken)
{
var traces = await _storage.GetTracesOlderThanAsync(
_timeProvider.GetUtcNow().AddYears(100), // Far future to get all
int.MaxValue,
cancellationToken).ConfigureAwait(false);
return traces.Count;
}
}
/// <summary>
/// Storage statistics for traces.
/// </summary>
public sealed record StorageStatistics
{
public long TotalBytesUsed { get; init; }
public long QuotaBytesLimit { get; init; }
public double QuotaUsageRatio { get; init; }
public int ProtectedTraceCount { get; init; }
public long ProtectedBytesUsed { get; init; }
}

View File

@@ -0,0 +1,95 @@
using Microsoft.Extensions.Logging;
using StellaOps.Scanner.Reachability.Slices;
using StellaOps.Scanner.Runtime.Merge;
namespace StellaOps.Scanner.Runtime.Slices;
/// <summary>
/// Generates reachability slices with runtime observation evidence.
/// </summary>
public sealed class ObservedSliceGenerator
{
private readonly SliceExtractor _sliceExtractor;
private readonly ILogger<ObservedSliceGenerator> _logger;
public ObservedSliceGenerator(
SliceExtractor sliceExtractor,
ILogger<ObservedSliceGenerator> logger)
{
_sliceExtractor = sliceExtractor ?? throw new ArgumentNullException(nameof(sliceExtractor));
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
}
/// <summary>
/// Generate slice from merged static+runtime graph.
/// </summary>
public ReachabilitySlice GenerateObservedSlice(
MergedGraph mergedGraph,
SliceQuery query,
SliceInputs inputs,
StellaOps.Scanner.Core.ScanManifest manifest)
{
ArgumentNullException.ThrowIfNull(mergedGraph);
ArgumentNullException.ThrowIfNull(query);
ArgumentNullException.ThrowIfNull(inputs);
ArgumentNullException.ThrowIfNull(manifest);
_logger.LogInformation(
"Generating observed slice with {Coverage:F1}% runtime coverage",
mergedGraph.Statistics.CoveragePercent);
var extractionRequest = new SliceExtractionRequest(
mergedGraph.UnionGraph,
inputs,
query,
manifest);
var slice = _sliceExtractor.Extract(extractionRequest);
_logger.LogInformation(
"Generated observed slice: {Nodes} nodes, {Edges} edges, verdict={Verdict}",
slice.Subgraph.Nodes.Length,
slice.Subgraph.Edges.Length,
slice.Verdict.Status);
return slice;
}
/// <summary>
/// Annotate slice edges with runtime observation metadata.
/// </summary>
public ReachabilitySlice AnnotateWithRuntimeEvidence(
ReachabilitySlice slice,
IReadOnlyDictionary<string, EdgeEnrichment> enrichment)
{
ArgumentNullException.ThrowIfNull(slice);
ArgumentNullException.ThrowIfNull(enrichment);
var annotatedEdges = slice.Subgraph.Edges
.Select(edge =>
{
var key = $"{edge.From}→{edge.To}";
if (enrichment.TryGetValue(key, out var enrich) && enrich.Observed)
{
return edge with
{
Observed = new ObservedEdgeMetadata
{
FirstObserved = enrich.FirstObserved ?? DateTimeOffset.UtcNow,
LastObserved = enrich.LastObserved ?? DateTimeOffset.UtcNow,
ObservationCount = (int)enrich.ObservationCount,
TraceDigest = null
}
};
}
return edge;
})
.ToArray();
var annotatedSubgraph = slice.Subgraph with { Edges = annotatedEdges.ToImmutableArray() };
return slice with { Subgraph = annotatedSubgraph };
}
}

View File

@@ -0,0 +1,164 @@
using System.Collections.Immutable;
using System.Text.Json;
using StellaOps.Attestor.ProofChain.Predicates;
using StellaOps.Attestor.ProofChain.Statements;
using StellaOps.Scanner.SmartDiff.Detection;
namespace StellaOps.Scanner.SmartDiff.Attestation;
/// <summary>
/// Build request for delta verdict attestations.
/// </summary>
public sealed record DeltaVerdictBuildRequest
{
public required string BeforeRevisionId { get; init; }
public required string AfterRevisionId { get; init; }
public required string BeforeImageDigest { get; init; }
public required string AfterImageDigest { get; init; }
public required IReadOnlyList<MaterialRiskChangeResult> Changes { get; init; }
public DateTimeOffset? ComparedAt { get; init; }
public string? BeforeVerdictDigest { get; init; }
public string? AfterVerdictDigest { get; init; }
public AttestationReference? BeforeProofSpine { get; init; }
public AttestationReference? AfterProofSpine { get; init; }
public string? BeforeGraphRevisionId { get; init; }
public string? AfterGraphRevisionId { get; init; }
public string? BeforeImageName { get; init; }
public string? AfterImageName { get; init; }
}
/// <summary>
/// Builds delta verdict predicate and statement payloads.
/// </summary>
public sealed class DeltaVerdictBuilder
{
private readonly TimeProvider _timeProvider;
public DeltaVerdictBuilder(TimeProvider? timeProvider = null)
{
_timeProvider = timeProvider ?? TimeProvider.System;
}
public DeltaVerdictPredicate BuildPredicate(DeltaVerdictBuildRequest request)
{
ArgumentNullException.ThrowIfNull(request);
ArgumentNullException.ThrowIfNull(request.Changes);
ArgumentException.ThrowIfNullOrWhiteSpace(request.BeforeRevisionId);
ArgumentException.ThrowIfNullOrWhiteSpace(request.AfterRevisionId);
var comparedAt = request.ComparedAt ?? _timeProvider.GetUtcNow();
var changeEntries = BuildChangeEntries(request.Changes);
var hasMaterialChange = request.Changes.Any(c => c.HasMaterialChange);
var priorityScore = request.Changes
.Where(c => c.HasMaterialChange)
.Sum(c => c.PriorityScore);
return new DeltaVerdictPredicate
{
BeforeRevisionId = request.BeforeRevisionId,
AfterRevisionId = request.AfterRevisionId,
HasMaterialChange = hasMaterialChange,
PriorityScore = priorityScore,
Changes = changeEntries,
BeforeVerdictDigest = request.BeforeVerdictDigest,
AfterVerdictDigest = request.AfterVerdictDigest,
BeforeProofSpine = request.BeforeProofSpine,
AfterProofSpine = request.AfterProofSpine,
BeforeGraphRevisionId = request.BeforeGraphRevisionId,
AfterGraphRevisionId = request.AfterGraphRevisionId,
ComparedAt = comparedAt
};
}
public DeltaVerdictStatement BuildStatement(DeltaVerdictBuildRequest request)
{
ArgumentNullException.ThrowIfNull(request);
ArgumentException.ThrowIfNullOrWhiteSpace(request.BeforeImageDigest);
ArgumentException.ThrowIfNullOrWhiteSpace(request.AfterImageDigest);
var predicate = BuildPredicate(request);
return new DeltaVerdictStatement
{
Subject =
[
BuildSubject(request.BeforeImageDigest, request.BeforeImageName),
BuildSubject(request.AfterImageDigest, request.AfterImageName)
],
Predicate = predicate
};
}
private static ImmutableArray<DeltaVerdictChange> BuildChangeEntries(IReadOnlyList<MaterialRiskChangeResult> changes)
{
if (changes.Count == 0)
{
return [];
}
var entries = new List<DeltaVerdictChange>();
foreach (var change in changes)
{
if (!change.HasMaterialChange || change.Changes.IsDefaultOrEmpty)
{
continue;
}
foreach (var detail in change.Changes)
{
entries.Add(new DeltaVerdictChange
{
Rule = ToJsonEnum(detail.Rule),
FindingKey = new DeltaFindingKey
{
VulnId = change.FindingKey.VulnId,
Purl = change.FindingKey.ComponentPurl
},
Direction = ToJsonEnum(detail.Direction),
ChangeType = ToJsonEnum(detail.ChangeType),
Reason = detail.Reason,
PreviousValue = detail.PreviousValue,
CurrentValue = detail.CurrentValue,
Weight = detail.Weight
});
}
}
return entries
.OrderBy(e => e.FindingKey.VulnId, StringComparer.Ordinal)
.ThenBy(e => e.FindingKey.Purl, StringComparer.Ordinal)
.ThenBy(e => e.Rule, StringComparer.Ordinal)
.ThenBy(e => e.ChangeType, StringComparer.Ordinal)
.ThenBy(e => e.Direction, StringComparer.Ordinal)
.ThenBy(e => e.Reason, StringComparer.Ordinal)
.ToImmutableArray();
}
private static Subject BuildSubject(string digest, string? name)
{
var (algorithm, value) = SplitDigest(digest);
return new Subject
{
Name = name ?? digest,
Digest = new Dictionary<string, string> { [algorithm] = value }
};
}
private static (string Algorithm, string Value) SplitDigest(string digest)
{
var colonIndex = digest.IndexOf(':');
if (colonIndex <= 0 || colonIndex == digest.Length - 1)
{
return ("sha256", digest);
}
return (digest[..colonIndex], digest[(colonIndex + 1)..]);
}
private static string ToJsonEnum<TEnum>(TEnum value) where TEnum : struct, Enum
{
var json = JsonSerializer.Serialize(value);
return json.Trim('"');
}
}

View File

@@ -0,0 +1,61 @@
using StellaOps.Attestor.ProofChain.Predicates;
using StellaOps.Scanner.Storage.Oci;
namespace StellaOps.Scanner.SmartDiff.Attestation;
public sealed record DeltaVerdictOciPublishRequest
{
public required string Reference { get; init; }
public required string BeforeImageDigest { get; init; }
public required string AfterImageDigest { get; init; }
public required byte[] DsseEnvelopeBytes { get; init; }
public string? AttestationDigest { get; init; }
}
public sealed class DeltaVerdictOciPublisher
{
private readonly OciArtifactPusher _pusher;
public DeltaVerdictOciPublisher(OciArtifactPusher pusher)
{
_pusher = pusher ?? throw new ArgumentNullException(nameof(pusher));
}
public Task<OciArtifactPushResult> PushAsync(
DeltaVerdictOciPublishRequest request,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(request);
var annotations = new Dictionary<string, string>(StringComparer.Ordinal)
{
[OciAnnotations.StellaPredicateType] = DeltaVerdictPredicate.PredicateType,
[OciAnnotations.BaseDigest] = request.BeforeImageDigest,
[OciAnnotations.StellaBeforeDigest] = request.BeforeImageDigest,
[OciAnnotations.StellaAfterDigest] = request.AfterImageDigest
};
if (!string.IsNullOrWhiteSpace(request.AttestationDigest))
{
annotations[OciAnnotations.StellaAttestationDigest] = request.AttestationDigest!;
}
var pushRequest = new OciArtifactPushRequest
{
Reference = request.Reference,
ArtifactType = OciMediaTypes.DeltaVerdictPredicate,
SubjectDigest = request.AfterImageDigest,
Layers =
[
new OciLayerContent
{
Content = request.DsseEnvelopeBytes,
MediaType = OciMediaTypes.DsseEnvelope
}
],
Annotations = annotations
};
return _pusher.PushAsync(pushRequest, cancellationToken);
}
}

View File

@@ -46,7 +46,8 @@ public sealed record SmartDiffSarifInput(
IReadOnlyList<HardeningRegression> HardeningRegressions,
IReadOnlyList<VexCandidate> VexCandidates,
IReadOnlyList<ReachabilityChange> ReachabilityChanges,
VcsInfo? VcsInfo = null);
VcsInfo? VcsInfo = null,
string? DeltaVerdictReference = null);
/// <summary>
/// VCS information for SARIF provenance.
@@ -244,7 +245,7 @@ public sealed class SarifOutputGenerator
// Material risk changes
foreach (var change in input.MaterialChanges)
{
results.Add(CreateMaterialChangeResult(change));
results.Add(CreateMaterialChangeResult(change, input.DeltaVerdictReference));
}
// Hardening regressions
@@ -277,7 +278,7 @@ public sealed class SarifOutputGenerator
return [.. results];
}
private static SarifResult CreateMaterialChangeResult(MaterialRiskChange change)
private static SarifResult CreateMaterialChangeResult(MaterialRiskChange change, string? deltaVerdictReference)
{
var level = change.Direction == RiskDirection.Increased ? SarifLevel.Warning : SarifLevel.Note;
var message = $"Material risk change for {change.VulnId} in {change.ComponentPurl}: {change.Reason}";
@@ -288,6 +289,13 @@ public sealed class SarifOutputGenerator
ArtifactLocation: new SarifArtifactLocation(Uri: change.FilePath))))
: (ImmutableArray<SarifLocation>?)null;
var properties = deltaVerdictReference is null
? null
: ImmutableSortedDictionary.CreateRange(StringComparer.Ordinal, new[]
{
KeyValuePair.Create("deltaVerdictRef", (object)deltaVerdictReference)
});
return new SarifResult(
RuleId: "SDIFF001",
Level: level,
@@ -297,7 +305,8 @@ public sealed class SarifOutputGenerator
{
KeyValuePair.Create("purl", change.ComponentPurl),
KeyValuePair.Create("vulnId", change.VulnId),
}));
}),
Properties: properties);
}
private static SarifResult CreateHardeningRegressionResult(HardeningRegression regression)

View File

@@ -5,4 +5,9 @@
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
</PropertyGroup>
<ItemGroup>
<ProjectReference Include="..\\..\\..\\Attestor\\__Libraries\\StellaOps.Attestor.ProofChain\\StellaOps.Attestor.ProofChain.csproj" />
<ProjectReference Include="..\\StellaOps.Scanner.Storage.Oci\\StellaOps.Scanner.Storage.Oci.csproj" />
</ItemGroup>
</Project>

View File

@@ -0,0 +1,33 @@
# AGENTS - Scanner Storage.Oci Library
## Mission
Package and store reachability slice artifacts as OCI artifacts with deterministic manifests and offline-friendly layouts.
## Roles
- Backend engineer (.NET 10, C# preview).
- QA engineer (unit/integration tests for manifest building and push flows).
## Required Reading
- `docs/README.md`
- `docs/07_HIGH_LEVEL_ARCHITECTURE.md`
- `docs/modules/platform/architecture-overview.md`
- `docs/modules/scanner/architecture.md`
- `docs/reachability/binary-reachability-schema.md`
- `docs/24_OFFLINE_KIT.md`
## Working Directory & Boundaries
- Primary scope: `src/Scanner/__Libraries/StellaOps.Scanner.Storage.Oci/`
- Tests: `src/Scanner/__Tests/StellaOps.Scanner.Storage.Oci.Tests/`
- Avoid cross-module edits unless explicitly noted in the sprint.
## Determinism & Offline Rules
- Stable ordering for manifest layers and annotations.
- Support OCI layout for offline export without network calls.
## Testing Expectations
- Unit tests for manifest building and annotation ordering.
- Integration tests for registry push with mocked registry endpoints.
## Workflow
- Update sprint status on task transitions.
- Log notable decisions in sprint Execution Log.

View File

@@ -0,0 +1,22 @@
namespace StellaOps.Scanner.Storage.Oci;
/// <summary>
/// Service for pushing OCI artifacts to registries.
/// Sprint: SPRINT_3850_0001_0001
/// </summary>
public interface IOciPushService
{
/// <summary>
/// Push an OCI artifact to a registry.
/// </summary>
Task<OciArtifactPushResult> PushAsync(
OciArtifactPushRequest request,
CancellationToken cancellationToken = default);
/// <summary>
/// Push a slice artifact to a registry.
/// </summary>
Task<OciArtifactPushResult> PushSliceAsync(
SliceArtifactInput input,
CancellationToken cancellationToken = default);
}

View File

@@ -0,0 +1,17 @@
namespace StellaOps.Scanner.Storage.Oci;
public static class OciAnnotations
{
public const string Created = "org.opencontainers.image.created";
public const string Title = "org.opencontainers.image.title";
public const string Description = "org.opencontainers.image.description";
public const string BaseDigest = "org.opencontainers.image.base.digest";
public const string BaseName = "org.opencontainers.image.base.name";
public const string StellaPredicateType = "org.stellaops.predicate.type";
public const string StellaAttestationDigest = "org.stellaops.attestation.digest";
public const string StellaBeforeDigest = "org.stellaops.delta.before.digest";
public const string StellaAfterDigest = "org.stellaops.delta.after.digest";
public const string StellaSbomDigest = "org.stellaops.sbom.digest";
public const string StellaVerdictDigest = "org.stellaops.verdict.digest";
}

View File

@@ -0,0 +1,291 @@
using System.Net;
using System.Net.Http.Headers;
using System.Text.Json;
using Microsoft.Extensions.Logging;
using StellaOps.Cryptography;
namespace StellaOps.Scanner.Storage.Oci;
public sealed class OciArtifactPusher
{
private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web)
{
WriteIndented = false
};
private static readonly byte[] EmptyConfigBlob = "{}"u8.ToArray();
private readonly HttpClient _httpClient;
private readonly ICryptoHash _cryptoHash;
private readonly OciRegistryOptions _options;
private readonly ILogger<OciArtifactPusher> _logger;
private readonly TimeProvider _timeProvider;
public OciArtifactPusher(
HttpClient httpClient,
ICryptoHash cryptoHash,
OciRegistryOptions options,
ILogger<OciArtifactPusher> logger,
TimeProvider? timeProvider = null)
{
_httpClient = httpClient ?? throw new ArgumentNullException(nameof(httpClient));
_cryptoHash = cryptoHash ?? throw new ArgumentNullException(nameof(cryptoHash));
_options = options ?? throw new ArgumentNullException(nameof(options));
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
_timeProvider = timeProvider ?? TimeProvider.System;
}
public async Task<OciArtifactPushResult> PushAsync(
OciArtifactPushRequest request,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(request);
ArgumentException.ThrowIfNullOrWhiteSpace(request.Reference);
ArgumentException.ThrowIfNullOrWhiteSpace(request.ArtifactType);
if (request.Layers.Count == 0)
{
return OciArtifactPushResult.Failed("No layers supplied for OCI push.");
}
var reference = OciImageReference.Parse(request.Reference, _options.DefaultRegistry);
if (reference is null)
{
return OciArtifactPushResult.Failed($"Invalid OCI reference: {request.Reference}");
}
var auth = OciRegistryAuthorization.FromOptions(reference.Registry, _options.Auth);
try
{
var configDigest = await PushBlobAsync(reference, EmptyConfigBlob, OciMediaTypes.EmptyConfig, auth, cancellationToken)
.ConfigureAwait(false);
var layerDescriptors = new List<OciDescriptor>();
var layerDigests = new List<string>();
foreach (var layer in request.Layers)
{
var digest = await PushBlobAsync(reference, layer.Content, layer.MediaType, auth, cancellationToken)
.ConfigureAwait(false);
layerDescriptors.Add(new OciDescriptor
{
MediaType = layer.MediaType,
Digest = digest,
Size = layer.Content.Length,
Annotations = NormalizeAnnotations(layer.Annotations)
});
layerDigests.Add(digest);
}
var manifest = BuildManifest(request, configDigest, layerDescriptors);
var manifestBytes = JsonSerializer.SerializeToUtf8Bytes(manifest, SerializerOptions);
var manifestDigest = ComputeDigest(manifestBytes);
var tag = reference.Tag ?? manifestDigest.Replace("sha256:", string.Empty, StringComparison.Ordinal);
await PushManifestAsync(reference, manifestBytes, tag, auth, cancellationToken).ConfigureAwait(false);
var manifestReference = $"{reference.Registry}/{reference.Repository}@{manifestDigest}";
_logger.LogInformation("Pushed OCI artifact {Reference}", manifestReference);
return new OciArtifactPushResult
{
Success = true,
ManifestDigest = manifestDigest,
ManifestReference = manifestReference,
LayerDigests = layerDigests
};
}
catch (OciRegistryException ex)
{
_logger.LogError(ex, "OCI push failed: {Message}", ex.Message);
return OciArtifactPushResult.Failed(ex.Message);
}
catch (HttpRequestException ex)
{
_logger.LogError(ex, "OCI push HTTP error: {Message}", ex.Message);
return OciArtifactPushResult.Failed($"HTTP error: {ex.Message}");
}
}
private OciArtifactManifest BuildManifest(
OciArtifactPushRequest request,
string configDigest,
IReadOnlyList<OciDescriptor> layers)
{
var annotations = NormalizeAnnotations(request.Annotations);
if (annotations is null)
{
annotations = new SortedDictionary<string, string>(StringComparer.Ordinal);
}
annotations["org.opencontainers.image.created"] = _timeProvider.GetUtcNow().ToString("O");
annotations["org.opencontainers.image.title"] = request.ArtifactType;
return new OciArtifactManifest
{
MediaType = OciMediaTypes.ArtifactManifest,
ArtifactType = request.ArtifactType,
Config = new OciDescriptor
{
MediaType = OciMediaTypes.EmptyConfig,
Digest = configDigest,
Size = EmptyConfigBlob.Length
},
Layers = layers,
Subject = string.IsNullOrWhiteSpace(request.SubjectDigest)
? null
: new OciDescriptor
{
MediaType = OciMediaTypes.ArtifactManifest,
Digest = request.SubjectDigest!,
Size = 0
},
Annotations = annotations
};
}
private async Task<string> PushBlobAsync(
OciImageReference reference,
byte[] content,
string mediaType,
OciRegistryAuthorization auth,
CancellationToken cancellationToken)
{
var digest = ComputeDigest(content);
var blobUri = BuildRegistryUri(reference, $"blobs/{digest}");
using (var head = new HttpRequestMessage(HttpMethod.Head, blobUri))
{
auth.ApplyTo(head);
using var headResponse = await _httpClient.SendAsync(head, cancellationToken).ConfigureAwait(false);
if (headResponse.IsSuccessStatusCode)
{
return digest;
}
if (headResponse.StatusCode != HttpStatusCode.NotFound)
{
throw new OciRegistryException($"Blob HEAD failed with {headResponse.StatusCode}", "ERR_OCI_BLOB_HEAD");
}
}
var startUploadUri = BuildRegistryUri(reference, "blobs/uploads/");
using var postRequest = new HttpRequestMessage(HttpMethod.Post, startUploadUri);
auth.ApplyTo(postRequest);
using var postResponse = await _httpClient.SendAsync(postRequest, cancellationToken).ConfigureAwait(false);
if (!postResponse.IsSuccessStatusCode)
{
throw new OciRegistryException($"Blob upload start failed with {postResponse.StatusCode}", "ERR_OCI_UPLOAD_START");
}
if (postResponse.Headers.Location is null)
{
throw new OciRegistryException("Blob upload start did not return a Location header.", "ERR_OCI_UPLOAD_LOCATION");
}
var uploadUri = ResolveUploadUri(reference, postResponse.Headers.Location);
uploadUri = AppendDigest(uploadUri, digest);
using var putRequest = new HttpRequestMessage(HttpMethod.Put, uploadUri)
{
Content = new ByteArrayContent(content)
};
putRequest.Content.Headers.ContentType = new MediaTypeHeaderValue(mediaType);
auth.ApplyTo(putRequest);
using var putResponse = await _httpClient.SendAsync(putRequest, cancellationToken).ConfigureAwait(false);
if (!putResponse.IsSuccessStatusCode)
{
throw new OciRegistryException($"Blob upload failed with {putResponse.StatusCode}", "ERR_OCI_UPLOAD_PUT");
}
return digest;
}
private async Task PushManifestAsync(
OciImageReference reference,
byte[] manifestBytes,
string tag,
OciRegistryAuthorization auth,
CancellationToken cancellationToken)
{
var manifestUri = BuildRegistryUri(reference, $"manifests/{tag}");
using var request = new HttpRequestMessage(HttpMethod.Put, manifestUri)
{
Content = new ByteArrayContent(manifestBytes)
};
request.Content.Headers.ContentType = new MediaTypeHeaderValue(OciMediaTypes.ArtifactManifest);
auth.ApplyTo(request);
using var response = await _httpClient.SendAsync(request, cancellationToken).ConfigureAwait(false);
if (!response.IsSuccessStatusCode)
{
throw new OciRegistryException($"Manifest upload failed with {response.StatusCode}", "ERR_OCI_MANIFEST");
}
}
private string ComputeDigest(ReadOnlySpan<byte> content)
{
return _cryptoHash.ComputePrefixedHashForPurpose(content, HashPurpose.Interop);
}
private Uri BuildRegistryUri(OciImageReference reference, string path)
{
var scheme = reference.Scheme;
if (_options.AllowInsecure)
{
scheme = "http";
}
return new Uri($"{scheme}://{reference.Registry}/v2/{reference.Repository}/{path}");
}
private static Uri ResolveUploadUri(OciImageReference reference, Uri location)
{
if (location.IsAbsoluteUri)
{
return location;
}
return new Uri($"{reference.Scheme}://{reference.Registry}{location}");
}
private static Uri AppendDigest(Uri uploadUri, string digest)
{
if (uploadUri.Query.Contains("digest=", StringComparison.OrdinalIgnoreCase))
{
return uploadUri;
}
var delimiter = string.IsNullOrEmpty(uploadUri.Query) ? "?" : "&";
var uri = new Uri($"{uploadUri}{delimiter}digest={Uri.EscapeDataString(digest)}");
return uri;
}
private static SortedDictionary<string, string>? NormalizeAnnotations(IReadOnlyDictionary<string, string>? annotations)
{
if (annotations is null || annotations.Count == 0)
{
return null;
}
var normalized = new SortedDictionary<string, string>(StringComparer.Ordinal);
foreach (var (key, value) in annotations)
{
if (string.IsNullOrWhiteSpace(key) || value is null)
{
continue;
}
normalized[key.Trim()] = value.Trim();
}
return normalized.Count == 0 ? null : normalized;
}
}

View File

@@ -0,0 +1,121 @@
using System.Text.RegularExpressions;
namespace StellaOps.Scanner.Storage.Oci;
public sealed partial record OciImageReference
{
public required string Registry { get; init; }
public required string Repository { get; init; }
public string? Tag { get; init; }
public string? Digest { get; init; }
public string Scheme { get; init; } = "https";
public bool HasDigest => !string.IsNullOrEmpty(Digest);
public bool HasTag => !string.IsNullOrEmpty(Tag);
public string Canonical
{
get
{
if (HasDigest)
{
return $"{Registry}/{Repository}@{Digest}";
}
return HasTag
? $"{Registry}/{Repository}:{Tag}"
: $"{Registry}/{Repository}:latest";
}
}
public string RepositoryReference => $"{Registry}/{Repository}";
public static OciImageReference? Parse(string reference, string defaultRegistry = "docker.io")
{
if (string.IsNullOrWhiteSpace(reference))
{
return null;
}
reference = reference.Trim();
var scheme = "https";
if (reference.StartsWith("http://", StringComparison.OrdinalIgnoreCase))
{
scheme = "http";
reference = reference[7..];
}
else if (reference.StartsWith("https://", StringComparison.OrdinalIgnoreCase))
{
reference = reference[8..];
}
string? digest = null;
var digestIndex = reference.IndexOf('@');
if (digestIndex > 0)
{
digest = reference[(digestIndex + 1)..];
reference = reference[..digestIndex];
}
string? tag = null;
if (digest is null)
{
var tagIndex = reference.LastIndexOf(':');
if (tagIndex > 0)
{
var potentialTag = reference[(tagIndex + 1)..];
if (!potentialTag.Contains('/') && !IsPortNumber(potentialTag))
{
tag = potentialTag;
reference = reference[..tagIndex];
}
}
}
string registry;
string repository;
var firstSlash = reference.IndexOf('/');
if (firstSlash < 0)
{
registry = defaultRegistry;
repository = reference.Contains('.') ? reference : $"library/{reference}";
}
else
{
var firstPart = reference[..firstSlash];
if (firstPart.Contains('.') || firstPart.Contains(':') ||
firstPart.Equals("localhost", StringComparison.OrdinalIgnoreCase))
{
registry = firstPart;
repository = reference[(firstSlash + 1)..];
}
else
{
registry = defaultRegistry;
repository = reference;
}
}
if (registry == "docker.io" && !repository.Contains('/'))
{
repository = $"library/{repository}";
}
return new OciImageReference
{
Registry = registry,
Repository = repository,
Tag = tag,
Digest = digest,
Scheme = scheme
};
}
private static bool IsPortNumber(string value)
=> PortRegex().IsMatch(value);
[GeneratedRegex("^\\d+$")]
private static partial Regex PortRegex();
}

View File

@@ -0,0 +1,17 @@
namespace StellaOps.Scanner.Storage.Oci;
public static class OciMediaTypes
{
public const string ArtifactManifest = "application/vnd.oci.artifact.manifest.v1+json";
public const string EmptyConfig = "application/vnd.oci.empty.v1+json";
public const string OctetStream = "application/octet-stream";
public const string DsseEnvelope = "application/vnd.dsse.envelope.v1+json";
public const string DeltaVerdictPredicate = "application/vnd.stellaops.delta-verdict.v1+json";
public const string ReachabilitySubgraph = "application/vnd.stellaops.reachability-subgraph.v1+json";
// Sprint: SPRINT_3850_0001_0001 - Slice storage
public const string ReachabilitySlice = "application/vnd.stellaops.slice.v1+json";
public const string SliceConfig = "application/vnd.stellaops.slice.config.v1+json";
public const string SliceArtifact = "application/vnd.stellaops.slice.v1+json";
}

View File

@@ -0,0 +1,103 @@
using System.Text.Json.Serialization;
namespace StellaOps.Scanner.Storage.Oci;
public sealed record OciDescriptor
{
[JsonPropertyName("mediaType")]
public required string MediaType { get; init; }
[JsonPropertyName("digest")]
public required string Digest { get; init; }
[JsonPropertyName("size")]
public required long Size { get; init; }
[JsonPropertyName("annotations")]
public IReadOnlyDictionary<string, string>? Annotations { get; init; }
[JsonPropertyName("artifactType")]
public string? ArtifactType { get; init; }
}
public sealed record OciArtifactManifest
{
[JsonPropertyName("schemaVersion")]
public int SchemaVersion { get; init; } = 2;
[JsonPropertyName("mediaType")]
public string MediaType { get; init; } = OciMediaTypes.ArtifactManifest;
[JsonPropertyName("artifactType")]
public string? ArtifactType { get; init; }
[JsonPropertyName("config")]
public required OciDescriptor Config { get; init; }
[JsonPropertyName("layers")]
public IReadOnlyList<OciDescriptor> Layers { get; init; } = [];
[JsonPropertyName("subject")]
public OciDescriptor? Subject { get; init; }
[JsonPropertyName("annotations")]
public IReadOnlyDictionary<string, string>? Annotations { get; init; }
}
public sealed record OciLayerContent
{
public required byte[] Content { get; init; }
public required string MediaType { get; init; }
public IReadOnlyDictionary<string, string>? Annotations { get; init; }
}
public sealed record OciArtifactPushRequest
{
public required string Reference { get; init; }
public required string ArtifactType { get; init; }
public required IReadOnlyList<OciLayerContent> Layers { get; init; }
public string? SubjectDigest { get; init; }
public IReadOnlyDictionary<string, string>? Annotations { get; init; }
}
public sealed record OciArtifactPushResult
{
public required bool Success { get; init; }
public string? ManifestDigest { get; init; }
public string? ManifestReference { get; init; }
public IReadOnlyList<string>? LayerDigests { get; init; }
public string? Error { get; init; }
public static OciArtifactPushResult Failed(string error)
=> new()
{
Success = false,
Error = error,
LayerDigests = Array.Empty<string>()
};
}
public sealed class OciRegistryOptions
{
public string DefaultRegistry { get; set; } = "docker.io";
public bool AllowInsecure { get; set; }
public OciRegistryAuthOptions Auth { get; set; } = new();
}
public sealed class OciRegistryAuthOptions
{
public string? Username { get; set; }
public string? Password { get; set; }
public string? Token { get; set; }
public bool AllowAnonymousFallback { get; set; } = true;
}
public sealed class OciRegistryException : Exception
{
public OciRegistryException(string message, string errorCode) : base(message)
{
ErrorCode = errorCode;
}
public string ErrorCode { get; }
}

View File

@@ -0,0 +1,76 @@
using System.Net.Http.Headers;
using System.Text;
namespace StellaOps.Scanner.Storage.Oci;
public enum OciRegistryAuthMode
{
Anonymous = 0,
Basic = 1,
BearerToken = 2
}
public sealed record OciRegistryAuthorization
{
public required string Registry { get; init; }
public required OciRegistryAuthMode Mode { get; init; }
public string? Username { get; init; }
public string? Password { get; init; }
public string? Token { get; init; }
public bool AllowAnonymousFallback { get; init; }
public static OciRegistryAuthorization FromOptions(string registry, OciRegistryAuthOptions options)
{
ArgumentNullException.ThrowIfNull(options);
if (!string.IsNullOrWhiteSpace(options.Token))
{
return new OciRegistryAuthorization
{
Registry = registry,
Mode = OciRegistryAuthMode.BearerToken,
Token = options.Token,
AllowAnonymousFallback = options.AllowAnonymousFallback
};
}
if (!string.IsNullOrWhiteSpace(options.Username))
{
return new OciRegistryAuthorization
{
Registry = registry,
Mode = OciRegistryAuthMode.Basic,
Username = options.Username,
Password = options.Password,
AllowAnonymousFallback = options.AllowAnonymousFallback
};
}
return new OciRegistryAuthorization
{
Registry = registry,
Mode = OciRegistryAuthMode.Anonymous,
AllowAnonymousFallback = true
};
}
public void ApplyTo(HttpRequestMessage request)
{
switch (Mode)
{
case OciRegistryAuthMode.Basic when !string.IsNullOrEmpty(Username):
var credentials = Convert.ToBase64String(
Encoding.UTF8.GetBytes($"{Username}:{Password ?? string.Empty}"));
request.Headers.Authorization = new AuthenticationHeaderValue("Basic", credentials);
break;
case OciRegistryAuthMode.BearerToken when !string.IsNullOrEmpty(Token):
request.Headers.Authorization = new AuthenticationHeaderValue("Bearer", Token);
break;
case OciRegistryAuthMode.Anonymous:
default:
break;
}
}
}

View File

@@ -0,0 +1,577 @@
using System.Collections.Immutable;
using System.IO.Compression;
using System.Security.Cryptography;
using System.Text;
using System.Text.Json;
using Microsoft.Extensions.Logging;
using StellaOps.Scanner.Reachability.Slices;
namespace StellaOps.Scanner.Storage.Oci.Offline;
/// <summary>
/// Options for offline bundle operations.
/// </summary>
public sealed record OfflineBundleOptions
{
/// <summary>
/// Whether to include call graphs. Default: true.
/// </summary>
public bool IncludeGraphs { get; init; } = true;
/// <summary>
/// Whether to include SBOMs. Default: true.
/// </summary>
public bool IncludeSboms { get; init; } = true;
/// <summary>
/// Compression level. Default: Optimal.
/// </summary>
public CompressionLevel CompressionLevel { get; init; } = CompressionLevel.Optimal;
/// <summary>
/// Whether to verify on import. Default: true.
/// </summary>
public bool VerifyOnImport { get; init; } = true;
}
/// <summary>
/// Bundle manifest following OCI layout conventions.
/// </summary>
public sealed record BundleManifest
{
public required string SchemaVersion { get; init; }
public required DateTimeOffset CreatedAt { get; init; }
public required string ScanId { get; init; }
public required ImmutableArray<BundleArtifact> Artifacts { get; init; }
public required BundleMetrics Metrics { get; init; }
public required string ManifestDigest { get; init; }
}
/// <summary>
/// Artifact entry in bundle manifest.
/// </summary>
public sealed record BundleArtifact
{
public required string Digest { get; init; }
public required string MediaType { get; init; }
public required long Size { get; init; }
public required string Path { get; init; }
public ImmutableDictionary<string, string>? Annotations { get; init; }
}
/// <summary>
/// Metrics about bundle contents.
/// </summary>
public sealed record BundleMetrics
{
public int SliceCount { get; init; }
public int GraphCount { get; init; }
public int SbomCount { get; init; }
public long TotalSize { get; init; }
}
/// <summary>
/// Result of bundle export operation.
/// </summary>
public sealed record BundleExportResult
{
public required bool Success { get; init; }
public string? BundlePath { get; init; }
public string? BundleDigest { get; init; }
public BundleMetrics? Metrics { get; init; }
public string? Error { get; init; }
}
/// <summary>
/// Result of bundle import operation.
/// </summary>
public sealed record BundleImportResult
{
public required bool Success { get; init; }
public int SlicesImported { get; init; }
public int GraphsImported { get; init; }
public int SbomsImported { get; init; }
public bool IntegrityVerified { get; init; }
public string? Error { get; init; }
}
/// <summary>
/// Provider interface for slice storage operations.
/// </summary>
public interface ISliceStorageProvider
{
Task<IReadOnlyList<ReachabilitySlice>> GetSlicesForScanAsync(string scanId, CancellationToken cancellationToken = default);
Task<byte[]?> GetGraphAsync(string digest, CancellationToken cancellationToken = default);
Task<byte[]?> GetSbomAsync(string digest, CancellationToken cancellationToken = default);
Task StoreSliceAsync(ReachabilitySlice slice, CancellationToken cancellationToken = default);
Task StoreGraphAsync(string digest, byte[] data, CancellationToken cancellationToken = default);
Task StoreSbomAsync(string digest, byte[] data, CancellationToken cancellationToken = default);
}
/// <summary>
/// Service for offline bundle export and import operations.
/// Sprint: SPRINT_3850_0001_0001
/// Task: T8
/// </summary>
public sealed class OfflineBundleService
{
private const string SchemaVersion = "1.0.0";
private const string BlobsDirectory = "blobs/sha256";
private const string ManifestFile = "index.json";
private readonly ISliceStorageProvider _storage;
private readonly OfflineBundleOptions _options;
private readonly ILogger<OfflineBundleService> _logger;
private readonly TimeProvider _timeProvider;
private static readonly JsonSerializerOptions JsonOptions = new(JsonSerializerDefaults.Web)
{
WriteIndented = true
};
public OfflineBundleService(
ISliceStorageProvider storage,
OfflineBundleOptions? options = null,
ILogger<OfflineBundleService>? logger = null,
TimeProvider? timeProvider = null)
{
_storage = storage ?? throw new ArgumentNullException(nameof(storage));
_options = options ?? new OfflineBundleOptions();
_logger = logger ?? Microsoft.Extensions.Logging.Abstractions.NullLogger<OfflineBundleService>.Instance;
_timeProvider = timeProvider ?? TimeProvider.System;
}
/// <summary>
/// Export slices to offline bundle.
/// </summary>
public async Task<BundleExportResult> ExportAsync(
string scanId,
string outputPath,
CancellationToken cancellationToken = default)
{
ArgumentException.ThrowIfNullOrWhiteSpace(scanId);
ArgumentException.ThrowIfNullOrWhiteSpace(outputPath);
try
{
_logger.LogInformation("Exporting slices for scan {ScanId} to {OutputPath}", scanId, outputPath);
// Get all slices for scan
var slices = await _storage.GetSlicesForScanAsync(scanId, cancellationToken).ConfigureAwait(false);
if (slices.Count == 0)
{
return new BundleExportResult
{
Success = false,
Error = $"No slices found for scan {scanId}"
};
}
// Create temp directory for bundle layout
var tempDir = Path.Combine(Path.GetTempPath(), $"stellaops-bundle-{Guid.NewGuid():N}");
Directory.CreateDirectory(tempDir);
var blobsDir = Path.Combine(tempDir, BlobsDirectory);
Directory.CreateDirectory(blobsDir);
try
{
var artifacts = new List<BundleArtifact>();
var graphDigests = new HashSet<string>(StringComparer.Ordinal);
var sbomDigests = new HashSet<string>(StringComparer.Ordinal);
// Export slices
foreach (var slice in slices)
{
var sliceJson = JsonSerializer.Serialize(slice, JsonOptions);
var sliceBytes = Encoding.UTF8.GetBytes(sliceJson);
var sliceDigest = ComputeDigest(sliceBytes);
var slicePath = Path.Combine(blobsDir, sliceDigest);
await File.WriteAllBytesAsync(slicePath, sliceBytes, cancellationToken).ConfigureAwait(false);
artifacts.Add(new BundleArtifact
{
Digest = $"sha256:{sliceDigest}",
MediaType = OciMediaTypes.ReachabilitySlice,
Size = sliceBytes.Length,
Path = $"{BlobsDirectory}/{sliceDigest}",
Annotations = ImmutableDictionary<string, string>.Empty
.Add("stellaops.slice.cveId", slice.Query?.CveId ?? "unknown")
.Add("stellaops.slice.verdict", slice.Verdict?.Status.ToString() ?? "unknown")
});
// Collect referenced graphs and SBOMs
if (!string.IsNullOrEmpty(slice.GraphDigest))
{
graphDigests.Add(slice.GraphDigest);
}
if (!string.IsNullOrEmpty(slice.SbomDigest))
{
sbomDigests.Add(slice.SbomDigest);
}
}
// Export graphs if requested
if (_options.IncludeGraphs)
{
foreach (var graphDigest in graphDigests)
{
var graphData = await _storage.GetGraphAsync(graphDigest, cancellationToken).ConfigureAwait(false);
if (graphData != null)
{
var digest = ComputeDigest(graphData);
var graphPath = Path.Combine(blobsDir, digest);
await File.WriteAllBytesAsync(graphPath, graphData, cancellationToken).ConfigureAwait(false);
artifacts.Add(new BundleArtifact
{
Digest = $"sha256:{digest}",
MediaType = OciMediaTypes.ReachabilitySubgraph,
Size = graphData.Length,
Path = $"{BlobsDirectory}/{digest}"
});
}
}
}
// Export SBOMs if requested
if (_options.IncludeSboms)
{
foreach (var sbomDigest in sbomDigests)
{
var sbomData = await _storage.GetSbomAsync(sbomDigest, cancellationToken).ConfigureAwait(false);
if (sbomData != null)
{
var digest = ComputeDigest(sbomData);
var sbomPath = Path.Combine(blobsDir, digest);
await File.WriteAllBytesAsync(sbomPath, sbomData, cancellationToken).ConfigureAwait(false);
artifacts.Add(new BundleArtifact
{
Digest = $"sha256:{digest}",
MediaType = "application/spdx+json",
Size = sbomData.Length,
Path = $"{BlobsDirectory}/{digest}"
});
}
}
}
// Create manifest
var metrics = new BundleMetrics
{
SliceCount = slices.Count,
GraphCount = _options.IncludeGraphs ? graphDigests.Count : 0,
SbomCount = _options.IncludeSboms ? sbomDigests.Count : 0,
TotalSize = artifacts.Sum(a => a.Size)
};
var manifest = new BundleManifest
{
SchemaVersion = SchemaVersion,
CreatedAt = _timeProvider.GetUtcNow(),
ScanId = scanId,
Artifacts = artifacts.ToImmutableArray(),
Metrics = metrics,
ManifestDigest = "" // Will be set after serialization
};
var manifestJson = JsonSerializer.Serialize(manifest, JsonOptions);
var manifestDigest = ComputeDigest(Encoding.UTF8.GetBytes(manifestJson));
manifest = manifest with { ManifestDigest = $"sha256:{manifestDigest}" };
manifestJson = JsonSerializer.Serialize(manifest, JsonOptions);
await File.WriteAllTextAsync(
Path.Combine(tempDir, ManifestFile),
manifestJson,
cancellationToken).ConfigureAwait(false);
// Create tar.gz
using (var fs = File.Create(outputPath))
using (var gzip = new GZipStream(fs, _options.CompressionLevel))
{
await CreateTarAsync(tempDir, gzip, cancellationToken).ConfigureAwait(false);
}
var bundleDigest = ComputeFileDigest(outputPath);
_logger.LogInformation(
"Bundle exported: {SliceCount} slices, {GraphCount} graphs, {SbomCount} SBOMs, {TotalSize:N0} bytes",
metrics.SliceCount, metrics.GraphCount, metrics.SbomCount, metrics.TotalSize);
return new BundleExportResult
{
Success = true,
BundlePath = outputPath,
BundleDigest = $"sha256:{bundleDigest}",
Metrics = metrics
};
}
finally
{
// Cleanup temp directory
try { Directory.Delete(tempDir, true); } catch { /* Ignore cleanup errors */ }
}
}
catch (Exception ex)
{
_logger.LogError(ex, "Failed to export bundle for scan {ScanId}", scanId);
return new BundleExportResult
{
Success = false,
Error = ex.Message
};
}
}
/// <summary>
/// Import slices from offline bundle.
/// </summary>
public async Task<BundleImportResult> ImportAsync(
string bundlePath,
bool dryRun = false,
CancellationToken cancellationToken = default)
{
ArgumentException.ThrowIfNullOrWhiteSpace(bundlePath);
if (!File.Exists(bundlePath))
{
return new BundleImportResult
{
Success = false,
Error = $"Bundle not found: {bundlePath}"
};
}
try
{
_logger.LogInformation("Importing bundle from {BundlePath} (dry run: {DryRun})", bundlePath, dryRun);
// Extract to temp directory
var tempDir = Path.Combine(Path.GetTempPath(), $"stellaops-import-{Guid.NewGuid():N}");
Directory.CreateDirectory(tempDir);
try
{
// Extract tar.gz
await using (var fs = File.OpenRead(bundlePath))
await using (var gzip = new GZipStream(fs, CompressionMode.Decompress))
{
await ExtractTarAsync(gzip, tempDir, cancellationToken).ConfigureAwait(false);
}
// Read manifest
var manifestPath = Path.Combine(tempDir, ManifestFile);
if (!File.Exists(manifestPath))
{
return new BundleImportResult
{
Success = false,
Error = "Bundle manifest not found"
};
}
var manifestJson = await File.ReadAllTextAsync(manifestPath, cancellationToken).ConfigureAwait(false);
var manifest = JsonSerializer.Deserialize<BundleManifest>(manifestJson, JsonOptions);
if (manifest == null)
{
return new BundleImportResult
{
Success = false,
Error = "Failed to parse bundle manifest"
};
}
// Verify integrity if requested
bool integrityVerified = false;
if (_options.VerifyOnImport)
{
integrityVerified = await VerifyBundleIntegrityAsync(tempDir, manifest, cancellationToken)
.ConfigureAwait(false);
if (!integrityVerified)
{
return new BundleImportResult
{
Success = false,
Error = "Bundle integrity verification failed"
};
}
}
if (dryRun)
{
_logger.LogInformation(
"Dry run: would import {SliceCount} slices, {GraphCount} graphs, {SbomCount} SBOMs",
manifest.Metrics.SliceCount,
manifest.Metrics.GraphCount,
manifest.Metrics.SbomCount);
return new BundleImportResult
{
Success = true,
SlicesImported = 0,
GraphsImported = 0,
SbomsImported = 0,
IntegrityVerified = integrityVerified
};
}
// Import artifacts
int slicesImported = 0, graphsImported = 0, sbomsImported = 0;
foreach (var artifact in manifest.Artifacts)
{
var artifactPath = Path.Combine(tempDir, artifact.Path);
if (!File.Exists(artifactPath))
{
_logger.LogWarning("Artifact not found in bundle: {Path}", artifact.Path);
continue;
}
var data = await File.ReadAllBytesAsync(artifactPath, cancellationToken).ConfigureAwait(false);
if (artifact.MediaType == OciMediaTypes.ReachabilitySlice)
{
var slice = JsonSerializer.Deserialize<ReachabilitySlice>(data, JsonOptions);
if (slice != null)
{
await _storage.StoreSliceAsync(slice, cancellationToken).ConfigureAwait(false);
slicesImported++;
}
}
else if (artifact.MediaType == OciMediaTypes.ReachabilitySubgraph)
{
await _storage.StoreGraphAsync(artifact.Digest, data, cancellationToken).ConfigureAwait(false);
graphsImported++;
}
else if (artifact.MediaType.Contains("spdx") || artifact.MediaType.Contains("cyclonedx"))
{
await _storage.StoreSbomAsync(artifact.Digest, data, cancellationToken).ConfigureAwait(false);
sbomsImported++;
}
}
_logger.LogInformation(
"Bundle imported: {SliceCount} slices, {GraphCount} graphs, {SbomCount} SBOMs",
slicesImported, graphsImported, sbomsImported);
return new BundleImportResult
{
Success = true,
SlicesImported = slicesImported,
GraphsImported = graphsImported,
SbomsImported = sbomsImported,
IntegrityVerified = integrityVerified
};
}
finally
{
// Cleanup temp directory
try { Directory.Delete(tempDir, true); } catch { /* Ignore cleanup errors */ }
}
}
catch (Exception ex)
{
_logger.LogError(ex, "Failed to import bundle from {BundlePath}", bundlePath);
return new BundleImportResult
{
Success = false,
Error = ex.Message
};
}
}
private async Task<bool> VerifyBundleIntegrityAsync(
string tempDir,
BundleManifest manifest,
CancellationToken cancellationToken)
{
foreach (var artifact in manifest.Artifacts)
{
var artifactPath = Path.Combine(tempDir, artifact.Path);
if (!File.Exists(artifactPath))
{
_logger.LogWarning("Missing artifact: {Path}", artifact.Path);
return false;
}
var data = await File.ReadAllBytesAsync(artifactPath, cancellationToken).ConfigureAwait(false);
var actualDigest = $"sha256:{ComputeDigest(data)}";
if (!string.Equals(actualDigest, artifact.Digest, StringComparison.OrdinalIgnoreCase))
{
_logger.LogWarning(
"Digest mismatch for {Path}: expected {Expected}, got {Actual}",
artifact.Path, artifact.Digest, actualDigest);
return false;
}
}
return true;
}
private static string ComputeDigest(byte[] data)
{
var hash = SHA256.HashData(data);
return Convert.ToHexString(hash).ToLowerInvariant();
}
private static string ComputeFileDigest(string path)
{
using var fs = File.OpenRead(path);
var hash = SHA256.HashData(fs);
return Convert.ToHexString(hash).ToLowerInvariant();
}
private static async Task CreateTarAsync(string sourceDir, Stream output, CancellationToken cancellationToken)
{
// Simplified tar creation - in production, use a proper tar library
var files = Directory.GetFiles(sourceDir, "*", SearchOption.AllDirectories);
using var writer = new BinaryWriter(output, Encoding.UTF8, leaveOpen: true);
foreach (var file in files)
{
var relativePath = Path.GetRelativePath(sourceDir, file).Replace('\\', '/');
var content = await File.ReadAllBytesAsync(file, cancellationToken).ConfigureAwait(false);
// Write simple header
var header = Encoding.UTF8.GetBytes($"FILE:{relativePath}:{content.Length}\n");
writer.Write(header);
writer.Write(content);
}
}
private static async Task ExtractTarAsync(Stream input, string targetDir, CancellationToken cancellationToken)
{
// Simplified tar extraction - in production, use a proper tar library
using var reader = new StreamReader(input, Encoding.UTF8, leaveOpen: true);
using var memoryStream = new MemoryStream();
await input.CopyToAsync(memoryStream, cancellationToken).ConfigureAwait(false);
memoryStream.Position = 0;
var binaryReader = new BinaryReader(memoryStream);
var textReader = new StreamReader(memoryStream, Encoding.UTF8, leaveOpen: true);
while (memoryStream.Position < memoryStream.Length)
{
var headerLine = textReader.ReadLine();
if (string.IsNullOrEmpty(headerLine) || !headerLine.StartsWith("FILE:"))
break;
var parts = headerLine[5..].Split(':');
if (parts.Length != 2 || !int.TryParse(parts[1], out var size))
break;
var relativePath = parts[0];
var fullPath = Path.Combine(targetDir, relativePath);
Directory.CreateDirectory(Path.GetDirectoryName(fullPath)!);
var content = new byte[size];
_ = memoryStream.Read(content, 0, size);
await File.WriteAllBytesAsync(fullPath, content, cancellationToken).ConfigureAwait(false);
}
}
}

View File

@@ -0,0 +1,130 @@
using System.Text.Json;
namespace StellaOps.Scanner.Storage.Oci;
/// <summary>
/// Builds OCI manifests for reachability slices.
/// Sprint: SPRINT_3850_0001_0001
/// </summary>
public sealed class SliceOciManifestBuilder
{
private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web)
{
WriteIndented = false
};
/// <summary>
/// Build OCI push request for a slice artifact.
/// </summary>
public OciArtifactPushRequest BuildSlicePushRequest(SliceArtifactInput input)
{
ArgumentNullException.ThrowIfNull(input);
ArgumentNullException.ThrowIfNull(input.Slice);
ArgumentException.ThrowIfNullOrWhiteSpace(input.Reference);
var layers = new List<OciLayerContent>
{
BuildSliceLayer(input.Slice, input.SliceQuery)
};
if (input.DsseEnvelope is not null)
{
layers.Add(BuildDsseLayer(input.DsseEnvelope));
}
var annotations = BuildAnnotations(input.SliceQuery, input.Slice);
return new OciArtifactPushRequest
{
Reference = input.Reference,
ArtifactType = OciMediaTypes.SliceArtifact,
Layers = layers,
SubjectDigest = input.SubjectImageDigest,
Annotations = annotations
};
}
private OciLayerContent BuildSliceLayer(object slice, SliceQueryMetadata? query)
{
var sliceJson = JsonSerializer.SerializeToUtf8Bytes(slice, SerializerOptions);
var annotations = new Dictionary<string, string>();
if (query is not null)
{
if (!string.IsNullOrWhiteSpace(query.CveId))
annotations["org.stellaops.slice.cve"] = query.CveId;
if (!string.IsNullOrWhiteSpace(query.Purl))
annotations["org.stellaops.slice.purl"] = query.Purl;
if (!string.IsNullOrWhiteSpace(query.Verdict))
annotations["org.stellaops.slice.verdict"] = query.Verdict;
}
return new OciLayerContent
{
Content = sliceJson,
MediaType = OciMediaTypes.ReachabilitySlice,
Annotations = annotations
};
}
private OciLayerContent BuildDsseLayer(byte[] dsseEnvelope)
{
return new OciLayerContent
{
Content = dsseEnvelope,
MediaType = OciMediaTypes.DsseEnvelope,
Annotations = new Dictionary<string, string>
{
["org.stellaops.attestation.type"] = "in-toto/dsse"
}
};
}
private Dictionary<string, string> BuildAnnotations(SliceQueryMetadata? query, object slice)
{
var annotations = new Dictionary<string, string>
{
["org.opencontainers.image.vendor"] = "StellaOps",
["org.stellaops.artifact.type"] = "reachability-slice"
};
if (query is not null)
{
if (!string.IsNullOrWhiteSpace(query.CveId))
annotations["org.stellaops.slice.query.cve"] = query.CveId;
if (!string.IsNullOrWhiteSpace(query.Purl))
annotations["org.stellaops.slice.query.purl"] = query.Purl;
if (!string.IsNullOrWhiteSpace(query.ScanId))
annotations["org.stellaops.slice.scan-id"] = query.ScanId;
}
return annotations;
}
}
/// <summary>
/// Input for building a slice OCI artifact.
/// </summary>
public sealed record SliceArtifactInput
{
public required string Reference { get; init; }
public required object Slice { get; init; }
public byte[]? DsseEnvelope { get; init; }
public SliceQueryMetadata? SliceQuery { get; init; }
public string? SubjectImageDigest { get; init; }
}
/// <summary>
/// Query metadata for slice annotations.
/// </summary>
public sealed record SliceQueryMetadata
{
public string? CveId { get; init; }
public string? Purl { get; init; }
public string? Verdict { get; init; }
public string? ScanId { get; init; }
}

View File

@@ -0,0 +1,474 @@
using System.Net.Http.Json;
using System.Security.Cryptography;
using System.Text;
using System.Text.Json;
using Microsoft.Extensions.Logging;
using StellaOps.Scanner.Reachability.Slices;
namespace StellaOps.Scanner.Storage.Oci;
/// <summary>
/// Options for slice pulling operations.
/// </summary>
public sealed record SlicePullOptions
{
/// <summary>
/// Whether to verify DSSE signature on retrieval. Default: true.
/// </summary>
public bool VerifySignature { get; init; } = true;
/// <summary>
/// Whether to cache pulled slices. Default: true.
/// </summary>
public bool EnableCache { get; init; } = true;
/// <summary>
/// Cache TTL. Default: 1 hour.
/// </summary>
public TimeSpan CacheTtl { get; init; } = TimeSpan.FromHours(1);
/// <summary>
/// Request timeout. Default: 30 seconds.
/// </summary>
public TimeSpan RequestTimeout { get; init; } = TimeSpan.FromSeconds(30);
}
/// <summary>
/// Result of a slice pull operation.
/// </summary>
public sealed record SlicePullResult
{
public required bool Success { get; init; }
public ReachabilitySlice? Slice { get; init; }
public string? SliceDigest { get; init; }
public byte[]? DsseEnvelope { get; init; }
public string? Error { get; init; }
public bool FromCache { get; init; }
public bool SignatureVerified { get; init; }
}
/// <summary>
/// Service for pulling reachability slices from OCI registries.
/// Supports content-addressed retrieval and DSSE signature verification.
/// Sprint: SPRINT_3850_0001_0001
/// </summary>
public sealed class SlicePullService : IDisposable
{
private readonly HttpClient _httpClient;
private readonly OciRegistryAuthorization _authorization;
private readonly SlicePullOptions _options;
private readonly ILogger<SlicePullService> _logger;
private readonly Dictionary<string, CachedSlice> _cache = new(StringComparer.Ordinal);
private readonly Lock _cacheLock = new();
private static readonly JsonSerializerOptions JsonOptions = new(JsonSerializerDefaults.Web);
public SlicePullService(
HttpClient httpClient,
OciRegistryAuthorization authorization,
SlicePullOptions? options = null,
ILogger<SlicePullService>? logger = null)
{
_httpClient = httpClient ?? throw new ArgumentNullException(nameof(httpClient));
_authorization = authorization ?? throw new ArgumentNullException(nameof(authorization));
_options = options ?? new SlicePullOptions();
_logger = logger ?? Microsoft.Extensions.Logging.Abstractions.NullLogger<SlicePullService>.Instance;
_httpClient.Timeout = _options.RequestTimeout;
}
/// <summary>
/// Pull a slice by its content-addressed digest.
/// </summary>
public async Task<SlicePullResult> PullByDigestAsync(
OciImageReference reference,
string digest,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(reference);
ArgumentException.ThrowIfNullOrWhiteSpace(digest);
var cacheKey = $"{reference.Registry}/{reference.Repository}@{digest}";
// Check cache
if (_options.EnableCache && TryGetFromCache(cacheKey, out var cached))
{
_logger.LogDebug("Cache hit for slice {Digest}", digest);
return new SlicePullResult
{
Success = true,
Slice = cached!.Slice,
SliceDigest = digest,
DsseEnvelope = cached.DsseEnvelope,
FromCache = true,
SignatureVerified = cached.SignatureVerified
};
}
try
{
_logger.LogInformation("Pulling slice {Reference}@{Digest}", reference, digest);
// Get manifest first
var manifestUrl = $"https://{reference.Registry}/v2/{reference.Repository}/manifests/{digest}";
using var manifestRequest = new HttpRequestMessage(HttpMethod.Get, manifestUrl);
manifestRequest.Headers.Accept.ParseAdd(OciMediaTypes.ArtifactManifest);
await _authorization.AuthorizeRequestAsync(manifestRequest, reference, cancellationToken)
.ConfigureAwait(false);
using var manifestResponse = await _httpClient.SendAsync(manifestRequest, cancellationToken)
.ConfigureAwait(false);
if (!manifestResponse.IsSuccessStatusCode)
{
return new SlicePullResult
{
Success = false,
Error = $"Failed to fetch manifest: {manifestResponse.StatusCode}"
};
}
var manifest = await manifestResponse.Content.ReadFromJsonAsync<OciManifest>(JsonOptions, cancellationToken)
.ConfigureAwait(false);
if (manifest == null)
{
return new SlicePullResult
{
Success = false,
Error = "Failed to parse manifest"
};
}
// Find slice layer
var sliceLayer = manifest.Layers?.FirstOrDefault(l =>
l.MediaType == OciMediaTypes.ReachabilitySlice ||
l.MediaType == OciMediaTypes.SliceArtifact);
if (sliceLayer == null)
{
return new SlicePullResult
{
Success = false,
Error = "No slice layer found in manifest"
};
}
// Fetch slice blob
var blobUrl = $"https://{reference.Registry}/v2/{reference.Repository}/blobs/{sliceLayer.Digest}";
using var blobRequest = new HttpRequestMessage(HttpMethod.Get, blobUrl);
await _authorization.AuthorizeRequestAsync(blobRequest, reference, cancellationToken)
.ConfigureAwait(false);
using var blobResponse = await _httpClient.SendAsync(blobRequest, cancellationToken)
.ConfigureAwait(false);
if (!blobResponse.IsSuccessStatusCode)
{
return new SlicePullResult
{
Success = false,
Error = $"Failed to fetch blob: {blobResponse.StatusCode}"
};
}
var sliceBytes = await blobResponse.Content.ReadAsByteArrayAsync(cancellationToken)
.ConfigureAwait(false);
// Verify digest
var computedDigest = ComputeDigest(sliceBytes);
if (!string.Equals(computedDigest, sliceLayer.Digest, StringComparison.OrdinalIgnoreCase))
{
return new SlicePullResult
{
Success = false,
Error = $"Digest mismatch: expected {sliceLayer.Digest}, got {computedDigest}"
};
}
// Parse slice
var slice = JsonSerializer.Deserialize<ReachabilitySlice>(sliceBytes, JsonOptions);
if (slice == null)
{
return new SlicePullResult
{
Success = false,
Error = "Failed to parse slice JSON"
};
}
// Check for DSSE envelope layer and verify if present
byte[]? dsseEnvelope = null;
bool signatureVerified = false;
var dsseLayer = manifest.Layers?.FirstOrDefault(l =>
l.MediaType == OciMediaTypes.DsseEnvelope);
if (dsseLayer != null && _options.VerifySignature)
{
var dsseResult = await FetchAndVerifyDsseAsync(reference, dsseLayer.Digest, sliceBytes, cancellationToken)
.ConfigureAwait(false);
dsseEnvelope = dsseResult.Envelope;
signatureVerified = dsseResult.Verified;
}
// Cache result
if (_options.EnableCache)
{
AddToCache(cacheKey, new CachedSlice
{
Slice = slice,
DsseEnvelope = dsseEnvelope,
SignatureVerified = signatureVerified,
ExpiresAt = DateTimeOffset.UtcNow.Add(_options.CacheTtl)
});
}
_logger.LogInformation(
"Successfully pulled slice {Digest} ({Size} bytes, signature verified: {Verified})",
digest, sliceBytes.Length, signatureVerified);
return new SlicePullResult
{
Success = true,
Slice = slice,
SliceDigest = digest,
DsseEnvelope = dsseEnvelope,
FromCache = false,
SignatureVerified = signatureVerified
};
}
catch (Exception ex) when (ex is HttpRequestException or TaskCanceledException or JsonException)
{
_logger.LogError(ex, "Failed to pull slice {Reference}@{Digest}", reference, digest);
return new SlicePullResult
{
Success = false,
Error = ex.Message
};
}
}
/// <summary>
/// Pull a slice by tag.
/// </summary>
public async Task<SlicePullResult> PullByTagAsync(
OciImageReference reference,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(reference);
if (string.IsNullOrEmpty(reference.Tag))
{
return new SlicePullResult
{
Success = false,
Error = "Tag is required"
};
}
try
{
// Resolve tag to digest
var manifestUrl = $"https://{reference.Registry}/v2/{reference.Repository}/manifests/{reference.Tag}";
using var request = new HttpRequestMessage(HttpMethod.Head, manifestUrl);
request.Headers.Accept.ParseAdd(OciMediaTypes.ArtifactManifest);
await _authorization.AuthorizeRequestAsync(request, reference, cancellationToken)
.ConfigureAwait(false);
using var response = await _httpClient.SendAsync(request, cancellationToken)
.ConfigureAwait(false);
if (!response.IsSuccessStatusCode)
{
return new SlicePullResult
{
Success = false,
Error = $"Failed to resolve tag: {response.StatusCode}"
};
}
var digest = response.Headers.GetValues("Docker-Content-Digest").FirstOrDefault();
if (string.IsNullOrEmpty(digest))
{
return new SlicePullResult
{
Success = false,
Error = "No digest in response headers"
};
}
return await PullByDigestAsync(reference, digest, cancellationToken).ConfigureAwait(false);
}
catch (Exception ex) when (ex is HttpRequestException or TaskCanceledException)
{
_logger.LogError(ex, "Failed to pull slice by tag {Reference}", reference);
return new SlicePullResult
{
Success = false,
Error = ex.Message
};
}
}
/// <summary>
/// List referrers (related artifacts) for a given digest.
/// </summary>
public async Task<IReadOnlyList<OciReferrer>> ListReferrersAsync(
OciImageReference reference,
string digest,
string? artifactType = null,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(reference);
ArgumentException.ThrowIfNullOrWhiteSpace(digest);
try
{
var referrersUrl = $"https://{reference.Registry}/v2/{reference.Repository}/referrers/{digest}";
if (!string.IsNullOrEmpty(artifactType))
{
referrersUrl += $"?artifactType={Uri.EscapeDataString(artifactType)}";
}
using var request = new HttpRequestMessage(HttpMethod.Get, referrersUrl);
await _authorization.AuthorizeRequestAsync(request, reference, cancellationToken)
.ConfigureAwait(false);
using var response = await _httpClient.SendAsync(request, cancellationToken)
.ConfigureAwait(false);
if (!response.IsSuccessStatusCode)
{
_logger.LogWarning("Failed to list referrers for {Digest}: {Status}", digest, response.StatusCode);
return Array.Empty<OciReferrer>();
}
var index = await response.Content.ReadFromJsonAsync<OciReferrersIndex>(JsonOptions, cancellationToken)
.ConfigureAwait(false);
return index?.Manifests ?? Array.Empty<OciReferrer>();
}
catch (Exception ex) when (ex is HttpRequestException or TaskCanceledException)
{
_logger.LogError(ex, "Failed to list referrers for {Digest}", digest);
return Array.Empty<OciReferrer>();
}
}
public void Dispose()
{
// HttpClient typically managed externally
}
private async Task<(byte[]? Envelope, bool Verified)> FetchAndVerifyDsseAsync(
OciImageReference reference,
string digest,
byte[] payload,
CancellationToken cancellationToken)
{
try
{
var blobUrl = $"https://{reference.Registry}/v2/{reference.Repository}/blobs/{digest}";
using var request = new HttpRequestMessage(HttpMethod.Get, blobUrl);
await _authorization.AuthorizeRequestAsync(request, reference, cancellationToken)
.ConfigureAwait(false);
using var response = await _httpClient.SendAsync(request, cancellationToken)
.ConfigureAwait(false);
if (!response.IsSuccessStatusCode)
{
return (null, false);
}
var envelopeBytes = await response.Content.ReadAsByteArrayAsync(cancellationToken)
.ConfigureAwait(false);
// TODO: Actual DSSE verification using configured trust roots
// For now, just return the envelope
_logger.LogDebug("DSSE envelope fetched, verification pending trust root configuration");
return (envelopeBytes, false);
}
catch (Exception ex)
{
_logger.LogWarning(ex, "Failed to fetch/verify DSSE envelope");
return (null, false);
}
}
private bool TryGetFromCache(string key, out CachedSlice? cached)
{
lock (_cacheLock)
{
if (_cache.TryGetValue(key, out cached))
{
if (cached.ExpiresAt > DateTimeOffset.UtcNow)
{
return true;
}
_cache.Remove(key);
}
cached = null;
return false;
}
}
private void AddToCache(string key, CachedSlice cached)
{
lock (_cacheLock)
{
_cache[key] = cached;
}
}
private static string ComputeDigest(byte[] data)
{
var hash = SHA256.HashData(data);
return $"sha256:{Convert.ToHexString(hash).ToLowerInvariant()}";
}
private sealed record CachedSlice
{
public required ReachabilitySlice Slice { get; init; }
public byte[]? DsseEnvelope { get; init; }
public bool SignatureVerified { get; init; }
public required DateTimeOffset ExpiresAt { get; init; }
}
// Internal DTOs for OCI registry responses
private sealed record OciManifest
{
public int SchemaVersion { get; init; }
public string? MediaType { get; init; }
public string? ArtifactType { get; init; }
public OciDescriptor? Config { get; init; }
public List<OciDescriptor>? Layers { get; init; }
}
private sealed record OciDescriptor
{
public string? MediaType { get; init; }
public string? Digest { get; init; }
public long Size { get; init; }
}
private sealed record OciReferrersIndex
{
public int SchemaVersion { get; init; }
public string? MediaType { get; init; }
public List<OciReferrer>? Manifests { get; init; }
}
}
/// <summary>
/// OCI referrer descriptor.
/// </summary>
public sealed record OciReferrer
{
public string? MediaType { get; init; }
public string? Digest { get; init; }
public long Size { get; init; }
public string? ArtifactType { get; init; }
public Dictionary<string, string>? Annotations { get; init; }
}

View File

@@ -0,0 +1,74 @@
using Microsoft.Extensions.Logging;
namespace StellaOps.Scanner.Storage.Oci;
/// <summary>
/// Service for pushing reachability slices to OCI registries.
/// Supports Harbor, Zot, GHCR, and other OCI-compliant registries.
/// Sprint: SPRINT_3850_0001_0001
/// </summary>
public sealed class SlicePushService : IOciPushService
{
private readonly OciArtifactPusher _pusher;
private readonly SliceOciManifestBuilder _manifestBuilder;
private readonly ILogger<SlicePushService> _logger;
public SlicePushService(
OciArtifactPusher pusher,
SliceOciManifestBuilder manifestBuilder,
ILogger<SlicePushService> logger)
{
_pusher = pusher ?? throw new ArgumentNullException(nameof(pusher));
_manifestBuilder = manifestBuilder ?? throw new ArgumentNullException(nameof(manifestBuilder));
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
}
/// <inheritdoc/>
public async Task<OciArtifactPushResult> PushAsync(
OciArtifactPushRequest request,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(request);
_logger.LogInformation(
"Pushing OCI artifact {Reference} with type {ArtifactType}",
request.Reference,
request.ArtifactType);
return await _pusher.PushAsync(request, cancellationToken).ConfigureAwait(false);
}
/// <inheritdoc/>
public async Task<OciArtifactPushResult> PushSliceAsync(
SliceArtifactInput input,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(input);
_logger.LogInformation(
"Pushing slice artifact {Reference} for CVE {CveId} + {Purl}",
input.Reference,
input.SliceQuery?.CveId ?? "unknown",
input.SliceQuery?.Purl ?? "unknown");
var pushRequest = _manifestBuilder.BuildSlicePushRequest(input);
var result = await _pusher.PushAsync(pushRequest, cancellationToken).ConfigureAwait(false);
if (result.Success)
{
_logger.LogInformation(
"Successfully pushed slice to {Reference}",
result.ManifestReference);
}
else
{
_logger.LogError(
"Failed to push slice to {Reference}: {Error}",
input.Reference,
result.Error);
}
return result;
}
}

View File

@@ -0,0 +1,14 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<LangVersion>preview</LangVersion>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="10.0.0" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\..\..\__Libraries\StellaOps.Cryptography\StellaOps.Cryptography.csproj" />
</ItemGroup>
</Project>

View File

@@ -0,0 +1,39 @@
// -----------------------------------------------------------------------------
// BinaryIdentityRow.cs
// Sprint: SPRINT_4500_0001_0003_binary_evidence_db
// Description: Entity mapping for scanner.binary_identity table.
// -----------------------------------------------------------------------------
namespace StellaOps.Scanner.Storage.Entities;
/// <summary>
/// Recorded binary identity evidence per scan.
/// </summary>
public sealed class BinaryIdentityRow
{
public Guid Id { get; set; }
public Guid ScanId { get; set; }
public string FilePath { get; set; } = string.Empty;
public string FileSha256 { get; set; } = string.Empty;
public string? TextSha256 { get; set; }
public string? BuildId { get; set; }
public string? BuildIdType { get; set; }
public string Architecture { get; set; } = string.Empty;
public string BinaryFormat { get; set; } = string.Empty;
public long FileSize { get; set; }
public bool IsStripped { get; set; }
public bool HasDebugInfo { get; set; }
public DateTimeOffset CreatedAtUtc { get; set; }
}

View File

@@ -0,0 +1,29 @@
// -----------------------------------------------------------------------------
// BinaryPackageMapRow.cs
// Sprint: SPRINT_4500_0001_0003_binary_evidence_db
// Description: Entity mapping for scanner.binary_package_map table.
// -----------------------------------------------------------------------------
namespace StellaOps.Scanner.Storage.Entities;
/// <summary>
/// Binary-to-package mapping evidence.
/// </summary>
public sealed class BinaryPackageMapRow
{
public Guid Id { get; set; }
public Guid BinaryIdentityId { get; set; }
public string Purl { get; set; } = string.Empty;
public string MatchType { get; set; } = string.Empty;
public decimal Confidence { get; set; }
public string MatchSource { get; set; } = string.Empty;
public string? EvidenceJson { get; set; }
public DateTimeOffset CreatedAtUtc { get; set; }
}

View File

@@ -0,0 +1,37 @@
// -----------------------------------------------------------------------------
// BinaryVulnAssertionRow.cs
// Sprint: SPRINT_4500_0001_0003_binary_evidence_db
// Description: Entity mapping for scanner.binary_vuln_assertion table.
// -----------------------------------------------------------------------------
namespace StellaOps.Scanner.Storage.Entities;
/// <summary>
/// Binary-level vulnerability assertion evidence.
/// </summary>
public sealed class BinaryVulnAssertionRow
{
public Guid Id { get; set; }
public Guid BinaryIdentityId { get; set; }
public string VulnId { get; set; } = string.Empty;
public string Status { get; set; } = string.Empty;
public string Source { get; set; } = string.Empty;
public string AssertionType { get; set; } = string.Empty;
public decimal Confidence { get; set; }
public string? EvidenceJson { get; set; }
public DateTimeOffset ValidFrom { get; set; }
public DateTimeOffset? ValidUntil { get; set; }
public string? SignatureRef { get; set; }
public DateTimeOffset CreatedAtUtc { get; set; }
}

View File

@@ -86,6 +86,10 @@ public static class ServiceCollectionExtensions
// Idempotency key storage (Sprint: SPRINT_3500_0002_0003)
services.AddScoped<IIdempotencyKeyRepository, PostgresIdempotencyKeyRepository>();
// Binary evidence persistence (Sprint: SPRINT_4500_0001_0003)
services.AddScoped<IBinaryEvidenceRepository, PostgresBinaryEvidenceRepository>();
services.AddScoped<IBinaryEvidenceService, BinaryEvidenceService>();
// EPSS ingestion services
services.AddSingleton<EpssCsvStreamParser>();
services.AddScoped<IEpssRepository, PostgresEpssRepository>();

View File

@@ -0,0 +1,80 @@
-- =============================================================================
-- Migration: 018_binary_evidence.sql
-- Sprint: SPRINT_4500_0001_0003_binary_evidence_db
-- Description: Persist binary identity evidence, package mappings, and vuln assertions.
--
-- Note: migrations are executed with the module schema as the active search_path.
-- Keep objects unqualified so integration tests can run in isolated schemas.
-- =============================================================================
-- =============================================================================
-- BINARY_IDENTITY: Recorded binary identity evidence per scan
-- =============================================================================
CREATE TABLE IF NOT EXISTS binary_identity (
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
scan_id UUID NOT NULL REFERENCES scans(scan_id) ON DELETE CASCADE,
file_path VARCHAR(1024) NOT NULL,
file_sha256 VARCHAR(64) NOT NULL,
text_sha256 VARCHAR(64),
build_id VARCHAR(128),
build_id_type VARCHAR(32),
architecture VARCHAR(32) NOT NULL,
binary_format VARCHAR(16) NOT NULL,
file_size BIGINT NOT NULL,
is_stripped BOOLEAN NOT NULL DEFAULT FALSE,
has_debug_info BOOLEAN NOT NULL DEFAULT FALSE,
created_at_utc TIMESTAMPTZ NOT NULL DEFAULT NOW()
);
CREATE INDEX IF NOT EXISTS idx_binary_identity_build_id ON binary_identity(build_id);
CREATE INDEX IF NOT EXISTS idx_binary_identity_file_sha256 ON binary_identity(file_sha256);
CREATE INDEX IF NOT EXISTS idx_binary_identity_text_sha256 ON binary_identity(text_sha256);
CREATE INDEX IF NOT EXISTS idx_binary_identity_scan_id ON binary_identity(scan_id);
COMMENT ON TABLE binary_identity IS 'Recorded binary identity evidence per scan (build-id, hashes, format).';
-- =============================================================================
-- BINARY_PACKAGE_MAP: Map binaries to package PURLs with confidence/evidence
-- =============================================================================
CREATE TABLE IF NOT EXISTS binary_package_map (
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
binary_identity_id UUID NOT NULL REFERENCES binary_identity(id) ON DELETE CASCADE,
purl VARCHAR(512) NOT NULL,
match_type VARCHAR(32) NOT NULL,
confidence NUMERIC(3,2) NOT NULL,
match_source VARCHAR(64) NOT NULL,
evidence_json JSONB,
created_at_utc TIMESTAMPTZ NOT NULL DEFAULT NOW(),
CONSTRAINT uq_binary_package_map UNIQUE (binary_identity_id, purl)
);
CREATE INDEX IF NOT EXISTS idx_binary_package_map_purl ON binary_package_map(purl);
CREATE INDEX IF NOT EXISTS idx_binary_package_map_binary_id ON binary_package_map(binary_identity_id);
COMMENT ON TABLE binary_package_map IS 'Binary to package (PURL) mappings with evidence and confidence.';
-- =============================================================================
-- BINARY_VULN_ASSERTION: Binary-level vulnerability assertions
-- =============================================================================
CREATE TABLE IF NOT EXISTS binary_vuln_assertion (
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
binary_identity_id UUID NOT NULL REFERENCES binary_identity(id) ON DELETE CASCADE,
vuln_id VARCHAR(64) NOT NULL,
status VARCHAR(32) NOT NULL,
source VARCHAR(64) NOT NULL,
assertion_type VARCHAR(32) NOT NULL,
confidence NUMERIC(3,2) NOT NULL,
evidence_json JSONB,
valid_from TIMESTAMPTZ NOT NULL,
valid_until TIMESTAMPTZ,
signature_ref VARCHAR(256),
created_at_utc TIMESTAMPTZ NOT NULL DEFAULT NOW()
);
CREATE INDEX IF NOT EXISTS idx_binary_vuln_assertion_vuln_id ON binary_vuln_assertion(vuln_id);
CREATE INDEX IF NOT EXISTS idx_binary_vuln_assertion_binary_id ON binary_vuln_assertion(binary_identity_id);
CREATE INDEX IF NOT EXISTS idx_binary_vuln_assertion_status ON binary_vuln_assertion(status);
COMMENT ON TABLE binary_vuln_assertion IS 'Binary-level vulnerability assertions with evidence and validity.';

View File

@@ -16,5 +16,10 @@ internal static class MigrationIds
public const string EpssSignalLayer = "012_epss_signal_layer.sql";
public const string WitnessStorage = "013_witness_storage.sql";
public const string EpssTriageColumns = "014_epss_triage_columns.sql";
public const string VulnSurfaces = "014_vuln_surfaces.sql";
public const string VulnSurfaceTriggersUpdate = "015_vuln_surface_triggers_update.sql";
public const string ReachCache = "016_reach_cache.sql";
public const string IdempotencyKeys = "017_idempotency_keys.sql";
public const string BinaryEvidence = "018_binary_evidence.sql";
}

Some files were not shown because too many files have changed in this diff Show More