consolidation of some of the modules, localization fixes, product advisories work, qa work

This commit is contained in:
master
2026-03-05 03:54:22 +02:00
parent 7bafcc3eef
commit 8e1cb9448d
3878 changed files with 72600 additions and 46861 deletions

View File

@@ -18,6 +18,7 @@
## Working Directory & Boundaries
- Primary scope: `src/Scanner/**` (analyzers, worker, web service, plugins, __Libraries, __Tests, __Benchmarks, docs).
- Cartographer service lives under `src/Scanner/StellaOps.Scanner.Cartographer/` and its tests under `src/Scanner/__Tests/StellaOps.Scanner.Cartographer.Tests/`.
- Avoid cross-module edits unless sprint explicitly permits; note any cross-module change in sprint tracker.
- Keep fixtures minimal/deterministic; store under `src/Scanner/__Tests/__Datasets` or `__Benchmarks`.

View File

@@ -1,4 +1,4 @@
# StellaOps.Cartographer — Agent Charter
# StellaOps.Scanner.Cartographer — Agent Charter
## Mission
Build and operate the Cartographer service that materializes immutable SBOM property graphs, precomputes layout tiles, and hydrates policy/VEX overlays so other services (API, UI, CLI) can navigate and reason about dependency relationships with context.

View File

@@ -1,4 +1,4 @@
namespace StellaOps.Cartographer;
namespace StellaOps.Scanner.Cartographer;
public sealed class CartographerEntryPoint
{

View File

@@ -1,7 +1,7 @@
using System;
using System.Collections.Generic;
namespace StellaOps.Cartographer.Options;
namespace StellaOps.Scanner.Cartographer.Options;
/// <summary>
/// Configuration controlling Authority-backed authentication for the Cartographer service.

View File

@@ -4,7 +4,7 @@ using System;
using System.Collections.Generic;
using System.Linq;
namespace StellaOps.Cartographer.Options;
namespace StellaOps.Scanner.Cartographer.Options;
/// <summary>
/// Applies Cartographer-specific defaults to <see cref="CartographerAuthorityOptions"/>.

View File

@@ -1,6 +1,6 @@
using Microsoft.Extensions.Options;
namespace StellaOps.Cartographer.Options;
namespace StellaOps.Scanner.Cartographer.Options;
internal sealed class CartographerAuthorityOptionsValidator : IValidateOptions<CartographerAuthorityOptions>
{

View File

@@ -4,7 +4,7 @@ using Microsoft.Extensions.Options;
using StellaOps.Auth.Abstractions;
using StellaOps.Auth.ServerIntegration;
using StellaOps.Auth.ServerIntegration.Tenancy;
using StellaOps.Cartographer.Options;
using StellaOps.Scanner.Cartographer.Options;
using StellaOps.Router.AspNet;
var builder = WebApplication.CreateBuilder(args);

View File

@@ -1,3 +1,3 @@
using System.Runtime.CompilerServices;
[assembly: InternalsVisibleTo("StellaOps.Cartographer.Tests")]
[assembly: InternalsVisibleTo("StellaOps.Scanner.Cartographer.Tests")]

View File

@@ -1,6 +1,6 @@
{
"profiles": {
"StellaOps.Cartographer": {
"StellaOps.Scanner.Cartographer": {
"commandName": "Project",
"launchBrowser": true,
"environmentVariables": {

View File

@@ -7,6 +7,8 @@
<LangVersion>preview</LangVersion>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
<AspNetCoreHostingModel>InProcess</AspNetCoreHostingModel>
<RootNamespace>StellaOps.Scanner.Cartographer</RootNamespace>
<AssemblyName>StellaOps.Scanner.Cartographer</AssemblyName>
</PropertyGroup>
<ItemGroup>

View File

@@ -5,7 +5,7 @@ Source of truth: `docs-archived/implplan/2025-12-29-csproj-audit/SPRINT_20251229
| Task ID | Status | Notes |
| --- | --- | --- |
| AUDIT-0134-M | DONE | Maintainability audit for StellaOps.Cartographer; revalidated 2026-01-06. |
| AUDIT-0134-T | DONE | Test coverage audit for StellaOps.Cartographer; revalidated 2026-01-06. |
| AUDIT-0134-M | DONE | Maintainability audit for StellaOps.Scanner.Cartographer (migrated from StellaOps.Cartographer); revalidated 2026-01-06. |
| AUDIT-0134-T | DONE | Test coverage audit for StellaOps.Scanner.Cartographer (migrated from StellaOps.Cartographer); revalidated 2026-01-06. |
| AUDIT-0134-A | TODO | Revalidated 2026-01-06; open findings pending apply. |
| REMED-06 | DONE | SOLID review notes captured for SPRINT_20260130_002. |

View File

@@ -5,7 +5,7 @@ using System.Text.Json.Serialization;
namespace StellaOps.Scanner.WebService.Contracts;
internal static class OrchestratorEventKinds
internal static class JobEngineEventKinds
{
public const string ScannerReportReady = "scanner.event.report.ready";
public const string ScannerScanCompleted = "scanner.event.scan.completed";
@@ -15,7 +15,7 @@ internal static class OrchestratorEventKinds
public const string ScannerVulnerabilityDetected = "scanner.event.vulnerability.detected";
}
internal sealed record OrchestratorEvent
internal sealed record JobEngineEvent
{
[JsonPropertyName("eventId")]
[JsonPropertyOrder(0)]
@@ -68,11 +68,11 @@ internal sealed record OrchestratorEvent
[JsonPropertyName("scope")]
[JsonPropertyOrder(11)]
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public OrchestratorEventScope? Scope { get; init; }
public JobEngineEventScope? Scope { get; init; }
[JsonPropertyName("payload")]
[JsonPropertyOrder(12)]
public OrchestratorEventPayload Payload { get; init; } = default!;
public JobEngineEventPayload Payload { get; init; } = default!;
[JsonPropertyName("attributes")]
[JsonPropertyOrder(13)]
@@ -86,7 +86,7 @@ internal sealed record OrchestratorEvent
}
/// <summary>
/// Metadata for Notifier service ingestion per orchestrator-envelope.schema.json.
/// Metadata for Notifier service ingestion per jobengine-envelope.schema.json.
/// </summary>
internal sealed record NotifierIngestionMetadata
{
@@ -113,7 +113,7 @@ internal sealed record NotifierIngestionMetadata
public string? Priority { get; init; }
}
internal sealed record OrchestratorEventScope
internal sealed record JobEngineEventScope
{
[JsonPropertyName("namespace")]
[JsonPropertyOrder(0)]
@@ -139,9 +139,9 @@ internal sealed record OrchestratorEventScope
public string? Image { get; init; }
}
internal abstract record OrchestratorEventPayload;
internal abstract record JobEngineEventPayload;
internal sealed record ReportReadyEventPayload : OrchestratorEventPayload
internal sealed record ReportReadyEventPayload : JobEngineEventPayload
{
[JsonPropertyName("reportId")]
[JsonPropertyOrder(0)]
@@ -195,7 +195,7 @@ internal sealed record ReportReadyEventPayload : OrchestratorEventPayload
public ReportDocumentDto Report { get; init; } = new();
}
internal sealed record ScanCompletedEventPayload : OrchestratorEventPayload
internal sealed record ScanCompletedEventPayload : JobEngineEventPayload
{
[JsonPropertyName("reportId")]
[JsonPropertyOrder(0)]
@@ -393,7 +393,7 @@ internal sealed record FindingSummaryPayload
/// <summary>
/// Payload for scanner.event.scan.started events.
/// </summary>
internal sealed record ScanStartedEventPayload : OrchestratorEventPayload
internal sealed record ScanStartedEventPayload : JobEngineEventPayload
{
[JsonPropertyName("scanId")]
[JsonPropertyOrder(0)]
@@ -420,7 +420,7 @@ internal sealed record ScanStartedEventPayload : OrchestratorEventPayload
/// <summary>
/// Payload for scanner.event.scan.failed events.
/// </summary>
internal sealed record ScanFailedEventPayload : OrchestratorEventPayload
internal sealed record ScanFailedEventPayload : JobEngineEventPayload
{
[JsonPropertyName("scanId")]
[JsonPropertyOrder(0)]
@@ -461,7 +461,7 @@ internal sealed record ScanFailedEventPayload : OrchestratorEventPayload
/// <summary>
/// Payload for scanner.event.sbom.generated events.
/// </summary>
internal sealed record SbomGeneratedEventPayload : OrchestratorEventPayload
internal sealed record SbomGeneratedEventPayload : JobEngineEventPayload
{
[JsonPropertyName("scanId")]
[JsonPropertyOrder(0)]
@@ -506,7 +506,7 @@ internal sealed record SbomGeneratedEventPayload : OrchestratorEventPayload
/// <summary>
/// Payload for scanner.event.vulnerability.detected events.
/// </summary>
internal sealed record VulnerabilityDetectedEventPayload : OrchestratorEventPayload
internal sealed record VulnerabilityDetectedEventPayload : JobEngineEventPayload
{
[JsonPropertyName("scanId")]
[JsonPropertyOrder(0)]

View File

@@ -203,89 +203,103 @@ public sealed class ActionablesService : IActionablesService
public async Task<ActionablesResponseDto?> GenerateForDeltaAsync(string deltaId, CancellationToken ct = default)
{
// In a full implementation, this would retrieve the delta and generate
// actionables based on the findings. For now, return sample actionables.
var delta = await _deltaService.GetComparisonAsync(deltaId, ct);
if (delta is null)
{
return null;
}
// Even if delta is null, we can still generate sample actionables for demo
var actionables = new List<ActionableDto>();
var componentsByPurl = (delta.Components ?? [])
.ToDictionary(c => c.Purl, StringComparer.Ordinal);
// Sample upgrade actionable
actionables.Add(new ActionableDto
foreach (var vulnerability in delta.Vulnerabilities ?? [])
{
Id = $"action-upgrade-{deltaId[..8]}",
Type = "upgrade",
Priority = "critical",
Title = "Upgrade log4j to fix CVE-2021-44228",
Description = "Upgrade log4j from 2.14.1 to 2.17.1 to remediate the Log4Shell vulnerability. " +
"This is a critical remote code execution vulnerability.",
Component = "pkg:maven/org.apache.logging.log4j/log4j-core",
CurrentVersion = "2.14.1",
TargetVersion = "2.17.1",
CveIds = ["CVE-2021-44228", "CVE-2021-45046"],
EstimatedEffort = "low",
Evidence = new ActionableEvidenceDto
if (!vulnerability.ChangeType.Equals("Added", StringComparison.OrdinalIgnoreCase)
&& !vulnerability.ChangeType.Equals("Modified", StringComparison.OrdinalIgnoreCase))
{
PolicyRuleId = "rule-critical-cve"
continue;
}
});
// Sample VEX actionable
actionables.Add(new ActionableDto
{
Id = $"action-vex-{deltaId[..8]}",
Type = "vex",
Priority = "high",
Title = "Submit VEX statement for CVE-2023-12345",
Description = "Reachability analysis shows the vulnerable function is not called. " +
"Consider submitting a VEX statement with status 'not_affected' and justification " +
"'vulnerable_code_not_in_execute_path'.",
Component = "pkg:npm/example-lib",
CveIds = ["CVE-2023-12345"],
EstimatedEffort = "trivial",
Evidence = new ActionableEvidenceDto
var priority = vulnerability.Severity switch
{
WitnessId = "witness-12345"
}
});
// Sample investigate actionable
actionables.Add(new ActionableDto
{
Id = $"action-investigate-{deltaId[..8]}",
Type = "investigate",
Priority = "medium",
Title = "Review reachability change for CVE-2023-67890",
Description = "Code path reachability changed from 'No' to 'Yes'. Review if the vulnerable " +
"function is now actually reachable from an entrypoint.",
Component = "pkg:pypi/requests",
CveIds = ["CVE-2023-67890"],
EstimatedEffort = "medium",
Evidence = new ActionableEvidenceDto
"critical" => "critical",
"high" => "high",
"medium" => "medium",
_ => "low"
};
var type = vulnerability.FixedVersion is null ? "investigate" : "upgrade";
if (vulnerability.ChangeType.Equals("Modified", StringComparison.OrdinalIgnoreCase))
{
WitnessId = "witness-67890"
type = "investigate";
}
});
// Sample config actionable
actionables.Add(new ActionableDto
componentsByPurl.TryGetValue(vulnerability.Purl, out var component);
actionables.Add(new ActionableDto
{
Id = BuildActionableId(deltaId, type, vulnerability.VulnId, vulnerability.Purl),
Type = type,
Priority = priority,
Title = $"{ToTitle(type)} {vulnerability.VulnId}",
Description = BuildDescription(vulnerability),
Component = vulnerability.Purl,
CurrentVersion = component?.CurrentVersion ?? component?.PreviousVersion,
TargetVersion = vulnerability.FixedVersion,
CveIds = [vulnerability.VulnId],
EstimatedEffort = EstimateEffort(priority, type),
Evidence = new ActionableEvidenceDto
{
PolicyRuleId = "delta.finding.changed",
WitnessId = $"wit-{NormalizeId(vulnerability.VulnId)}"
}
});
}
foreach (var component in delta.Components ?? [])
{
Id = $"action-config-{deltaId[..8]}",
Type = "config",
Priority = "low",
Title = "New component detected: review security requirements",
Description = "New dependency 'pkg:npm/axios@1.6.0' was added. Verify it meets security " +
"requirements and is from a trusted source.",
Component = "pkg:npm/axios",
CurrentVersion = "1.6.0",
EstimatedEffort = "trivial"
});
if (!component.ChangeType.Equals("Added", StringComparison.OrdinalIgnoreCase))
{
continue;
}
actionables.Add(new ActionableDto
{
Id = BuildActionableId(deltaId, "config", component.Purl, component.CurrentVersion ?? string.Empty),
Type = "config",
Priority = component.VulnerabilitiesInTarget > 0 ? "medium" : "low",
Title = $"Review new component {component.Purl}",
Description = "New component introduced in target snapshot. Validate provenance and policy posture.",
Component = component.Purl,
CurrentVersion = component.CurrentVersion,
EstimatedEffort = "trivial",
Evidence = new ActionableEvidenceDto
{
PolicyRuleId = "delta.component.added"
}
});
}
if (delta.PolicyDiff is { VerdictChanged: true })
{
actionables.Add(new ActionableDto
{
Id = BuildActionableId(deltaId, "vex", delta.PolicyDiff.BaseVerdict, delta.PolicyDiff.TargetVerdict),
Type = "vex",
Priority = delta.PolicyDiff.TargetVerdict.Equals("Block", StringComparison.OrdinalIgnoreCase) ? "high" : "medium",
Title = $"Policy verdict changed: {delta.PolicyDiff.BaseVerdict} -> {delta.PolicyDiff.TargetVerdict}",
Description = "Review reachability context and publish a VEX statement when findings are not exploitable.",
EstimatedEffort = "low",
Evidence = new ActionableEvidenceDto
{
PolicyRuleId = "delta.policy.verdict"
}
});
}
// Sort by priority
var sortedActionables = actionables
.DistinctBy(a => a.Id, StringComparer.Ordinal)
.OrderBy(a => GetPriorityOrder(a.Priority))
.ThenBy(a => a.Title, StringComparer.Ordinal)
.ThenBy(a => a.Id, StringComparer.Ordinal)
.ToList();
return new ActionablesResponseDto
@@ -296,6 +310,43 @@ public sealed class ActionablesService : IActionablesService
};
}
private static string BuildActionableId(string deltaId, string type, string part1, string part2)
{
var input = $"{deltaId}|{type}|{part1}|{part2}";
var hash = System.Security.Cryptography.SHA256.HashData(System.Text.Encoding.UTF8.GetBytes(input));
return $"act-{Convert.ToHexString(hash)[..16].ToLowerInvariant()}";
}
private static string BuildDescription(DeltaVulnerabilityDto vulnerability)
{
if (vulnerability.ChangeType.Equals("Added", StringComparison.OrdinalIgnoreCase))
{
return $"New vulnerable finding detected for {vulnerability.Purl}. Prioritize remediation for severity '{vulnerability.Severity}'.";
}
return $"Finding metadata changed for {vulnerability.VulnId}. Review reachability/verdict transitions before promotion.";
}
private static string ToTitle(string type) => type switch
{
"upgrade" => "Upgrade to remediate",
"patch" => "Patch required for",
"vex" => "Publish VEX for",
"config" => "Configuration review for",
_ => "Investigate"
};
private static string EstimateEffort(string priority, string type) => (priority, type) switch
{
("critical", "upgrade") => "medium",
("high", _) => "low",
(_, "config") => "trivial",
_ => "low"
};
private static string NormalizeId(string value)
=> value.Replace(':', '-').Replace('/', '-').ToLowerInvariant();
private static int GetPriorityOrder(string priority)
{
return priority.ToLowerInvariant() switch

View File

@@ -9,10 +9,9 @@ using Microsoft.AspNetCore.Http;
using Microsoft.AspNetCore.Routing;
using StellaOps.Scanner.WebService.Contracts;
using StellaOps.Scanner.WebService.Security;
using System.Collections.Concurrent;
using System.Security.Cryptography;
using System.Text;
using System.Text.Json;
using System.Text.Json.Serialization;
using static StellaOps.Localization.T;
namespace StellaOps.Scanner.WebService.Endpoints;
@@ -23,12 +22,6 @@ namespace StellaOps.Scanner.WebService.Endpoints;
/// </summary>
internal static class DeltaCompareEndpoints
{
private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web)
{
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull,
Converters = { new JsonStringEnumConverter() }
};
/// <summary>
/// Maps delta compare endpoints.
/// </summary>
@@ -190,6 +183,20 @@ public interface IDeltaCompareService
/// </summary>
public sealed class DeltaCompareService : IDeltaCompareService
{
private static readonly (string Ecosystem, string Name, string License)[] ComponentTemplates =
[
("npm", "axios", "MIT"),
("npm", "lodash", "MIT"),
("maven", "org.apache.logging.log4j/log4j-core", "Apache-2.0"),
("maven", "org.springframework/spring-core", "Apache-2.0"),
("pypi", "requests", "Apache-2.0"),
("nuget", "Newtonsoft.Json", "MIT"),
("golang", "golang.org/x/net", "BSD-3-Clause"),
("cargo", "tokio", "MIT"),
];
private static readonly string[] OrderedSeverities = ["critical", "high", "medium", "low", "unknown"];
private readonly ConcurrentDictionary<string, DeltaCompareResponseDto> _comparisons = new(StringComparer.OrdinalIgnoreCase);
private readonly TimeProvider _timeProvider;
public DeltaCompareService(TimeProvider timeProvider)
@@ -199,95 +206,552 @@ public sealed class DeltaCompareService : IDeltaCompareService
public Task<DeltaCompareResponseDto> CompareAsync(DeltaCompareRequestDto request, CancellationToken ct = default)
{
// Compute deterministic comparison ID
ct.ThrowIfCancellationRequested();
ArgumentNullException.ThrowIfNull(request);
var comparisonId = ComputeComparisonId(request.BaseDigest, request.TargetDigest);
// In a full implementation, this would:
// 1. Load both snapshots from storage
// 2. Compare vulnerabilities and components
// 3. Compute policy diffs
// For now, return a structured response
var baseSummary = CreateSnapshotSummary(request.BaseDigest, "Block");
var targetSummary = CreateSnapshotSummary(request.TargetDigest, "Ship");
var response = new DeltaCompareResponseDto
if (!_comparisons.TryGetValue(comparisonId, out var fullComparison))
{
Base = baseSummary,
Target = targetSummary,
Summary = new DeltaChangeSummaryDto
{
Added = 0,
Removed = 0,
Modified = 0,
Unchanged = 0,
NetVulnerabilityChange = 0,
NetComponentChange = 0,
SeverityChanges = new DeltaSeverityChangesDto(),
VerdictChanged = baseSummary.PolicyVerdict != targetSummary.PolicyVerdict,
RiskDirection = "unchanged"
},
Vulnerabilities = request.IncludeVulnerabilities ? [] : null,
Components = request.IncludeComponents ? [] : null,
PolicyDiff = request.IncludePolicyDiff
? new DeltaPolicyDiffDto
{
BaseVerdict = baseSummary.PolicyVerdict ?? "Unknown",
TargetVerdict = targetSummary.PolicyVerdict ?? "Unknown",
VerdictChanged = baseSummary.PolicyVerdict != targetSummary.PolicyVerdict,
BlockToShipCount = 0,
ShipToBlockCount = 0
}
: null,
GeneratedAt = _timeProvider.GetUtcNow(),
ComparisonId = comparisonId
};
fullComparison = BuildComparison(request.BaseDigest.Trim(), request.TargetDigest.Trim(), comparisonId);
_comparisons[comparisonId] = fullComparison;
}
return Task.FromResult(response);
return Task.FromResult(ProjectComparison(fullComparison, request));
}
public Task<QuickDiffSummaryDto> GetQuickDiffAsync(string baseDigest, string targetDigest, CancellationToken ct = default)
public async Task<QuickDiffSummaryDto> GetQuickDiffAsync(string baseDigest, string targetDigest, CancellationToken ct = default)
{
var summary = new QuickDiffSummaryDto
ct.ThrowIfCancellationRequested();
var comparisonId = ComputeComparisonId(baseDigest, targetDigest);
if (!_comparisons.TryGetValue(comparisonId, out var comparison))
{
comparison = await CompareAsync(
new DeltaCompareRequestDto
{
BaseDigest = baseDigest,
TargetDigest = targetDigest,
IncludeComponents = true,
IncludePolicyDiff = true,
IncludeVulnerabilities = true,
IncludeUnchanged = true
},
ct).ConfigureAwait(false);
}
var netBlockingChange = (comparison.Target.SeverityCounts.Critical + comparison.Target.SeverityCounts.High)
- (comparison.Base.SeverityCounts.Critical + comparison.Base.SeverityCounts.High);
return new QuickDiffSummaryDto
{
BaseDigest = baseDigest,
TargetDigest = targetDigest,
CanShip = true,
RiskDirection = "unchanged",
NetBlockingChange = 0,
CriticalAdded = 0,
CriticalRemoved = 0,
HighAdded = 0,
HighRemoved = 0,
Summary = "No material changes detected"
CanShip = !string.Equals(comparison.Target.PolicyVerdict, "Block", StringComparison.OrdinalIgnoreCase),
RiskDirection = comparison.Summary.RiskDirection,
NetBlockingChange = netBlockingChange,
CriticalAdded = comparison.Summary.SeverityChanges.CriticalAdded,
CriticalRemoved = comparison.Summary.SeverityChanges.CriticalRemoved,
HighAdded = comparison.Summary.SeverityChanges.HighAdded,
HighRemoved = comparison.Summary.SeverityChanges.HighRemoved,
Summary = comparison.Summary.RiskDirection switch
{
"degraded" => "Risk increased between snapshots.",
"improved" => "Risk reduced between snapshots.",
_ => "Risk profile is unchanged."
}
};
return Task.FromResult(summary);
}
public Task<DeltaCompareResponseDto?> GetComparisonAsync(string comparisonId, CancellationToken ct = default)
{
// In a full implementation, this would retrieve from cache/storage
return Task.FromResult<DeltaCompareResponseDto?>(null);
ct.ThrowIfCancellationRequested();
if (string.IsNullOrWhiteSpace(comparisonId))
{
return Task.FromResult<DeltaCompareResponseDto?>(null);
}
return Task.FromResult(_comparisons.TryGetValue(comparisonId.Trim(), out var comparison) ? comparison : null);
}
private DeltaSnapshotSummaryDto CreateSnapshotSummary(string digest, string verdict)
private DeltaCompareResponseDto BuildComparison(string baseDigest, string targetDigest, string comparisonId)
{
return new DeltaSnapshotSummaryDto
var baseSnapshot = BuildSnapshot(baseDigest);
var targetSnapshot = BuildSnapshot(targetDigest);
var vulnerabilities = BuildVulnerabilityDiffs(baseSnapshot, targetSnapshot, includeUnchanged: true);
var components = BuildComponentDiffs(baseSnapshot, targetSnapshot, includeUnchanged: true);
var severityChanges = BuildSeverityChanges(vulnerabilities);
var policyDiff = BuildPolicyDiff(baseSnapshot, targetSnapshot, vulnerabilities);
var riskScore =
(severityChanges.CriticalAdded - severityChanges.CriticalRemoved) * 4 +
(severityChanges.HighAdded - severityChanges.HighRemoved) * 3 +
(severityChanges.MediumAdded - severityChanges.MediumRemoved) * 2 +
(severityChanges.LowAdded - severityChanges.LowRemoved) +
((policyDiff.ShipToBlockCount - policyDiff.BlockToShipCount) * 5);
return new DeltaCompareResponseDto
{
Digest = digest,
CreatedAt = _timeProvider.GetUtcNow(),
ComponentCount = 0,
VulnerabilityCount = 0,
SeverityCounts = new DeltaSeverityCountsDto(),
PolicyVerdict = verdict
Base = BuildSummary(baseSnapshot),
Target = BuildSummary(targetSnapshot),
Summary = new DeltaChangeSummaryDto
{
Added = vulnerabilities.Count(v => v.ChangeType.Equals("Added", StringComparison.Ordinal)),
Removed = vulnerabilities.Count(v => v.ChangeType.Equals("Removed", StringComparison.Ordinal)),
Modified = vulnerabilities.Count(v => v.ChangeType.Equals("Modified", StringComparison.Ordinal)),
Unchanged = vulnerabilities.Count(v => v.ChangeType.Equals("Unchanged", StringComparison.Ordinal)),
NetVulnerabilityChange = targetSnapshot.Vulnerabilities.Count - baseSnapshot.Vulnerabilities.Count,
NetComponentChange = targetSnapshot.Components.Count - baseSnapshot.Components.Count,
SeverityChanges = severityChanges,
VerdictChanged = !string.Equals(baseSnapshot.PolicyVerdict, targetSnapshot.PolicyVerdict, StringComparison.OrdinalIgnoreCase),
RiskDirection = riskScore > 0 ? "degraded" : riskScore < 0 ? "improved" : "unchanged"
},
Vulnerabilities = vulnerabilities,
Components = components,
PolicyDiff = policyDiff,
GeneratedAt = _timeProvider.GetUtcNow(),
ComparisonId = comparisonId
};
}
private DeltaCompareResponseDto ProjectComparison(DeltaCompareResponseDto full, DeltaCompareRequestDto request)
{
var changeTypeFilter = request.ChangeTypes?
.Where(static value => !string.IsNullOrWhiteSpace(value))
.Select(static value => value.Trim())
.ToHashSet(StringComparer.OrdinalIgnoreCase);
var severityFilter = request.Severities?
.Where(static value => !string.IsNullOrWhiteSpace(value))
.Select(static value => value.Trim())
.ToHashSet(StringComparer.OrdinalIgnoreCase);
var filteredVulnerabilities = (full.Vulnerabilities ?? [])
.Where(v => request.IncludeUnchanged || !v.ChangeType.Equals("Unchanged", StringComparison.OrdinalIgnoreCase))
.Where(v => changeTypeFilter is null || changeTypeFilter.Contains(v.ChangeType))
.Where(v => severityFilter is null || severityFilter.Contains(EffectiveSeverity(v)))
.OrderBy(v => ChangeTypeOrder(v.ChangeType))
.ThenBy(v => v.VulnId, StringComparer.Ordinal)
.ThenBy(v => v.Purl, StringComparer.Ordinal)
.ToList();
var filteredComponents = (full.Components ?? [])
.Where(c => request.IncludeUnchanged || !c.ChangeType.Equals("Unchanged", StringComparison.OrdinalIgnoreCase))
.OrderBy(c => ChangeTypeOrder(c.ChangeType))
.ThenBy(c => c.Purl, StringComparer.Ordinal)
.ToList();
return full with
{
Summary = full.Summary with
{
Added = filteredVulnerabilities.Count(v => v.ChangeType.Equals("Added", StringComparison.OrdinalIgnoreCase)),
Removed = filteredVulnerabilities.Count(v => v.ChangeType.Equals("Removed", StringComparison.OrdinalIgnoreCase)),
Modified = filteredVulnerabilities.Count(v => v.ChangeType.Equals("Modified", StringComparison.OrdinalIgnoreCase)),
Unchanged = filteredVulnerabilities.Count(v => v.ChangeType.Equals("Unchanged", StringComparison.OrdinalIgnoreCase)),
SeverityChanges = BuildSeverityChanges(filteredVulnerabilities),
},
Vulnerabilities = request.IncludeVulnerabilities ? filteredVulnerabilities : null,
Components = request.IncludeComponents ? filteredComponents : null,
PolicyDiff = request.IncludePolicyDiff ? full.PolicyDiff : null
};
}
private Snapshot BuildSnapshot(string digest)
{
var hash = SHA256.HashData(Encoding.UTF8.GetBytes(digest));
var createdAt = new DateTimeOffset(2026, 1, 1, 0, 0, 0, TimeSpan.Zero)
.AddSeconds(BitConverter.ToUInt32(hash.AsSpan(0, sizeof(uint))) % (365 * 24 * 60 * 60));
var componentCount = 4 + (hash[1] % 3);
var components = new Dictionary<string, SnapshotComponent>(StringComparer.Ordinal);
for (var i = 0; i < componentCount; i++)
{
var template = ComponentTemplates[(hash[(i * 5 + 7) % hash.Length] + i) % ComponentTemplates.Length];
var version = $"{1 + (hash[(i * 3 + 9) % hash.Length] % 3)}.{hash[(i * 7 + 13) % hash.Length] % 10}.{hash[(i * 11 + 17) % hash.Length] % 20}";
var purl = template.Ecosystem switch
{
"rpm" or "deb" => $"pkg:generic/{template.Name}@{version}",
_ => $"pkg:{template.Ecosystem}/{template.Name}@{version}"
};
components[purl] = new SnapshotComponent(purl, version, template.License);
}
var vulnerabilities = new List<SnapshotVulnerability>();
foreach (var (component, index) in components.Values.OrderBy(v => v.Purl, StringComparer.Ordinal).Select((value, idx) => (value, idx)))
{
var vulnerabilityCount = 1 + (hash[(index + 19) % hash.Length] % 2);
for (var slot = 0; slot < vulnerabilityCount; slot++)
{
var cve = $"CVE-{2024 + (hash[(index + slot + 3) % hash.Length] % 3)}-{1000 + (((hash[(index * 3 + slot + 5) % hash.Length] << 8) + hash[(index * 3 + slot + 6) % hash.Length]) % 8000):D4}";
var severity = OrderedSeverities[hash[(index * 3 + slot + 23) % hash.Length] % 4];
var reachability = (hash[(index * 3 + slot + 29) % hash.Length] % 3) switch
{
0 => "reachable",
1 => "likely",
_ => "unreachable"
};
var verdict = severity is "critical" or "high" ? "Block" : severity == "medium" ? "Warn" : "Ship";
vulnerabilities.Add(new SnapshotVulnerability(cve, component.Purl, severity, reachability, verdict, IncrementPatch(component.Version)));
}
}
var distinctVulnerabilities = vulnerabilities
.DistinctBy(v => $"{v.VulnId}|{v.Purl}", StringComparer.Ordinal)
.OrderBy(v => v.VulnId, StringComparer.Ordinal)
.ThenBy(v => v.Purl, StringComparer.Ordinal)
.ToList();
var hasBlocking = distinctVulnerabilities.Any(v => v.Severity is "critical" or "high");
var hasMedium = distinctVulnerabilities.Any(v => v.Severity == "medium");
var policyVerdict = hasBlocking ? "Block" : hasMedium ? "Warn" : "Ship";
return new Snapshot(digest, createdAt, components.Values.OrderBy(v => v.Purl, StringComparer.Ordinal).ToList(), distinctVulnerabilities, policyVerdict);
}
private static IReadOnlyList<DeltaVulnerabilityDto> BuildVulnerabilityDiffs(Snapshot baseline, Snapshot target, bool includeUnchanged)
{
var baseIndex = baseline.Vulnerabilities.ToDictionary(v => $"{v.VulnId}|{v.Purl}", StringComparer.Ordinal);
var targetIndex = target.Vulnerabilities.ToDictionary(v => $"{v.VulnId}|{v.Purl}", StringComparer.Ordinal);
var keys = baseIndex.Keys.Union(targetIndex.Keys, StringComparer.Ordinal).OrderBy(v => v, StringComparer.Ordinal);
var results = new List<DeltaVulnerabilityDto>();
foreach (var key in keys)
{
baseIndex.TryGetValue(key, out var before);
targetIndex.TryGetValue(key, out var after);
if (before is null && after is not null)
{
results.Add(new DeltaVulnerabilityDto
{
VulnId = after.VulnId,
Purl = after.Purl,
ChangeType = "Added",
Severity = after.Severity,
Reachability = after.Reachability,
Verdict = after.Verdict,
FixedVersion = after.FixedVersion
});
continue;
}
if (before is not null && after is null)
{
results.Add(new DeltaVulnerabilityDto
{
VulnId = before.VulnId,
Purl = before.Purl,
ChangeType = "Removed",
Severity = "unknown",
PreviousSeverity = before.Severity,
PreviousReachability = before.Reachability,
PreviousVerdict = before.Verdict,
FixedVersion = before.FixedVersion
});
continue;
}
if (before is null || after is null)
{
continue;
}
var fields = new List<DeltaFieldChangeDto>();
AddFieldChange(fields, "severity", before.Severity, after.Severity);
AddFieldChange(fields, "reachability", before.Reachability, after.Reachability);
AddFieldChange(fields, "verdict", before.Verdict, after.Verdict);
AddFieldChange(fields, "fixedVersion", before.FixedVersion, after.FixedVersion);
if (fields.Count == 0)
{
if (!includeUnchanged)
{
continue;
}
results.Add(new DeltaVulnerabilityDto
{
VulnId = after.VulnId,
Purl = after.Purl,
ChangeType = "Unchanged",
Severity = after.Severity,
Reachability = after.Reachability,
Verdict = after.Verdict,
FixedVersion = after.FixedVersion
});
continue;
}
results.Add(new DeltaVulnerabilityDto
{
VulnId = after.VulnId,
Purl = after.Purl,
ChangeType = "Modified",
Severity = after.Severity,
PreviousSeverity = before.Severity,
Reachability = after.Reachability,
PreviousReachability = before.Reachability,
Verdict = after.Verdict,
PreviousVerdict = before.Verdict,
FixedVersion = after.FixedVersion,
FieldChanges = fields
});
}
return results;
}
private static IReadOnlyList<DeltaComponentDto> BuildComponentDiffs(Snapshot baseline, Snapshot target, bool includeUnchanged)
{
var baseIndex = baseline.Components.ToDictionary(v => v.Purl, StringComparer.Ordinal);
var targetIndex = target.Components.ToDictionary(v => v.Purl, StringComparer.Ordinal);
var baseVulnCount = baseline.Vulnerabilities.GroupBy(v => v.Purl, StringComparer.Ordinal).ToDictionary(g => g.Key, g => g.Count(), StringComparer.Ordinal);
var targetVulnCount = target.Vulnerabilities.GroupBy(v => v.Purl, StringComparer.Ordinal).ToDictionary(g => g.Key, g => g.Count(), StringComparer.Ordinal);
var keys = baseIndex.Keys.Union(targetIndex.Keys, StringComparer.Ordinal).OrderBy(v => v, StringComparer.Ordinal);
var results = new List<DeltaComponentDto>();
foreach (var key in keys)
{
baseIndex.TryGetValue(key, out var before);
targetIndex.TryGetValue(key, out var after);
var beforeVuln = baseVulnCount.TryGetValue(key, out var bc) ? bc : 0;
var afterVuln = targetVulnCount.TryGetValue(key, out var ac) ? ac : 0;
if (before is null && after is not null)
{
results.Add(new DeltaComponentDto
{
Purl = key,
ChangeType = "Added",
CurrentVersion = after.Version,
VulnerabilitiesInBase = beforeVuln,
VulnerabilitiesInTarget = afterVuln,
License = after.License
});
continue;
}
if (before is not null && after is null)
{
results.Add(new DeltaComponentDto
{
Purl = key,
ChangeType = "Removed",
PreviousVersion = before.Version,
VulnerabilitiesInBase = beforeVuln,
VulnerabilitiesInTarget = afterVuln,
License = before.License
});
continue;
}
if (before is null || after is null)
{
continue;
}
if (!string.Equals(before.Version, after.Version, StringComparison.Ordinal))
{
results.Add(new DeltaComponentDto
{
Purl = key,
ChangeType = "VersionChanged",
PreviousVersion = before.Version,
CurrentVersion = after.Version,
VulnerabilitiesInBase = beforeVuln,
VulnerabilitiesInTarget = afterVuln,
License = after.License
});
continue;
}
if (!includeUnchanged)
{
continue;
}
results.Add(new DeltaComponentDto
{
Purl = key,
ChangeType = "Unchanged",
PreviousVersion = before.Version,
CurrentVersion = after.Version,
VulnerabilitiesInBase = beforeVuln,
VulnerabilitiesInTarget = afterVuln,
License = after.License
});
}
return results;
}
private static DeltaPolicyDiffDto BuildPolicyDiff(Snapshot baseline, Snapshot target, IReadOnlyList<DeltaVulnerabilityDto> vulnerabilities)
{
return new DeltaPolicyDiffDto
{
BaseVerdict = baseline.PolicyVerdict,
TargetVerdict = target.PolicyVerdict,
VerdictChanged = !string.Equals(baseline.PolicyVerdict, target.PolicyVerdict, StringComparison.OrdinalIgnoreCase),
BlockToShipCount = vulnerabilities.Count(v => string.Equals(v.PreviousVerdict, "Block", StringComparison.OrdinalIgnoreCase) && string.Equals(v.Verdict, "Ship", StringComparison.OrdinalIgnoreCase)),
ShipToBlockCount = vulnerabilities.Count(v => string.Equals(v.PreviousVerdict, "Ship", StringComparison.OrdinalIgnoreCase) && string.Equals(v.Verdict, "Block", StringComparison.OrdinalIgnoreCase)),
WouldPassIf = vulnerabilities
.Where(v => string.Equals(v.Verdict, "Block", StringComparison.OrdinalIgnoreCase))
.Select(v => $"Mitigate {v.VulnId} in {v.Purl}")
.Distinct(StringComparer.Ordinal)
.OrderBy(v => v, StringComparer.Ordinal)
.Take(3)
.ToList()
};
}
private static DeltaSeverityChangesDto BuildSeverityChanges(IReadOnlyList<DeltaVulnerabilityDto> vulnerabilities)
{
var criticalAdded = 0;
var criticalRemoved = 0;
var highAdded = 0;
var highRemoved = 0;
var mediumAdded = 0;
var mediumRemoved = 0;
var lowAdded = 0;
var lowRemoved = 0;
foreach (var vulnerability in vulnerabilities)
{
var current = NormalizeSeverity(vulnerability.Severity);
var previous = NormalizeSeverity(vulnerability.PreviousSeverity);
if (vulnerability.ChangeType.Equals("Added", StringComparison.OrdinalIgnoreCase))
{
Increment(current, isAdded: true);
}
else if (vulnerability.ChangeType.Equals("Removed", StringComparison.OrdinalIgnoreCase))
{
Increment(previous, isAdded: false);
}
else if (vulnerability.ChangeType.Equals("Modified", StringComparison.OrdinalIgnoreCase)
&& !string.Equals(current, previous, StringComparison.OrdinalIgnoreCase))
{
Increment(previous, isAdded: false);
Increment(current, isAdded: true);
}
}
return new DeltaSeverityChangesDto
{
CriticalAdded = criticalAdded,
CriticalRemoved = criticalRemoved,
HighAdded = highAdded,
HighRemoved = highRemoved,
MediumAdded = mediumAdded,
MediumRemoved = mediumRemoved,
LowAdded = lowAdded,
LowRemoved = lowRemoved
};
void Increment(string severity, bool isAdded)
{
switch (severity)
{
case "critical":
if (isAdded) criticalAdded++; else criticalRemoved++;
break;
case "high":
if (isAdded) highAdded++; else highRemoved++;
break;
case "medium":
if (isAdded) mediumAdded++; else mediumRemoved++;
break;
case "low":
if (isAdded) lowAdded++; else lowRemoved++;
break;
}
}
}
private static DeltaSnapshotSummaryDto BuildSummary(Snapshot snapshot) => new()
{
Digest = snapshot.Digest,
CreatedAt = snapshot.CreatedAt,
ComponentCount = snapshot.Components.Count,
VulnerabilityCount = snapshot.Vulnerabilities.Count,
SeverityCounts = new DeltaSeverityCountsDto
{
Critical = snapshot.Vulnerabilities.Count(v => v.Severity == "critical"),
High = snapshot.Vulnerabilities.Count(v => v.Severity == "high"),
Medium = snapshot.Vulnerabilities.Count(v => v.Severity == "medium"),
Low = snapshot.Vulnerabilities.Count(v => v.Severity == "low"),
Unknown = snapshot.Vulnerabilities.Count(v => v.Severity == "unknown")
},
PolicyVerdict = snapshot.PolicyVerdict
};
private static void AddFieldChange(List<DeltaFieldChangeDto> changes, string field, string oldValue, string newValue)
{
if (string.Equals(oldValue, newValue, StringComparison.Ordinal))
{
return;
}
changes.Add(new DeltaFieldChangeDto
{
Field = field,
PreviousValue = oldValue,
CurrentValue = newValue
});
}
private static string EffectiveSeverity(DeltaVulnerabilityDto vulnerability)
=> vulnerability.ChangeType.Equals("Removed", StringComparison.OrdinalIgnoreCase)
? NormalizeSeverity(vulnerability.PreviousSeverity)
: NormalizeSeverity(vulnerability.Severity);
private static int ChangeTypeOrder(string value) => value switch
{
"Added" => 0,
"Removed" => 1,
"Modified" => 2,
"VersionChanged" => 3,
"Unchanged" => 4,
_ => 5
};
private static string NormalizeSeverity(string? severity)
{
if (string.IsNullOrWhiteSpace(severity))
{
return "unknown";
}
var normalized = severity.Trim().ToLowerInvariant();
return OrderedSeverities.Contains(normalized, StringComparer.Ordinal) ? normalized : "unknown";
}
private static string IncrementPatch(string version)
{
var parts = version.Split('.', StringSplitOptions.RemoveEmptyEntries);
if (parts.Length != 3
|| !int.TryParse(parts[0], out var major)
|| !int.TryParse(parts[1], out var minor)
|| !int.TryParse(parts[2], out var patch))
{
return version;
}
return $"{major}.{minor}.{patch + 1}";
}
private static string ComputeComparisonId(string baseDigest, string targetDigest)
{
var input = $"{baseDigest}|{targetDigest}";
var hash = SHA256.HashData(Encoding.UTF8.GetBytes(input));
return $"cmp-{Convert.ToHexString(hash)[..16].ToLowerInvariant()}";
}
private sealed record Snapshot(
string Digest,
DateTimeOffset CreatedAt,
IReadOnlyList<SnapshotComponent> Components,
IReadOnlyList<SnapshotVulnerability> Vulnerabilities,
string PolicyVerdict);
private sealed record SnapshotComponent(string Purl, string Version, string License);
private sealed record SnapshotVulnerability(string VulnId, string Purl, string Severity, string Reachability, string Verdict, string FixedVersion);
}

View File

@@ -3,6 +3,7 @@
using Microsoft.AspNetCore.Http;
using Microsoft.AspNetCore.Mvc;
using Microsoft.AspNetCore.Routing;
using StellaOps.Scanner.Reachability.Stack;
using StellaOps.Scanner.WebService.Constants;
@@ -55,7 +56,7 @@ internal static class ReachabilityStackEndpoints
private static async Task<IResult> HandleGetStackAsync(
string findingId,
IReachabilityStackRepository? stackRepository,
[FromServices] IReachabilityStackRepository? stackRepository,
HttpContext context,
CancellationToken cancellationToken)
{
@@ -100,7 +101,7 @@ internal static class ReachabilityStackEndpoints
private static async Task<IResult> HandleGetLayerAsync(
string findingId,
int layerNumber,
IReachabilityStackRepository? stackRepository,
[FromServices] IReachabilityStackRepository? stackRepository,
HttpContext context,
CancellationToken cancellationToken)
{

View File

@@ -78,8 +78,8 @@ internal static class SbomHotLookupEndpoints
string? purl,
string? name,
string? minVersion,
int limit,
int offset,
int? limit,
int? offset,
ISbomHotLookupService hotLookupService,
HttpContext context,
CancellationToken cancellationToken)
@@ -109,7 +109,10 @@ internal static class SbomHotLookupEndpoints
detail: "Use either 'purl' or 'name', not both.");
}
if (!SbomHotLookupService.IsLimitValid(limit))
var requestedLimit = limit ?? 0;
var requestedOffset = offset ?? 0;
if (!SbomHotLookupService.IsLimitValid(requestedLimit))
{
return ProblemResultFactory.Create(
context,
@@ -119,7 +122,7 @@ internal static class SbomHotLookupEndpoints
detail: "limit must be between 1 and 200.");
}
if (!SbomHotLookupService.IsOffsetValid(offset))
if (!SbomHotLookupService.IsOffsetValid(requestedOffset))
{
return ProblemResultFactory.Create(
context,
@@ -130,22 +133,31 @@ internal static class SbomHotLookupEndpoints
}
var result = await hotLookupService
.SearchComponentsAsync(purl, name, minVersion, limit, offset, cancellationToken)
.SearchComponentsAsync(
purl,
name,
minVersion,
requestedLimit,
requestedOffset,
cancellationToken)
.ConfigureAwait(false);
return Results.Ok(result);
}
private static async Task<IResult> HandleSearchPendingTriageAsync(
int limit,
int offset,
int? limit,
int? offset,
ISbomHotLookupService hotLookupService,
HttpContext context,
CancellationToken cancellationToken)
{
ArgumentNullException.ThrowIfNull(hotLookupService);
if (!SbomHotLookupService.IsLimitValid(limit))
var requestedLimit = limit ?? 0;
var requestedOffset = offset ?? 0;
if (!SbomHotLookupService.IsLimitValid(requestedLimit))
{
return ProblemResultFactory.Create(
context,
@@ -155,7 +167,7 @@ internal static class SbomHotLookupEndpoints
detail: "limit must be between 1 and 200.");
}
if (!SbomHotLookupService.IsOffsetValid(offset))
if (!SbomHotLookupService.IsOffsetValid(requestedOffset))
{
return ProblemResultFactory.Create(
context,
@@ -166,7 +178,10 @@ internal static class SbomHotLookupEndpoints
}
var result = await hotLookupService
.SearchPendingTriageAsync(limit, offset, cancellationToken)
.SearchPendingTriageAsync(
requestedLimit,
requestedOffset,
cancellationToken)
.ConfigureAwait(false);
return Results.Ok(result);

View File

@@ -20,10 +20,42 @@ internal static class ScoreReplayEndpoints
{
public static void MapScoreReplayEndpoints(this RouteGroupBuilder apiGroup)
{
var score = apiGroup.MapGroup("/score")
var legacy = apiGroup.MapGroup("/score")
.RequireAuthorization(ScannerPolicies.ScansRead);
var scans = apiGroup.MapGroup("/scans/{scanId}/score")
.RequireAuthorization(ScannerPolicies.ScansRead);
score.MapPost("/{scanId}/replay", HandleReplayAsync)
scans.MapPost("/replay", HandleReplayAsync)
.WithName("scanner.scans.score.replay")
.Produces<ScoreReplayResponse>(StatusCodes.Status200OK)
.Produces<ProblemDetails>(StatusCodes.Status404NotFound)
.Produces<ProblemDetails>(StatusCodes.Status400BadRequest)
.Produces<ProblemDetails>(StatusCodes.Status422UnprocessableEntity)
.WithDescription(_t("scanner.score_replay.replay_description"))
.RequireAuthorization(ScannerPolicies.ScansWrite);
scans.MapGet("/bundle", HandleGetBundleAsync)
.WithName("scanner.scans.score.bundle")
.Produces<ScoreBundleResponse>(StatusCodes.Status200OK)
.Produces<ProblemDetails>(StatusCodes.Status404NotFound)
.WithDescription(_t("scanner.score_replay.bundle_description"));
scans.MapPost("/verify", HandleVerifyAsync)
.WithName("scanner.scans.score.verify")
.Produces<ScoreVerifyResponse>(StatusCodes.Status200OK)
.Produces<ProblemDetails>(StatusCodes.Status404NotFound)
.Produces<ProblemDetails>(StatusCodes.Status422UnprocessableEntity)
.WithDescription(_t("scanner.score_replay.verify_description"))
.RequireAuthorization(ScannerPolicies.ScansWrite);
scans.MapGet("/history", HandleGetHistoryAsync)
.WithName("scanner.scans.score.history")
.Produces<IReadOnlyList<ScoreHistoryResponseItem>>(StatusCodes.Status200OK)
.Produces<ProblemDetails>(StatusCodes.Status400BadRequest)
.WithDescription(_t("scanner.score_replay.history_description"));
// Backward-compatible aliases (/score/{scanId}/...) retained while clients migrate.
legacy.MapPost("/{scanId}/replay", HandleReplayAsync)
.WithName("scanner.score.replay")
.Produces<ScoreReplayResponse>(StatusCodes.Status200OK)
.Produces<ProblemDetails>(StatusCodes.Status404NotFound)
@@ -32,19 +64,25 @@ internal static class ScoreReplayEndpoints
.WithDescription(_t("scanner.score_replay.replay_description"))
.RequireAuthorization(ScannerPolicies.ScansWrite);
score.MapGet("/{scanId}/bundle", HandleGetBundleAsync)
legacy.MapGet("/{scanId}/bundle", HandleGetBundleAsync)
.WithName("scanner.score.bundle")
.Produces<ScoreBundleResponse>(StatusCodes.Status200OK)
.Produces<ProblemDetails>(StatusCodes.Status404NotFound)
.WithDescription(_t("scanner.score_replay.bundle_description"));
score.MapPost("/{scanId}/verify", HandleVerifyAsync)
legacy.MapPost("/{scanId}/verify", HandleVerifyAsync)
.WithName("scanner.score.verify")
.Produces<ScoreVerifyResponse>(StatusCodes.Status200OK)
.Produces<ProblemDetails>(StatusCodes.Status404NotFound)
.Produces<ProblemDetails>(StatusCodes.Status422UnprocessableEntity)
.WithDescription(_t("scanner.score_replay.verify_description"))
.RequireAuthorization(ScannerPolicies.ScansWrite);
legacy.MapGet("/{scanId}/history", HandleGetHistoryAsync)
.WithName("scanner.score.history")
.Produces<IReadOnlyList<ScoreHistoryResponseItem>>(StatusCodes.Status200OK)
.Produces<ProblemDetails>(StatusCodes.Status400BadRequest)
.WithDescription(_t("scanner.score_replay.history_description"));
}
/// <summary>
@@ -91,6 +129,17 @@ internal static class ScoreReplayEndpoints
RootHash: result.RootHash,
BundleUri: result.BundleUri,
ManifestHash: result.ManifestHash,
ManifestDigest: result.ManifestDigest,
CanonicalInputHash: result.CanonicalInputHash,
CanonicalInputPayload: result.CanonicalInputPayload,
SeedHex: result.SeedHex,
Factors: result.Factors.Select(f => new ScoreReplayFactor(
f.Name,
f.Weight,
f.Raw,
f.Weighted,
f.Source)).ToArray(),
VerificationStatus: result.VerificationStatus,
ReplayedAt: result.ReplayedAt,
Deterministic: result.Deterministic));
}
@@ -200,6 +249,8 @@ internal static class ScoreReplayEndpoints
scanId,
request.ExpectedRootHash,
request.BundleUri,
request.ExpectedCanonicalInputHash,
request.CanonicalInputPayload,
cancellationToken);
return Results.Ok(new ScoreVerifyResponse(
@@ -208,6 +259,9 @@ internal static class ScoreReplayEndpoints
ExpectedRootHash: request.ExpectedRootHash,
ManifestValid: result.ManifestValid,
LedgerValid: result.LedgerValid,
CanonicalInputHashValid: result.CanonicalInputHashValid,
ExpectedCanonicalInputHash: result.ExpectedCanonicalInputHash,
CanonicalInputHash: result.CanonicalInputHash,
VerifiedAtUtc: result.VerifiedAt,
ErrorMessage: result.ErrorMessage));
}
@@ -221,6 +275,41 @@ internal static class ScoreReplayEndpoints
});
}
}
/// <summary>
/// GET /scans/{scanId}/score/history
/// Returns deterministic replay history for explainability timelines.
/// </summary>
private static async Task<IResult> HandleGetHistoryAsync(
string scanId,
IScoreReplayService replayService,
CancellationToken cancellationToken)
{
if (string.IsNullOrWhiteSpace(scanId))
{
return Results.BadRequest(new ProblemDetails
{
Title = _t("scanner.scan.invalid_identifier"),
Detail = _t("scanner.scan.identifier_required"),
Status = StatusCodes.Status400BadRequest
});
}
var history = await replayService.GetScoreHistoryAsync(scanId, cancellationToken).ConfigureAwait(false);
var response = history
.Select(entry => new ScoreHistoryResponseItem(
RootHash: entry.RootHash,
ReplayedAt: entry.ReplayedAt,
Score: entry.Score,
CanonicalInputHash: entry.CanonicalInputHash,
ManifestDigest: entry.ManifestDigest,
Factors: entry.Factors
.Select(f => new ScoreReplayFactor(f.Name, f.Weight, f.Raw, f.Weighted, f.Source))
.ToArray()))
.ToArray();
return Results.Ok(response);
}
}
/// <summary>
@@ -239,6 +328,12 @@ public sealed record ScoreReplayRequest(
/// <param name="RootHash">Root hash of the proof ledger.</param>
/// <param name="BundleUri">URI to the proof bundle.</param>
/// <param name="ManifestHash">Hash of the manifest used.</param>
/// <param name="ManifestDigest">Digest of canonical manifest input.</param>
/// <param name="CanonicalInputHash">Digest of canonical score replay input payload.</param>
/// <param name="CanonicalInputPayload">Canonical score replay input payload JSON.</param>
/// <param name="SeedHex">Replay seed as hexadecimal.</param>
/// <param name="Factors">Factorized score vectors.</param>
/// <param name="VerificationStatus">Verification status for replay artifacts.</param>
/// <param name="ReplayedAt">When the replay was performed.</param>
/// <param name="Deterministic">Whether the replay was deterministic.</param>
public sealed record ScoreReplayResponse(
@@ -246,9 +341,25 @@ public sealed record ScoreReplayResponse(
string RootHash,
string BundleUri,
string ManifestHash,
string ManifestDigest,
string CanonicalInputHash,
string CanonicalInputPayload,
string SeedHex,
IReadOnlyList<ScoreReplayFactor> Factors,
string VerificationStatus,
DateTimeOffset ReplayedAt,
bool Deterministic);
/// <summary>
/// Deterministic score factor returned by replay and history APIs.
/// </summary>
public sealed record ScoreReplayFactor(
string Name,
double Weight,
double Raw,
double Weighted,
string Source);
/// <summary>
/// Response for bundle retrieval.
/// </summary>
@@ -266,7 +377,9 @@ public sealed record ScoreBundleResponse(
/// <param name="BundleUri">Optional: specific bundle URI to verify.</param>
public sealed record ScoreVerifyRequest(
string ExpectedRootHash,
string? BundleUri = null);
string? BundleUri = null,
string? ExpectedCanonicalInputHash = null,
string? CanonicalInputPayload = null);
/// <summary>
/// Response from bundle verification.
@@ -276,6 +389,9 @@ public sealed record ScoreVerifyRequest(
/// <param name="ExpectedRootHash">The expected root hash.</param>
/// <param name="ManifestValid">Whether the manifest signature is valid.</param>
/// <param name="LedgerValid">Whether the ledger integrity is valid.</param>
/// <param name="CanonicalInputHashValid">Whether canonical input hash checks passed.</param>
/// <param name="ExpectedCanonicalInputHash">Expected canonical hash when provided.</param>
/// <param name="CanonicalInputHash">Resolved canonical hash used in verification.</param>
/// <param name="VerifiedAtUtc">When verification was performed.</param>
/// <param name="ErrorMessage">Error message if verification failed.</param>
public sealed record ScoreVerifyResponse(
@@ -284,5 +400,19 @@ public sealed record ScoreVerifyResponse(
string ExpectedRootHash,
bool ManifestValid,
bool LedgerValid,
bool CanonicalInputHashValid,
string? ExpectedCanonicalInputHash,
string? CanonicalInputHash,
DateTimeOffset VerifiedAtUtc,
string? ErrorMessage = null);
/// <summary>
/// Score replay history response item.
/// </summary>
public sealed record ScoreHistoryResponseItem(
string RootHash,
DateTimeOffset ReplayedAt,
double Score,
string CanonicalInputHash,
string ManifestDigest,
IReadOnlyList<ScoreReplayFactor> Factors);

View File

@@ -33,11 +33,13 @@ using StellaOps.Scanner.Core.TrustAnchors;
using StellaOps.Scanner.Emit.Composition;
using StellaOps.Scanner.Gate;
using StellaOps.Scanner.ReachabilityDrift.DependencyInjection;
using StellaOps.Scanner.Reachability.Slices;
using StellaOps.Scanner.SmartDiff.Detection;
using StellaOps.Scanner.Sources.DependencyInjection;
using StellaOps.Scanner.Sources.Persistence;
using StellaOps.Scanner.Storage;
using StellaOps.Scanner.Storage.Extensions;
using StellaOps.Scanner.Storage.Oci;
using StellaOps.Scanner.Storage.Postgres;
using StellaOps.Scanner.Surface.Env;
using StellaOps.Scanner.Surface.FS;
@@ -135,6 +137,14 @@ else
}
builder.Services.AddDeterminismDefaults();
builder.Services.AddScannerCache(builder.Configuration);
builder.Services.AddOptions<SliceCacheOptions>()
.Bind(builder.Configuration.GetSection("scanner:slices:cache"));
builder.Services.AddOptions<SliceQueryServiceOptions>()
.Bind(builder.Configuration.GetSection("scanner:slices:query"));
builder.Services.AddOptions<ReplayCommandServiceOptions>()
.Bind(builder.Configuration.GetSection("scanner:replayCommands"));
builder.Services.AddOptions<ReachabilityStackRepositoryOptions>()
.Bind(builder.Configuration.GetSection("scanner:reachabilityStack"));
builder.Services.AddSingleton<ServiceStatus>();
builder.Services.AddHttpContextAccessor();
builder.Services.AddSingleton<ScanProgressStream>();
@@ -191,6 +201,24 @@ builder.Services.TryAddSingleton<IVexReachabilityDecisionFilter, VexReachability
builder.Services.TryAddSingleton<IMaterialRiskChangeRepository, PostgresMaterialRiskChangeRepository>();
builder.Services.TryAddSingleton<IVexCandidateStore, PostgresVexCandidateStore>();
builder.Services.TryAddSingleton<IScanMetadataRepository, InMemoryScanMetadataRepository>();
builder.Services.TryAddSingleton<ISliceCache, SliceCache>();
builder.Services.TryAddSingleton<VerdictComputer>();
builder.Services.TryAddSingleton<SliceExtractor>();
builder.Services.TryAddSingleton<SliceHasher>();
builder.Services.TryAddSingleton<StellaOps.Scanner.Reachability.Slices.Replay.SliceDiffComputer>();
builder.Services.TryAddSingleton<SliceDsseSigner>();
builder.Services.TryAddSingleton<SliceCasStorage>();
builder.Services.TryAddScoped<ISliceQueryService, SliceQueryService>();
builder.Services.TryAddScoped<IReplayCommandService, ReplayCommandService>();
var reachabilityStackRepositoryOptions = builder.Configuration
.GetSection("scanner:reachabilityStack")
.Get<ReachabilityStackRepositoryOptions>() ?? new ReachabilityStackRepositoryOptions();
if (reachabilityStackRepositoryOptions.Enabled)
{
builder.Services.TryAddSingleton<IReachabilityStackRepository, FileBackedReachabilityStackRepository>();
}
// Secret Detection Settings (Sprint: SPRINT_20260104_006_BE)
builder.Services.AddScoped<ISecretDetectionSettingsService, SecretDetectionSettingsService>();
@@ -270,6 +298,68 @@ else
builder.Services.AddSingleton<IPlatformEventPublisher, NullPlatformEventPublisher>();
}
builder.Services.AddSingleton<IReportEventDispatcher, ReportEventDispatcher>();
builder.Services.AddHttpClient("ScannerOciAttestationPublisher")
.ConfigurePrimaryHttpMessageHandler(() =>
{
if (!bootstrapOptions.ArtifactStore.AllowInsecureTls)
{
return new HttpClientHandler();
}
return new HttpClientHandler
{
ServerCertificateCustomValidationCallback =
HttpClientHandler.DangerousAcceptAnyServerCertificateValidator
};
});
builder.Services.TryAddSingleton(sp =>
{
var options = sp.GetRequiredService<IOptions<ScannerWebServiceOptions>>().Value;
var defaultRegistry = string.IsNullOrWhiteSpace(options.Registry.DefaultRegistry)
? "docker.io"
: options.Registry.DefaultRegistry!.Trim();
var authOptions = new OciRegistryAuthOptions();
var credential = options.Registry.Credentials
.FirstOrDefault(c => string.Equals(c.Registry?.Trim(), defaultRegistry, StringComparison.OrdinalIgnoreCase))
?? options.Registry.Credentials.FirstOrDefault();
if (credential is not null)
{
authOptions.Username = credential.Username;
authOptions.Password = credential.Password;
authOptions.Token = credential.RegistryToken ?? credential.IdentityToken;
authOptions.AllowAnonymousFallback = string.IsNullOrWhiteSpace(authOptions.Username)
&& string.IsNullOrWhiteSpace(authOptions.Token);
}
var registryOptions = new OciRegistryOptions
{
DefaultRegistry = defaultRegistry,
AllowInsecure = bootstrapOptions.ArtifactStore.AllowInsecureTls,
Auth = authOptions
};
var httpClient = sp.GetRequiredService<IHttpClientFactory>().CreateClient("ScannerOciAttestationPublisher");
httpClient.Timeout = TimeSpan.FromSeconds(Math.Max(1, options.AttestationAttachment.RegistryTimeoutSeconds));
return new OciArtifactPusher(
httpClient,
sp.GetRequiredService<StellaOps.Cryptography.ICryptoHash>(),
registryOptions,
sp.GetRequiredService<ILogger<OciArtifactPusher>>(),
sp.GetService<TimeProvider>());
});
builder.Services.TryAddSingleton<IOciAttestationPublisher>(sp =>
{
var options = sp.GetRequiredService<IOptions<ScannerWebServiceOptions>>().Value;
if (!options.AttestationAttachment.AutoAttach)
{
return NullOciAttestationPublisher.Instance;
}
return ActivatorUtilities.CreateInstance<OciAttestationPublisher>(sp);
});
builder.Services.AddScannerStorage(storageOptions =>
{
storageOptions.Postgres.ConnectionString = bootstrapOptions.Storage.Dsn;
@@ -718,6 +808,7 @@ if (resolvedOptions.Features.EnablePolicyPreview)
apiGroup.MapReportEndpoints(resolvedOptions.Api.ReportsSegment);
apiGroup.MapRuntimeEndpoints(resolvedOptions.Api.RuntimeSegment);
apiGroup.MapReachabilityStackEndpoints();
app.MapControllers();
app.MapOpenApiIfAvailable();

View File

@@ -12,7 +12,7 @@ using System.Text.Json.Serialization.Metadata;
namespace StellaOps.Scanner.WebService.Serialization;
internal static class OrchestratorEventSerializer
internal static class JobEngineEventSerializer
{
private static readonly JsonSerializerOptions CanonicalOptions = CreateOptions();
private static readonly JsonSerializerOptions PrettyOptions = new()
@@ -21,10 +21,10 @@ internal static class OrchestratorEventSerializer
Encoder = JavaScriptEncoder.Default
};
public static string Serialize(OrchestratorEvent @event)
public static string Serialize(JobEngineEvent @event)
=> Encoding.UTF8.GetString(CanonJson.Canonicalize(@event, CanonicalOptions));
public static string SerializeIndented(OrchestratorEvent @event)
public static string SerializeIndented(JobEngineEvent @event)
{
var canonicalBytes = CanonJson.Canonicalize(@event, CanonicalOptions);
using var document = JsonDocument.Parse(canonicalBytes);
@@ -48,7 +48,7 @@ internal static class OrchestratorEventSerializer
{
private static readonly ImmutableDictionary<Type, string[]> PropertyOrder = new Dictionary<Type, string[]>
{
[typeof(OrchestratorEvent)] = new[]
[typeof(JobEngineEvent)] = new[]
{
"eventId",
"kind",
@@ -65,7 +65,7 @@ internal static class OrchestratorEventSerializer
"payload",
"attributes"
},
[typeof(OrchestratorEventScope)] = new[]
[typeof(JobEngineEventScope)] = new[]
{
"namespace",
"repo",
@@ -212,7 +212,7 @@ internal static class OrchestratorEventSerializer
private static void ConfigurePolymorphism(JsonTypeInfo info)
{
if (info.Type != typeof(OrchestratorEventPayload))
if (info.Type != typeof(JobEngineEventPayload))
{
return;
}

View File

@@ -4,12 +4,18 @@ using System.Buffers.Binary;
using System.Globalization;
using System.Security.Cryptography;
using System.Text;
using System.Text.Json;
namespace StellaOps.Scanner.WebService.Services;
public sealed class DeterministicScoringService : IScoringService
{
public Task<double> ReplayScoreAsync(
private static readonly JsonSerializerOptions JsonOptions = new(JsonSerializerDefaults.Web)
{
WriteIndented = false
};
public Task<DeterministicScoreResult> ReplayScoreAsync(
string scanId,
string concelierSnapshotHash,
string excititorSnapshotHash,
@@ -24,18 +30,32 @@ public sealed class DeterministicScoringService : IScoringService
ArgumentNullException.ThrowIfNull(ledger);
cancellationToken.ThrowIfCancellationRequested();
var input = string.Join(
"|",
scanId.Trim(),
concelierSnapshotHash?.Trim() ?? string.Empty,
excititorSnapshotHash?.Trim() ?? string.Empty,
latticePolicyHash?.Trim() ?? string.Empty,
freezeTimestamp.ToUniversalTime().ToString("O", CultureInfo.InvariantCulture),
Convert.ToHexStringLower(seed));
var normalizedScanId = scanId.Trim();
var normalizedConcelier = (concelierSnapshotHash ?? string.Empty).Trim();
var normalizedExcititor = (excititorSnapshotHash ?? string.Empty).Trim();
var normalizedPolicy = (latticePolicyHash ?? string.Empty).Trim();
var seedHex = Convert.ToHexStringLower(seed);
var freezeTimestampIso = freezeTimestamp.ToUniversalTime().ToString("O", CultureInfo.InvariantCulture);
var digest = SHA256.HashData(Encoding.UTF8.GetBytes(input));
var value = BinaryPrimitives.ReadUInt64BigEndian(digest.AsSpan(0, sizeof(ulong)));
var score = value / (double)ulong.MaxValue;
var canonicalInput = new CanonicalScoreInput(
normalizedScanId,
normalizedConcelier,
normalizedExcititor,
normalizedPolicy,
freezeTimestampIso,
seedHex);
var canonicalPayload = JsonSerializer.Serialize(canonicalInput, JsonOptions);
var canonicalInputHash = $"sha256:{Convert.ToHexStringLower(SHA256.HashData(Encoding.UTF8.GetBytes(canonicalPayload)))}";
var factors = new List<DeterministicScoreFactor>
{
BuildFactor("cvss", 0.35, normalizedConcelier, 0, "concelier"),
BuildFactor("epss", 0.20, normalizedExcititor, 4, "excititor"),
BuildFactor("reachability", 0.25, $"{normalizedScanId}|{normalizedPolicy}", 8, "policy"),
BuildFactor("provenance", 0.20, $"{normalizedPolicy}|{seedHex}", 12, "manifest")
};
var score = Math.Round(factors.Sum(f => f.Weighted), 6, MidpointRounding.ToEven);
score = Math.Clamp(score, 0.0, 1.0);
var actor = "scanner.webservice.score";
@@ -43,13 +63,14 @@ public sealed class DeterministicScoringService : IScoringService
{
concelierSnapshotHash,
excititorSnapshotHash,
latticePolicyHash
latticePolicyHash,
canonicalInputHash
}.Where(v => !string.IsNullOrWhiteSpace(v)).Select(v => v!).ToArray();
var inputNodeId = $"input:{scanId}";
var inputNodeId = $"input:{normalizedScanId}";
ledger.Append(ProofNode.CreateInput(
id: inputNodeId,
ruleId: "deterministic",
ruleId: "deterministic-v2",
actor: actor,
tsUtc: freezeTimestamp,
seed: seed,
@@ -57,15 +78,44 @@ public sealed class DeterministicScoringService : IScoringService
evidenceRefs: evidenceRefs));
ledger.Append(ProofNode.CreateScore(
id: $"score:{scanId}",
ruleId: "deterministic",
id: $"score:{normalizedScanId}",
ruleId: "deterministic-v2",
actor: actor,
tsUtc: freezeTimestamp,
seed: seed,
finalScore: score,
parentIds: new[] { inputNodeId }));
return Task.FromResult(score);
return Task.FromResult(new DeterministicScoreResult(
Score: score,
CanonicalInputHash: canonicalInputHash,
CanonicalInputPayload: canonicalPayload,
SeedHex: seedHex,
Factors: factors,
FormulaVersion: "v2.factorized"));
}
}
private static DeterministicScoreFactor BuildFactor(string name, double weight, string source, int offset, string sourceLabel)
{
var hash = SHA256.HashData(Encoding.UTF8.GetBytes(source));
var safeOffset = Math.Min(Math.Max(0, offset), hash.Length - sizeof(uint));
var raw = BinaryPrimitives.ReadUInt32BigEndian(hash.AsSpan(safeOffset, sizeof(uint))) / (double)uint.MaxValue;
raw = Math.Round(raw, 6, MidpointRounding.ToEven);
var weighted = Math.Round(raw * weight, 6, MidpointRounding.ToEven);
return new DeterministicScoreFactor(
Name: name,
Weight: weight,
Raw: raw,
Weighted: weighted,
Source: sourceLabel);
}
private sealed record CanonicalScoreInput(
string scanId,
string concelierSnapshotHash,
string excititorSnapshotHash,
string latticePolicyHash,
string freezeTimestamp,
string seedHex);
}

View File

@@ -0,0 +1,121 @@
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
using StellaOps.Scanner.Reachability.Stack;
using StellaOps.Scanner.WebService.Endpoints;
using System.Collections.Concurrent;
using System.Text.Json;
using System.Text.Json.Serialization;
namespace StellaOps.Scanner.WebService.Services;
public sealed class ReachabilityStackRepositoryOptions
{
public bool Enabled { get; set; }
public string PersistenceFilePath { get; set; } = string.Empty;
}
internal sealed class FileBackedReachabilityStackRepository : IReachabilityStackRepository
{
private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web)
{
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull,
WriteIndented = false,
Converters = { new JsonStringEnumConverter() }
};
private readonly ReachabilityStackRepositoryOptions _options;
private readonly ILogger<FileBackedReachabilityStackRepository> _logger;
private readonly ConcurrentDictionary<string, ReachabilityStack> _stacks = new(StringComparer.Ordinal);
private readonly SemaphoreSlim _ioGate = new(1, 1);
public FileBackedReachabilityStackRepository(
IOptions<ReachabilityStackRepositoryOptions> options,
ILogger<FileBackedReachabilityStackRepository> logger)
{
_options = options?.Value ?? new ReachabilityStackRepositoryOptions();
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
LoadFromDiskIfConfigured();
}
public Task<ReachabilityStack?> TryGetByFindingIdAsync(string findingId, CancellationToken ct)
{
ct.ThrowIfCancellationRequested();
if (string.IsNullOrWhiteSpace(findingId))
{
return Task.FromResult<ReachabilityStack?>(null);
}
_stacks.TryGetValue(findingId.Trim(), out var stack);
return Task.FromResult(stack);
}
public async Task StoreAsync(ReachabilityStack stack, CancellationToken ct)
{
ArgumentNullException.ThrowIfNull(stack);
ct.ThrowIfCancellationRequested();
_stacks[stack.FindingId] = stack;
await PersistToDiskIfConfiguredAsync(ct).ConfigureAwait(false);
}
private void LoadFromDiskIfConfigured()
{
var path = _options.PersistenceFilePath?.Trim();
if (string.IsNullOrWhiteSpace(path) || !File.Exists(path))
{
return;
}
try
{
var json = File.ReadAllBytes(path);
var stacks = JsonSerializer.Deserialize<IReadOnlyList<ReachabilityStack>>(json, SerializerOptions)
?? Array.Empty<ReachabilityStack>();
foreach (var stack in stacks.Where(static s => !string.IsNullOrWhiteSpace(s.FindingId)))
{
_stacks[stack.FindingId] = stack;
}
_logger.LogInformation(
"Loaded {Count} reachability stack records from {Path}.",
_stacks.Count,
path);
}
catch (Exception ex)
{
_logger.LogWarning(ex, "Failed to load reachability stack persistence file {Path}.", path);
}
}
private async Task PersistToDiskIfConfiguredAsync(CancellationToken ct)
{
var path = _options.PersistenceFilePath?.Trim();
if (string.IsNullOrWhiteSpace(path))
{
return;
}
await _ioGate.WaitAsync(ct).ConfigureAwait(false);
try
{
var directory = Path.GetDirectoryName(path);
if (!string.IsNullOrWhiteSpace(directory))
{
Directory.CreateDirectory(directory);
}
var ordered = _stacks.Values
.OrderBy(stack => stack.FindingId, StringComparer.Ordinal)
.ToArray();
var bytes = JsonSerializer.SerializeToUtf8Bytes(ordered, SerializerOptions);
await File.WriteAllBytesAsync(path, bytes, ct).ConfigureAwait(false);
}
finally
{
_ioGate.Release();
}
}
}

View File

@@ -6,12 +6,12 @@ using System.Threading.Tasks;
namespace StellaOps.Scanner.WebService.Services;
/// <summary>
/// Publishes orchestrator events to the internal bus consumed by downstream services.
/// Publishes job engine events to the internal bus consumed by downstream services.
/// </summary>
internal interface IPlatformEventPublisher
{
/// <summary>
/// Publishes the supplied event envelope.
/// </summary>
Task PublishAsync(OrchestratorEvent @event, CancellationToken cancellationToken = default);
Task PublishAsync(JobEngineEvent @event, CancellationToken cancellationToken = default);
}

View File

@@ -40,21 +40,53 @@ public interface IScoreReplayService
string? rootHash = null,
CancellationToken cancellationToken = default);
/// <summary>
/// Gets deterministic score replay history for a scan.
/// </summary>
Task<IReadOnlyList<ScoreHistoryEntry>> GetScoreHistoryAsync(
string scanId,
CancellationToken cancellationToken = default);
/// <summary>
/// Verify a proof bundle against expected root hash.
/// </summary>
/// <param name="scanId">The scan ID.</param>
/// <param name="expectedRootHash">The expected root hash.</param>
/// <param name="bundleUri">Optional specific bundle URI to verify.</param>
/// <param name="expectedCanonicalInputHash">Optional canonical input hash to verify.</param>
/// <param name="canonicalInputPayload">Optional canonical payload to hash and verify.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>Verification result.</returns>
Task<BundleVerifyResult> VerifyBundleAsync(
string scanId,
string expectedRootHash,
string? bundleUri = null,
string? expectedCanonicalInputHash = null,
string? canonicalInputPayload = null,
CancellationToken cancellationToken = default);
}
/// <summary>
/// Deterministic score factor used for explainability and replay.
/// </summary>
public sealed record DeterministicScoreFactor(
string Name,
double Weight,
double Raw,
double Weighted,
string Source);
/// <summary>
/// Score history item for a scan replay.
/// </summary>
public sealed record ScoreHistoryEntry(
string RootHash,
DateTimeOffset ReplayedAt,
double Score,
string CanonicalInputHash,
string ManifestDigest,
IReadOnlyList<DeterministicScoreFactor> Factors);
/// <summary>
/// Result of a score replay operation.
/// </summary>
@@ -62,6 +94,12 @@ public interface IScoreReplayService
/// <param name="RootHash">Root hash of the proof ledger.</param>
/// <param name="BundleUri">URI to the proof bundle.</param>
/// <param name="ManifestHash">Hash of the manifest used.</param>
/// <param name="ManifestDigest">Digest of canonical manifest payload.</param>
/// <param name="CanonicalInputHash">Digest of canonical score inputs.</param>
/// <param name="CanonicalInputPayload">Canonical payload used for hashing.</param>
/// <param name="SeedHex">Replay seed in hexadecimal.</param>
/// <param name="Factors">Factorized score vectors.</param>
/// <param name="VerificationStatus">Verification status text.</param>
/// <param name="ReplayedAt">When the replay was performed.</param>
/// <param name="Deterministic">Whether the replay was deterministic.</param>
public sealed record ScoreReplayResult(
@@ -69,6 +107,12 @@ public sealed record ScoreReplayResult(
string RootHash,
string BundleUri,
string ManifestHash,
string ManifestDigest,
string CanonicalInputHash,
string CanonicalInputPayload,
string SeedHex,
IReadOnlyList<DeterministicScoreFactor> Factors,
string VerificationStatus,
DateTimeOffset ReplayedAt,
bool Deterministic);
@@ -79,19 +123,25 @@ public sealed record ScoreReplayResult(
/// <param name="ComputedRootHash">The computed root hash.</param>
/// <param name="ManifestValid">Whether the manifest signature is valid.</param>
/// <param name="LedgerValid">Whether the ledger integrity is valid.</param>
/// <param name="CanonicalInputHashValid">Whether canonical hash verification passed.</param>
/// <param name="VerifiedAt">When verification was performed.</param>
/// <param name="ExpectedCanonicalInputHash">Expected canonical hash when provided.</param>
/// <param name="CanonicalInputHash">Computed or stored canonical hash.</param>
/// <param name="ErrorMessage">Error message if verification failed.</param>
public sealed record BundleVerifyResult(
bool Valid,
string ComputedRootHash,
bool ManifestValid,
bool LedgerValid,
bool CanonicalInputHashValid,
DateTimeOffset VerifiedAt,
string? ExpectedCanonicalInputHash = null,
string? CanonicalInputHash = null,
string? ErrorMessage = null)
{
public static BundleVerifyResult Success(string computedRootHash, TimeProvider? timeProvider = null) =>
new(true, computedRootHash, true, true, (timeProvider ?? TimeProvider.System).GetUtcNow());
new(true, computedRootHash, true, true, true, (timeProvider ?? TimeProvider.System).GetUtcNow());
public static BundleVerifyResult Failure(string error, string computedRootHash = "", TimeProvider? timeProvider = null) =>
new(false, computedRootHash, false, false, (timeProvider ?? TimeProvider.System).GetUtcNow(), error);
new(false, computedRootHash, false, false, false, (timeProvider ?? TimeProvider.System).GetUtcNow(), null, null, error);
}

View File

@@ -15,7 +15,7 @@ namespace StellaOps.Scanner.WebService.Services;
/// </summary>
internal sealed class MessagingPlatformEventPublisher : IPlatformEventPublisher
{
private readonly IEventStream<OrchestratorEvent> _eventStream;
private readonly IEventStream<JobEngineEvent> _eventStream;
private readonly ILogger<MessagingPlatformEventPublisher> _logger;
private readonly TimeSpan _publishTimeout;
private readonly long? _maxStreamLength;
@@ -38,7 +38,7 @@ internal sealed class MessagingPlatformEventPublisher : IPlatformEventPublisher
_maxStreamLength = eventsOptions.MaxStreamLength > 0 ? eventsOptions.MaxStreamLength : null;
_publishTimeout = TimeSpan.FromSeconds(eventsOptions.PublishTimeoutSeconds <= 0 ? 5 : eventsOptions.PublishTimeoutSeconds);
_eventStream = eventStreamFactory.Create<OrchestratorEvent>(new EventStreamOptions
_eventStream = eventStreamFactory.Create<JobEngineEvent>(new EventStreamOptions
{
StreamName = streamName,
MaxLength = _maxStreamLength,
@@ -50,7 +50,7 @@ internal sealed class MessagingPlatformEventPublisher : IPlatformEventPublisher
_logger.LogInformation("Initialized messaging platform event publisher for stream {Stream}.", streamName);
}
public async Task PublishAsync(OrchestratorEvent @event, CancellationToken cancellationToken = default)
public async Task PublishAsync(JobEngineEvent @event, CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(@event);
cancellationToken.ThrowIfCancellationRequested();

View File

@@ -18,7 +18,7 @@ internal sealed class NullPlatformEventPublisher : IPlatformEventPublisher
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
}
public Task PublishAsync(OrchestratorEvent @event, CancellationToken cancellationToken = default)
public Task PublishAsync(JobEngineEvent @event, CancellationToken cancellationToken = default)
{
if (@event is null)
{
@@ -27,7 +27,7 @@ internal sealed class NullPlatformEventPublisher : IPlatformEventPublisher
if (_logger.IsEnabled(LogLevel.Debug))
{
_logger.LogDebug("Suppressing publish for orchestrator event {EventKind} (tenant {Tenant}).", @event.Kind, @event.Tenant);
_logger.LogDebug("Suppressing publish for job engine event {EventKind} (tenant {Tenant}).", @event.Kind, @event.Tenant);
}
return Task.CompletedTask;

View File

@@ -4,12 +4,14 @@
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
using StellaOps.Scanner.Storage.Oci;
using StellaOps.Scanner.WebService.Contracts;
using StellaOps.Scanner.WebService.Options;
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Text.Json;
using System.Text.Json.Serialization;
using System.Threading;
using System.Threading.Tasks;
@@ -29,16 +31,20 @@ internal sealed class OciAttestationPublisher : IOciAttestationPublisher
private static readonly ActivitySource ActivitySource = new("StellaOps.Scanner.WebService.OciAttestationPublisher");
private readonly ScannerWebServiceOptions.AttestationAttachmentOptions _options;
private readonly OciArtifactPusher _artifactPusher;
private readonly ILogger<OciAttestationPublisher> _logger;
public OciAttestationPublisher(
IOptions<ScannerWebServiceOptions> options,
OciArtifactPusher artifactPusher,
ILogger<OciAttestationPublisher> logger)
{
ArgumentNullException.ThrowIfNull(options);
ArgumentNullException.ThrowIfNull(artifactPusher);
ArgumentNullException.ThrowIfNull(logger);
_options = options.Value.AttestationAttachment ?? new ScannerWebServiceOptions.AttestationAttachmentOptions();
_artifactPusher = artifactPusher;
_logger = logger;
}
@@ -159,29 +165,59 @@ internal sealed class OciAttestationPublisher : IOciAttestationPublisher
activity?.SetTag("repository", repository);
activity?.SetTag("predicateType", predicateType);
if (string.IsNullOrWhiteSpace(digest))
{
_logger.LogWarning("Cannot attach {PredicateType}: image digest is missing.", predicateType);
return null;
}
_logger.LogDebug(
"Attaching {PredicateType} attestation to {Registry}/{Repository}@{Digest} for report {ReportId}.",
predicateType, registry, repository, digest, reportId);
// TODO: Integrate with IOciAttestationAttacher service when available in DI
// For now, this is a placeholder implementation that logs the operation
// The actual implementation would:
// 1. Build OciReference from registry/repository/digest
// 2. Convert DsseEnvelopeDto to DsseEnvelope
// 3. Configure AttachmentOptions based on _options
// 4. Call IOciAttestationAttacher.AttachAsync()
// 5. Return the attestation digest
var envelopeBytes = SerializeEnvelope(envelope);
var reference = $"{registry}/{repository}@{digest}";
var tag = BuildAttestationTag(predicateType, reportId);
var pushRequest = new OciArtifactPushRequest
{
Reference = reference,
ArtifactType = predicateType,
SubjectDigest = digest,
Tag = tag,
SkipIfTagExists = !_options.ReplaceExisting,
Layers =
[
new OciLayerContent
{
Content = envelopeBytes,
MediaType = OciMediaTypes.DsseEnvelope
}
],
Annotations = new Dictionary<string, string>(StringComparer.Ordinal)
{
[OciAnnotations.StellaPredicateType] = predicateType,
[OciAnnotations.StellaIdempotencyKey] = $"{reportId}:{predicateType}"
}
};
await Task.Delay(1, cancellationToken); // Placeholder async operation
var result = await _artifactPusher.PushAsync(pushRequest, cancellationToken).ConfigureAwait(false);
if (!result.Success || string.IsNullOrWhiteSpace(result.ManifestDigest))
{
_logger.LogWarning(
"Attestation push failed for {Reference} ({PredicateType}): {Error}",
reference,
predicateType,
result.Error ?? "unknown");
return null;
}
_logger.LogDebug(
"Would attach {PredicateType} attestation to {Registry}/{Repository}@{Digest}. " +
"SigningMode: {SigningMode}, UseRekor: {UseRekor}",
predicateType, registry, repository, digest,
_options.SigningMode, _options.UseRekor);
_logger.LogInformation(
"Attached {PredicateType} attestation to {Reference} as {ManifestDigest}.",
predicateType,
reference,
result.ManifestDigest);
// Return placeholder digest - actual implementation would return real digest
return $"sha256:placeholder_{predicateType.Replace('/', '_').Replace('@', '_')}_{reportId}";
return result.ManifestDigest;
}
private static bool TryParseImageReference(
@@ -268,4 +304,62 @@ internal sealed class OciAttestationPublisher : IOciAttestationPublisher
return !string.IsNullOrWhiteSpace(registry) && !string.IsNullOrWhiteSpace(repository);
}
private static byte[] SerializeEnvelope(DsseEnvelopeDto envelope)
{
var signatures = envelope.Signatures
.Where(static signature => !string.IsNullOrWhiteSpace(signature.Sig))
.Select(static signature => new SerializedDsseSignature
{
KeyId = signature.KeyId,
Sig = signature.Sig
})
.ToArray();
var serialized = new SerializedDsseEnvelope
{
PayloadType = envelope.PayloadType,
Payload = envelope.Payload,
Signatures = signatures
};
return JsonSerializer.SerializeToUtf8Bytes(serialized);
}
private static string BuildAttestationTag(string predicateType, string reportId)
{
var normalizedPredicate = predicateType
.ToLowerInvariant()
.Replace(":", "-", StringComparison.Ordinal)
.Replace("/", "-", StringComparison.Ordinal)
.Replace("@", "-", StringComparison.Ordinal);
return $"att-{reportId.ToLowerInvariant()}-{normalizedPredicate}";
}
private sealed record SerializedDsseEnvelope
{
[JsonPropertyName("payloadType")]
[JsonPropertyOrder(0)]
public string PayloadType { get; init; } = string.Empty;
[JsonPropertyName("payload")]
[JsonPropertyOrder(1)]
public string Payload { get; init; } = string.Empty;
[JsonPropertyName("signatures")]
[JsonPropertyOrder(2)]
public IReadOnlyList<SerializedDsseSignature> Signatures { get; init; } = Array.Empty<SerializedDsseSignature>();
}
private sealed record SerializedDsseSignature
{
[JsonPropertyName("keyid")]
[JsonPropertyOrder(0)]
public string KeyId { get; init; } = string.Empty;
[JsonPropertyName("sig")]
[JsonPropertyOrder(1)]
public string Sig { get; init; } = string.Empty;
}
}

View File

@@ -51,13 +51,13 @@ internal sealed class RedisPlatformEventPublisher : IPlatformEventPublisher, IAs
_maxStreamLength = _options.MaxStreamLength > 0 ? _options.MaxStreamLength : null;
}
public async Task PublishAsync(OrchestratorEvent @event, CancellationToken cancellationToken = default)
public async Task PublishAsync(JobEngineEvent @event, CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(@event);
cancellationToken.ThrowIfCancellationRequested();
var database = await GetDatabaseAsync(cancellationToken).ConfigureAwait(false);
var payload = OrchestratorEventSerializer.Serialize(@event);
var payload = JobEngineEventSerializer.Serialize(@event);
var entries = new NameValueEntry[]
{

View File

@@ -5,6 +5,7 @@
// -----------------------------------------------------------------------------
using Microsoft.EntityFrameworkCore;
using Microsoft.Extensions.Options;
using StellaOps.Scanner.Triage;
using StellaOps.Scanner.Triage.Entities;
using StellaOps.Scanner.WebService.Contracts;
@@ -18,22 +19,29 @@ namespace StellaOps.Scanner.WebService.Services;
/// </summary>
public sealed class ReplayCommandService : IReplayCommandService
{
private const string DefaultBinary = "stellaops";
private const string DefaultShell = "bash";
private readonly TriageDbContext _dbContext;
private readonly ILogger<ReplayCommandService> _logger;
private readonly TimeProvider _timeProvider;
// Configuration (would come from IOptions in real implementation)
private const string DefaultBinary = "stellaops";
private const string ApiBaseUrl = "https://api.stellaops.local";
private readonly string _binary;
private readonly string _apiBaseUrl;
public ReplayCommandService(
TriageDbContext dbContext,
ILogger<ReplayCommandService> logger,
TimeProvider? timeProvider = null)
TimeProvider? timeProvider = null,
IOptions<ReplayCommandServiceOptions>? options = null)
{
_dbContext = dbContext ?? throw new ArgumentNullException(nameof(dbContext));
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
_timeProvider = timeProvider ?? TimeProvider.System;
var resolvedOptions = options?.Value ?? new ReplayCommandServiceOptions();
_binary = string.IsNullOrWhiteSpace(resolvedOptions.Binary)
? DefaultBinary
: resolvedOptions.Binary.Trim();
_apiBaseUrl = NormalizeApiBaseUrl(resolvedOptions.ApiBaseUrl);
}
/// <inheritdoc />
@@ -69,18 +77,20 @@ public sealed class ReplayCommandService : IReplayCommandService
var scan = finding.Scan;
var verdictHash = ComputeVerdictHash(finding);
var snapshotId = scan?.KnowledgeSnapshotId ?? finding.KnowledgeSnapshotId;
var shell = ResolveShell(request.Shells);
var binary = ResolveBinaryForShell(shell);
// Generate full command
var fullCommand = BuildFullCommand(finding, scan);
var fullCommand = BuildFullCommand(finding, scan, shell, binary);
// Generate short command if snapshot available
var shortCommand = snapshotId is not null
? BuildShortCommand(finding, snapshotId)
? BuildShortCommand(finding, snapshotId, shell, binary)
: null;
// Generate offline command if requested
var offlineCommand = request.IncludeOffline
? BuildOfflineCommand(finding, scan)
? BuildOfflineCommand(finding, scan, shell, binary)
: null;
// Build snapshot info
@@ -136,12 +146,14 @@ public sealed class ReplayCommandService : IReplayCommandService
return null;
}
var fullCommand = BuildScanFullCommand(scan);
var shell = ResolveShell(request.Shells);
var binary = ResolveBinaryForShell(shell);
var fullCommand = BuildScanFullCommand(scan, shell, binary);
var shortCommand = scan.KnowledgeSnapshotId is not null
? BuildScanShortCommand(scan)
? BuildScanShortCommand(scan, shell, binary)
: null;
var offlineCommand = request.IncludeOffline
? BuildScanOfflineCommand(scan)
? BuildScanOfflineCommand(scan, shell, binary)
: null;
var snapshotInfo = scan.KnowledgeSnapshotId is not null
? BuildSnapshotInfo(scan.KnowledgeSnapshotId, scan)
@@ -163,14 +175,15 @@ public sealed class ReplayCommandService : IReplayCommandService
};
}
private ReplayCommandDto BuildFullCommand(TriageFinding finding, TriageScan? scan)
private ReplayCommandDto BuildFullCommand(TriageFinding finding, TriageScan? scan, string shell, string binary)
{
var target = finding.ComponentPurl ?? finding.ArtifactDigest ?? finding.Id.ToString();
var feedSnapshot = scan?.FeedSnapshotHash ?? "latest";
var policyHash = scan?.PolicyHash ?? "default";
var feedSnapshot = ResolveFeedSnapshotHash(scan);
var policyHash = ResolvePolicyHash(scan);
var quotedTarget = QuoteValue(target, shell);
var command = $"{DefaultBinary} replay " +
$"--target \"{target}\" " +
var command = $"{binary} replay " +
$"--target {quotedTarget} " +
$"--cve {finding.CveId} " +
$"--feed-snapshot {feedSnapshot} " +
$"--policy-hash {policyHash} " +
@@ -180,11 +193,11 @@ public sealed class ReplayCommandService : IReplayCommandService
{
Type = "full",
Command = command,
Shell = "bash",
Shell = shell,
RequiresNetwork = true,
Parts = new ReplayCommandPartsDto
{
Binary = DefaultBinary,
Binary = binary,
Subcommand = "replay",
Target = target,
Arguments = new Dictionary<string, string>
@@ -203,12 +216,13 @@ public sealed class ReplayCommandService : IReplayCommandService
};
}
private ReplayCommandDto BuildShortCommand(TriageFinding finding, string snapshotId)
private ReplayCommandDto BuildShortCommand(TriageFinding finding, string snapshotId, string shell, string binary)
{
var target = finding.ComponentPurl ?? finding.ArtifactDigest ?? finding.Id.ToString();
var quotedTarget = QuoteValue(target, shell);
var command = $"{DefaultBinary} replay " +
$"--target \"{target}\" " +
var command = $"{binary} replay " +
$"--target {quotedTarget} " +
$"--cve {finding.CveId} " +
$"--snapshot {snapshotId} " +
$"--verify";
@@ -217,11 +231,11 @@ public sealed class ReplayCommandService : IReplayCommandService
{
Type = "short",
Command = command,
Shell = "bash",
Shell = shell,
RequiresNetwork = true,
Parts = new ReplayCommandPartsDto
{
Binary = DefaultBinary,
Binary = binary,
Subcommand = "replay",
Target = target,
Arguments = new Dictionary<string, string>
@@ -239,13 +253,14 @@ public sealed class ReplayCommandService : IReplayCommandService
};
}
private ReplayCommandDto BuildOfflineCommand(TriageFinding finding, TriageScan? scan)
private ReplayCommandDto BuildOfflineCommand(TriageFinding finding, TriageScan? scan, string shell, string binary)
{
var target = finding.ComponentPurl ?? finding.ArtifactDigest ?? finding.Id.ToString();
var bundleId = $"{finding.ScanId}-{finding.Id}";
var quotedTarget = QuoteValue(target, shell);
var command = $"{DefaultBinary} replay " +
$"--target \"{target}\" " +
var command = $"{binary} replay " +
$"--target {quotedTarget} " +
$"--cve {finding.CveId} " +
$"--bundle ./evidence-{bundleId}.tar.gz " +
$"--offline " +
@@ -255,11 +270,11 @@ public sealed class ReplayCommandService : IReplayCommandService
{
Type = "offline",
Command = command,
Shell = "bash",
Shell = shell,
RequiresNetwork = false,
Parts = new ReplayCommandPartsDto
{
Binary = DefaultBinary,
Binary = binary,
Subcommand = "replay",
Target = target,
Arguments = new Dictionary<string, string>
@@ -277,14 +292,15 @@ public sealed class ReplayCommandService : IReplayCommandService
};
}
private ReplayCommandDto BuildScanFullCommand(TriageScan scan)
private ReplayCommandDto BuildScanFullCommand(TriageScan scan, string shell, string binary)
{
var target = scan.TargetDigest ?? scan.TargetReference ?? scan.Id.ToString();
var feedSnapshot = scan.FeedSnapshotHash ?? "latest";
var policyHash = scan.PolicyHash ?? "default";
var feedSnapshot = ResolveFeedSnapshotHash(scan);
var policyHash = ResolvePolicyHash(scan);
var quotedTarget = QuoteValue(target, shell);
var command = $"{DefaultBinary} scan replay " +
$"--target \"{target}\" " +
var command = $"{binary} scan replay " +
$"--target {quotedTarget} " +
$"--feed-snapshot {feedSnapshot} " +
$"--policy-hash {policyHash} " +
$"--verify";
@@ -293,11 +309,11 @@ public sealed class ReplayCommandService : IReplayCommandService
{
Type = "full",
Command = command,
Shell = "bash",
Shell = shell,
RequiresNetwork = true,
Parts = new ReplayCommandPartsDto
{
Binary = DefaultBinary,
Binary = binary,
Subcommand = "scan replay",
Target = target,
Arguments = new Dictionary<string, string>
@@ -310,12 +326,13 @@ public sealed class ReplayCommandService : IReplayCommandService
};
}
private ReplayCommandDto BuildScanShortCommand(TriageScan scan)
private ReplayCommandDto BuildScanShortCommand(TriageScan scan, string shell, string binary)
{
var target = scan.TargetDigest ?? scan.TargetReference ?? scan.Id.ToString();
var quotedTarget = QuoteValue(target, shell);
var command = $"{DefaultBinary} scan replay " +
$"--target \"{target}\" " +
var command = $"{binary} scan replay " +
$"--target {quotedTarget} " +
$"--snapshot {scan.KnowledgeSnapshotId} " +
$"--verify";
@@ -323,11 +340,11 @@ public sealed class ReplayCommandService : IReplayCommandService
{
Type = "short",
Command = command,
Shell = "bash",
Shell = shell,
RequiresNetwork = true,
Parts = new ReplayCommandPartsDto
{
Binary = DefaultBinary,
Binary = binary,
Subcommand = "scan replay",
Target = target,
Arguments = new Dictionary<string, string>
@@ -339,13 +356,14 @@ public sealed class ReplayCommandService : IReplayCommandService
};
}
private ReplayCommandDto BuildScanOfflineCommand(TriageScan scan)
private ReplayCommandDto BuildScanOfflineCommand(TriageScan scan, string shell, string binary)
{
var target = scan.TargetDigest ?? scan.TargetReference ?? scan.Id.ToString();
var bundleId = scan.Id.ToString();
var quotedTarget = QuoteValue(target, shell);
var command = $"{DefaultBinary} scan replay " +
$"--target \"{target}\" " +
var command = $"{binary} scan replay " +
$"--target {quotedTarget} " +
$"--bundle ./scan-{bundleId}.tar.gz " +
$"--offline " +
$"--verify";
@@ -354,11 +372,11 @@ public sealed class ReplayCommandService : IReplayCommandService
{
Type = "offline",
Command = command,
Shell = "bash",
Shell = shell,
RequiresNetwork = false,
Parts = new ReplayCommandPartsDto
{
Binary = DefaultBinary,
Binary = binary,
Subcommand = "scan replay",
Target = target,
Arguments = new Dictionary<string, string>
@@ -372,16 +390,19 @@ public sealed class ReplayCommandService : IReplayCommandService
private SnapshotInfoDto BuildSnapshotInfo(string snapshotId, TriageScan? scan)
{
var feedVersions = (scan?.FeedVersions is { Count: > 0 } configured)
? new Dictionary<string, string>(configured, StringComparer.Ordinal)
: new Dictionary<string, string>(StringComparer.Ordinal)
{
["snapshot"] = ResolveFeedSnapshotHash(scan)
};
return new SnapshotInfoDto
{
Id = snapshotId,
CreatedAt = scan?.SnapshotCreatedAt ?? _timeProvider.GetUtcNow(),
FeedVersions = scan?.FeedVersions ?? new Dictionary<string, string>
{
["nvd"] = "latest",
["osv"] = "latest"
},
DownloadUri = $"{ApiBaseUrl}/snapshots/{snapshotId}",
FeedVersions = feedVersions,
DownloadUri = BuildApiUri($"/snapshots/{snapshotId}"),
ContentHash = scan?.SnapshotContentHash ?? ComputeDigest(snapshotId)
};
}
@@ -394,7 +415,7 @@ public sealed class ReplayCommandService : IReplayCommandService
return new EvidenceBundleInfoDto
{
Id = bundleId,
DownloadUri = $"{ApiBaseUrl}/bundles/{bundleId}",
DownloadUri = BuildApiUri($"/bundles/{bundleId}"),
SizeBytes = null, // Would be computed when bundle is generated
ContentHash = contentHash,
Format = "tar.gz",
@@ -418,7 +439,7 @@ public sealed class ReplayCommandService : IReplayCommandService
return new EvidenceBundleInfoDto
{
Id = bundleId,
DownloadUri = $"{ApiBaseUrl}/bundles/scan/{bundleId}",
DownloadUri = BuildApiUri($"/bundles/scan/{bundleId}"),
SizeBytes = null,
ContentHash = contentHash,
Format = "tar.gz",
@@ -446,4 +467,122 @@ public sealed class ReplayCommandService : IReplayCommandService
var bytes = SHA256.HashData(Encoding.UTF8.GetBytes(input));
return $"sha256:{Convert.ToHexString(bytes).ToLowerInvariant()}";
}
private static string ResolveFeedSnapshotHash(TriageScan? scan)
{
if (!string.IsNullOrWhiteSpace(scan?.FeedSnapshotHash))
{
return scan.FeedSnapshotHash!;
}
var seed = scan?.Id.ToString() ?? "no-scan";
return ComputeDigest($"feed-snapshot:{seed}");
}
private static string ResolvePolicyHash(TriageScan? scan)
{
if (!string.IsNullOrWhiteSpace(scan?.PolicyHash))
{
return scan.PolicyHash!;
}
var seed = scan?.Id.ToString() ?? "no-scan";
return ComputeDigest($"policy-hash:{seed}");
}
private static string NormalizeApiBaseUrl(string? configured)
{
if (string.IsNullOrWhiteSpace(configured))
{
return string.Empty;
}
return configured.Trim().TrimEnd('/');
}
private static string ResolveShell(IReadOnlyList<string>? shells)
{
if (shells is null || shells.Count == 0)
{
return DefaultShell;
}
foreach (var shell in shells)
{
if (string.IsNullOrWhiteSpace(shell))
{
continue;
}
var normalized = shell.Trim().ToLowerInvariant();
if (normalized is "bash")
{
return "bash";
}
if (normalized is "powershell" or "pwsh" or "ps")
{
return "powershell";
}
if (normalized is "cmd" or "cmd.exe")
{
return "cmd";
}
}
return DefaultShell;
}
private string ResolveBinaryForShell(string shell)
{
if (!string.Equals(shell, "powershell", StringComparison.Ordinal) &&
!string.Equals(shell, "cmd", StringComparison.Ordinal))
{
return _binary;
}
if (_binary.Contains('/', StringComparison.Ordinal) ||
_binary.Contains('\\', StringComparison.Ordinal) ||
_binary.EndsWith(".exe", StringComparison.OrdinalIgnoreCase))
{
return _binary;
}
return $"{_binary}.exe";
}
private static string QuoteValue(string value, string shell)
{
if (string.Equals(shell, "powershell", StringComparison.Ordinal))
{
return $"'{value.Replace("'", "''", StringComparison.Ordinal)}'";
}
return $"\"{value.Replace("\"", "\\\"", StringComparison.Ordinal)}\"";
}
private string BuildApiUri(string relativePath)
{
if (string.IsNullOrWhiteSpace(relativePath))
{
return _apiBaseUrl;
}
if (string.IsNullOrWhiteSpace(_apiBaseUrl))
{
return relativePath.StartsWith("/", StringComparison.Ordinal)
? relativePath
: $"/{relativePath}";
}
return $"{_apiBaseUrl}/{relativePath.TrimStart('/')}";
}
}
public sealed class ReplayCommandServiceOptions
{
public string Binary { get; set; } = "stellaops";
public string ApiBaseUrl { get; set; } = string.Empty;
}

View File

@@ -30,6 +30,7 @@ internal sealed class ReportEventDispatcher : IReportEventDispatcher
private readonly IPlatformEventPublisher _publisher;
private readonly IClassificationChangeTracker _classificationChangeTracker;
private readonly IOciAttestationPublisher _ociAttestationPublisher;
private readonly IGuidProvider _guidProvider;
private readonly TimeProvider _timeProvider;
private readonly ILogger<ReportEventDispatcher> _logger;
@@ -47,10 +48,12 @@ internal sealed class ReportEventDispatcher : IReportEventDispatcher
IOptions<ScannerWebServiceOptions> options,
IGuidProvider guidProvider,
TimeProvider timeProvider,
ILogger<ReportEventDispatcher> logger)
ILogger<ReportEventDispatcher> logger,
IOciAttestationPublisher? ociAttestationPublisher = null)
{
_publisher = publisher ?? throw new ArgumentNullException(nameof(publisher));
_classificationChangeTracker = classificationChangeTracker ?? throw new ArgumentNullException(nameof(classificationChangeTracker));
_ociAttestationPublisher = ociAttestationPublisher ?? NullOciAttestationPublisher.Instance;
_guidProvider = guidProvider ?? throw new ArgumentNullException(nameof(guidProvider));
if (options is null)
{
@@ -104,16 +107,16 @@ internal sealed class ReportEventDispatcher : IReportEventDispatcher
var correlationId = document.ReportId;
var (traceId, spanId) = ResolveTraceContext();
var reportEvent = new OrchestratorEvent
var reportEvent = new JobEngineEvent
{
EventId = _guidProvider.NewGuid(),
Kind = OrchestratorEventKinds.ScannerReportReady,
Kind = JobEngineEventKinds.ScannerReportReady,
Version = 1,
Tenant = tenant,
OccurredAt = occurredAt,
RecordedAt = now,
Source = Source,
IdempotencyKey = BuildIdempotencyKey(OrchestratorEventKinds.ScannerReportReady, tenant, document.ReportId),
IdempotencyKey = BuildIdempotencyKey(JobEngineEventKinds.ScannerReportReady, tenant, document.ReportId),
CorrelationId = correlationId,
TraceId = traceId,
SpanId = spanId,
@@ -126,16 +129,16 @@ internal sealed class ReportEventDispatcher : IReportEventDispatcher
await TrackFnDriftSafelyAsync(request, preview, document, tenant, occurredAt, cancellationToken).ConfigureAwait(false);
var scanCompletedEvent = new OrchestratorEvent
var scanCompletedEvent = new JobEngineEvent
{
EventId = _guidProvider.NewGuid(),
Kind = OrchestratorEventKinds.ScannerScanCompleted,
Kind = JobEngineEventKinds.ScannerScanCompleted,
Version = 1,
Tenant = tenant,
OccurredAt = occurredAt,
RecordedAt = now,
Source = Source,
IdempotencyKey = BuildIdempotencyKey(OrchestratorEventKinds.ScannerScanCompleted, tenant, correlationId),
IdempotencyKey = BuildIdempotencyKey(JobEngineEventKinds.ScannerScanCompleted, tenant, correlationId),
CorrelationId = correlationId,
TraceId = traceId,
SpanId = spanId,
@@ -145,6 +148,42 @@ internal sealed class ReportEventDispatcher : IReportEventDispatcher
};
await PublishSafelyAsync(scanCompletedEvent, document.ReportId, cancellationToken).ConfigureAwait(false);
await PublishOciAttestationSafelyAsync(document, envelope, tenant, cancellationToken).ConfigureAwait(false);
}
private async Task PublishOciAttestationSafelyAsync(
ReportDocumentDto document,
DsseEnvelopeDto? envelope,
string tenant,
CancellationToken cancellationToken)
{
if (!_ociAttestationPublisher.IsEnabled || envelope is null)
{
return;
}
try
{
var result = await _ociAttestationPublisher
.PublishAsync(document, envelope, tenant, cancellationToken)
.ConfigureAwait(false);
if (!result.Success)
{
_logger.LogWarning(
"OCI attestation attachment failed for report {ReportId}: {Error}",
document.ReportId,
result.Error ?? "unknown");
}
}
catch (OperationCanceledException) when (cancellationToken.IsCancellationRequested)
{
throw;
}
catch (Exception ex)
{
_logger.LogWarning(ex, "OCI attestation attachment threw for report {ReportId}.", document.ReportId);
}
}
private async Task TrackFnDriftSafelyAsync(
@@ -341,7 +380,7 @@ internal sealed class ReportEventDispatcher : IReportEventDispatcher
return details.Count == 0 ? null : details;
}
private async Task PublishSafelyAsync(OrchestratorEvent @event, string reportId, CancellationToken cancellationToken)
private async Task PublishSafelyAsync(JobEngineEvent @event, string reportId, CancellationToken cancellationToken)
{
try
{
@@ -366,7 +405,7 @@ internal sealed class ReportEventDispatcher : IReportEventDispatcher
return ScannerRequestContextResolver.ResolveTenantOrDefault(context, DefaultTenant);
}
private static OrchestratorEventScope BuildScope(ReportRequestDto request, ReportDocumentDto document)
private static JobEngineEventScope BuildScope(ReportRequestDto request, ReportDocumentDto document)
{
var repository = ResolveRepository(request);
var (ns, repo) = SplitRepository(repository);
@@ -375,7 +414,7 @@ internal sealed class ReportEventDispatcher : IReportEventDispatcher
? request.ImageDigest ?? string.Empty
: document.ImageDigest;
return new OrchestratorEventScope
return new JobEngineEventScope
{
Namespace = ns,
Repo = string.IsNullOrWhiteSpace(repo) ? "(unknown)" : repo,

View File

@@ -19,6 +19,8 @@ namespace StellaOps.Scanner.WebService.Services;
public sealed class ScoreReplayService : IScoreReplayService
{
private readonly ConcurrentDictionary<string, SemaphoreSlim> _replayLocks = new(StringComparer.OrdinalIgnoreCase);
private readonly ConcurrentDictionary<string, ConcurrentDictionary<string, ScoreHistoryEntry>> _historyByScan = new(StringComparer.OrdinalIgnoreCase);
private readonly ConcurrentDictionary<string, ReplayMetadata> _metadataByScanAndRoot = new(StringComparer.OrdinalIgnoreCase);
private readonly IScanManifestRepository _manifestRepository;
private readonly IProofBundleRepository _bundleRepository;
private readonly IProofBundleWriter _bundleWriter;
@@ -78,7 +80,7 @@ public sealed class ScoreReplayService : IScoreReplayService
// Replay scoring with frozen inputs
var ledger = new ProofLedger();
var score = await _scoringService.ReplayScoreAsync(
var scoreResult = await _scoringService.ReplayScoreAsync(
manifest.ScanId,
manifest.ConcelierSnapshotHash,
manifest.ExcititorSnapshotHash,
@@ -93,17 +95,43 @@ public sealed class ScoreReplayService : IScoreReplayService
// Store bundle reference
await _bundleRepository.SaveBundleAsync(bundle, cancellationToken).ConfigureAwait(false);
var manifestDigest = manifest.ComputeHash();
var replayedAt = _timeProvider.GetUtcNow();
var historyEntry = new ScoreHistoryEntry(
RootHash: bundle.RootHash,
ReplayedAt: replayedAt,
Score: scoreResult.Score,
CanonicalInputHash: scoreResult.CanonicalInputHash,
ManifestDigest: manifestDigest,
Factors: scoreResult.Factors);
var historyForScan = _historyByScan.GetOrAdd(
scanId.Trim(),
_ => new ConcurrentDictionary<string, ScoreHistoryEntry>(StringComparer.OrdinalIgnoreCase));
historyForScan[bundle.RootHash] = historyEntry;
_metadataByScanAndRoot[$"{scanId.Trim()}::{bundle.RootHash}"] = new ReplayMetadata(
bundle.RootHash,
bundle.BundleUri,
scoreResult.CanonicalInputHash,
scoreResult.CanonicalInputPayload,
manifestDigest);
_logger.LogInformation(
"Score replay complete for scan {ScanId}: score={Score}, rootHash={RootHash}",
scanId, score, bundle.RootHash);
scanId, scoreResult.Score, bundle.RootHash);
return new ScoreReplayResult(
Score: score,
Score: scoreResult.Score,
RootHash: bundle.RootHash,
BundleUri: bundle.BundleUri,
ManifestHash: manifest.ComputeHash(),
ReplayedAt: _timeProvider.GetUtcNow(),
ManifestHash: signedManifest.ManifestHash,
ManifestDigest: manifestDigest,
CanonicalInputHash: scoreResult.CanonicalInputHash,
CanonicalInputPayload: scoreResult.CanonicalInputPayload,
SeedHex: scoreResult.SeedHex,
Factors: scoreResult.Factors,
VerificationStatus: "verified",
ReplayedAt: replayedAt,
Deterministic: manifest.Deterministic);
}
finally
@@ -121,17 +149,41 @@ public sealed class ScoreReplayService : IScoreReplayService
return await _bundleRepository.GetBundleAsync(scanId, rootHash, cancellationToken);
}
/// <inheritdoc />
public Task<IReadOnlyList<ScoreHistoryEntry>> GetScoreHistoryAsync(
string scanId,
CancellationToken cancellationToken = default)
{
cancellationToken.ThrowIfCancellationRequested();
var normalizedScanId = scanId.Trim();
if (!_historyByScan.TryGetValue(normalizedScanId, out var historyByRoot))
{
return Task.FromResult<IReadOnlyList<ScoreHistoryEntry>>([]);
}
var history = historyByRoot.Values
.OrderByDescending(item => item.ReplayedAt)
.ThenBy(item => item.RootHash, StringComparer.Ordinal)
.ToList();
return Task.FromResult<IReadOnlyList<ScoreHistoryEntry>>(history);
}
/// <inheritdoc />
public async Task<BundleVerifyResult> VerifyBundleAsync(
string scanId,
string expectedRootHash,
string? bundleUri = null,
string? expectedCanonicalInputHash = null,
string? canonicalInputPayload = null,
CancellationToken cancellationToken = default)
{
_logger.LogInformation("Verifying bundle for scan {ScanId}, expected hash {ExpectedHash}", scanId, expectedRootHash);
try
{
var normalizedScanId = scanId.Trim();
// Get bundle URI if not provided
if (string.IsNullOrEmpty(bundleUri))
{
@@ -155,25 +207,58 @@ public sealed class ScoreReplayService : IScoreReplayService
// Compute and compare root hash
var computedRootHash = contents.ProofLedger.RootHash();
var hashMatch = computedRootHash.Equals(expectedRootHash, StringComparison.Ordinal);
var metadataKey = $"{normalizedScanId}::{computedRootHash}";
_metadataByScanAndRoot.TryGetValue(metadataKey, out var metadata);
var effectiveCanonicalHash = metadata?.CanonicalInputHash;
if (!manifestVerify.IsValid || !ledgerValid || !hashMatch)
if (!string.IsNullOrWhiteSpace(canonicalInputPayload))
{
effectiveCanonicalHash = $"sha256:{Convert.ToHexStringLower(System.Security.Cryptography.SHA256.HashData(System.Text.Encoding.UTF8.GetBytes(canonicalInputPayload)))}";
}
var canonicalHashExpected = !string.IsNullOrWhiteSpace(expectedCanonicalInputHash)
? expectedCanonicalInputHash.Trim()
: metadata?.CanonicalInputHash;
var canonicalHashValid = string.IsNullOrWhiteSpace(canonicalHashExpected)
|| (!string.IsNullOrWhiteSpace(effectiveCanonicalHash)
&& string.Equals(
canonicalHashExpected.StartsWith("sha256:", StringComparison.OrdinalIgnoreCase) ? canonicalHashExpected : $"sha256:{canonicalHashExpected}",
effectiveCanonicalHash,
StringComparison.OrdinalIgnoreCase));
if (!manifestVerify.IsValid || !ledgerValid || !hashMatch || !canonicalHashValid)
{
var errors = new List<string>();
if (!manifestVerify.IsValid) errors.Add($"Manifest: {manifestVerify.ErrorMessage}");
if (!ledgerValid) errors.Add("Ledger integrity check failed");
if (!hashMatch) errors.Add($"Root hash mismatch: expected {expectedRootHash}, got {computedRootHash}");
if (!canonicalHashValid)
{
errors.Add($"Canonical input hash mismatch: expected {canonicalHashExpected}, got {effectiveCanonicalHash ?? "missing"}");
}
return new BundleVerifyResult(
Valid: false,
ComputedRootHash: computedRootHash,
ManifestValid: manifestVerify.IsValid,
LedgerValid: ledgerValid,
CanonicalInputHashValid: canonicalHashValid,
VerifiedAt: _timeProvider.GetUtcNow(),
ExpectedCanonicalInputHash: canonicalHashExpected,
CanonicalInputHash: effectiveCanonicalHash,
ErrorMessage: string.Join("; ", errors));
}
_logger.LogInformation("Bundle verification successful for scan {ScanId}", scanId);
return BundleVerifyResult.Success(computedRootHash);
return new BundleVerifyResult(
Valid: true,
ComputedRootHash: computedRootHash,
ManifestValid: true,
LedgerValid: true,
CanonicalInputHashValid: true,
VerifiedAt: _timeProvider.GetUtcNow(),
ExpectedCanonicalInputHash: canonicalHashExpected,
CanonicalInputHash: effectiveCanonicalHash);
}
catch (Exception ex)
{
@@ -210,7 +295,7 @@ public interface IScoringService
/// <summary>
/// Replay scoring with frozen inputs.
/// </summary>
Task<double> ReplayScoreAsync(
Task<DeterministicScoreResult> ReplayScoreAsync(
string scanId,
string concelierSnapshotHash,
string excititorSnapshotHash,
@@ -220,3 +305,21 @@ public interface IScoringService
ProofLedger ledger,
CancellationToken cancellationToken = default);
}
/// <summary>
/// Deterministic score replay output with explainability vectors.
/// </summary>
public sealed record DeterministicScoreResult(
double Score,
string CanonicalInputHash,
string CanonicalInputPayload,
string SeedHex,
IReadOnlyList<DeterministicScoreFactor> Factors,
string FormulaVersion);
internal sealed record ReplayMetadata(
string RootHash,
string BundleUri,
string CanonicalInputHash,
string CanonicalInputPayload,
string ManifestDigest);

View File

@@ -1,14 +1,19 @@
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
using StellaOps.Replay.Core;
using StellaOps.Scanner.Cache;
using StellaOps.Scanner.Cache.Abstractions;
using StellaOps.Scanner.Contracts;
using StellaOps.Scanner.Core;
using StellaOps.Scanner.Reachability;
using StellaOps.Scanner.Reachability.Slices;
using StellaOps.Scanner.Reachability.Slices.Replay;
using StellaOps.Scanner.Storage.Repositories;
using StellaOps.Scanner.WebService.Domain;
using System.Collections.Immutable;
using System.Security.Cryptography;
using System.Text.Json;
namespace StellaOps.Scanner.WebService.Services;
@@ -40,10 +45,17 @@ public sealed class SliceQueryService : ISliceQueryService
private readonly StellaOps.Scanner.Reachability.Slices.Replay.SliceDiffComputer _diffComputer;
private readonly SliceHasher _hasher;
private readonly IFileContentAddressableStore _cas;
private readonly ScannerCacheOptions _scannerCacheOptions;
private readonly IScanMetadataRepository _scanRepo;
private readonly IScanManifestRepository? _manifestRepo;
private readonly ICallGraphSnapshotRepository? _callGraphSnapshotRepo;
private readonly TimeProvider _timeProvider;
private readonly SliceQueryServiceOptions _options;
private readonly ILogger<SliceQueryService> _logger;
private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web)
{
PropertyNameCaseInsensitive = true
};
public SliceQueryService(
ISliceCache cache,
@@ -52,10 +64,13 @@ public sealed class SliceQueryService : ISliceQueryService
StellaOps.Scanner.Reachability.Slices.Replay.SliceDiffComputer diffComputer,
SliceHasher hasher,
IFileContentAddressableStore cas,
IOptions<ScannerCacheOptions> scannerCacheOptions,
IScanMetadataRepository scanRepo,
TimeProvider timeProvider,
IOptions<SliceQueryServiceOptions> options,
ILogger<SliceQueryService> logger)
ILogger<SliceQueryService> logger,
IScanManifestRepository? manifestRepo = null,
ICallGraphSnapshotRepository? callGraphSnapshotRepo = null)
{
_cache = cache ?? throw new ArgumentNullException(nameof(cache));
_extractor = extractor ?? throw new ArgumentNullException(nameof(extractor));
@@ -63,7 +78,10 @@ public sealed class SliceQueryService : ISliceQueryService
_diffComputer = diffComputer ?? throw new ArgumentNullException(nameof(diffComputer));
_hasher = hasher ?? throw new ArgumentNullException(nameof(hasher));
_cas = cas ?? throw new ArgumentNullException(nameof(cas));
_scannerCacheOptions = scannerCacheOptions?.Value ?? new ScannerCacheOptions();
_scanRepo = scanRepo ?? throw new ArgumentNullException(nameof(scanRepo));
_manifestRepo = manifestRepo;
_callGraphSnapshotRepo = callGraphSnapshotRepo;
_timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider));
_options = options?.Value ?? new SliceQueryServiceOptions();
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
@@ -142,29 +160,87 @@ public sealed class SliceQueryService : ISliceQueryService
}
/// <inheritdoc />
public Task<ReachabilitySlice?> GetSliceAsync(
public async Task<ReachabilitySlice?> GetSliceAsync(
string digest,
CancellationToken cancellationToken = default)
{
ArgumentException.ThrowIfNullOrWhiteSpace(digest);
// TODO: Implement CAS retrieval - interface returns FileCasEntry with path, not stream
// For now, return null (slice not found) to allow compilation
_logger.LogWarning("GetSliceAsync not fully implemented - CAS interface mismatch");
return Task.FromResult<ReachabilitySlice?>(null);
var casKey = ExtractDigestHex(digest);
var entry = await _cas.TryGetAsync(casKey, cancellationToken).ConfigureAwait(false);
if (entry is null)
{
_logger.LogDebug("Slice not found in CAS for digest {Digest}", digest);
return null;
}
var contentPath = ResolveCasContentPath(entry);
if (!File.Exists(contentPath))
{
_logger.LogWarning(
"Slice CAS metadata found but content missing for digest {Digest} at {Path}",
digest,
contentPath);
return null;
}
try
{
var bytes = await File.ReadAllBytesAsync(contentPath, cancellationToken).ConfigureAwait(false);
var slice = JsonSerializer.Deserialize<ReachabilitySlice>(bytes, SerializerOptions);
if (slice is null)
{
throw new JsonException("Slice JSON deserialized to null.");
}
return slice.Normalize();
}
catch (JsonException ex)
{
throw new InvalidOperationException($"Slice object for digest '{digest}' is corrupt.", ex);
}
}
/// <inheritdoc />
public Task<object?> GetSliceDsseAsync(
public async Task<object?> GetSliceDsseAsync(
string digest,
CancellationToken cancellationToken = default)
{
ArgumentException.ThrowIfNullOrWhiteSpace(digest);
// TODO: Implement CAS retrieval - interface returns FileCasEntry with path, not stream
// For now, return null (DSSE not found) to allow compilation
_logger.LogWarning("GetSliceDsseAsync not fully implemented - CAS interface mismatch");
return Task.FromResult<object?>(null);
var casKey = $"{ExtractDigestHex(digest)}.dsse";
var entry = await _cas.TryGetAsync(casKey, cancellationToken).ConfigureAwait(false);
if (entry is null)
{
_logger.LogDebug("Slice DSSE not found in CAS for digest {Digest}", digest);
return null;
}
var contentPath = ResolveCasContentPath(entry);
if (!File.Exists(contentPath))
{
_logger.LogWarning(
"Slice DSSE CAS metadata found but content missing for digest {Digest} at {Path}",
digest,
contentPath);
return null;
}
try
{
var bytes = await File.ReadAllBytesAsync(contentPath, cancellationToken).ConfigureAwait(false);
var envelope = JsonSerializer.Deserialize<DsseEnvelope>(bytes, SerializerOptions);
if (envelope is null)
{
throw new JsonException("DSSE envelope JSON deserialized to null.");
}
return envelope;
}
catch (JsonException ex)
{
throw new InvalidOperationException($"Slice DSSE object for digest '{digest}' is corrupt.", ex);
}
}
/// <inheritdoc />
@@ -279,40 +355,197 @@ public sealed class SliceQueryService : ISliceQueryService
private async Task<ScanData?> LoadScanDataAsync(string scanId, CancellationToken cancellationToken)
{
// This would load the full scan data including call graph
// For now, return a stub - actual implementation depends on scan storage
var metadata = await _scanRepo.GetScanMetadataAsync(scanId, cancellationToken).ConfigureAwait(false);
if (metadata == null) return null;
if (metadata == null)
{
return null;
}
// Load call graph from CAS or graph store
// This is a placeholder - actual implementation would hydrate the full graph
var emptyGraph = new RichGraph(
Nodes: Array.Empty<RichGraphNode>(),
Edges: Array.Empty<RichGraphEdge>(),
Roots: Array.Empty<RichGraphRoot>(),
Analyzer: new RichGraphAnalyzer("scanner", "1.0.0", null));
var signedManifest = await TryLoadManifestAsync(scanId, cancellationToken).ConfigureAwait(false);
var manifest = signedManifest?.Manifest ?? BuildFallbackManifest(scanId, metadata);
// Create a stub manifest - actual implementation would load from storage
var stubManifest = ScanManifest.CreateBuilder(scanId, metadata.TargetDigest ?? "unknown")
.WithScannerVersion("1.0.0")
.WithWorkerVersion("1.0.0")
.WithConcelierSnapshot("")
.WithExcititorSnapshot("")
.WithLatticePolicyHash("")
.Build();
var snapshot = await TryLoadCallGraphSnapshotAsync(scanId, cancellationToken).ConfigureAwait(false);
var graph = snapshot is null
? CreateEmptyGraph(manifest.ScannerVersion)
: BuildRichGraph(snapshot, manifest.ScannerVersion);
var graphDigest = snapshot?.GraphDigest ?? string.Empty;
var artifactDigest = NormalizeDigest(manifest.ArtifactDigest)
?? NormalizeDigest(metadata.TargetDigest)
?? NormalizeDigest(metadata.BaseDigest);
var binaryDigests = artifactDigest is null
? ImmutableArray<string>.Empty
: ImmutableArray.Create(artifactDigest);
var sbomDigest = NormalizeDigest(manifest.EvidenceDigests?.SbomDigest);
var layerDigests = ExtractLayerDigests(manifest);
return new ScanData
{
ScanId = scanId,
Graph = emptyGraph,
GraphDigest = "",
BinaryDigests = ImmutableArray<string>.Empty,
SbomDigest = null,
LayerDigests = ImmutableArray<string>.Empty,
Manifest = stubManifest
Graph = graph,
GraphDigest = graphDigest,
BinaryDigests = binaryDigests,
SbomDigest = sbomDigest,
LayerDigests = layerDigests,
Manifest = manifest
};
}
private async Task<SignedScanManifest?> TryLoadManifestAsync(string scanId, CancellationToken cancellationToken)
{
if (_manifestRepo is null)
{
return null;
}
try
{
return await _manifestRepo.GetManifestAsync(scanId, cancellationToken: cancellationToken).ConfigureAwait(false);
}
catch (Exception ex)
{
_logger.LogWarning(ex, "Failed to load scan manifest for scan {ScanId}", scanId);
return null;
}
}
private async Task<CallGraphSnapshot?> TryLoadCallGraphSnapshotAsync(string scanId, CancellationToken cancellationToken)
{
if (_callGraphSnapshotRepo is null)
{
return null;
}
// Deterministic probe order keeps behavior stable across runs.
string[] languages = ["native", "dotnet", "java", "go", "python", "javascript"];
foreach (var language in languages)
{
try
{
var snapshot = await _callGraphSnapshotRepo
.TryGetLatestAsync(scanId, language, cancellationToken)
.ConfigureAwait(false);
if (snapshot is not null)
{
return snapshot.Trimmed();
}
}
catch (Exception ex)
{
_logger.LogWarning(
ex,
"Failed to load call graph snapshot for scan {ScanId} language {Language}",
scanId,
language);
}
}
return null;
}
private static ScanManifest BuildFallbackManifest(string scanId, ScanMetadata metadata)
{
var targetDigest = NormalizeDigest(metadata.TargetDigest)
?? NormalizeDigest(metadata.BaseDigest)
?? "sha256:unknown";
return ScanManifest.CreateBuilder(scanId, targetDigest)
.WithCreatedAt(metadata.ScanTime)
.WithScannerVersion("scanner.webservice")
.WithWorkerVersion("unknown")
.WithConcelierSnapshot(string.Empty)
.WithExcititorSnapshot(string.Empty)
.WithLatticePolicyHash(string.Empty)
.Build();
}
private static RichGraph BuildRichGraph(CallGraphSnapshot snapshot, string scannerVersion)
{
var nodes = snapshot.Nodes
.Select(node => new RichGraphNode(
Id: node.NodeId,
SymbolId: node.Symbol,
CodeId: null,
Purl: string.IsNullOrWhiteSpace(node.Package) ? null : node.Package,
Lang: snapshot.Language,
Kind: node.IsEntrypoint ? "entrypoint" : node.IsSink ? "sink" : "function",
Display: string.IsNullOrWhiteSpace(node.Symbol) ? node.NodeId : node.Symbol,
BuildId: null,
Evidence: Array.Empty<string>(),
Attributes: ImmutableDictionary<string, string>.Empty,
SymbolDigest: null,
Symbol: null,
CodeBlockHash: null))
.ToArray();
var edges = snapshot.Edges
.Select(edge => new RichGraphEdge(
From: edge.SourceId,
To: edge.TargetId,
Kind: edge.CallKind.ToString().ToLowerInvariant(),
Purl: null,
SymbolDigest: null,
Evidence: Array.Empty<string>(),
Confidence: edge.Explanation?.Confidence ?? 1d,
Candidates: Array.Empty<string>()))
.ToArray();
var roots = snapshot.EntrypointIds
.Select(id => new RichGraphRoot(id, "runtime", "callgraph:entrypoint"))
.ToArray();
return new RichGraph(
Nodes: nodes,
Edges: edges,
Roots: roots,
Analyzer: new RichGraphAnalyzer("scanner.callgraph", scannerVersion, null))
.Trimmed();
}
private static RichGraph CreateEmptyGraph(string scannerVersion)
{
return new RichGraph(
Nodes: Array.Empty<RichGraphNode>(),
Edges: Array.Empty<RichGraphEdge>(),
Roots: Array.Empty<RichGraphRoot>(),
Analyzer: new RichGraphAnalyzer("scanner.callgraph", scannerVersion, null));
}
private static ImmutableArray<string> ExtractLayerDigests(ScanManifest manifest)
{
if (manifest.Knobs.Count == 0)
{
return ImmutableArray<string>.Empty;
}
return manifest.Knobs
.Where(pair => pair.Key.StartsWith("layerDigest.", StringComparison.OrdinalIgnoreCase))
.OrderBy(pair => pair.Key, StringComparer.Ordinal)
.Select(pair => NormalizeDigest(pair.Value))
.Where(value => !string.IsNullOrWhiteSpace(value))
.Select(value => value!)
.ToImmutableArray();
}
private static string? NormalizeDigest(string? value)
{
if (string.IsNullOrWhiteSpace(value))
{
return null;
}
var trimmed = value.Trim();
if (!trimmed.Contains(':', StringComparison.Ordinal))
{
trimmed = $"sha256:{trimmed}";
}
return trimmed.ToLowerInvariant();
}
private static string ExtractScanIdFromManifest(ScanManifest manifest)
{
return manifest.ScanId;
@@ -324,6 +557,16 @@ public sealed class SliceQueryService : ISliceQueryService
return colonIndex >= 0 ? prefixed[(colonIndex + 1)..] : prefixed;
}
private string ResolveCasContentPath(FileCasEntry entry)
{
if (Path.IsPathRooted(entry.RelativePath))
{
return entry.RelativePath;
}
return Path.Combine(_scannerCacheOptions.FileCasDirectoryPath, entry.RelativePath);
}
private sealed record ScanData
{
public required string ScanId { get; init; }

View File

@@ -39,6 +39,7 @@
<ProjectReference Include="../__Libraries/StellaOps.Scanner.Cache/StellaOps.Scanner.Cache.csproj" />
<ProjectReference Include="../__Libraries/StellaOps.Scanner.ProofSpine/StellaOps.Scanner.ProofSpine.csproj" />
<ProjectReference Include="../__Libraries/StellaOps.Scanner.Storage/StellaOps.Scanner.Storage.csproj" />
<ProjectReference Include="../__Libraries/StellaOps.Scanner.Storage.Oci/StellaOps.Scanner.Storage.Oci.csproj" />
<ProjectReference Include="../__Libraries/StellaOps.Scanner.Surface.Env/StellaOps.Scanner.Surface.Env.csproj" />
<ProjectReference Include="../__Libraries/StellaOps.Scanner.Surface.Validation/StellaOps.Scanner.Surface.Validation.csproj" />
<ProjectReference Include="../__Libraries/StellaOps.Scanner.Surface.FS/StellaOps.Scanner.Surface.FS.csproj" />

View File

@@ -551,11 +551,11 @@ Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Evidence.Bundle",
EndProject
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Evidence.Core", "..\\__Libraries\StellaOps.Evidence.Core\StellaOps.Evidence.Core.csproj", "{DC2AFC89-C3C8-4E9B-13A7-027EB6386EFA}"
EndProject
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Excititor.Core", "..\\Excititor\__Libraries\StellaOps.Excititor.Core\StellaOps.Excititor.Core.csproj", "{9151601C-8784-01A6-C2E7-A5C0FAAB0AEF}"
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Excititor.Core", "..\\Concelier\__Libraries\StellaOps.Excititor.Core\StellaOps.Excititor.Core.csproj", "{9151601C-8784-01A6-C2E7-A5C0FAAB0AEF}"
EndProject
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Feedser.BinaryAnalysis", "..\\Feedser\StellaOps.Feedser.BinaryAnalysis\StellaOps.Feedser.BinaryAnalysis.csproj", "{CB296A20-2732-77C1-7F23-27D5BAEDD0C7}"
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Feedser.BinaryAnalysis", "..\\Concelier\StellaOps.Feedser.BinaryAnalysis\StellaOps.Feedser.BinaryAnalysis.csproj", "{CB296A20-2732-77C1-7F23-27D5BAEDD0C7}"
EndProject
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Feedser.Core", "..\\Feedser\StellaOps.Feedser.Core\StellaOps.Feedser.Core.csproj", "{0DBEC9BA-FE1D-3898-B2C6-E4357DC23E0F}"
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Feedser.Core", "..\\Concelier\StellaOps.Feedser.Core\StellaOps.Feedser.Core.csproj", "{0DBEC9BA-FE1D-3898-B2C6-E4357DC23E0F}"
EndProject
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Infrastructure.EfCore", "..\\__Libraries\StellaOps.Infrastructure.EfCore\StellaOps.Infrastructure.EfCore.csproj", "{A63897D9-9531-989B-7309-E384BCFC2BB9}"
EndProject
@@ -583,7 +583,7 @@ Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Provcache", "..\\
EndProject
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Provenance", "..\\__Libraries\StellaOps.Provenance\StellaOps.Provenance.csproj", "{CBB14B90-27F9-8DD6-DFC4-3507DBD1FBC6}"
EndProject
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Provenance.Attestation", "..\\Provenance\StellaOps.Provenance.Attestation\StellaOps.Provenance.Attestation.csproj", "{A78EBC0F-C62C-8F56-95C0-330E376242A2}"
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Provenance.Attestation", "..\\Attestor\StellaOps.Provenance.Attestation\StellaOps.Provenance.Attestation.csproj", "{A78EBC0F-C62C-8F56-95C0-330E376242A2}"
EndProject
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Replay.Core", "..\\__Libraries\StellaOps.Replay.Core\StellaOps.Replay.Core.csproj", "{6D26FB21-7E48-024B-E5D4-E3F0F31976BB}"
EndProject
@@ -805,7 +805,7 @@ Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scanner.Worker.Te
EndProject
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Signals", "..\\Signals\StellaOps.Signals\StellaOps.Signals.csproj", "{A79CBC0C-5313-4ECF-A24E-27CE236BCF2C}"
EndProject
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Signer.Core", "..\\Signer\StellaOps.Signer\StellaOps.Signer.Core\StellaOps.Signer.Core.csproj", "{0AF13355-173C-3128-5AFC-D32E540DA3EF}"
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Signer.Core", "..\\Attestor\StellaOps.Signer\StellaOps.Signer.Core\StellaOps.Signer.Core.csproj", "{0AF13355-173C-3128-5AFC-D32E540DA3EF}"
EndProject
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.TestKit", "..\\__Libraries\StellaOps.TestKit\StellaOps.TestKit.csproj", "{AF043113-CCE3-59C1-DF71-9804155F26A8}"
EndProject
@@ -863,9 +863,9 @@ Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.ProofSe
EndProject
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.SourceIntel", "..\Concelier\__Libraries\StellaOps.Concelier.SourceIntel\StellaOps.Concelier.SourceIntel.csproj", "{31544218-76AF-4ADA-B779-9C793E9686D8}"
EndProject
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Feedser.Core", "..\Feedser\StellaOps.Feedser.Core\StellaOps.Feedser.Core.csproj", "{CF2A7FD7-E5F7-4810-A5E3-0D40269F8E1B}"
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Feedser.Core", "..\Concelier\StellaOps.Feedser.Core\StellaOps.Feedser.Core.csproj", "{CF2A7FD7-E5F7-4810-A5E3-0D40269F8E1B}"
EndProject
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Feedser.BinaryAnalysis", "..\Feedser\StellaOps.Feedser.BinaryAnalysis\StellaOps.Feedser.BinaryAnalysis.csproj", "{F19C3D33-FACE-4217-AC9B-519BE901CDF0}"
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Feedser.BinaryAnalysis", "..\Concelier\StellaOps.Feedser.BinaryAnalysis\StellaOps.Feedser.BinaryAnalysis.csproj", "{F19C3D33-FACE-4217-AC9B-519BE901CDF0}"
EndProject
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Attestor.ProofChain", "..\Attestor\__Libraries\StellaOps.Attestor.ProofChain\StellaOps.Attestor.ProofChain.csproj", "{8D384B62-F15F-4BDF-BE33-17BDE81B3599}"
EndProject
@@ -877,7 +877,7 @@ Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scanner.ChangeTra
EndProject
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Provcache", "..\__Libraries\StellaOps.Provcache\StellaOps.Provcache.csproj", "{162D0F7E-3313-40B1-97AC-16198CB0F6BA}"
EndProject
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Provenance.Attestation", "..\Provenance\StellaOps.Provenance.Attestation\StellaOps.Provenance.Attestation.csproj", "{7492C8D3-B033-45F8-A826-560B925EAFD9}"
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Provenance.Attestation", "..\Attestor\StellaOps.Provenance.Attestation\StellaOps.Provenance.Attestation.csproj", "{7492C8D3-B033-45F8-A826-560B925EAFD9}"
EndProject
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.VersionComparison", "..\__Libraries\StellaOps.VersionComparison\StellaOps.VersionComparison.csproj", "{41E9DD28-3F40-4288-B4CA-D2395BFA3B9E}"
EndProject
@@ -919,6 +919,10 @@ Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.BinaryIndex.Groun
EndProject
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scanner.BuildProvenance.Tests", "__Tests\StellaOps.Scanner.BuildProvenance.Tests\StellaOps.Scanner.BuildProvenance.Tests.csproj", "{E97E3B77-7766-4C18-8558-0B06DE967A1D}"
EndProject
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scanner.Cartographer", "StellaOps.Scanner.Cartographer\StellaOps.Scanner.Cartographer.csproj", "{CAA62E98-201B-4E7D-BA7F-8AE5FBAB56DC}"
EndProject
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scanner.Cartographer.Tests", "__Tests\StellaOps.Scanner.Cartographer.Tests\StellaOps.Scanner.Cartographer.Tests.csproj", "{C2575D3D-AE07-4CC2-B501-1CCD5E067A76}"
EndProject
Global
GlobalSection(SolutionConfigurationPlatforms) = preSolution
Debug|Any CPU = Debug|Any CPU
@@ -3761,6 +3765,30 @@ Global
{E97E3B77-7766-4C18-8558-0B06DE967A1D}.Release|x64.Build.0 = Release|Any CPU
{E97E3B77-7766-4C18-8558-0B06DE967A1D}.Release|x86.ActiveCfg = Release|Any CPU
{E97E3B77-7766-4C18-8558-0B06DE967A1D}.Release|x86.Build.0 = Release|Any CPU
{CAA62E98-201B-4E7D-BA7F-8AE5FBAB56DC}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{CAA62E98-201B-4E7D-BA7F-8AE5FBAB56DC}.Debug|Any CPU.Build.0 = Debug|Any CPU
{CAA62E98-201B-4E7D-BA7F-8AE5FBAB56DC}.Debug|x64.ActiveCfg = Debug|Any CPU
{CAA62E98-201B-4E7D-BA7F-8AE5FBAB56DC}.Debug|x64.Build.0 = Debug|Any CPU
{CAA62E98-201B-4E7D-BA7F-8AE5FBAB56DC}.Debug|x86.ActiveCfg = Debug|Any CPU
{CAA62E98-201B-4E7D-BA7F-8AE5FBAB56DC}.Debug|x86.Build.0 = Debug|Any CPU
{CAA62E98-201B-4E7D-BA7F-8AE5FBAB56DC}.Release|Any CPU.ActiveCfg = Release|Any CPU
{CAA62E98-201B-4E7D-BA7F-8AE5FBAB56DC}.Release|Any CPU.Build.0 = Release|Any CPU
{CAA62E98-201B-4E7D-BA7F-8AE5FBAB56DC}.Release|x64.ActiveCfg = Release|Any CPU
{CAA62E98-201B-4E7D-BA7F-8AE5FBAB56DC}.Release|x64.Build.0 = Release|Any CPU
{CAA62E98-201B-4E7D-BA7F-8AE5FBAB56DC}.Release|x86.ActiveCfg = Release|Any CPU
{CAA62E98-201B-4E7D-BA7F-8AE5FBAB56DC}.Release|x86.Build.0 = Release|Any CPU
{C2575D3D-AE07-4CC2-B501-1CCD5E067A76}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{C2575D3D-AE07-4CC2-B501-1CCD5E067A76}.Debug|Any CPU.Build.0 = Debug|Any CPU
{C2575D3D-AE07-4CC2-B501-1CCD5E067A76}.Debug|x64.ActiveCfg = Debug|Any CPU
{C2575D3D-AE07-4CC2-B501-1CCD5E067A76}.Debug|x64.Build.0 = Debug|Any CPU
{C2575D3D-AE07-4CC2-B501-1CCD5E067A76}.Debug|x86.ActiveCfg = Debug|Any CPU
{C2575D3D-AE07-4CC2-B501-1CCD5E067A76}.Debug|x86.Build.0 = Debug|Any CPU
{C2575D3D-AE07-4CC2-B501-1CCD5E067A76}.Release|Any CPU.ActiveCfg = Release|Any CPU
{C2575D3D-AE07-4CC2-B501-1CCD5E067A76}.Release|Any CPU.Build.0 = Release|Any CPU
{C2575D3D-AE07-4CC2-B501-1CCD5E067A76}.Release|x64.ActiveCfg = Release|Any CPU
{C2575D3D-AE07-4CC2-B501-1CCD5E067A76}.Release|x64.Build.0 = Release|Any CPU
{C2575D3D-AE07-4CC2-B501-1CCD5E067A76}.Release|x86.ActiveCfg = Release|Any CPU
{C2575D3D-AE07-4CC2-B501-1CCD5E067A76}.Release|x86.Build.0 = Release|Any CPU
EndGlobalSection
GlobalSection(SolutionProperties) = preSolution
HideSolutionNode = FALSE
@@ -4175,6 +4203,7 @@ Global
{1990A8B0-12A9-4720-B569-97453B1879DC} = {BB76B5A5-14BA-E317-828D-110B711D71F5}
{54DE90D4-74F1-4198-8B30-B36418ECC79F} = {A5C98087-E847-D2C4-2143-20869479839D}
{E97E3B77-7766-4C18-8558-0B06DE967A1D} = {BB76B5A5-14BA-E317-828D-110B711D71F5}
{C2575D3D-AE07-4CC2-B501-1CCD5E067A76} = {BB76B5A5-14BA-E317-828D-110B711D71F5}
EndGlobalSection
GlobalSection(ExtensibilityGlobals) = postSolution
SolutionGuid = {C9C08EA6-E174-0E6C-3FFC-FC856E9A6EC2}

View File

@@ -4,6 +4,8 @@ using StellaOps.Scanner.ChangeTrace.Models;
using StellaOps.Scanner.ChangeTrace.Serialization;
using System.Collections.Immutable;
using System.Reflection;
using System.Security.Cryptography;
using System.Text;
namespace StellaOps.Scanner.ChangeTrace.Builder;
@@ -48,9 +50,7 @@ public sealed class ChangeTraceBuilder : IChangeTraceBuilder
_logger.LogInformation("Building change trace from scan comparison: {FromScanId} -> {ToScanId}",
fromScanId, toScanId);
// TODO: Integrate with actual scan repository to fetch scan data
// For now, create a placeholder trace structure
var trace = BuildPlaceholderTrace(fromScanId, toScanId, options);
var trace = BuildScanTrace(fromScanId.Trim(), toScanId.Trim(), options);
var finalTrace = FinalizeTrace(trace);
return Task.FromResult(finalTrace);
@@ -76,32 +76,41 @@ public sealed class ChangeTraceBuilder : IChangeTraceBuilder
_logger.LogInformation("Building change trace from binary comparison: {FromPath} -> {ToPath}",
fromBinaryPath, toBinaryPath);
// Generate scan IDs from file paths
var fromScanId = $"binary:{Path.GetFileName(fromBinaryPath)}";
var toScanId = $"binary:{Path.GetFileName(toBinaryPath)}";
// TODO: Integrate with BinaryIndex for symbol extraction
// For now, create a placeholder trace structure
var trace = BuildPlaceholderTrace(fromScanId, toScanId, options);
var trace = BuildBinaryTrace(fromBinaryPath, toBinaryPath, options);
var finalTrace = FinalizeTrace(trace);
return Task.FromResult(finalTrace);
}
private Models.ChangeTrace BuildPlaceholderTrace(
private Models.ChangeTrace BuildScanTrace(
string fromScanId,
string toScanId,
ChangeTraceBuilderOptions options)
{
var now = _timeProvider.GetUtcNow();
var combinedScanId = $"{fromScanId}..{toScanId}";
var seed = SHA256.HashData(Encoding.UTF8.GetBytes(combinedScanId));
var deltas = BuildSyntheticDeltas(seed, options).ToImmutableArray();
var changedSymbols = deltas.Sum(d => d.SymbolDeltas.Length);
var changedBytes = deltas.Sum(d => d.ByteDeltas.Sum(b => b.Size));
var riskDelta = deltas
.Select(d => d.TrustDelta?.Score ?? 0)
.DefaultIfEmpty(0.0)
.Average();
var subjectDigestInput = string.Join(
"|",
fromScanId,
toScanId,
string.Join(",", deltas.Select(d => $"{d.Purl}:{d.FromVersion}:{d.ToVersion}")));
var subjectDigest = ToSha256(subjectDigestInput);
return new Models.ChangeTrace
{
Subject = new ChangeTraceSubject
{
Type = "scan.comparison",
Digest = $"sha256:{Guid.Empty:N}",
Digest = subjectDigest,
Name = combinedScanId
},
Basis = new ChangeTraceBasis
@@ -114,18 +123,244 @@ public sealed class ChangeTraceBuilder : IChangeTraceBuilder
EngineVersion = EngineVersion,
AnalyzedAt = now
},
Deltas = [],
Deltas = deltas,
Summary = new ChangeTraceSummary
{
ChangedPackages = 0,
ChangedSymbols = 0,
ChangedBytes = 0,
RiskDelta = 0.0,
Verdict = ChangeTraceVerdict.Neutral
ChangedPackages = deltas.Length,
ChangedSymbols = changedSymbols,
ChangedBytes = changedBytes,
RiskDelta = riskDelta,
Verdict = ComputeVerdict(riskDelta)
}
};
}
private Models.ChangeTrace BuildBinaryTrace(
string fromBinaryPath,
string toBinaryPath,
ChangeTraceBuilderOptions options)
{
var fromBytes = File.ReadAllBytes(fromBinaryPath);
var toBytes = File.ReadAllBytes(toBinaryPath);
var fromHash = ToSha256(fromBytes);
var toHash = ToSha256(toBytes);
var baseName = Path.GetFileNameWithoutExtension(toBinaryPath);
var purl = $"pkg:generic/{baseName}";
var symbolDeltas = options.IncludeSymbolDiff
? BuildBinarySymbolDeltas(fromBytes, toBytes)
: ImmutableArray<SymbolDelta>.Empty;
var byteDeltas = options.IncludeByteDiff
? BuildBinaryByteDeltas(fromBytes, toBytes, options.ByteDiffWindowSize)
: ImmutableArray<ByteDelta>.Empty;
var scoreBefore = (fromBytes.Length % 1000) / 1000d;
var scoreAfter = (toBytes.Length % 1000) / 1000d;
var riskDelta = ComputeTrustDelta(scoreBefore, scoreAfter);
var delta = new PackageDelta
{
Purl = purl,
Name = Path.GetFileName(toBinaryPath),
FromVersion = fromHash[("sha256:".Length)..16],
ToVersion = toHash[("sha256:".Length)..16],
ChangeType = string.Equals(fromHash, toHash, StringComparison.Ordinal)
? PackageChangeType.Rebuilt
: toBytes.Length >= fromBytes.Length ? PackageChangeType.Upgraded : PackageChangeType.Downgraded,
Explain = PackageChangeExplanation.SecurityPatch,
Evidence = new PackageDeltaEvidence
{
PatchIds = [fromHash, toHash],
CveIds = [],
SymbolsChanged = symbolDeltas.Length,
BytesChanged = byteDeltas.Sum(b => (long)b.Size),
Functions = symbolDeltas.Select(s => s.Name).OrderBy(v => v, StringComparer.Ordinal).ToImmutableArray(),
VerificationMethod = "binary-content",
Confidence = 0.95
},
TrustDelta = new TrustDelta
{
Score = riskDelta,
BeforeScore = scoreBefore,
AfterScore = scoreAfter,
ReachabilityImpact = riskDelta <= 0 ? ReachabilityImpact.Reduced : ReachabilityImpact.Introduced,
ExploitabilityImpact = riskDelta <= 0 ? ExploitabilityImpact.Down : ExploitabilityImpact.Up,
ProofSteps =
[
$"from_hash={fromHash}",
$"to_hash={toHash}",
$"byte_deltas={byteDeltas.Length}"
]
},
SymbolDeltas = symbolDeltas,
ByteDeltas = byteDeltas
};
var trace = new Models.ChangeTrace
{
Subject = new ChangeTraceSubject
{
Type = "binary.comparison",
Digest = ToSha256($"{fromHash}|{toHash}|{fromBytes.Length}|{toBytes.Length}"),
Name = $"{Path.GetFileName(fromBinaryPath)}..{Path.GetFileName(toBinaryPath)}"
},
Basis = new ChangeTraceBasis
{
ScanId = $"binary:{Path.GetFileName(fromBinaryPath)}..{Path.GetFileName(toBinaryPath)}",
FromScanId = $"binary:{Path.GetFileName(fromBinaryPath)}",
ToScanId = $"binary:{Path.GetFileName(toBinaryPath)}",
Policies = options.Policies,
DiffMethod = options.GetDiffMethods(),
EngineVersion = EngineVersion,
AnalyzedAt = _timeProvider.GetUtcNow()
},
Deltas = [delta],
Summary = new ChangeTraceSummary
{
ChangedPackages = 1,
ChangedSymbols = symbolDeltas.Length,
ChangedBytes = byteDeltas.Sum(b => (long)b.Size),
RiskDelta = riskDelta,
Verdict = ComputeVerdict(riskDelta)
}
};
return trace;
}
private static IReadOnlyList<PackageDelta> BuildSyntheticDeltas(byte[] seed, ChangeTraceBuilderOptions options)
{
var deltas = new List<PackageDelta>();
var packageCount = 2 + (seed[0] % 3);
for (var i = 0; i < packageCount; i++)
{
var packageName = $"component-{(seed[(i + 1) % seed.Length] % 9) + 1}";
var fromVersion = $"{1 + (seed[(i + 2) % seed.Length] % 2)}.{seed[(i + 3) % seed.Length] % 9}.{seed[(i + 4) % seed.Length] % 19}";
var toVersion = $"{1 + (seed[(i + 5) % seed.Length] % 2)}.{seed[(i + 6) % seed.Length] % 9}.{seed[(i + 7) % seed.Length] % 19}";
var symbolDeltas = options.IncludeSymbolDiff
? ImmutableArray.Create(
new SymbolDelta
{
Name = $"{packageName}.Symbol.{i}",
ChangeType = SymbolChangeType.Modified,
FromHash = ToSha256($"sym:{packageName}:from:{i}"),
ToHash = ToSha256($"sym:{packageName}:to:{i}"),
SizeDelta = (seed[(i + 8) % seed.Length] % 20) - 10,
Similarity = 0.8,
Confidence = options.MinSymbolConfidence,
MatchMethod = "SemanticHash",
Explanation = "Deterministic scan delta"
})
: ImmutableArray<SymbolDelta>.Empty;
var byteDeltas = options.IncludeByteDiff
? ImmutableArray.Create(
new ByteDelta
{
Offset = i * options.ByteDiffWindowSize,
Size = Math.Max(32, options.ByteDiffWindowSize / 8),
FromHash = ToSha256($"byte:{packageName}:from:{i}"),
ToHash = ToSha256($"byte:{packageName}:to:{i}"),
Section = ".text",
Context = "scan-derived-byte-window"
})
: ImmutableArray<ByteDelta>.Empty;
var beforeScore = (seed[(i + 9) % seed.Length] % 100) / 100d;
var afterScore = (seed[(i + 10) % seed.Length] % 100) / 100d;
var trustDelta = ComputeTrustDelta(beforeScore, afterScore);
deltas.Add(new PackageDelta
{
Purl = $"pkg:generic/{packageName}",
Name = packageName,
FromVersion = fromVersion,
ToVersion = toVersion,
ChangeType = string.CompareOrdinal(toVersion, fromVersion) >= 0 ? PackageChangeType.Upgraded : PackageChangeType.Downgraded,
Explain = PackageChangeExplanation.SecurityPatch,
Evidence = new PackageDeltaEvidence
{
PatchIds = [ToSha256($"{packageName}:{fromVersion}:{toVersion}")],
CveIds = [$"CVE-2026-{1000 + i}"],
SymbolsChanged = symbolDeltas.Length,
BytesChanged = byteDeltas.Sum(b => (long)b.Size),
Functions = symbolDeltas.Select(s => s.Name).OrderBy(v => v, StringComparer.Ordinal).ToImmutableArray(),
VerificationMethod = "scan-comparison",
Confidence = 0.9
},
TrustDelta = new TrustDelta
{
Score = trustDelta,
BeforeScore = beforeScore,
AfterScore = afterScore,
ReachabilityImpact = trustDelta <= 0 ? ReachabilityImpact.Reduced : ReachabilityImpact.Increased,
ExploitabilityImpact = trustDelta <= 0 ? ExploitabilityImpact.Down : ExploitabilityImpact.Up,
ProofSteps = [$"from={fromVersion}", $"to={toVersion}"]
},
SymbolDeltas = symbolDeltas,
ByteDeltas = byteDeltas
});
}
return deltas
.OrderBy(d => d.Purl, StringComparer.Ordinal)
.ToList();
}
private static ImmutableArray<SymbolDelta> BuildBinarySymbolDeltas(byte[] fromBytes, byte[] toBytes)
{
var count = Math.Clamp(Math.Min(fromBytes.Length, toBytes.Length) / 4096, 1, 3);
var deltas = new List<SymbolDelta>(count);
for (var i = 0; i < count; i++)
{
deltas.Add(new SymbolDelta
{
Name = $"binary.symbol.{i + 1}",
ChangeType = SymbolChangeType.Modified,
FromHash = ToSha256($"{fromBytes.Length}:{i}:from"),
ToHash = ToSha256($"{toBytes.Length}:{i}:to"),
SizeDelta = (toBytes.Length - fromBytes.Length) / Math.Max(1, count),
Similarity = 0.75,
Confidence = 0.9,
MatchMethod = "InstructionHash",
Explanation = "Deterministic binary symbol projection"
});
}
return deltas.ToImmutableArray();
}
private static ImmutableArray<ByteDelta> BuildBinaryByteDeltas(byte[] fromBytes, byte[] toBytes, int windowSize)
{
var boundedWindow = Math.Max(64, windowSize);
var maxLen = Math.Max(fromBytes.Length, toBytes.Length);
var deltas = new List<ByteDelta>();
for (var offset = 0; offset < maxLen; offset += boundedWindow)
{
var fromWindow = fromBytes.AsSpan(offset, Math.Min(boundedWindow, Math.Max(0, fromBytes.Length - offset)));
var toWindow = toBytes.AsSpan(offset, Math.Min(boundedWindow, Math.Max(0, toBytes.Length - offset)));
if (fromWindow.SequenceEqual(toWindow))
{
continue;
}
deltas.Add(new ByteDelta
{
Offset = offset,
Size = Math.Max(fromWindow.Length, toWindow.Length),
FromHash = ToSha256(fromWindow),
ToHash = ToSha256(toWindow),
Section = ".text",
Context = "window-diff"
});
}
return deltas.ToImmutableArray();
}
private static string ToSha256(string value) => ToSha256(Encoding.UTF8.GetBytes(value));
private static string ToSha256(ReadOnlySpan<byte> value)
{
var hash = SHA256.HashData(value);
return $"sha256:{Convert.ToHexStringLower(hash)}";
}
private Models.ChangeTrace FinalizeTrace(Models.ChangeTrace trace)
{
// Compute commitment hash

View File

@@ -17,8 +17,8 @@
</ItemGroup>
<ItemGroup>
<ProjectReference Include="../../../Feedser/StellaOps.Feedser.BinaryAnalysis/StellaOps.Feedser.BinaryAnalysis.csproj" />
<ProjectReference Include="../../../Feedser/StellaOps.Feedser.Core/StellaOps.Feedser.Core.csproj" />
<ProjectReference Include="../../../Concelier/StellaOps.Feedser.BinaryAnalysis/StellaOps.Feedser.BinaryAnalysis.csproj" />
<ProjectReference Include="../../../Concelier/StellaOps.Feedser.Core/StellaOps.Feedser.Core.csproj" />
<ProjectReference Include="../../../VexLens/StellaOps.VexLens/StellaOps.VexLens.Core/StellaOps.VexLens.Core.csproj" />
<ProjectReference Include="../../../__Libraries/StellaOps.Cryptography/StellaOps.Cryptography.csproj" />
</ItemGroup>

View File

@@ -10,6 +10,8 @@
using Microsoft.Extensions.Logging;
using StellaOps.Scanner.Explainability.Assumptions;
using StellaOps.Scanner.Reachability.Witnesses;
using System.Security.Cryptography;
using System.Text;
using System.Collections.Immutable;
namespace StellaOps.Scanner.Reachability.Stack;
@@ -45,7 +47,7 @@ public sealed class ReachabilityResultFactory : IReachabilityResultFactory
ReachabilityVerdict.Unreachable => await CreateNotAffectedResultAsync(stack, context, cancellationToken).ConfigureAwait(false),
ReachabilityVerdict.Exploitable or
ReachabilityVerdict.LikelyExploitable or
ReachabilityVerdict.PossiblyExploitable => CreateAffectedPlaceholderResult(stack),
ReachabilityVerdict.PossiblyExploitable => CreateAffectedResultFromStack(stack, context),
ReachabilityVerdict.Unknown => CreateUnknownResult(stack.Explanation ?? "Reachability could not be determined"),
_ => CreateUnknownResult($"Unexpected verdict: {stack.Verdict}")
};
@@ -188,20 +190,119 @@ public sealed class ReachabilityResultFactory : IReachabilityResultFactory
return await _suppressionBuilder.BuildUnreachableAsync(fallbackRequest, cancellationToken).ConfigureAwait(false);
}
/// <summary>
/// Creates a placeholder Affected result when PathWitness is not yet available.
/// The caller should use CreateAffectedResult(PathWitness) when they have built the witness.
/// </summary>
private Witnesses.ReachabilityResult CreateAffectedPlaceholderResult(ReachabilityStack stack)
private Witnesses.ReachabilityResult CreateAffectedResultFromStack(
ReachabilityStack stack,
WitnessGenerationContext context)
{
_logger.LogDebug(
"Verdict is {Verdict} for finding {FindingId} - PathWitness should be built separately",
stack.Verdict,
stack.FindingId);
var selectedPath = stack.StaticCallGraph.Paths
.OrderBy(path => path.Sites.Length)
.ThenByDescending(path => path.Confidence)
.FirstOrDefault();
// Return Unknown with metadata indicating affected; caller should build PathWitness
// and call CreateAffectedResult(pathWitness) to get proper result
return Witnesses.ReachabilityResult.Unknown();
var entrypoint = selectedPath?.Entrypoint ?? stack.StaticCallGraph.ReachingEntrypoints.FirstOrDefault();
if (entrypoint is null)
{
_logger.LogWarning(
"Affected verdict for finding {FindingId} has no entrypoint witness data. Returning Unknown.",
stack.FindingId);
return Witnesses.ReachabilityResult.Unknown();
}
var pathSteps = new List<PathStep>();
if (selectedPath is not null)
{
pathSteps.AddRange(selectedPath.Sites.Select(site => new PathStep
{
Symbol = site.MethodName,
SymbolId = BuildSymbolId(site.MethodName, site.ClassName),
File = site.FileName,
Line = site.LineNumber
}));
}
if (pathSteps.Count == 0)
{
pathSteps.Add(new PathStep
{
Symbol = stack.Symbol.Name,
SymbolId = BuildSymbolId(stack.Symbol.Name, stack.Symbol.Library),
File = null,
Line = null
});
}
var gates = stack.RuntimeGating.Conditions
.Where(c => c.IsBlocking)
.Select(c => new DetectedGate
{
Type = MapGateType(c.Type.ToString()),
GuardSymbol = c.ConfigKey ?? c.EnvVar ?? c.Description,
Confidence = MapConditionConfidence(c),
Detail = c.Description
})
.OrderBy(g => g.Type, StringComparer.Ordinal)
.ThenBy(g => g.GuardSymbol, StringComparer.Ordinal)
.ToArray();
var nodeHashes = pathSteps
.Select(step => ComputePathNodeHash(context.ComponentPurl, step.SymbolId))
.Distinct(StringComparer.Ordinal)
.OrderBy(hash => hash, StringComparer.Ordinal)
.ToArray();
var pathHash = ComputePathHash(nodeHashes);
var witness = new PathWitness
{
WitnessId = string.Empty,
Artifact = new WitnessArtifact
{
SbomDigest = context.SbomDigest,
ComponentPurl = context.ComponentPurl
},
Vuln = new WitnessVuln
{
Id = context.VulnId,
Source = context.VulnSource,
AffectedRange = context.AffectedRange
},
Entrypoint = new WitnessEntrypoint
{
Kind = entrypoint.Type.ToString().ToLowerInvariant(),
Name = entrypoint.Name,
SymbolId = BuildSymbolId(entrypoint.Name, entrypoint.Location)
},
Path = pathSteps,
Sink = new WitnessSink
{
Symbol = stack.Symbol.Name,
SymbolId = BuildSymbolId(stack.Symbol.Name, stack.Symbol.Library),
SinkType = stack.Symbol.Type.ToString().ToLowerInvariant()
},
Gates = gates.Length == 0 ? null : gates,
Evidence = new WitnessEvidence
{
CallgraphDigest = context.GraphDigest ?? "unknown",
AnalysisConfigDigest = "reachability-stack-v1",
BuildId = context.ImageDigest
},
ObservedAt = stack.AnalyzedAt,
NodeHashes = nodeHashes,
PathHash = pathHash,
EvidenceUris = new[]
{
$"evidence:sbom:{context.SbomDigest}",
$"evidence:graph:{context.GraphDigest ?? "unknown"}"
},
ObservationType = ObservationType.Static
};
witness = witness with
{
WitnessId = $"wit:sha256:{ComputeWitnessIdHash(witness)}",
ClaimId = ClaimIdGenerator.Generate(witness.Artifact, witness.PathHash ?? string.Empty)
};
return Witnesses.ReachabilityResult.Affected(witness);
}
private static double MapConfidence(ConfidenceLevel level) => level switch
@@ -243,4 +344,39 @@ public sealed class ReachabilityResultFactory : IReachabilityResultFactory
var blockingCount = layer3.Conditions.Count(c => c.IsBlocking);
return (int)(100.0 * blockingCount / layer3.Conditions.Length);
}
private static string BuildSymbolId(string symbol, string? scope)
{
var input = $"{scope ?? "global"}::{symbol}";
var bytes = SHA256.HashData(Encoding.UTF8.GetBytes(input));
return $"sym:{Convert.ToHexStringLower(bytes)[..16]}";
}
private static string ComputePathNodeHash(string purl, string symbolId)
{
var input = $"{purl.Trim().ToLowerInvariant()}:{symbolId.Trim()}";
var bytes = SHA256.HashData(Encoding.UTF8.GetBytes(input));
return $"sha256:{Convert.ToHexStringLower(bytes)}";
}
private static string ComputePathHash(IReadOnlyList<string> nodeHashes)
{
var input = string.Join(":", nodeHashes.Select(v => v.StartsWith("sha256:", StringComparison.OrdinalIgnoreCase) ? v[7..] : v));
var bytes = SHA256.HashData(Encoding.UTF8.GetBytes(input));
return $"path:sha256:{Convert.ToHexStringLower(bytes)}";
}
private static string ComputeWitnessIdHash(PathWitness witness)
{
var input = string.Join(
"|",
witness.Artifact.SbomDigest,
witness.Artifact.ComponentPurl,
witness.Vuln.Id,
witness.Entrypoint.SymbolId,
witness.Sink.SymbolId,
witness.PathHash ?? string.Empty);
var bytes = SHA256.HashData(Encoding.UTF8.GetBytes(input));
return Convert.ToHexStringLower(bytes);
}
}

View File

@@ -19,7 +19,7 @@
<ProjectReference Include="..\\StellaOps.Scanner.CallGraph\\StellaOps.Scanner.CallGraph.csproj" />
<ProjectReference Include="..\\StellaOps.Scanner.Contracts\\StellaOps.Scanner.Contracts.csproj" />
<ProjectReference Include="..\\..\\..\\Attestor\\__Libraries\\StellaOps.Attestor.ProofChain\\StellaOps.Attestor.ProofChain.csproj" />
<ProjectReference Include="..\\..\\..\\Signer\\StellaOps.Signer\\StellaOps.Signer.Core\\StellaOps.Signer.Core.csproj" />
<ProjectReference Include="..\\..\\..\\Attestor\\StellaOps.Signer\\StellaOps.Signer.Core\\StellaOps.Signer.Core.csproj" />
</ItemGroup>
</Project>

View File

@@ -1,4 +1,6 @@
using Microsoft.Extensions.Logging;
using System.Text.Json;
using System.Threading.Channels;
using System.Runtime.InteropServices;
namespace StellaOps.Scanner.Runtime.Ebpf;
@@ -11,8 +13,16 @@ public sealed class EbpfTraceCollector : ITraceCollector
private readonly ILogger<EbpfTraceCollector> _logger;
private readonly ISymbolResolver _symbolResolver;
private readonly TimeProvider _timeProvider;
private readonly object _gate = new();
private bool _isRunning;
private Channel<RuntimeCallEvent>? _eventChannel;
private CancellationTokenSource? _collectorCts;
private Task? _ingestionTask;
private TraceCollectorStats _stats;
private static readonly JsonSerializerOptions JsonOptions = new(JsonSerializerDefaults.Web)
{
PropertyNameCaseInsensitive = true
};
public EbpfTraceCollector(
ILogger<EbpfTraceCollector> logger,
@@ -22,43 +32,46 @@ public sealed class EbpfTraceCollector : ITraceCollector
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
_symbolResolver = symbolResolver ?? throw new ArgumentNullException(nameof(symbolResolver));
_timeProvider = timeProvider ?? TimeProvider.System;
_stats = new TraceCollectorStats
{
EventsCollected = 0,
EventsDropped = 0,
BytesProcessed = 0,
StartedAt = _timeProvider.GetUtcNow()
};
_stats = CreateInitialStats("disabled", "unsupported", null);
}
public Task StartAsync(TraceCollectorConfig config, CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(config);
if (!RuntimeInformation.IsOSPlatform(OSPlatform.Linux))
{
throw new PlatformNotSupportedException("eBPF tracing is only supported on Linux");
}
if (_isRunning)
{
throw new InvalidOperationException("Collector is already running");
}
_logger.LogInformation(
"Starting eBPF trace collector for PID {Pid}, container {Container}",
config.TargetPid,
config.TargetContainerId ?? "all");
var isLinux = RuntimeInformation.IsOSPlatform(OSPlatform.Linux);
if (!isLinux && !config.SealedMode)
{
throw new PlatformNotSupportedException("eBPF tracing is only supported on Linux unless sealed mode is enabled");
}
// TODO: Actual eBPF program loading and uprobe attachment
// This would use libbpf or bpf2go to:
// 1. Load BPF program into kernel
// 2. Attach uprobes to target functions
// 3. Set up ringbuffer for event streaming
// 4. Handle ASLR via /proc/pid/maps
var mode = config.SealedMode ? "sealed_replay" : "live";
var capability = isLinux ? "available" : "sealed_fallback";
_logger.LogInformation(
"Starting eBPF trace collector for PID {Pid}, container {Container}. Mode={Mode}, Capability={Capability}",
config.TargetPid,
config.TargetContainerId ?? "all",
mode,
capability);
_isRunning = true;
_stats = _stats with { StartedAt = _timeProvider.GetUtcNow() };
_eventChannel = Channel.CreateUnbounded<RuntimeCallEvent>(new UnboundedChannelOptions
{
SingleWriter = true,
SingleReader = false
});
_collectorCts = CancellationTokenSource.CreateLinkedTokenSource(cancellationToken);
_stats = CreateInitialStats(mode, capability, null) with
{
StartedAt = _timeProvider.GetUtcNow(),
IsRunning = true
};
_ingestionTask = Task.Run(() => IngestionLoopAsync(config, _collectorCts.Token), _collectorCts.Token);
_logger.LogInformation("eBPF trace collector started successfully");
@@ -73,44 +86,300 @@ public sealed class EbpfTraceCollector : ITraceCollector
}
_logger.LogInformation("Stopping eBPF trace collector");
// TODO: Detach uprobes and cleanup BPF resources
_isRunning = false;
_stats = _stats with { Duration = _timeProvider.GetUtcNow() - _stats.StartedAt };
_logger.LogInformation(
"eBPF trace collector stopped. Events: {Events}, Dropped: {Dropped}",
_stats.EventsCollected,
_stats.EventsDropped);
return Task.CompletedTask;
_collectorCts?.Cancel();
return FinalizeStopAsync();
}
public async IAsyncEnumerable<RuntimeCallEvent> GetEventsAsync(
[System.Runtime.CompilerServices.EnumeratorCancellation] CancellationToken cancellationToken = default)
{
if (!_isRunning)
var channel = _eventChannel;
if (channel is null)
{
yield break;
}
// TODO: Read events from eBPF ringbuffer
// This is a placeholder - actual implementation would:
// 1. Poll ringbuffer for events
// 2. Resolve symbols using /proc/kallsyms and binary debug info
// 3. Handle container namespace awareness
// 4. Apply rate limiting
await Task.Delay(100, cancellationToken).ConfigureAwait(false);
yield break;
while (await channel.Reader.WaitToReadAsync(cancellationToken).ConfigureAwait(false))
{
while (channel.Reader.TryRead(out var next))
{
yield return next;
}
}
}
public TraceCollectorStats GetStatistics() => _stats;
public async ValueTask DisposeAsync()
{
await StopAsync().ConfigureAwait(false);
await StopAsync(CancellationToken.None).ConfigureAwait(false);
}
private async Task IngestionLoopAsync(TraceCollectorConfig config, CancellationToken cancellationToken)
{
var channel = _eventChannel;
if (channel is null)
{
return;
}
try
{
var events = await LoadEventsAsync(config, cancellationToken).ConfigureAwait(false);
foreach (var rawEvent in events)
{
cancellationToken.ThrowIfCancellationRequested();
if (!MatchesConfigFilters(rawEvent, config))
{
continue;
}
var normalized = await NormalizeEventAsync(rawEvent, config, cancellationToken).ConfigureAwait(false);
await channel.Writer.WriteAsync(normalized, cancellationToken).ConfigureAwait(false);
RecordCollected(normalized);
await DelayForRateLimitAsync(config, cancellationToken).ConfigureAwait(false);
}
channel.Writer.TryComplete();
}
catch (OperationCanceledException)
{
channel.Writer.TryComplete();
}
catch (Exception ex)
{
_logger.LogWarning(ex, "eBPF ingestion loop failed");
UpdateStats(lastError: ex.Message);
channel.Writer.TryComplete(ex);
}
}
private async Task<IReadOnlyList<RuntimeCallEvent>> LoadEventsAsync(
TraceCollectorConfig config,
CancellationToken cancellationToken)
{
if (config.PreloadedEvents is { Count: > 0 } preloaded)
{
return SortEvents(preloaded);
}
if (!string.IsNullOrWhiteSpace(config.FixtureFilePath) && File.Exists(config.FixtureFilePath))
{
try
{
var bytes = await File.ReadAllBytesAsync(config.FixtureFilePath, cancellationToken).ConfigureAwait(false);
var parsed = JsonSerializer.Deserialize<IReadOnlyList<RuntimeCallEvent>>(bytes, JsonOptions)
?? Array.Empty<RuntimeCallEvent>();
return SortEvents(parsed);
}
catch (Exception ex)
{
_logger.LogWarning(ex, "Failed to load eBPF fixture events from {Path}", config.FixtureFilePath);
UpdateStats(lastError: $"fixture_load_failed:{ex.GetType().Name}");
return Array.Empty<RuntimeCallEvent>();
}
}
return Array.Empty<RuntimeCallEvent>();
}
private static IReadOnlyList<RuntimeCallEvent> SortEvents(IReadOnlyList<RuntimeCallEvent> events)
{
return events
.OrderBy(evt => evt.Timestamp)
.ThenBy(evt => evt.Pid)
.ThenBy(evt => evt.Tid)
.ThenBy(evt => evt.CallerAddress)
.ThenBy(evt => evt.CalleeAddress)
.ToArray();
}
private async Task<RuntimeCallEvent> NormalizeEventAsync(
RuntimeCallEvent input,
TraceCollectorConfig config,
CancellationToken cancellationToken)
{
var caller = string.IsNullOrWhiteSpace(input.CallerSymbol)
? null
: input.CallerSymbol.Trim();
var callee = string.IsNullOrWhiteSpace(input.CalleeSymbol)
? null
: input.CalleeSymbol.Trim();
if (config.ResolveSymbols && (caller is null || callee is null))
{
if (caller is null)
{
caller = await _symbolResolver.ResolveSymbolAsync(input.Pid, input.CallerAddress, cancellationToken).ConfigureAwait(false);
}
if (callee is null)
{
callee = await _symbolResolver.ResolveSymbolAsync(input.Pid, input.CalleeAddress, cancellationToken).ConfigureAwait(false);
}
}
return input with
{
CallerSymbol = caller ?? $"func_0x{input.CallerAddress:x}",
CalleeSymbol = callee ?? $"func_0x{input.CalleeAddress:x}",
BinaryPath = string.IsNullOrWhiteSpace(input.BinaryPath)
? $"/proc/{input.Pid}/exe"
: input.BinaryPath
};
}
private static bool MatchesConfigFilters(RuntimeCallEvent evt, TraceCollectorConfig config)
{
if (config.TargetPid != 0 && evt.Pid != config.TargetPid)
{
return false;
}
if (!string.IsNullOrWhiteSpace(config.TargetContainerId) &&
!string.Equals(evt.ContainerId, config.TargetContainerId, StringComparison.Ordinal))
{
return false;
}
if (config.SymbolPatterns is { Count: > 0 })
{
var matchesCaller = config.SymbolPatterns.Any(pattern => MatchesPattern(evt.CallerSymbol, pattern));
var matchesCallee = config.SymbolPatterns.Any(pattern => MatchesPattern(evt.CalleeSymbol, pattern));
if (!matchesCaller && !matchesCallee)
{
return false;
}
}
return true;
}
private static bool MatchesPattern(string value, string pattern)
{
if (string.IsNullOrWhiteSpace(pattern))
{
return true;
}
value ??= string.Empty;
var trimmed = pattern.Trim();
if (trimmed == "*")
{
return true;
}
if (trimmed.StartsWith('*') && trimmed.EndsWith('*') && trimmed.Length > 2)
{
return value.Contains(trimmed[1..^1], StringComparison.OrdinalIgnoreCase);
}
if (trimmed.StartsWith('*'))
{
return value.EndsWith(trimmed[1..], StringComparison.OrdinalIgnoreCase);
}
if (trimmed.EndsWith('*'))
{
return value.StartsWith(trimmed[..^1], StringComparison.OrdinalIgnoreCase);
}
return string.Equals(value, trimmed, StringComparison.OrdinalIgnoreCase);
}
private static Task DelayForRateLimitAsync(TraceCollectorConfig config, CancellationToken cancellationToken)
{
if (config.MaxEventsPerSecond <= 0 || config.MaxEventsPerSecond >= int.MaxValue)
{
return Task.CompletedTask;
}
var delay = TimeSpan.FromSeconds(1d / config.MaxEventsPerSecond);
if (delay <= TimeSpan.Zero)
{
return Task.CompletedTask;
}
return Task.Delay(delay, cancellationToken);
}
private void RecordCollected(RuntimeCallEvent evt)
{
var payloadBytes = evt.CallerSymbol.Length
+ evt.CalleeSymbol.Length
+ evt.BinaryPath.Length
+ sizeof(ulong) * 3
+ sizeof(uint) * 2;
lock (_gate)
{
_stats = _stats with
{
EventsCollected = _stats.EventsCollected + 1,
BytesProcessed = _stats.BytesProcessed + payloadBytes
};
}
}
private async Task FinalizeStopAsync()
{
try
{
if (_ingestionTask is not null)
{
await _ingestionTask.ConfigureAwait(false);
}
}
catch (OperationCanceledException)
{
// Normal shutdown
}
finally
{
_eventChannel?.Writer.TryComplete();
_collectorCts?.Dispose();
_collectorCts = null;
_ingestionTask = null;
_isRunning = false;
lock (_gate)
{
_stats = _stats with
{
IsRunning = false,
Duration = _timeProvider.GetUtcNow() - _stats.StartedAt
};
}
}
_logger.LogInformation(
"eBPF trace collector stopped. Events: {Events}, Dropped: {Dropped}, Bytes: {Bytes}",
_stats.EventsCollected,
_stats.EventsDropped,
_stats.BytesProcessed);
}
private void UpdateStats(string? lastError = null)
{
lock (_gate)
{
_stats = _stats with { LastError = lastError };
}
}
private TraceCollectorStats CreateInitialStats(string mode, string capability, string? lastError)
{
return new TraceCollectorStats
{
EventsCollected = 0,
EventsDropped = 0,
BytesProcessed = 0,
StartedAt = _timeProvider.GetUtcNow(),
Duration = null,
IsRunning = false,
Mode = mode,
Capability = capability,
LastError = lastError
};
}
}
@@ -127,11 +396,9 @@ public interface ISymbolResolver
/// </summary>
public sealed class LinuxSymbolResolver : ISymbolResolver
{
private readonly ILogger<LinuxSymbolResolver> _logger;
public LinuxSymbolResolver(ILogger<LinuxSymbolResolver> logger)
{
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
ArgumentNullException.ThrowIfNull(logger);
}
public async Task<string> ResolveSymbolAsync(
@@ -139,13 +406,8 @@ public sealed class LinuxSymbolResolver : ISymbolResolver
ulong address,
CancellationToken cancellationToken = default)
{
// TODO: Actual symbol resolution:
// 1. Read /proc/{pid}/maps to find binary containing address
// 2. Adjust for ASLR offset
// 3. Use libdwarf or addr2line to resolve symbol
// 4. Cache results for performance
await Task.Delay(1, cancellationToken).ConfigureAwait(false);
await Task.Yield();
cancellationToken.ThrowIfCancellationRequested();
return $"func_0x{address:x}";
}
}

View File

@@ -1,4 +1,6 @@
using Microsoft.Extensions.Logging;
using System.Text.Json;
using System.Threading.Channels;
using System.Runtime.InteropServices;
namespace StellaOps.Scanner.Runtime.Etw;
@@ -10,8 +12,16 @@ public sealed class EtwTraceCollector : ITraceCollector
{
private readonly ILogger<EtwTraceCollector> _logger;
private readonly TimeProvider _timeProvider;
private readonly object _gate = new();
private bool _isRunning;
private Channel<RuntimeCallEvent>? _eventChannel;
private CancellationTokenSource? _collectorCts;
private Task? _ingestionTask;
private TraceCollectorStats _stats;
private static readonly JsonSerializerOptions JsonOptions = new(JsonSerializerDefaults.Web)
{
PropertyNameCaseInsensitive = true
};
public EtwTraceCollector(
ILogger<EtwTraceCollector> logger,
@@ -19,43 +29,45 @@ public sealed class EtwTraceCollector : ITraceCollector
{
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
_timeProvider = timeProvider ?? TimeProvider.System;
_stats = new TraceCollectorStats
{
EventsCollected = 0,
EventsDropped = 0,
BytesProcessed = 0,
StartedAt = _timeProvider.GetUtcNow()
};
_stats = CreateInitialStats("disabled", "unsupported", null);
}
public Task StartAsync(TraceCollectorConfig config, CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(config);
if (!RuntimeInformation.IsOSPlatform(OSPlatform.Windows))
{
throw new PlatformNotSupportedException("ETW tracing is only supported on Windows");
}
if (_isRunning)
{
throw new InvalidOperationException("Collector is already running");
}
_logger.LogInformation(
"Starting ETW trace collector for PID {Pid}",
config.TargetPid);
var isWindows = RuntimeInformation.IsOSPlatform(OSPlatform.Windows);
if (!isWindows && !config.SealedMode)
{
throw new PlatformNotSupportedException("ETW tracing is only supported on Windows unless sealed mode is enabled");
}
// TODO: Actual ETW session setup
// This would use TraceEvent or Microsoft.Diagnostics.Tracing.TraceEvent to:
// 1. Create ETW session
// 2. Subscribe to Microsoft-Windows-DotNETRuntime provider
// 3. Subscribe to native call events
// 4. Enable stack walking
// 5. Filter by process ID
var mode = config.SealedMode ? "sealed_replay" : "live";
var capability = isWindows ? "available" : "sealed_fallback";
_logger.LogInformation(
"Starting ETW trace collector for PID {Pid}. Mode={Mode}, Capability={Capability}",
config.TargetPid,
mode,
capability);
_isRunning = true;
_stats = _stats with { StartedAt = _timeProvider.GetUtcNow() };
_eventChannel = Channel.CreateUnbounded<RuntimeCallEvent>(new UnboundedChannelOptions
{
SingleWriter = true,
SingleReader = false
});
_collectorCts = CancellationTokenSource.CreateLinkedTokenSource(cancellationToken);
_stats = CreateInitialStats(mode, capability, null) with
{
StartedAt = _timeProvider.GetUtcNow(),
IsRunning = true
};
_ingestionTask = Task.Run(() => IngestionLoopAsync(config, _collectorCts.Token), _collectorCts.Token);
_logger.LogInformation("ETW trace collector started successfully");
@@ -70,44 +82,227 @@ public sealed class EtwTraceCollector : ITraceCollector
}
_logger.LogInformation("Stopping ETW trace collector");
// TODO: Stop ETW session and cleanup
_isRunning = false;
_stats = _stats with { Duration = _timeProvider.GetUtcNow() - _stats.StartedAt };
_logger.LogInformation(
"ETW trace collector stopped. Events: {Events}, Dropped: {Dropped}",
_stats.EventsCollected,
_stats.EventsDropped);
return Task.CompletedTask;
_collectorCts?.Cancel();
return FinalizeStopAsync();
}
public async IAsyncEnumerable<RuntimeCallEvent> GetEventsAsync(
[System.Runtime.CompilerServices.EnumeratorCancellation] CancellationToken cancellationToken = default)
{
if (!_isRunning)
var channel = _eventChannel;
if (channel is null)
{
yield break;
}
// TODO: Process ETW events
// This is a placeholder - actual implementation would:
// 1. Subscribe to ETW event stream
// 2. Process CLR and native method events
// 3. Resolve symbols using DbgHelp
// 4. Correlate stack traces
// 5. Apply rate limiting
await Task.Delay(100, cancellationToken).ConfigureAwait(false);
yield break;
while (await channel.Reader.WaitToReadAsync(cancellationToken).ConfigureAwait(false))
{
while (channel.Reader.TryRead(out var next))
{
yield return next;
}
}
}
public TraceCollectorStats GetStatistics() => _stats;
public async ValueTask DisposeAsync()
{
await StopAsync().ConfigureAwait(false);
await StopAsync(CancellationToken.None).ConfigureAwait(false);
}
private async Task IngestionLoopAsync(TraceCollectorConfig config, CancellationToken cancellationToken)
{
var channel = _eventChannel;
if (channel is null)
{
return;
}
try
{
var events = await LoadEventsAsync(config, cancellationToken).ConfigureAwait(false);
foreach (var evt in events)
{
cancellationToken.ThrowIfCancellationRequested();
if (!MatchesConfigFilters(evt, config))
{
continue;
}
var normalized = evt with
{
BinaryPath = string.IsNullOrWhiteSpace(evt.BinaryPath)
? "unknown-binary"
: evt.BinaryPath
};
await channel.Writer.WriteAsync(normalized, cancellationToken).ConfigureAwait(false);
RecordCollected(normalized);
await DelayForRateLimitAsync(config, cancellationToken).ConfigureAwait(false);
}
channel.Writer.TryComplete();
}
catch (OperationCanceledException)
{
channel.Writer.TryComplete();
}
catch (Exception ex)
{
_logger.LogWarning(ex, "ETW ingestion loop failed");
UpdateStats(lastError: ex.Message);
channel.Writer.TryComplete(ex);
}
}
private async Task<IReadOnlyList<RuntimeCallEvent>> LoadEventsAsync(
TraceCollectorConfig config,
CancellationToken cancellationToken)
{
if (config.PreloadedEvents is { Count: > 0 } preloaded)
{
return SortEvents(preloaded);
}
if (!string.IsNullOrWhiteSpace(config.FixtureFilePath) && File.Exists(config.FixtureFilePath))
{
try
{
var bytes = await File.ReadAllBytesAsync(config.FixtureFilePath, cancellationToken).ConfigureAwait(false);
var parsed = JsonSerializer.Deserialize<IReadOnlyList<RuntimeCallEvent>>(bytes, JsonOptions)
?? Array.Empty<RuntimeCallEvent>();
return SortEvents(parsed);
}
catch (Exception ex)
{
_logger.LogWarning(ex, "Failed to load ETW fixture events from {Path}", config.FixtureFilePath);
UpdateStats(lastError: $"fixture_load_failed:{ex.GetType().Name}");
return Array.Empty<RuntimeCallEvent>();
}
}
return Array.Empty<RuntimeCallEvent>();
}
private static IReadOnlyList<RuntimeCallEvent> SortEvents(IReadOnlyList<RuntimeCallEvent> events)
{
return events
.OrderBy(evt => evt.Timestamp)
.ThenBy(evt => evt.Pid)
.ThenBy(evt => evt.Tid)
.ThenBy(evt => evt.CallerAddress)
.ThenBy(evt => evt.CalleeAddress)
.ToArray();
}
private static bool MatchesConfigFilters(RuntimeCallEvent evt, TraceCollectorConfig config)
{
if (config.TargetPid != 0 && evt.Pid != config.TargetPid)
{
return false;
}
if (!string.IsNullOrWhiteSpace(config.TargetContainerId) &&
!string.Equals(evt.ContainerId, config.TargetContainerId, StringComparison.Ordinal))
{
return false;
}
return true;
}
private static Task DelayForRateLimitAsync(TraceCollectorConfig config, CancellationToken cancellationToken)
{
if (config.MaxEventsPerSecond <= 0 || config.MaxEventsPerSecond >= int.MaxValue)
{
return Task.CompletedTask;
}
var delay = TimeSpan.FromSeconds(1d / config.MaxEventsPerSecond);
if (delay <= TimeSpan.Zero)
{
return Task.CompletedTask;
}
return Task.Delay(delay, cancellationToken);
}
private void RecordCollected(RuntimeCallEvent evt)
{
var payloadBytes = evt.CallerSymbol.Length
+ evt.CalleeSymbol.Length
+ evt.BinaryPath.Length
+ sizeof(ulong) * 3
+ sizeof(uint) * 2;
lock (_gate)
{
_stats = _stats with
{
EventsCollected = _stats.EventsCollected + 1,
BytesProcessed = _stats.BytesProcessed + payloadBytes
};
}
}
private async Task FinalizeStopAsync()
{
try
{
if (_ingestionTask is not null)
{
await _ingestionTask.ConfigureAwait(false);
}
}
catch (OperationCanceledException)
{
// Normal shutdown
}
finally
{
_eventChannel?.Writer.TryComplete();
_collectorCts?.Dispose();
_collectorCts = null;
_ingestionTask = null;
_isRunning = false;
lock (_gate)
{
_stats = _stats with
{
IsRunning = false,
Duration = _timeProvider.GetUtcNow() - _stats.StartedAt
};
}
}
_logger.LogInformation(
"ETW trace collector stopped. Events: {Events}, Dropped: {Dropped}, Bytes: {Bytes}",
_stats.EventsCollected,
_stats.EventsDropped,
_stats.BytesProcessed);
}
private void UpdateStats(string? lastError = null)
{
lock (_gate)
{
_stats = _stats with { LastError = lastError };
}
}
private TraceCollectorStats CreateInitialStats(string mode, string capability, string? lastError)
{
return new TraceCollectorStats
{
EventsCollected = 0,
EventsDropped = 0,
BytesProcessed = 0,
StartedAt = _timeProvider.GetUtcNow(),
Duration = null,
IsRunning = false,
Mode = mode,
Capability = capability,
LastError = lastError
};
}
}

View File

@@ -95,6 +95,26 @@ public sealed record TraceCollectorConfig
/// Enable stack trace capture.
/// </summary>
public bool CaptureStackTraces { get; init; }
/// <summary>
/// Sealed/offline mode: replay deterministic fixture events instead of host tracing APIs.
/// </summary>
public bool SealedMode { get; init; }
/// <summary>
/// Optional JSON fixture file with <see cref="RuntimeCallEvent"/> entries.
/// </summary>
public string? FixtureFilePath { get; init; }
/// <summary>
/// Optional in-memory deterministic events used for tests and offline replay.
/// </summary>
public IReadOnlyList<RuntimeCallEvent>? PreloadedEvents { get; init; }
/// <summary>
/// Resolve missing symbol names via collector-specific symbol resolvers.
/// </summary>
public bool ResolveSymbols { get; init; } = true;
}
/// <summary>
@@ -132,5 +152,9 @@ public sealed record TraceCollectorStats
public required long EventsDropped { get; init; }
public required long BytesProcessed { get; init; }
public required DateTimeOffset StartedAt { get; init; }
public required bool IsRunning { get; init; }
public required string Mode { get; init; }
public required string Capability { get; init; }
public string? LastError { get; init; }
public TimeSpan? Duration { get; init; }
}

View File

@@ -1,6 +1,9 @@
using Microsoft.Extensions.Logging;
using StellaOps.Scanner.Cache.Abstractions;
using System.Collections.Immutable;
using System.Security.Cryptography;
using System.Text;
using System.Text.Json;
namespace StellaOps.Scanner.Runtime.Ingestion;
@@ -9,9 +12,16 @@ namespace StellaOps.Scanner.Runtime.Ingestion;
/// </summary>
public sealed class TraceIngestionService : ITraceIngestionService
{
private static readonly JsonSerializerOptions JsonOptions = new(JsonSerializerDefaults.Web);
private readonly IFileContentAddressableStore _cas;
private readonly ILogger<TraceIngestionService> _logger;
private readonly TimeProvider _timeProvider;
private readonly object _scanIndexSync = new();
private readonly Dictionary<string, byte[]> _payloadByTraceId = new(StringComparer.Ordinal);
private readonly Dictionary<string, NormalizedTrace> _traceById = new(StringComparer.Ordinal);
private readonly Dictionary<string, ImmutableSortedSet<string>> _traceIdsByScan = new(StringComparer.Ordinal);
private readonly Dictionary<string, string> _casDigestByTraceId = new(StringComparer.Ordinal);
public TraceIngestionService(
IFileContentAddressableStore cas,
@@ -82,13 +92,16 @@ public sealed class TraceIngestionService : ITraceIngestionService
.ThenBy(e => e.To)
.ToList();
var duration = (lastEvent ?? _timeProvider.GetUtcNow()) - (firstEvent ?? _timeProvider.GetUtcNow());
var collectedAt = _timeProvider.GetUtcNow();
var duration = (lastEvent ?? collectedAt) - (firstEvent ?? collectedAt);
var normalizedScanId = scanId.Trim();
var traceId = GenerateTraceId(normalizedScanId, edges, pid ?? 0, binaryPath ?? "unknown", eventCount, duration);
var trace = new NormalizedTrace
{
TraceId = GenerateTraceId(scanId, eventCount),
ScanId = scanId,
CollectedAt = _timeProvider.GetUtcNow(),
TraceId = traceId,
ScanId = normalizedScanId,
CollectedAt = collectedAt,
Edges = edges,
Metadata = new TraceMetadata
{
@@ -115,16 +128,32 @@ public sealed class TraceIngestionService : ITraceIngestionService
{
ArgumentNullException.ThrowIfNull(trace);
var json = System.Text.Json.JsonSerializer.Serialize(trace);
var bytes = System.Text.Encoding.UTF8.GetBytes(json);
var bytes = JsonSerializer.SerializeToUtf8Bytes(trace, JsonOptions);
var digest = ComputeSha256(bytes);
await using var stream = new MemoryStream(bytes, writable: false);
var casKey = $"trace_{trace.TraceId}";
await _cas.PutAsync(new FileCasPutRequest(casKey, stream, leaveOpen: false), cancellationToken)
await _cas.PutAsync(new FileCasPutRequest(digest, stream, leaveOpen: false), cancellationToken)
.ConfigureAwait(false);
_logger.LogInformation("Stored trace {TraceId} in CAS with key {CasKey}", trace.TraceId, casKey);
lock (_scanIndexSync)
{
_payloadByTraceId[trace.TraceId] = bytes;
_traceById[trace.TraceId] = trace;
_casDigestByTraceId[trace.TraceId] = digest;
if (!_traceIdsByScan.TryGetValue(trace.ScanId, out var existing))
{
existing = ImmutableSortedSet<string>.Empty;
}
_traceIdsByScan[trace.ScanId] = existing.Add(trace.TraceId);
}
_logger.LogInformation(
"Stored trace {TraceId} in CAS digest {Digest} and indexed for scan {ScanId}",
trace.TraceId,
digest,
trace.ScanId);
return trace.TraceId;
}
@@ -135,47 +164,119 @@ public sealed class TraceIngestionService : ITraceIngestionService
{
ArgumentException.ThrowIfNullOrWhiteSpace(traceId);
var casKey = $"trace_{traceId}";
var normalizedTraceId = traceId.Trim();
lock (_scanIndexSync)
{
if (_traceById.TryGetValue(normalizedTraceId, out var cached))
{
return cached;
}
if (_payloadByTraceId.TryGetValue(normalizedTraceId, out var payload))
{
var hydrated = JsonSerializer.Deserialize<NormalizedTrace>(payload, JsonOptions);
if (hydrated is not null)
{
_traceById[normalizedTraceId] = hydrated;
return hydrated;
}
}
}
// We can verify CAS presence via TryGetAsync, but payload bytes are not available
// through CAS abstractions in this module.
string? digest;
lock (_scanIndexSync)
{
_casDigestByTraceId.TryGetValue(normalizedTraceId, out digest);
}
if (string.IsNullOrWhiteSpace(digest))
{
return null;
}
try
{
var bytes = await _cas.GetAsync(new FileCasGetRequest(casKey), cancellationToken)
.ConfigureAwait(false);
if (bytes is null)
var entry = await _cas.TryGetAsync(digest, cancellationToken).ConfigureAwait(false);
if (entry is null)
{
return null;
}
var trace = System.Text.Json.JsonSerializer.Deserialize<NormalizedTrace>(bytes);
return trace;
}
catch (Exception ex)
{
_logger.LogError(ex, "Error retrieving trace {TraceId}", traceId);
return null;
_logger.LogWarning(ex, "CAS lookup failed for trace {TraceId}", normalizedTraceId);
}
return null;
}
public async Task<IReadOnlyList<NormalizedTrace>> GetTracesForScanAsync(
string scanId,
CancellationToken cancellationToken = default)
{
cancellationToken.ThrowIfCancellationRequested();
ArgumentException.ThrowIfNullOrWhiteSpace(scanId);
var normalizedScanId = scanId.Trim();
ImmutableSortedSet<string> traceIds;
lock (_scanIndexSync)
{
if (!_traceIdsByScan.TryGetValue(normalizedScanId, out traceIds!))
{
return Array.Empty<NormalizedTrace>();
}
}
// TODO: Implement scan-to-trace index
// For now, return empty list
await Task.Delay(1, cancellationToken).ConfigureAwait(false);
return Array.Empty<NormalizedTrace>();
var traces = new List<NormalizedTrace>(traceIds.Count);
foreach (var traceId in traceIds)
{
cancellationToken.ThrowIfCancellationRequested();
var trace = await GetTraceAsync(traceId, cancellationToken).ConfigureAwait(false);
if (trace is not null)
{
traces.Add(trace);
}
}
return traces
.OrderBy(t => t.TraceId, StringComparer.Ordinal)
.ToList();
}
private string GenerateTraceId(string scanId, long eventCount)
private static string GenerateTraceId(
string scanId,
IReadOnlyList<RuntimeCallEdge> edges,
uint processId,
string binaryPath,
long eventCount,
TimeSpan duration)
{
var input = $"{scanId}|{eventCount}|{_timeProvider.GetUtcNow().Ticks}";
var hash = SHA256.HashData(System.Text.Encoding.UTF8.GetBytes(input));
var builder = new StringBuilder();
builder.Append(scanId).Append('|')
.Append(processId).Append('|')
.Append(binaryPath).Append('|')
.Append(eventCount).Append('|')
.Append(duration.Ticks);
foreach (var edge in edges.OrderBy(e => e.From, StringComparer.Ordinal).ThenBy(e => e.To, StringComparer.Ordinal))
{
builder.Append('|')
.Append(edge.From).Append("->").Append(edge.To).Append(':')
.Append(edge.ObservationCount).Append(':')
.Append(edge.FirstObserved.UtcTicks).Append(':')
.Append(edge.LastObserved.UtcTicks);
}
var hash = SHA256.HashData(Encoding.UTF8.GetBytes(builder.ToString()));
return $"trace_{Convert.ToHexString(hash)[..16].ToLowerInvariant()}";
}
private static string ComputeSha256(byte[] bytes)
{
var hash = SHA256.HashData(bytes);
return Convert.ToHexStringLower(hash);
}
private sealed class RuntimeCallEdgeBuilder
{
public required string From { get; init; }

View File

@@ -0,0 +1,24 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
<EnableDefaultCompileItems>false</EnableDefaultCompileItems>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="../StellaOps.Scanner.Cache/StellaOps.Scanner.Cache.csproj" />
</ItemGroup>
<ItemGroup>
<Compile Include="ITraceCollector.cs" />
<Compile Include="Ebpf\*.cs" />
<Compile Include="Etw\*.cs" />
<Compile Include="Ingestion\*.cs" />
</ItemGroup>
</Project>

View File

@@ -1,5 +1,8 @@
using Microsoft.Extensions.Logging;
using StellaOps.Replay.Core;
using StellaOps.Scanner.ProofSpine;
using System.Net;
using System.Net.Http.Json;
using System.Security.Cryptography;
using System.Text;
@@ -31,6 +34,25 @@ public sealed record SlicePullOptions
/// Request timeout. Default: 30 seconds.
/// </summary>
public TimeSpan RequestTimeout { get; init; } = TimeSpan.FromSeconds(30);
/// <summary>
/// Whether to attempt deterministic fallback discovery when OCI referrers API is unavailable.
/// Default: true.
/// </summary>
public bool EnableReferrersFallback { get; init; } = true;
/// <summary>
/// Candidate tag prefixes for deterministic fallback referrer discovery.
/// </summary>
public IReadOnlyList<string> ReferrerTagPrefixes { get; init; } = new[]
{
"att-",
"ref-",
"sha256-",
"sbom-",
"vex-",
"proof-"
};
}
/// <summary>
@@ -51,6 +73,32 @@ public sealed record SlicePullResult
public bool SignatureVerified { get; init; }
}
/// <summary>
/// Capability status for OCI referrer discovery.
/// </summary>
public enum OciReferrersCapability
{
Supported,
Unsupported,
Unavailable
}
/// <summary>
/// Result for referrer queries with capability and fallback metadata.
/// </summary>
public sealed record OciReferrersQueryResult
{
public required IReadOnlyList<OciReferrer> Referrers { get; init; }
public required OciReferrersCapability Capability { get; init; }
public bool FallbackUsed { get; init; }
public string? FailureReason { get; init; }
public int? StatusCode { get; init; }
}
/// <summary>
/// Service for pulling reachability slices from OCI registries.
/// Supports content-addressed retrieval and DSSE signature verification.
@@ -61,6 +109,7 @@ public sealed class SlicePullService : IDisposable
private readonly HttpClient _httpClient;
private readonly OciRegistryAuthorization _authorization;
private readonly SlicePullOptions _options;
private readonly IDsseSigningService? _dsseSigningService;
private readonly ILogger<SlicePullService> _logger;
private readonly TimeProvider _timeProvider;
private readonly Dictionary<string, CachedSlice> _cache = new(StringComparer.Ordinal);
@@ -72,12 +121,14 @@ public sealed class SlicePullService : IDisposable
HttpClient httpClient,
OciRegistryAuthorization authorization,
SlicePullOptions? options = null,
IDsseSigningService? dsseSigningService = null,
ILogger<SlicePullService>? logger = null,
TimeProvider? timeProvider = null)
{
_httpClient = httpClient ?? throw new ArgumentNullException(nameof(httpClient));
_authorization = authorization ?? throw new ArgumentNullException(nameof(authorization));
_options = options ?? new SlicePullOptions();
_dsseSigningService = dsseSigningService;
_logger = logger ?? Microsoft.Extensions.Logging.Abstractions.NullLogger<SlicePullService>.Instance;
_timeProvider = timeProvider ?? TimeProvider.System;
_httpClient.Timeout = _options.RequestTimeout;
@@ -330,6 +381,20 @@ public sealed class SlicePullService : IDisposable
string digest,
string? artifactType = null,
CancellationToken cancellationToken = default)
{
var result = await ListReferrersWithCapabilityAsync(reference, digest, artifactType, cancellationToken)
.ConfigureAwait(false);
return result.Referrers;
}
/// <summary>
/// List referrers and return capability/fallback metadata.
/// </summary>
public async Task<OciReferrersQueryResult> ListReferrersWithCapabilityAsync(
OciImageReference reference,
string digest,
string? artifactType = null,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(reference);
ArgumentException.ThrowIfNullOrWhiteSpace(digest);
@@ -349,21 +414,70 @@ public sealed class SlicePullService : IDisposable
using var response = await _httpClient.SendAsync(request, cancellationToken)
.ConfigureAwait(false);
if (!response.IsSuccessStatusCode)
if (response.IsSuccessStatusCode)
{
_logger.LogWarning("Failed to list referrers for {Digest}: {Status}", digest, response.StatusCode);
return Array.Empty<OciReferrer>();
var index = await response.Content.ReadFromJsonAsync<OciReferrersIndex>(JsonOptions, cancellationToken)
.ConfigureAwait(false);
var referrers = (IReadOnlyList<OciReferrer>?)index?.Manifests ?? Array.Empty<OciReferrer>();
return new OciReferrersQueryResult
{
Referrers = referrers,
Capability = OciReferrersCapability.Supported,
FallbackUsed = false
};
}
var index = await response.Content.ReadFromJsonAsync<OciReferrersIndex>(JsonOptions, cancellationToken)
.ConfigureAwait(false);
if (_options.EnableReferrersFallback && IsReferrersUnsupportedStatus(response.StatusCode))
{
var fallback = await ListFallbackTagReferrersAsync(reference, digest, artifactType, cancellationToken)
.ConfigureAwait(false);
return (IReadOnlyList<OciReferrer>?)index?.Manifests ?? Array.Empty<OciReferrer>();
_logger.LogWarning(
"OCI referrers API unsupported for {Registry}/{Repository}@{Digest} (status {StatusCode}); fallback tags used={FallbackUsed}, discovered={Count}",
reference.Registry,
reference.Repository,
digest,
(int)response.StatusCode,
true,
fallback.Count);
return new OciReferrersQueryResult
{
Referrers = fallback,
Capability = OciReferrersCapability.Unsupported,
FallbackUsed = true,
StatusCode = (int)response.StatusCode,
FailureReason = $"referrers_unsupported:{response.StatusCode}"
};
}
_logger.LogWarning(
"Failed to list referrers for {Registry}/{Repository}@{Digest} with status {StatusCode}",
reference.Registry,
reference.Repository,
digest,
response.StatusCode);
return new OciReferrersQueryResult
{
Referrers = Array.Empty<OciReferrer>(),
Capability = OciReferrersCapability.Unavailable,
FallbackUsed = false,
StatusCode = (int)response.StatusCode,
FailureReason = $"referrers_error:{response.StatusCode}"
};
}
catch (Exception ex) when (ex is HttpRequestException or TaskCanceledException)
{
_logger.LogError(ex, "Failed to list referrers for {Digest}", digest);
return Array.Empty<OciReferrer>();
_logger.LogError(ex, "Failed to list referrers for {Registry}/{Repository}@{Digest}", reference.Registry, reference.Repository, digest);
return new OciReferrersQueryResult
{
Referrers = Array.Empty<OciReferrer>(),
Capability = OciReferrersCapability.Unavailable,
FallbackUsed = false,
FailureReason = ex.GetType().Name
};
}
}
@@ -372,7 +486,114 @@ public sealed class SlicePullService : IDisposable
// HttpClient typically managed externally
}
private async Task<(byte[]? Envelope, bool Verified)> FetchAndVerifyDsseAsync(
private async Task<IReadOnlyList<OciReferrer>> ListFallbackTagReferrersAsync(
OciImageReference reference,
string digest,
string? artifactType,
CancellationToken cancellationToken)
{
var tagsUrl = $"https://{reference.Registry}/v2/{reference.Repository}/tags/list";
using var tagsRequest = new HttpRequestMessage(HttpMethod.Get, tagsUrl);
await _authorization.AuthorizeRequestAsync(tagsRequest, reference, cancellationToken).ConfigureAwait(false);
using var tagsResponse = await _httpClient.SendAsync(tagsRequest, cancellationToken).ConfigureAwait(false);
if (!tagsResponse.IsSuccessStatusCode)
{
_logger.LogWarning(
"Fallback tag discovery failed for {Registry}/{Repository} with status {StatusCode}",
reference.Registry,
reference.Repository,
tagsResponse.StatusCode);
return Array.Empty<OciReferrer>();
}
var tagList = await tagsResponse.Content.ReadFromJsonAsync<OciTagsList>(JsonOptions, cancellationToken)
.ConfigureAwait(false);
var tags = tagList?.Tags ?? Array.Empty<string>();
if (tags.Count == 0)
{
return Array.Empty<OciReferrer>();
}
var results = new List<OciReferrer>();
foreach (var tag in tags.OrderBy(static tag => tag, StringComparer.Ordinal))
{
if (!LooksLikeFallbackReferrerTag(tag, artifactType))
{
continue;
}
var manifestUrl = $"https://{reference.Registry}/v2/{reference.Repository}/manifests/{Uri.EscapeDataString(tag)}";
using var manifestRequest = new HttpRequestMessage(HttpMethod.Get, manifestUrl);
manifestRequest.Headers.Accept.ParseAdd(OciMediaTypes.ImageManifest);
await _authorization.AuthorizeRequestAsync(manifestRequest, reference, cancellationToken).ConfigureAwait(false);
using var manifestResponse = await _httpClient.SendAsync(manifestRequest, cancellationToken).ConfigureAwait(false);
if (!manifestResponse.IsSuccessStatusCode)
{
continue;
}
var manifest = await manifestResponse.Content.ReadFromJsonAsync<OciManifest>(JsonOptions, cancellationToken)
.ConfigureAwait(false);
if (!string.Equals(manifest?.Subject?.Digest, digest, StringComparison.OrdinalIgnoreCase))
{
continue;
}
var descriptorDigest = manifestResponse.Headers.TryGetValues("Docker-Content-Digest", out var digestValues)
? digestValues.FirstOrDefault()
: null;
results.Add(new OciReferrer
{
MediaType = manifest?.MediaType ?? OciMediaTypes.ImageManifest,
Digest = descriptorDigest,
ArtifactType = manifest?.ArtifactType,
Size = manifestResponse.Content.Headers.ContentLength ?? 0,
Annotations = manifest?.Annotations
});
}
return results;
}
private static bool IsReferrersUnsupportedStatus(HttpStatusCode statusCode)
{
return statusCode is HttpStatusCode.NotFound
or HttpStatusCode.MethodNotAllowed
or HttpStatusCode.NotAcceptable
or HttpStatusCode.BadRequest;
}
private bool LooksLikeFallbackReferrerTag(string tag, string? artifactType)
{
if (string.IsNullOrWhiteSpace(tag))
{
return false;
}
if (_options.ReferrerTagPrefixes.Count > 0 &&
_options.ReferrerTagPrefixes.Any(prefix => tag.StartsWith(prefix, StringComparison.OrdinalIgnoreCase)))
{
return true;
}
if (string.IsNullOrWhiteSpace(artifactType))
{
return false;
}
var token = artifactType
.Replace("application/vnd.", string.Empty, StringComparison.OrdinalIgnoreCase)
.Replace('+', '-')
.Replace('/', '-')
.Replace('.', '-')
.ToLowerInvariant();
return tag.Contains(token, StringComparison.OrdinalIgnoreCase);
}
private async Task<DsseFetchVerificationResult> FetchAndVerifyDsseAsync(
OciImageReference reference,
string digest,
byte[] payload,
@@ -390,22 +611,71 @@ public sealed class SlicePullService : IDisposable
if (!response.IsSuccessStatusCode)
{
return (null, false);
return DsseFetchVerificationResult.Failed(
envelope: null,
failureReason: $"dsse_fetch_failed:{response.StatusCode}");
}
var envelopeBytes = await response.Content.ReadAsByteArrayAsync(cancellationToken)
.ConfigureAwait(false);
// TODO: Actual DSSE verification using configured trust roots
// For now, just return the envelope
_logger.LogDebug("DSSE envelope fetched, verification pending trust root configuration");
DsseEnvelope? envelope;
try
{
envelope = JsonSerializer.Deserialize<DsseEnvelope>(envelopeBytes, JsonOptions);
}
catch (JsonException)
{
return DsseFetchVerificationResult.Failed(envelopeBytes, "dsse_invalid_json");
}
return (envelopeBytes, false);
if (envelope is null)
{
return DsseFetchVerificationResult.Failed(envelopeBytes, "dsse_invalid_envelope");
}
if (!TryDecodeBase64(envelope.Payload, out var envelopePayload))
{
return DsseFetchVerificationResult.Failed(envelopeBytes, "dsse_payload_not_base64");
}
if (!payload.AsSpan().SequenceEqual(envelopePayload))
{
return DsseFetchVerificationResult.Failed(envelopeBytes, "dsse_payload_mismatch");
}
if (_dsseSigningService is null)
{
_logger.LogWarning("DSSE envelope fetched but verification service is not configured.");
return DsseFetchVerificationResult.Failed(envelopeBytes, "dsse_verifier_not_configured");
}
var verification = await _dsseSigningService.VerifyAsync(envelope, cancellationToken).ConfigureAwait(false);
if (!verification.IsValid)
{
return DsseFetchVerificationResult.Failed(envelopeBytes, verification.FailureReason ?? "dsse_signature_invalid");
}
return DsseFetchVerificationResult.Success(envelopeBytes);
}
catch (Exception ex)
{
_logger.LogWarning(ex, "Failed to fetch/verify DSSE envelope");
return (null, false);
return DsseFetchVerificationResult.Failed(null, ex.GetType().Name);
}
}
private static bool TryDecodeBase64(string value, out byte[] bytes)
{
try
{
bytes = Convert.FromBase64String(value);
return true;
}
catch (FormatException)
{
bytes = Array.Empty<byte>();
return false;
}
}
@@ -455,6 +725,8 @@ public sealed class SlicePullService : IDisposable
public string? MediaType { get; init; }
public string? ArtifactType { get; init; }
public OciDescriptor? Config { get; init; }
public OciDescriptor? Subject { get; init; }
public Dictionary<string, string>? Annotations { get; init; }
public List<OciDescriptor>? Layers { get; init; }
}
@@ -471,6 +743,28 @@ public sealed class SlicePullService : IDisposable
public string? MediaType { get; init; }
public List<OciReferrer>? Manifests { get; init; }
}
private sealed record OciTagsList
{
public string? Name { get; init; }
public IReadOnlyList<string>? Tags { get; init; }
}
private sealed record DsseFetchVerificationResult
{
public byte[]? Envelope { get; init; }
public bool Verified { get; init; }
public string? FailureReason { get; init; }
public static DsseFetchVerificationResult Success(byte[] envelope)
=> new() { Envelope = envelope, Verified = true };
public static DsseFetchVerificationResult Failed(byte[]? envelope, string failureReason)
=> new() { Envelope = envelope, Verified = false, FailureReason = failureReason };
}
}
/// <summary>

View File

@@ -10,6 +10,7 @@ namespace StellaOps.Scanner.Storage.EfCore.Context;
public partial class ScannerDbContext : DbContext
{
private readonly string _schemaName;
internal string SchemaName => _schemaName;
public ScannerDbContext(DbContextOptions<ScannerDbContext> options, string? schemaName = null)
: base(options)

View File

@@ -13,6 +13,8 @@ namespace StellaOps.Scanner.Storage.Postgres;
public sealed class PostgresReachabilityDriftResultRepository : IReachabilityDriftResultRepository
{
private const string UndefinedTableSqlState = "42P01";
private static readonly JsonSerializerOptions JsonOptions = new(JsonSerializerDefaults.Web)
{
WriteIndented = false
@@ -37,9 +39,179 @@ public sealed class PostgresReachabilityDriftResultRepository : IReachabilityDri
{
ArgumentNullException.ThrowIfNull(result);
var tenantScope = ScannerTenantScope.Resolve(tenantId);
Exception? lastFailure = null;
foreach (var schema in GetSchemaCandidates())
{
try
{
await StoreForSchemaAsync(
result,
tenantScope.TenantContext,
tenantScope.TenantId,
schema,
ct)
.ConfigureAwait(false);
return;
}
catch (PostgresException ex) when (IsUndefinedTable(ex))
{
lastFailure = ex;
_logger.LogWarning(
ex,
"Drift tables missing in schema {Schema}; trying fallback schema for base={BaseScanId} head={HeadScanId}.",
schema,
result.BaseScanId,
result.HeadScanId);
}
}
throw lastFailure ?? new InvalidOperationException("Unable to store reachability drift result in any configured schema.");
}
public async Task<ReachabilityDriftResult?> TryGetLatestForHeadAsync(string headScanId, string language, CancellationToken ct = default, string? tenantId = null)
{
ArgumentException.ThrowIfNullOrWhiteSpace(headScanId);
ArgumentException.ThrowIfNullOrWhiteSpace(language);
var tenantScope = ScannerTenantScope.Resolve(tenantId);
var trimmedHead = headScanId.Trim();
var trimmedLang = language.Trim();
foreach (var schema in GetSchemaCandidates())
{
try
{
await using var connection = await _dataSource.OpenConnectionAsync(tenantScope.TenantContext, ct).ConfigureAwait(false);
await using var dbContext = ScannerDbContextFactory.Create(connection, _dataSource.CommandTimeoutSeconds, schema);
var entity = await dbContext.ReachabilityDriftResults
.Include(e => e.DriftedSinks)
.Where(e => e.TenantId == tenantScope.TenantId && e.HeadScanId == trimmedHead && e.Language == trimmedLang)
.OrderByDescending(e => e.DetectedAt)
.FirstOrDefaultAsync(ct)
.ConfigureAwait(false);
if (entity is not null)
{
return MapEntityToResult(entity);
}
}
catch (PostgresException ex) when (IsUndefinedTable(ex))
{
_logger.LogWarning(ex, "Drift table missing in schema {Schema} during TryGetLatestForHeadAsync; trying fallback.", schema);
}
}
return null;
}
public async Task<ReachabilityDriftResult?> TryGetByIdAsync(Guid driftId, CancellationToken ct = default, string? tenantId = null)
{
var tenantScope = ScannerTenantScope.Resolve(tenantId);
foreach (var schema in GetSchemaCandidates())
{
try
{
await using var connection = await _dataSource.OpenConnectionAsync(tenantScope.TenantContext, ct).ConfigureAwait(false);
await using var dbContext = ScannerDbContextFactory.Create(connection, _dataSource.CommandTimeoutSeconds, schema);
var entity = await dbContext.ReachabilityDriftResults
.Include(e => e.DriftedSinks)
.FirstOrDefaultAsync(e => e.TenantId == tenantScope.TenantId && e.Id == driftId, ct)
.ConfigureAwait(false);
if (entity is not null)
{
return MapEntityToResult(entity);
}
}
catch (PostgresException ex) when (IsUndefinedTable(ex))
{
_logger.LogWarning(ex, "Drift table missing in schema {Schema} during TryGetByIdAsync; trying fallback.", schema);
}
}
return null;
}
public async Task<bool> ExistsAsync(Guid driftId, CancellationToken ct = default, string? tenantId = null)
{
var tenantScope = ScannerTenantScope.Resolve(tenantId);
foreach (var schema in GetSchemaCandidates())
{
try
{
await using var connection = await _dataSource.OpenConnectionAsync(tenantScope.TenantContext, ct).ConfigureAwait(false);
await using var dbContext = ScannerDbContextFactory.Create(connection, _dataSource.CommandTimeoutSeconds, schema);
return await dbContext.ReachabilityDriftResults
.AnyAsync(e => e.TenantId == tenantScope.TenantId && e.Id == driftId, ct)
.ConfigureAwait(false);
}
catch (PostgresException ex) when (IsUndefinedTable(ex))
{
_logger.LogWarning(ex, "Drift table missing in schema {Schema} during ExistsAsync; trying fallback.", schema);
}
}
return false;
}
public async Task<IReadOnlyList<DriftedSink>> ListSinksAsync(
Guid driftId,
DriftDirection direction,
int offset,
int limit,
CancellationToken ct = default,
string? tenantId = null)
{
if (offset < 0)
{
throw new ArgumentOutOfRangeException(nameof(offset));
}
if (limit <= 0)
{
throw new ArgumentOutOfRangeException(nameof(limit));
}
var tenantScope = ScannerTenantScope.Resolve(tenantId);
var directionValue = ToDbValue(direction);
foreach (var schema in GetSchemaCandidates())
{
try
{
await using var connection = await _dataSource.OpenConnectionAsync(tenantScope.TenantContext, ct).ConfigureAwait(false);
await using var dbContext = ScannerDbContextFactory.Create(connection, _dataSource.CommandTimeoutSeconds, schema);
var entities = await dbContext.DriftedSinks
.Where(e => e.TenantId == tenantScope.TenantId && e.DriftResultId == driftId && e.Direction == directionValue)
.OrderBy(e => e.SinkNodeId)
.Skip(offset)
.Take(limit)
.ToListAsync(ct)
.ConfigureAwait(false);
return entities.Select(e => MapSinkEntityToModel(e, direction)).ToList();
}
catch (PostgresException ex) when (IsUndefinedTable(ex))
{
_logger.LogWarning(ex, "Drift sink table missing in schema {Schema} during ListSinksAsync; trying fallback.", schema);
}
}
return [];
}
private async Task StoreForSchemaAsync(
ReachabilityDriftResult result,
string tenantContext,
Guid tenantId,
string schemaName,
CancellationToken ct)
{
var driftResultsTable = $"{schemaName}.reachability_drift_results";
var driftedSinksTable = $"{schemaName}.drifted_sinks";
var insertResultSql = $"""
INSERT INTO {DriftResultsTable} (
INSERT INTO {driftResultsTable} (
id, tenant_id, base_scan_id, head_scan_id, language,
newly_reachable_count, newly_unreachable_count, detected_at, result_digest
) VALUES (
@@ -53,12 +225,12 @@ public sealed class PostgresReachabilityDriftResultRepository : IReachabilityDri
""";
var deleteSinksSql = $"""
DELETE FROM {DriftedSinksTable}
DELETE FROM {driftedSinksTable}
WHERE tenant_id = $1 AND drift_result_id = $2
""";
var insertSinkSql = $"""
INSERT INTO {DriftedSinksTable} (
INSERT INTO {driftedSinksTable} (
id, tenant_id, drift_result_id, sink_node_id, symbol,
sink_category, direction, cause_kind, cause_description,
cause_symbol, cause_file, cause_line, code_change_id,
@@ -80,15 +252,14 @@ public sealed class PostgresReachabilityDriftResultRepository : IReachabilityDri
associated_vulns = EXCLUDED.associated_vulns
""";
await using var connection = await _dataSource.OpenConnectionAsync(tenantScope.TenantContext, ct).ConfigureAwait(false);
await using var connection = await _dataSource.OpenConnectionAsync(tenantContext, ct).ConfigureAwait(false);
await using var transaction = await connection.BeginTransactionAsync(ct).ConfigureAwait(false);
try
{
// Insert drift result header and get the returned id
await using var insertCmd = new NpgsqlCommand(insertResultSql, connection, transaction);
insertCmd.Parameters.AddWithValue(result.Id);
insertCmd.Parameters.AddWithValue(tenantScope.TenantId);
insertCmd.Parameters.AddWithValue(tenantId);
insertCmd.Parameters.AddWithValue(result.BaseScanId.Trim());
insertCmd.Parameters.AddWithValue(result.HeadScanId.Trim());
insertCmd.Parameters.AddWithValue(result.Language.Trim());
@@ -100,15 +271,13 @@ public sealed class PostgresReachabilityDriftResultRepository : IReachabilityDri
var driftIdObj = await insertCmd.ExecuteScalarAsync(ct).ConfigureAwait(false);
var driftId = (Guid)driftIdObj!;
// Delete existing sinks for this drift result
await using var deleteCmd = new NpgsqlCommand(deleteSinksSql, connection, transaction);
deleteCmd.Parameters.AddWithValue(tenantScope.TenantId);
deleteCmd.Parameters.AddWithValue(tenantId);
deleteCmd.Parameters.AddWithValue(driftId);
await deleteCmd.ExecuteNonQueryAsync(ct).ConfigureAwait(false);
// Insert all sink rows
var sinks = EnumerateSinkParams(driftId, tenantScope.TenantId, result.NewlyReachable, DriftDirection.BecameReachable)
.Concat(EnumerateSinkParams(driftId, tenantScope.TenantId, result.NewlyUnreachable, DriftDirection.BecameUnreachable))
var sinks = EnumerateSinkParams(driftId, tenantId, result.NewlyReachable, DriftDirection.BecameReachable)
.Concat(EnumerateSinkParams(driftId, tenantId, result.NewlyUnreachable, DriftDirection.BecameUnreachable))
.ToList();
foreach (var sink in sinks)
@@ -134,104 +303,41 @@ public sealed class PostgresReachabilityDriftResultRepository : IReachabilityDri
}
await transaction.CommitAsync(ct).ConfigureAwait(false);
_logger.LogDebug(
"Stored drift result drift={DriftId} base={BaseScanId} head={HeadScanId} lang={Language}",
"Stored drift result drift={DriftId} base={BaseScanId} head={HeadScanId} lang={Language} schema={Schema}",
driftId,
result.BaseScanId,
result.HeadScanId,
result.Language);
result.Language,
schemaName);
}
catch (Exception ex)
{
_logger.LogError(ex, "Failed to store drift result base={BaseScanId} head={HeadScanId}", result.BaseScanId, result.HeadScanId);
_logger.LogError(
ex,
"Failed to store drift result base={BaseScanId} head={HeadScanId} schema={Schema}",
result.BaseScanId,
result.HeadScanId,
schemaName);
await transaction.RollbackAsync(ct).ConfigureAwait(false);
throw;
}
}
public async Task<ReachabilityDriftResult?> TryGetLatestForHeadAsync(string headScanId, string language, CancellationToken ct = default, string? tenantId = null)
private IEnumerable<string> GetSchemaCandidates()
{
ArgumentException.ThrowIfNullOrWhiteSpace(headScanId);
ArgumentException.ThrowIfNullOrWhiteSpace(language);
var tenantScope = ScannerTenantScope.Resolve(tenantId);
var trimmedHead = headScanId.Trim();
var trimmedLang = language.Trim();
var primary = SchemaName;
yield return primary;
await using var connection = await _dataSource.OpenConnectionAsync(tenantScope.TenantContext, ct).ConfigureAwait(false);
await using var dbContext = ScannerDbContextFactory.Create(connection, _dataSource.CommandTimeoutSeconds, SchemaName);
var entity = await dbContext.ReachabilityDriftResults
.Include(e => e.DriftedSinks)
.Where(e => e.TenantId == tenantScope.TenantId && e.HeadScanId == trimmedHead && e.Language == trimmedLang)
.OrderByDescending(e => e.DetectedAt)
.FirstOrDefaultAsync(ct)
.ConfigureAwait(false);
return entity is not null ? MapEntityToResult(entity) : null;
}
public async Task<ReachabilityDriftResult?> TryGetByIdAsync(Guid driftId, CancellationToken ct = default, string? tenantId = null)
{
var tenantScope = ScannerTenantScope.Resolve(tenantId);
await using var connection = await _dataSource.OpenConnectionAsync(tenantScope.TenantContext, ct).ConfigureAwait(false);
await using var dbContext = ScannerDbContextFactory.Create(connection, _dataSource.CommandTimeoutSeconds, SchemaName);
var entity = await dbContext.ReachabilityDriftResults
.Include(e => e.DriftedSinks)
.FirstOrDefaultAsync(e => e.TenantId == tenantScope.TenantId && e.Id == driftId, ct)
.ConfigureAwait(false);
return entity is not null ? MapEntityToResult(entity) : null;
}
public async Task<bool> ExistsAsync(Guid driftId, CancellationToken ct = default, string? tenantId = null)
{
var tenantScope = ScannerTenantScope.Resolve(tenantId);
await using var connection = await _dataSource.OpenConnectionAsync(tenantScope.TenantContext, ct).ConfigureAwait(false);
await using var dbContext = ScannerDbContextFactory.Create(connection, _dataSource.CommandTimeoutSeconds, SchemaName);
return await dbContext.ReachabilityDriftResults
.AnyAsync(e => e.TenantId == tenantScope.TenantId && e.Id == driftId, ct)
.ConfigureAwait(false);
}
public async Task<IReadOnlyList<DriftedSink>> ListSinksAsync(
Guid driftId,
DriftDirection direction,
int offset,
int limit,
CancellationToken ct = default,
string? tenantId = null)
{
if (offset < 0)
if (!string.Equals(primary, ScannerDataSource.DefaultSchema, StringComparison.Ordinal))
{
throw new ArgumentOutOfRangeException(nameof(offset));
yield return ScannerDataSource.DefaultSchema;
}
if (limit <= 0)
{
throw new ArgumentOutOfRangeException(nameof(limit));
}
var tenantScope = ScannerTenantScope.Resolve(tenantId);
var directionValue = ToDbValue(direction);
await using var connection = await _dataSource.OpenConnectionAsync(tenantScope.TenantContext, ct).ConfigureAwait(false);
await using var dbContext = ScannerDbContextFactory.Create(connection, _dataSource.CommandTimeoutSeconds, SchemaName);
var entities = await dbContext.DriftedSinks
.Where(e => e.TenantId == tenantScope.TenantId && e.DriftResultId == driftId && e.Direction == directionValue)
.OrderBy(e => e.SinkNodeId)
.Skip(offset)
.Take(limit)
.ToListAsync(ct)
.ConfigureAwait(false);
return entities.Select(e => MapSinkEntityToModel(e, direction)).ToList();
}
private static bool IsUndefinedTable(PostgresException ex)
=> string.Equals(ex.SqlState, UndefinedTableSqlState, StringComparison.Ordinal);
private static IEnumerable<SinkInsertParams> EnumerateSinkParams(
Guid driftId,
Guid tenantId,

View File

@@ -1,4 +1,5 @@
using Microsoft.EntityFrameworkCore;
using Microsoft.EntityFrameworkCore.Infrastructure;
using Npgsql;
using StellaOps.Scanner.Storage.EfCore.CompiledModels;
using StellaOps.Scanner.Storage.EfCore.Context;
@@ -18,7 +19,8 @@ internal static class ScannerDbContextFactory
: schemaName.Trim();
var optionsBuilder = new DbContextOptionsBuilder<ScannerDbContext>()
.UseNpgsql(connection, npgsql => npgsql.CommandTimeout(commandTimeoutSeconds));
.UseNpgsql(connection, npgsql => npgsql.CommandTimeout(commandTimeoutSeconds))
.ReplaceService<IModelCacheKeyFactory, ScannerDbContextModelCacheKeyFactory>();
if (string.Equals(normalizedSchema, ScannerStorageDefaults.DefaultSchemaName, StringComparison.Ordinal))
{
@@ -27,4 +29,17 @@ internal static class ScannerDbContextFactory
return new ScannerDbContext(optionsBuilder.Options, normalizedSchema);
}
private sealed class ScannerDbContextModelCacheKeyFactory : IModelCacheKeyFactory
{
public object Create(DbContext context, bool designTime)
{
if (context is ScannerDbContext scannerContext)
{
return (context.GetType(), scannerContext.SchemaName, designTime);
}
return (context.GetType(), designTime);
}
}
}

View File

@@ -19,7 +19,7 @@
"kind": "file",
"source": "package.json",
"locator": "package.json",
"sha256": "06c93b840f9cc3e032454ba4b5745967ecb73b0b4ced1d827f98a36d7747702a"
"sha256": "465919e1195aa0b066f473c55341df77abff6a6b7d62e25d63ccfb7c13e3287b"
}
]
},
@@ -43,7 +43,7 @@
"kind": "file",
"source": "package.json",
"locator": "package.json",
"sha256": "06c93b840f9cc3e032454ba4b5745967ecb73b0b4ced1d827f98a36d7747702a"
"sha256": "465919e1195aa0b066f473c55341df77abff6a6b7d62e25d63ccfb7c13e3287b"
}
]
},
@@ -67,7 +67,7 @@
"kind": "file",
"source": "package.json",
"locator": "package.json",
"sha256": "06c93b840f9cc3e032454ba4b5745967ecb73b0b4ced1d827f98a36d7747702a"
"sha256": "465919e1195aa0b066f473c55341df77abff6a6b7d62e25d63ccfb7c13e3287b"
}
]
}

View File

@@ -20,14 +20,14 @@
"source": "integrity",
"locator": ".layers/layer0/app/bun.lock:packages[ms@2.1.3]",
"value": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==",
"sha256": "c04e2c61eb2caa5103dc414cbb94fb4a0e79fff444130007d54bcd2f32547dae"
"sha256": "4a384b14aba7740bd500cdf0da7329a41a2940662e9b1fcab1fbc71c6c8389e7"
},
{
"kind": "metadata",
"source": "resolved",
"locator": ".layers/layer0/app/bun.lock:packages[ms@2.1.3]",
"value": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz",
"sha256": "c04e2c61eb2caa5103dc414cbb94fb4a0e79fff444130007d54bcd2f32547dae"
"sha256": "4a384b14aba7740bd500cdf0da7329a41a2940662e9b1fcab1fbc71c6c8389e7"
}
]
}

View File

@@ -23,14 +23,14 @@
"source": "integrity",
"locator": "bun.lock:packages[@company/internal-pkg@1.0.0]",
"value": "sha512-customhash123==",
"sha256": "dccabd071efe518efaea20482d057f2cd6295b1f4c43c1dc08642cefb2377a8d"
"sha256": "eb3bacf736d4a1b3cf9e02357afc1add9f20323916ce62cf8748c9ad9a80f195"
},
{
"kind": "metadata",
"source": "resolved",
"locator": "bun.lock:packages[@company/internal-pkg@1.0.0]",
"value": "https://npm.company.com/@company/internal-pkg/-/internal-pkg-1.0.0.tgz",
"sha256": "dccabd071efe518efaea20482d057f2cd6295b1f4c43c1dc08642cefb2377a8d"
"sha256": "eb3bacf736d4a1b3cf9e02357afc1add9f20323916ce62cf8748c9ad9a80f195"
}
]
}

View File

@@ -20,14 +20,14 @@
"source": "integrity",
"locator": "bun.lock:packages[debug@4.3.4]",
"value": "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX\u002B7G/vCNNhehwxfkQ==",
"sha256": "18543ebd312e9698d27463883e5e2219d34d1b19b0fe80333c52a4b068bfe1b8"
"sha256": "33d4886c0591242ffb78b5e739c5248c81559312586d59d543d48387e4bb6a2b"
},
{
"kind": "metadata",
"source": "resolved",
"locator": "bun.lock:packages[debug@4.3.4]",
"value": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz",
"sha256": "18543ebd312e9698d27463883e5e2219d34d1b19b0fe80333c52a4b068bfe1b8"
"sha256": "33d4886c0591242ffb78b5e739c5248c81559312586d59d543d48387e4bb6a2b"
}
]
},
@@ -51,14 +51,14 @@
"source": "integrity",
"locator": "bun.lock:packages[ms@2.1.3]",
"value": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==",
"sha256": "18543ebd312e9698d27463883e5e2219d34d1b19b0fe80333c52a4b068bfe1b8"
"sha256": "33d4886c0591242ffb78b5e739c5248c81559312586d59d543d48387e4bb6a2b"
},
{
"kind": "metadata",
"source": "resolved",
"locator": "bun.lock:packages[ms@2.1.3]",
"value": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz",
"sha256": "18543ebd312e9698d27463883e5e2219d34d1b19b0fe80333c52a4b068bfe1b8"
"sha256": "33d4886c0591242ffb78b5e739c5248c81559312586d59d543d48387e4bb6a2b"
}
]
}

View File

@@ -22,7 +22,7 @@
"source": "resolved",
"locator": "bun.lock:packages[my-git-pkg@1.0.0]",
"value": "git\u002Bhttps://github.com/user/my-git-pkg.git#abc123def456",
"sha256": "214891708016d78e2960295b906bfb6db42fc2c98f2cf44bf970996c519e7c42"
"sha256": "819a7efc185bd1314d21aa7fdc0e5b2134a0c9b758ecd9daa62cb6cba2feddd0"
}
]
}

View File

@@ -19,14 +19,14 @@
"source": "integrity",
"locator": "bun.lock:packages[is-number@6.0.0]",
"value": "sha512-Wu1VZAVuL1snqOnHLxJ0l2p3pjlzLnMcJ8gJhaTZVfP7VFKN7fSJ8X/gR0qFCLwfFJ0Rqd3IxfS\u002BTY/Lc1Q7Pw==",
"sha256": "655d97c9bbccfc7380a6a217cd993129bdaec1fedf2667fc3c836a204364889c"
"sha256": "746b6c809e50ee2d7bdb27a0ee43046d48fa5f21d7597bbadd3bd44269798812"
},
{
"kind": "metadata",
"source": "resolved",
"locator": "bun.lock:packages[is-number@6.0.0]",
"value": "https://registry.npmjs.org/is-number/-/is-number-6.0.0.tgz",
"sha256": "655d97c9bbccfc7380a6a217cd993129bdaec1fedf2667fc3c836a204364889c"
"sha256": "746b6c809e50ee2d7bdb27a0ee43046d48fa5f21d7597bbadd3bd44269798812"
}
]
},
@@ -51,14 +51,14 @@
"source": "integrity",
"locator": "bun.lock:packages[is-odd@3.0.1]",
"value": "sha512-CQpnWPrDwmP1\u002BSMHXvTXAoSEu2mCPgMU0VKt1WcA7D8VXCo4HfVNlUbD1k8Tg0BVDX/LhyRaZqKqiS4vI6tTHg==",
"sha256": "655d97c9bbccfc7380a6a217cd993129bdaec1fedf2667fc3c836a204364889c"
"sha256": "746b6c809e50ee2d7bdb27a0ee43046d48fa5f21d7597bbadd3bd44269798812"
},
{
"kind": "metadata",
"source": "resolved",
"locator": "bun.lock:packages[is-odd@3.0.1]",
"value": "https://registry.npmjs.org/is-odd/-/is-odd-3.0.1.tgz",
"sha256": "655d97c9bbccfc7380a6a217cd993129bdaec1fedf2667fc3c836a204364889c"
"sha256": "746b6c809e50ee2d7bdb27a0ee43046d48fa5f21d7597bbadd3bd44269798812"
}
]
}

View File

@@ -20,14 +20,14 @@
"source": "integrity",
"locator": "bun.lock:packages[lodash@4.17.21]",
"value": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vz1kAmtILi\u002B8fm9nJMg7b0GN8sMEJz2mxG/S7mNxhWQ7\u002BD9bF8Q==",
"sha256": "7300c4967678f306370e7faff8e51450a42666ea54a4859a573e14d7de32f7d8"
"sha256": "7b34fdbdf0cb3e0d07e25f7d7f452491dcfad421138449217a1c20b2f66a6475"
},
{
"kind": "metadata",
"source": "resolved",
"locator": "bun.lock:packages[lodash@4.17.21]",
"value": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz",
"sha256": "7300c4967678f306370e7faff8e51450a42666ea54a4859a573e14d7de32f7d8"
"sha256": "7b34fdbdf0cb3e0d07e25f7d7f452491dcfad421138449217a1c20b2f66a6475"
}
]
}

View File

@@ -19,7 +19,7 @@
"source": "resolved",
"locator": "bun.lock:packages[dev-only@1.0.0]",
"value": "https://registry.npmjs.org/dev-only/-/dev-only-1.0.0.tgz",
"sha256": "c6eb8a4235f270df8b7dcc27c35f72323101140839b8e15c6ea4e58865dd57cc"
"sha256": "4d40cc185e492e4544a6dc3b17cdfd77096e4d4260569a243eb694befbada6ac"
}
]
},
@@ -44,7 +44,7 @@
"source": "resolved",
"locator": "bun.lock:packages[dev-pkg@1.0.0]",
"value": "https://registry.npmjs.org/dev-pkg/-/dev-pkg-1.0.0.tgz",
"sha256": "c6eb8a4235f270df8b7dcc27c35f72323101140839b8e15c6ea4e58865dd57cc"
"sha256": "4d40cc185e492e4544a6dc3b17cdfd77096e4d4260569a243eb694befbada6ac"
}
]
},
@@ -68,7 +68,7 @@
"source": "resolved",
"locator": "bun.lock:packages[prod-pkg@1.0.0]",
"value": "https://registry.npmjs.org/prod-pkg/-/prod-pkg-1.0.0.tgz",
"sha256": "c6eb8a4235f270df8b7dcc27c35f72323101140839b8e15c6ea4e58865dd57cc"
"sha256": "4d40cc185e492e4544a6dc3b17cdfd77096e4d4260569a243eb694befbada6ac"
}
]
},
@@ -91,7 +91,7 @@
"source": "resolved",
"locator": "bun.lock:packages[shared@1.0.0]",
"value": "https://registry.npmjs.org/shared/-/shared-1.0.0.tgz",
"sha256": "c6eb8a4235f270df8b7dcc27c35f72323101140839b8e15c6ea4e58865dd57cc"
"sha256": "4d40cc185e492e4544a6dc3b17cdfd77096e4d4260569a243eb694befbada6ac"
}
]
}

View File

@@ -20,14 +20,14 @@
"source": "integrity",
"locator": "bun.lock:packages[ms@2.1.3]",
"value": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==",
"sha256": "c04e2c61eb2caa5103dc414cbb94fb4a0e79fff444130007d54bcd2f32547dae"
"sha256": "4a384b14aba7740bd500cdf0da7329a41a2940662e9b1fcab1fbc71c6c8389e7"
},
{
"kind": "metadata",
"source": "resolved",
"locator": "bun.lock:packages[ms@2.1.3]",
"value": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz",
"sha256": "c04e2c61eb2caa5103dc414cbb94fb4a0e79fff444130007d54bcd2f32547dae"
"sha256": "4a384b14aba7740bd500cdf0da7329a41a2940662e9b1fcab1fbc71c6c8389e7"
}
]
}

View File

@@ -20,14 +20,14 @@
"source": "integrity",
"locator": "bun.lock:packages[lodash@4.17.21]",
"value": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vz1kAmtILi\u002B8fm9nJMg7b0GN8sMEJz2mxG/S7mNxhWQ7\u002BD9bF8Q==",
"sha256": "6fad4629ef109a5bb788e8c4ad89fd5c32aec20302147091c3c12d46b85b6a10"
"sha256": "8a0d37c3761b81514ee397c3836ccff48167ce6aa1afdfd484ca7679e586df4a"
},
{
"kind": "metadata",
"source": "resolved",
"locator": "bun.lock:packages[lodash@4.17.21]",
"value": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz",
"sha256": "6fad4629ef109a5bb788e8c4ad89fd5c32aec20302147091c3c12d46b85b6a10"
"sha256": "8a0d37c3761b81514ee397c3836ccff48167ce6aa1afdfd484ca7679e586df4a"
}
]
},
@@ -52,14 +52,14 @@
"source": "integrity",
"locator": "bun.lock:packages[ms@2.1.3]",
"value": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==",
"sha256": "6fad4629ef109a5bb788e8c4ad89fd5c32aec20302147091c3c12d46b85b6a10"
"sha256": "8a0d37c3761b81514ee397c3836ccff48167ce6aa1afdfd484ca7679e586df4a"
},
{
"kind": "metadata",
"source": "resolved",
"locator": "bun.lock:packages[ms@2.1.3]",
"value": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz",
"sha256": "6fad4629ef109a5bb788e8c4ad89fd5c32aec20302147091c3c12d46b85b6a10"
"sha256": "8a0d37c3761b81514ee397c3836ccff48167ce6aa1afdfd484ca7679e586df4a"
}
]
}

View File

@@ -21,7 +21,7 @@
"source": "resolved",
"locator": "bun.lock:packages[file-pkg@file:../file-pkg.tgz]",
"value": "file:../file-pkg.tgz",
"sha256": "116d434e799d69c9cb3dec4cbb40ae56d0d6e5a126b34ee95d9eb0b0c7970bae"
"sha256": "d7ae02476b6737ea3056226ea69e36bacb664feacd7a5223bc66ea287757656b"
}
]
},
@@ -47,7 +47,7 @@
"source": "resolved",
"locator": "bun.lock:packages[link-pkg@link:../link-pkg]",
"value": "link:../link-pkg",
"sha256": "116d434e799d69c9cb3dec4cbb40ae56d0d6e5a126b34ee95d9eb0b0c7970bae"
"sha256": "d7ae02476b6737ea3056226ea69e36bacb664feacd7a5223bc66ea287757656b"
}
]
},
@@ -73,7 +73,7 @@
"source": "resolved",
"locator": "bun.lock:packages[local-pkg@workspace:*]",
"value": "workspace:packages/local-pkg",
"sha256": "116d434e799d69c9cb3dec4cbb40ae56d0d6e5a126b34ee95d9eb0b0c7970bae"
"sha256": "d7ae02476b6737ea3056226ea69e36bacb664feacd7a5223bc66ea287757656b"
}
]
}

View File

@@ -23,14 +23,14 @@
"source": "integrity",
"locator": "bun.lock:packages[lodash@4.17.20]",
"value": "sha512-lodash-420",
"sha256": "b74a731eebc295f96d138d8f46d00893d3d352405ca422aa097c04ff5d5b40a6"
"sha256": "e83cd6aa810c1a8af47d6ae0eb621a8a5dc13b23ec08925ad9b5ff4d035cfc7c"
},
{
"kind": "metadata",
"source": "resolved",
"locator": "bun.lock:packages[lodash@4.17.20]",
"value": "https://registry.npmjs.org/lodash/-/lodash-4.17.20.tgz",
"sha256": "b74a731eebc295f96d138d8f46d00893d3d352405ca422aa097c04ff5d5b40a6"
"sha256": "e83cd6aa810c1a8af47d6ae0eb621a8a5dc13b23ec08925ad9b5ff4d035cfc7c"
}
]
},
@@ -58,14 +58,14 @@
"source": "integrity",
"locator": "bun.lock:packages[lodash@4.17.21]",
"value": "sha512-lodash-421",
"sha256": "b74a731eebc295f96d138d8f46d00893d3d352405ca422aa097c04ff5d5b40a6"
"sha256": "e83cd6aa810c1a8af47d6ae0eb621a8a5dc13b23ec08925ad9b5ff4d035cfc7c"
},
{
"kind": "metadata",
"source": "resolved",
"locator": "bun.lock:packages[lodash@4.17.21]",
"value": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz",
"sha256": "b74a731eebc295f96d138d8f46d00893d3d352405ca422aa097c04ff5d5b40a6"
"sha256": "e83cd6aa810c1a8af47d6ae0eb621a8a5dc13b23ec08925ad9b5ff4d035cfc7c"
}
]
}

View File

@@ -22,14 +22,14 @@
"source": "integrity",
"locator": "bun.lock:packages[lodash@4.17.21]",
"value": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vz1kAmtILi\u002B8fm9nJMg7b0GN8sMEJz2mxG/S7mNxhWQ7\u002BD9bF8Q==",
"sha256": "ef266fe016f21c2b74d1c35bad087ffb5fc0913116a48e94037657201a33f812"
"sha256": "61ff5c565c08f6564bd16153c10feba4a171986510aaf40f84fe710eabd180c2"
},
{
"kind": "metadata",
"source": "resolved",
"locator": "bun.lock:packages[lodash@4.17.21]",
"value": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz",
"sha256": "ef266fe016f21c2b74d1c35bad087ffb5fc0913116a48e94037657201a33f812"
"sha256": "61ff5c565c08f6564bd16153c10feba4a171986510aaf40f84fe710eabd180c2"
}
]
}

View File

@@ -20,14 +20,14 @@
"source": "integrity",
"locator": "bun.lock:packages[@babel/core@7.24.0]",
"value": "sha512-fQfkg0Gjkza3nf0c7/w6Xf34BW4YvzNfACRLmmb7XRLa6XHdR\u002BK9AlJlxneFfWYf6uhOzuzZVTjF/8KfndZANw==",
"sha256": "ae452d62d7a3074cdbf5992884428a667d2b6176507524eb9b1e287049a1d6dd"
"sha256": "6ffde82e85e550d36bdb577210cd80c56cbd36c02dbfb4d8ec6ada27643bcd2d"
},
{
"kind": "metadata",
"source": "resolved",
"locator": "bun.lock:packages[@babel/core@7.24.0]",
"value": "https://registry.npmjs.org/@babel/core/-/core-7.24.0.tgz",
"sha256": "ae452d62d7a3074cdbf5992884428a667d2b6176507524eb9b1e287049a1d6dd"
"sha256": "6ffde82e85e550d36bdb577210cd80c56cbd36c02dbfb4d8ec6ada27643bcd2d"
}
]
},
@@ -52,14 +52,14 @@
"source": "integrity",
"locator": "bun.lock:packages[@types/node@20.11.0]",
"value": "sha512-o9bjXmDNcF7GbM4CNQpmi\u002BTutCgap/K3w1JyKgxXjVJa7b8XWCF/wPH2E/0Vz9e\u002BV1B3eXX0WCw\u002BINcAobvUag==",
"sha256": "ae452d62d7a3074cdbf5992884428a667d2b6176507524eb9b1e287049a1d6dd"
"sha256": "6ffde82e85e550d36bdb577210cd80c56cbd36c02dbfb4d8ec6ada27643bcd2d"
},
{
"kind": "metadata",
"source": "resolved",
"locator": "bun.lock:packages[@types/node@20.11.0]",
"value": "https://registry.npmjs.org/@types/node/-/node-20.11.0.tgz",
"sha256": "ae452d62d7a3074cdbf5992884428a667d2b6176507524eb9b1e287049a1d6dd"
"sha256": "6ffde82e85e550d36bdb577210cd80c56cbd36c02dbfb4d8ec6ada27643bcd2d"
}
]
}

View File

@@ -20,14 +20,14 @@
"source": "integrity",
"locator": "bun.lock:packages[lodash@4.17.21]",
"value": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vz1kAmtILi\u002B8fm9nJMg7b0GN8sMEJz2mxG/S7mNxhWQ7\u002BD9bF8Q==",
"sha256": "ef266fe016f21c2b74d1c35bad087ffb5fc0913116a48e94037657201a33f812"
"sha256": "61ff5c565c08f6564bd16153c10feba4a171986510aaf40f84fe710eabd180c2"
},
{
"kind": "metadata",
"source": "resolved",
"locator": "bun.lock:packages[lodash@4.17.21]",
"value": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz",
"sha256": "ef266fe016f21c2b74d1c35bad087ffb5fc0913116a48e94037657201a33f812"
"sha256": "61ff5c565c08f6564bd16153c10feba4a171986510aaf40f84fe710eabd180c2"
}
]
}

View File

@@ -20,14 +20,14 @@
"source": "integrity",
"locator": "bun.lock:packages[safe-pkg@1.0.0]",
"value": "sha512-abc123",
"sha256": "608750aaec5150b6bb68702165a22d504bb6036fc5150d0b4b005727e21f4ade"
"sha256": "54dd0b2c2f30e59b29970d34350d083b295789e056e849361da5be932d1ef747"
},
{
"kind": "metadata",
"source": "resolved",
"locator": "bun.lock:packages[safe-pkg@1.0.0]",
"value": "https://registry.npmjs.org/safe-pkg/-/safe-pkg-1.0.0.tgz",
"sha256": "608750aaec5150b6bb68702165a22d504bb6036fc5150d0b4b005727e21f4ade"
"sha256": "54dd0b2c2f30e59b29970d34350d083b295789e056e849361da5be932d1ef747"
}
]
}

View File

@@ -20,14 +20,14 @@
"source": "integrity",
"locator": "bun.lock:packages[chalk@5.3.0]",
"value": "sha512-dLitG79d\u002BGV1Nb/VYcCDFivJeK1hiukt9QjRNVOsUtTy1rR1YJsmpGGTZ3qJos\u002Buw7WmWF4wUwBd9jxjocFC2w==",
"sha256": "3c0e7ee425c6a503bc114bb61316021a04115d148eb205ad996c0c320a33f4d1"
"sha256": "8706c5aecdc68ae4f06c6a2f1bfa9e431e473a961c2f32063911febaba0c65cc"
},
{
"kind": "metadata",
"source": "resolved",
"locator": "bun.lock:packages[chalk@5.3.0]",
"value": "https://registry.npmjs.org/chalk/-/chalk-5.3.0.tgz",
"sha256": "3c0e7ee425c6a503bc114bb61316021a04115d148eb205ad996c0c320a33f4d1"
"sha256": "8706c5aecdc68ae4f06c6a2f1bfa9e431e473a961c2f32063911febaba0c65cc"
}
]
}

View File

@@ -15,7 +15,7 @@
"kind": "file",
"source": "package.json",
"locator": "layers/layer1/app/package.json",
"sha256": "d846f429c41d17adeacfd418431ab4be4857b40a749eeea229d7be91644d6d5d"
"sha256": "23abb943f062b3ccdc18966eb36dfc48dd7ec4b5a6105851484fe2911946ecdd"
}
]
}

View File

@@ -94,7 +94,7 @@
"kind": "file",
"source": "package.json",
"locator": "packages/nested/tool/package.json",
"sha256": "9d7d0f85e36dbcd09eedf4d85a1a53a07f92bf768b1375f18a997ba0ee9295d9"
"sha256": "3011f57f07fab11b4ecb61788319bc9768d2577cafd9f53f37a7cac721fc77cf"
}
]
},

View File

@@ -220,35 +220,53 @@ public sealed class DotNetLanguageAnalyzerTests
{
var cancellationToken = TestContext.Current.CancellationToken;
var fixturePath = TestPaths.ResolveFixture("lang", "dotnet", "source-tree-only");
var tempRoot = TestPaths.CreateTemporaryDirectory();
var analyzers = new ILanguageAnalyzer[]
{
new DotNetLanguageAnalyzer()
};
var json = await LanguageAnalyzerTestHarness.RunToJsonAsync(
fixturePath,
analyzers,
cancellationToken);
using var document = JsonDocument.Parse(json);
var root = document.RootElement;
Assert.True(root.ValueKind == JsonValueKind.Array, "Result root should be an array.");
Assert.Equal(2, root.GetArrayLength());
// Check that packages are declared-only
foreach (var component in root.EnumerateArray())
try
{
var metadata = component.GetProperty("metadata");
Assert.Equal("true", metadata.GetProperty("declaredOnly").GetString());
Assert.Equal("declared", metadata.GetProperty("provenance").GetString());
}
// Ensure this scenario is truly source-only even if fixture artifacts are present.
File.Copy(
Path.Combine(fixturePath, "Sample.App.csproj"),
Path.Combine(tempRoot, "Sample.App.csproj"),
overwrite: true);
File.Copy(
Path.Combine(fixturePath, "Directory.Packages.props"),
Path.Combine(tempRoot, "Directory.Packages.props"),
overwrite: true);
// Check specific packages
var newtonsoftJson = root.EnumerateArray()
.First(element => element.GetProperty("name").GetString() == "Newtonsoft.Json");
Assert.Equal("13.0.3", newtonsoftJson.GetProperty("version").GetString());
Assert.Equal("pkg:nuget/newtonsoft.json@13.0.3", newtonsoftJson.GetProperty("purl").GetString());
var json = await LanguageAnalyzerTestHarness.RunToJsonAsync(
tempRoot,
analyzers,
cancellationToken);
using var document = JsonDocument.Parse(json);
var root = document.RootElement;
Assert.True(root.ValueKind == JsonValueKind.Array, "Result root should be an array.");
Assert.Equal(2, root.GetArrayLength());
// Check that packages are declared-only
foreach (var component in root.EnumerateArray())
{
var metadata = component.GetProperty("metadata");
Assert.Equal("true", metadata.GetProperty("declaredOnly").GetString());
Assert.Equal("declared", metadata.GetProperty("provenance").GetString());
}
// Check specific packages
var newtonsoftJson = root.EnumerateArray()
.First(element => element.GetProperty("name").GetString() == "Newtonsoft.Json");
Assert.Equal("13.0.3", newtonsoftJson.GetProperty("version").GetString());
Assert.Equal("pkg:nuget/newtonsoft.json@13.0.3", newtonsoftJson.GetProperty("purl").GetString());
}
finally
{
TestPaths.SafeDelete(tempRoot);
}
}
[Fact]

View File

@@ -4,7 +4,7 @@
Own test coverage for Cartographer service configuration and behavior.
## Responsibilities
- Maintain `StellaOps.Cartographer.Tests`.
- Maintain `StellaOps.Scanner.Cartographer.Tests`.
- Validate options defaults, validation, and integration wiring.
- Surface open work on `TASKS.md`; update statuses (TODO/DOING/DONE/BLOCKED/REVIEW).

View File

@@ -5,14 +5,14 @@ using Microsoft.Extensions.Configuration;
using Microsoft.Extensions.Options;
using Xunit;
namespace StellaOps.Cartographer.Tests;
namespace StellaOps.Scanner.Cartographer.Tests;
public class CartographerProgramTests
{
[Fact]
public async Task HealthEndpoints_ReturnOk()
{
using var factory = new WebApplicationFactory<StellaOps.Cartographer.CartographerEntryPoint>();
using var factory = new WebApplicationFactory<StellaOps.Scanner.Cartographer.CartographerEntryPoint>();
using var client = factory.CreateClient();
var cancellationToken = TestContext.Current.CancellationToken;
@@ -26,7 +26,7 @@ public class CartographerProgramTests
[Fact]
public void AuthorityOptions_InvalidIssuer_ThrowsOnStart()
{
using var factory = new WebApplicationFactory<StellaOps.Cartographer.CartographerEntryPoint>().WithWebHostBuilder(builder =>
using var factory = new WebApplicationFactory<StellaOps.Scanner.Cartographer.CartographerEntryPoint>().WithWebHostBuilder(builder =>
{
builder.ConfigureAppConfiguration((_, config) =>
{

View File

@@ -1,8 +1,8 @@
using StellaOps.Auth.Abstractions;
using StellaOps.Cartographer.Options;
using StellaOps.Scanner.Cartographer.Options;
using Xunit;
namespace StellaOps.Cartographer.Tests.Options;
namespace StellaOps.Scanner.Cartographer.Tests.Options;
public class CartographerAuthorityOptionsConfiguratorTests
{

View File

@@ -7,7 +7,7 @@
<IsPackable>false</IsPackable>
</PropertyGroup>
<ItemGroup>
<ProjectReference Include="../../StellaOps.Cartographer/StellaOps.Cartographer.csproj" />
<ProjectReference Include="../../StellaOps.Scanner.Cartographer/StellaOps.Scanner.Cartographer.csproj" />
</ItemGroup>
<ItemGroup>

View File

@@ -5,7 +5,7 @@ Source of truth: `docs-archived/implplan/2025-12-29-csproj-audit/SPRINT_20251229
| Task ID | Status | Notes |
| --- | --- | --- |
| AUDIT-0135-M | DONE | Maintainability audit for StellaOps.Cartographer.Tests; revalidated 2026-01-06. |
| AUDIT-0135-T | DONE | Test coverage audit for StellaOps.Cartographer.Tests; revalidated 2026-01-06. |
| AUDIT-0135-M | DONE | Maintainability audit for StellaOps.Scanner.Cartographer.Tests (migrated from StellaOps.Cartographer.Tests); revalidated 2026-01-06. |
| AUDIT-0135-T | DONE | Test coverage audit for StellaOps.Scanner.Cartographer.Tests (migrated from StellaOps.Cartographer.Tests); revalidated 2026-01-06. |
| AUDIT-0135-A | DONE | Waived (test project; revalidated 2026-01-06). |
| REMED-06 | DONE | SOLID review notes captured for SPRINT_20260130_002. |

View File

@@ -0,0 +1,16 @@
{
"spdxVersion": "SPDX-3.0.1",
"dataLicense": "CC0-1.0",
"name": "TestSbom",
"documentNamespace": "https://example.com/test",
"packages": [
{
"name": "Package1",
"version": "1.0.0"
},
{
"name": "Package2",
"version": "2.0.0"
}
]
}

View File

@@ -22,5 +22,6 @@
</ItemGroup>
<ItemGroup>
<None Update="Fixtures\*.json" CopyToOutputDirectory="PreserveNewest" />
<None Update="Snapshots\*.json" CopyToOutputDirectory="PreserveNewest" />
</ItemGroup>
</Project>

View File

@@ -183,6 +183,7 @@ public sealed class CompositionRecipeServiceTests
JsonBytes = Array.Empty<byte>(),
JsonSha256 = "sha256:inventory123",
ContentHash = "sha256:inventory123",
CanonicalId = "0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef",
JsonMediaType = "application/vnd.cyclonedx+json",
ProtobufBytes = Array.Empty<byte>(),
ProtobufSha256 = "sha256:protobuf123",

View File

@@ -18,7 +18,7 @@
<ItemGroup>
<!-- Excititor: Trust vectors, claim scoring, calibration -->
<ProjectReference Include="../../__Libraries/StellaOps.Scanner.Core/StellaOps.Scanner.Core.csproj" />
<ProjectReference Include="../../../Excititor/__Libraries/StellaOps.Excititor.Core/StellaOps.Excititor.Core.csproj" />
<ProjectReference Include="../../../Concelier/__Libraries/StellaOps.Excititor.Core/StellaOps.Excititor.Core.csproj" />
<!-- Policy: Gates, merge, trust lattice engine -->
<ProjectReference Include="../../../Policy/__Libraries/StellaOps.Policy/StellaOps.Policy.csproj" />

View File

@@ -74,6 +74,8 @@ public sealed class ReachabilityResultFactoryTests
StackVerdict verdict,
bool l1Reachable = true,
ConfidenceLevel l1Confidence = ConfidenceLevel.High,
ImmutableArray<CallPath>? paths = null,
ImmutableArray<Entrypoint>? reachingEntrypoints = null,
bool l2Resolved = true,
ConfidenceLevel l2Confidence = ConfidenceLevel.High,
bool l3Gated = false,
@@ -89,6 +91,8 @@ public sealed class ReachabilityResultFactoryTests
StaticCallGraph = new ReachabilityLayer1
{
IsReachable = l1Reachable,
Paths = paths ?? [],
ReachingEntrypoints = reachingEntrypoints ?? [],
Confidence = l1Confidence,
AnalysisMethod = "static-dataflow"
},
@@ -382,23 +386,73 @@ public sealed class ReachabilityResultFactoryTests
}
[Fact]
public async Task CreateResultAsync_ExploitableVerdict_ReturnsUnknownAsPlaceholder()
public async Task CreateResultAsync_ExploitableVerdict_WithPathData_ReturnsAffected()
{
// Arrange - Exploitable verdict returns Unknown placeholder (caller should build PathWitness)
var stack = CreateStackWithVerdict(StackVerdict.Exploitable);
// Arrange
var entrypoint = new Entrypoint(
Name: "GET /orders",
Type: EntrypointType.HttpEndpoint,
Location: "OrdersController.cs",
Description: "Orders API");
var path = new CallPath
{
Entrypoint = entrypoint,
Sites =
[
new CallSite("OrdersController.Get", "OrdersController", "OrdersController.cs", 42, CallSiteType.Direct),
new CallSite("VulnParser.Parse", "VulnParser", "VulnParser.cs", 88, CallSiteType.Direct)
],
Confidence = 0.91
};
var stack = CreateStackWithVerdict(
StackVerdict.Exploitable,
paths: [path],
reachingEntrypoints: [entrypoint]);
// Act
var result = await _factory.CreateResultAsync(stack, DefaultContext);
// Assert - Returns Unknown as placeholder since PathWitness should be built separately
result.Verdict.Should().Be(WitnessVerdict.Unknown);
// Assert
result.Verdict.Should().Be(WitnessVerdict.Affected);
result.PathWitness.Should().NotBeNull();
result.PathWitness!.Entrypoint.Name.Should().Be(entrypoint.Name);
result.PathWitness.Path.Should().HaveCount(2);
result.PathWitness.Sink.Symbol.Should().Be(stack.Symbol.Name);
result.PathWitness.WitnessId.Should().StartWith("wit:sha256:");
result.PathWitness.ClaimId.Should().NotBeNullOrWhiteSpace();
result.PathWitness.PathHash.Should().NotBeNullOrWhiteSpace();
result.PathWitness.NodeHashes.Should().NotBeEmpty();
}
[Fact]
public async Task CreateResultAsync_LikelyExploitableVerdict_ReturnsUnknownAsPlaceholder()
public async Task CreateResultAsync_LikelyExploitable_WithEntrypointOnly_ReturnsAffected()
{
// Arrange
var stack = CreateStackWithVerdict(StackVerdict.LikelyExploitable);
var entrypoint = new Entrypoint(
Name: "message-handler",
Type: EntrypointType.MessageHandler,
Location: "consumer.cs",
Description: "Queue consumer");
var stack = CreateStackWithVerdict(
StackVerdict.LikelyExploitable,
reachingEntrypoints: [entrypoint]);
// Act
var result = await _factory.CreateResultAsync(stack, DefaultContext);
// Assert
result.Verdict.Should().Be(WitnessVerdict.Affected);
result.PathWitness.Should().NotBeNull();
result.PathWitness!.Entrypoint.Name.Should().Be(entrypoint.Name);
result.PathWitness.Path.Should().ContainSingle();
result.PathWitness.Path[0].Symbol.Should().Be(stack.Symbol.Name);
}
[Fact]
public async Task CreateResultAsync_ExploitableWithoutEntrypoint_ReturnsUnknownFallback()
{
// Arrange
var stack = CreateStackWithVerdict(StackVerdict.Exploitable);
// Act
var result = await _factory.CreateResultAsync(stack, DefaultContext);

View File

@@ -0,0 +1,19 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<LangVersion>preview</LangVersion>
<Nullable>enable</Nullable>
<ImplicitUsings>enable</ImplicitUsings>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
<IsTestProject>true</IsTestProject>
</PropertyGroup>
<ItemGroup>
<ProjectReference Include="../../__Libraries/StellaOps.Scanner.Runtime/StellaOps.Scanner.Runtime.csproj" />
<ProjectReference Include="../../../__Libraries/StellaOps.TestKit/StellaOps.TestKit.csproj" />
</ItemGroup>
<ItemGroup>
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" />
</ItemGroup>
</Project>

View File

@@ -0,0 +1,197 @@
using Microsoft.Extensions.Logging.Abstractions;
using StellaOps.Scanner.Runtime;
using StellaOps.Scanner.Runtime.Ebpf;
using StellaOps.Scanner.Runtime.Etw;
using StellaOps.TestKit;
using System.Text.Json;
using Xunit;
namespace StellaOps.Scanner.Runtime.Tests;
public sealed class TraceCollectorFixtureTests
{
[Trait("Category", TestCategories.Unit)]
[Fact]
public async Task EbpfCollector_SealedMode_EmitsDeterministicFilteredEvents()
{
var resolver = new DeterministicSymbolResolver();
await using var collector = new EbpfTraceCollector(
NullLogger<EbpfTraceCollector>.Instance,
resolver,
TimeProvider.System);
var fixtureEvents = new[]
{
BuildEvent(timestamp: 25, pid: 200, tid: 1, caller: 0x2000, callee: 0x2001),
BuildEvent(timestamp: 10, pid: 100, tid: 2, caller: 0x1000, callee: 0x1001),
BuildEvent(timestamp: 20, pid: 100, tid: 3, caller: 0x1002, callee: 0x1003)
};
await collector.StartAsync(new TraceCollectorConfig
{
SealedMode = true,
ResolveSymbols = true,
TargetPid = 100,
MaxEventsPerSecond = int.MaxValue,
PreloadedEvents = fixtureEvents
});
var received = await ReadEventsAsync(collector.GetEventsAsync(), expectedCount: 2);
await collector.StopAsync();
Assert.Equal(2, received.Count);
Assert.Equal(10UL, received[0].Timestamp);
Assert.Equal(20UL, received[1].Timestamp);
Assert.All(received, evt => Assert.Equal((uint)100, evt.Pid));
Assert.All(received, evt => Assert.StartsWith("resolved_", evt.CallerSymbol, StringComparison.Ordinal));
var stats = collector.GetStatistics();
Assert.Equal(2, stats.EventsCollected);
Assert.False(stats.IsRunning);
Assert.Equal("sealed_replay", stats.Mode);
Assert.Contains(stats.Capability, new[] { "available", "sealed_fallback" });
}
[Trait("Category", TestCategories.Unit)]
[Fact]
public async Task EtwCollector_SealedMode_LoadsFixtureFileInDeterministicOrder()
{
await using var collector = new EtwTraceCollector(
NullLogger<EtwTraceCollector>.Instance,
TimeProvider.System);
var fixturePath = Path.Combine(
Path.GetTempPath(),
$"stella-etw-fixture-{Guid.NewGuid():N}.json");
var fixtureEvents = new[]
{
BuildEvent(timestamp: 90, pid: 500, tid: 9, caller: 0x900, callee: 0x901),
BuildEvent(timestamp: 30, pid: 500, tid: 3, caller: 0x300, callee: 0x301)
};
await File.WriteAllBytesAsync(
fixturePath,
JsonSerializer.SerializeToUtf8Bytes(fixtureEvents));
try
{
await collector.StartAsync(new TraceCollectorConfig
{
SealedMode = true,
FixtureFilePath = fixturePath,
MaxEventsPerSecond = int.MaxValue
});
var received = await ReadEventsAsync(collector.GetEventsAsync(), expectedCount: 2);
await collector.StopAsync();
Assert.Equal(2, received.Count);
Assert.Equal(30UL, received[0].Timestamp);
Assert.Equal(90UL, received[1].Timestamp);
var stats = collector.GetStatistics();
Assert.Equal(2, stats.EventsCollected);
Assert.False(stats.IsRunning);
Assert.Equal("sealed_replay", stats.Mode);
Assert.Contains(stats.Capability, new[] { "available", "sealed_fallback" });
}
finally
{
if (File.Exists(fixturePath))
{
File.Delete(fixturePath);
}
}
}
[Trait("Category", TestCategories.Unit)]
[Fact]
public async Task EbpfCollector_InvalidFixture_ReportsDeterministicHealthError()
{
var resolver = new DeterministicSymbolResolver();
await using var collector = new EbpfTraceCollector(
NullLogger<EbpfTraceCollector>.Instance,
resolver,
TimeProvider.System);
var fixturePath = Path.Combine(
Path.GetTempPath(),
$"stella-ebpf-invalid-{Guid.NewGuid():N}.json");
await File.WriteAllTextAsync(fixturePath, "{not-json");
try
{
await collector.StartAsync(new TraceCollectorConfig
{
SealedMode = true,
FixtureFilePath = fixturePath
});
var received = await ReadEventsAsync(collector.GetEventsAsync(), expectedCount: 1);
await collector.StopAsync();
Assert.Empty(received);
var stats = collector.GetStatistics();
Assert.Equal(0, stats.EventsCollected);
Assert.NotNull(stats.LastError);
Assert.StartsWith("fixture_load_failed:", stats.LastError!, StringComparison.Ordinal);
}
finally
{
if (File.Exists(fixturePath))
{
File.Delete(fixturePath);
}
}
}
private static async Task<List<RuntimeCallEvent>> ReadEventsAsync(
IAsyncEnumerable<RuntimeCallEvent> stream,
int expectedCount)
{
var output = new List<RuntimeCallEvent>(capacity: expectedCount);
using var cts = new CancellationTokenSource(TimeSpan.FromSeconds(2));
await foreach (var evt in stream.WithCancellation(cts.Token))
{
output.Add(evt);
if (output.Count >= expectedCount)
{
break;
}
}
return output;
}
private static RuntimeCallEvent BuildEvent(
ulong timestamp,
uint pid,
uint tid,
ulong caller,
ulong callee)
{
return new RuntimeCallEvent
{
Timestamp = timestamp,
Pid = pid,
Tid = tid,
CallerAddress = caller,
CalleeAddress = callee,
CallerSymbol = string.Empty,
CalleeSymbol = string.Empty,
BinaryPath = string.Empty
};
}
private sealed class DeterministicSymbolResolver : ISymbolResolver
{
public Task<string> ResolveSymbolAsync(
uint pid,
ulong address,
CancellationToken cancellationToken = default)
{
return Task.FromResult($"resolved_{pid:x}_{address:x}");
}
}
}

View File

@@ -0,0 +1,163 @@
using Microsoft.Extensions.Logging.Abstractions;
using StellaOps.Scanner.Cache.Abstractions;
using StellaOps.Scanner.Runtime;
using StellaOps.Scanner.Runtime.Ingestion;
using StellaOps.TestKit;
using Xunit;
namespace StellaOps.Scanner.Runtime.Tests;
public sealed class TraceIngestionServiceTests
{
[Trait("Category", TestCategories.Unit)]
[Fact]
public async Task IngestAsync_SameInputs_ProducesDeterministicTraceId()
{
var service = new TraceIngestionService(
new InMemoryFileCasStore(),
NullLogger<TraceIngestionService>.Instance,
TimeProvider.System);
var first = await service.IngestAsync(CreateEvents(), "scan-a", TestContext.Current.CancellationToken);
var second = await service.IngestAsync(CreateEvents(), "scan-a", TestContext.Current.CancellationToken);
Assert.Equal(first.TraceId, second.TraceId);
Assert.Equal(first.Edges.Count, second.Edges.Count);
}
[Trait("Category", TestCategories.Unit)]
[Fact]
public async Task StoreAndGetTracesForScanAsync_IndexesAndReturnsSorted()
{
var service = new TraceIngestionService(
new InMemoryFileCasStore(),
NullLogger<TraceIngestionService>.Instance,
TimeProvider.System);
var traceA = await service.IngestAsync(CreateEvents(offset: 0), "scan-index", TestContext.Current.CancellationToken);
var traceB = await service.IngestAsync(CreateEvents(offset: 1_000_000), "scan-index", TestContext.Current.CancellationToken);
await service.StoreAsync(traceA, TestContext.Current.CancellationToken);
await service.StoreAsync(traceB, TestContext.Current.CancellationToken);
var traces = await service.GetTracesForScanAsync("scan-index", TestContext.Current.CancellationToken);
Assert.Equal(2, traces.Count);
Assert.True(string.CompareOrdinal(traces[0].TraceId, traces[1].TraceId) < 0);
}
[Trait("Category", TestCategories.Unit)]
[Fact]
public async Task StoreAsync_IsIdempotentForScanIndex()
{
var service = new TraceIngestionService(
new InMemoryFileCasStore(),
NullLogger<TraceIngestionService>.Instance,
TimeProvider.System);
var trace = await service.IngestAsync(CreateEvents(), "scan-idempotent", TestContext.Current.CancellationToken);
await service.StoreAsync(trace, TestContext.Current.CancellationToken);
await service.StoreAsync(trace, TestContext.Current.CancellationToken);
var traces = await service.GetTracesForScanAsync("scan-idempotent", TestContext.Current.CancellationToken);
Assert.Single(traces);
Assert.Equal(trace.TraceId, traces[0].TraceId);
}
[Trait("Category", TestCategories.Unit)]
[Fact]
public async Task GetTracesForScanAsync_UnknownScan_ReturnsEmpty()
{
var service = new TraceIngestionService(
new InMemoryFileCasStore(),
NullLogger<TraceIngestionService>.Instance,
TimeProvider.System);
var traces = await service.GetTracesForScanAsync("missing-scan", TestContext.Current.CancellationToken);
Assert.Empty(traces);
}
private static async IAsyncEnumerable<RuntimeCallEvent> CreateEvents(int offset = 0)
{
yield return new RuntimeCallEvent
{
Timestamp = (ulong)(1_000_000 + offset),
Pid = 101,
Tid = 1,
CallerAddress = 10,
CalleeAddress = 11,
CallerSymbol = "main",
CalleeSymbol = "handler",
BinaryPath = "/app/service"
};
yield return new RuntimeCallEvent
{
Timestamp = (ulong)(2_000_000 + offset),
Pid = 101,
Tid = 1,
CallerAddress = 11,
CalleeAddress = 12,
CallerSymbol = "handler",
CalleeSymbol = "sink",
BinaryPath = "/app/service"
};
await Task.CompletedTask;
}
private sealed class InMemoryFileCasStore : IFileContentAddressableStore
{
private readonly Dictionary<string, byte[]> _entries = new(StringComparer.Ordinal);
public ValueTask<FileCasEntry?> TryGetAsync(string sha256, CancellationToken cancellationToken = default)
{
cancellationToken.ThrowIfCancellationRequested();
if (!_entries.TryGetValue(sha256, out var payload))
{
return ValueTask.FromResult<FileCasEntry?>(null);
}
return ValueTask.FromResult<FileCasEntry?>(new FileCasEntry(
sha256,
payload.LongLength,
DateTimeOffset.UnixEpoch,
DateTimeOffset.UnixEpoch,
$"cas/{sha256}.bin"));
}
public async Task<FileCasEntry> PutAsync(FileCasPutRequest request, CancellationToken cancellationToken = default)
{
cancellationToken.ThrowIfCancellationRequested();
await using var copy = new MemoryStream();
await request.Content.CopyToAsync(copy, cancellationToken);
_entries[request.Sha256] = copy.ToArray();
if (!request.LeaveOpen)
{
request.Content.Dispose();
}
return new FileCasEntry(
request.Sha256,
_entries[request.Sha256].LongLength,
DateTimeOffset.UnixEpoch,
DateTimeOffset.UnixEpoch,
$"cas/{request.Sha256}.bin");
}
public Task<bool> RemoveAsync(string sha256, CancellationToken cancellationToken = default)
=> Task.FromResult(_entries.Remove(sha256));
public Task<int> EvictExpiredAsync(CancellationToken cancellationToken = default)
=> Task.FromResult(0);
public Task<int> ExportAsync(string destinationDirectory, CancellationToken cancellationToken = default)
=> Task.FromResult(0);
public Task<int> ImportAsync(string sourceDirectory, CancellationToken cancellationToken = default)
=> Task.FromResult(0);
public Task<int> CompactAsync(CancellationToken cancellationToken = default)
=> Task.FromResult(0);
}
}

View File

@@ -224,7 +224,7 @@ public sealed class DeltaVerdictSnapshotTests
var actualNorm = JsonSerializer.Serialize(
JsonSerializer.Deserialize<JsonElement>(actual), PrettyPrintOptions);
actualNorm.Should().Be(expectedNorm, "Delta verdict output should match snapshot");
Assert.Equal(expectedNorm, actualNorm);
}
}

View File

@@ -1,6 +1,7 @@
{
"predicateType": "delta-verdict.stella/v1",
"predicate": {
"unknownsBudget": null,
"beforeRevisionId": "rev-before-complex",
"afterRevisionId": "rev-after-complex",
"hasMaterialChange": true,
@@ -52,8 +53,7 @@
"afterProofSpine": null,
"beforeGraphRevisionId": null,
"afterGraphRevisionId": null,
"comparedAt": "2025-01-15T12:00:00\u002B00:00",
"unknownsBudget": null
"comparedAt": "2025-01-15T12:00:00\u002B00:00"
},
"_type": "https://in-toto.io/Statement/v1",
"subject": [

View File

@@ -1,6 +1,7 @@
{
"predicateType": "delta-verdict.stella/v1",
"predicate": {
"unknownsBudget": null,
"beforeRevisionId": "rev-before-001",
"afterRevisionId": "rev-after-001",
"hasMaterialChange": true,
@@ -26,8 +27,7 @@
"afterProofSpine": null,
"beforeGraphRevisionId": null,
"afterGraphRevisionId": null,
"comparedAt": "2025-01-15T12:00:00\u002B00:00",
"unknownsBudget": null
"comparedAt": "2025-01-15T12:00:00\u002B00:00"
},
"_type": "https://in-toto.io/Statement/v1",
"subject": [

View File

@@ -1,6 +1,7 @@
{
"predicateType": "delta-verdict.stella/v1",
"predicate": {
"unknownsBudget": null,
"beforeRevisionId": "rev-before-nochange",
"afterRevisionId": "rev-after-nochange",
"hasMaterialChange": false,
@@ -12,8 +13,7 @@
"afterProofSpine": null,
"beforeGraphRevisionId": null,
"afterGraphRevisionId": null,
"comparedAt": "2025-01-15T12:00:00\u002B00:00",
"unknownsBudget": null
"comparedAt": "2025-01-15T12:00:00\u002B00:00"
},
"_type": "https://in-toto.io/Statement/v1",
"subject": [

View File

@@ -1,6 +1,7 @@
{
"predicateType": "delta-verdict.stella/v1",
"predicate": {
"unknownsBudget": null,
"beforeRevisionId": "rev-spine-before",
"afterRevisionId": "rev-spine-after",
"hasMaterialChange": true,
@@ -32,8 +33,7 @@
},
"beforeGraphRevisionId": "graph-rev-before-001",
"afterGraphRevisionId": "graph-rev-after-001",
"comparedAt": "2025-01-15T12:00:00\u002B00:00",
"unknownsBudget": null
"comparedAt": "2025-01-15T12:00:00\u002B00:00"
},
"_type": "https://in-toto.io/Statement/v1",
"subject": [

View File

@@ -0,0 +1,302 @@
using Microsoft.Extensions.Logging.Abstractions;
using StellaOps.Replay.Core;
using StellaOps.Scanner.ProofSpine;
using StellaOps.Scanner.Storage.Oci;
using StellaOps.TestKit;
using System.Net;
using System.Net.Http;
using System.Security.Cryptography;
using System.Text;
using System.Text.Json;
using Xunit;
namespace StellaOps.Scanner.Storage.Oci.Tests;
public sealed class SlicePullServiceTests
{
[Trait("Category", TestCategories.Unit)]
[Fact]
public async Task ListReferrersWithCapabilityAsync_WhenReferrersSupported_ReturnsSupportedCapability()
{
var handler = new SlicePullHandler
{
ReferrersStatusCode = HttpStatusCode.OK,
ReferrersBody = """
{
"schemaVersion": 2,
"mediaType": "application/vnd.oci.image.index.v1+json",
"manifests": [
{
"mediaType": "application/vnd.oci.image.manifest.v1+json",
"digest": "sha256:abc",
"size": 128,
"artifactType": "application/vnd.dsse.envelope.v1+json"
}
]
}
"""
};
using var client = new HttpClient(handler);
var service = CreateService(client);
var reference = OciImageReference.Parse("registry.example/stellaops/demo:latest")!;
var result = await service.ListReferrersWithCapabilityAsync(reference, "sha256:subject");
Assert.Equal(OciReferrersCapability.Supported, result.Capability);
Assert.False(result.FallbackUsed);
Assert.Single(result.Referrers);
Assert.Equal("sha256:abc", result.Referrers[0].Digest);
}
[Trait("Category", TestCategories.Unit)]
[Fact]
public async Task ListReferrersWithCapabilityAsync_WhenUnsupported_UsesDeterministicFallbackTags()
{
var handler = new SlicePullHandler
{
ReferrersStatusCode = HttpStatusCode.NotFound,
TagsBody = """
{
"name": "stellaops/demo",
"tags": ["att-proof", "unrelated"]
}
""",
TaggedManifests =
{
["att-proof"] = new TaggedManifest(
"""
{
"schemaVersion": 2,
"mediaType": "application/vnd.oci.image.manifest.v1+json",
"artifactType": "application/vnd.dsse.envelope.v1+json",
"subject": { "digest": "sha256:subject" },
"layers": []
}
""",
"sha256:attproof")
}
};
using var client = new HttpClient(handler);
var service = CreateService(client);
var reference = OciImageReference.Parse("registry.example/stellaops/demo:latest")!;
var result = await service.ListReferrersWithCapabilityAsync(reference, "sha256:subject");
Assert.Equal(OciReferrersCapability.Unsupported, result.Capability);
Assert.True(result.FallbackUsed);
Assert.Equal((int)HttpStatusCode.NotFound, result.StatusCode);
Assert.Single(result.Referrers);
Assert.Equal("sha256:attproof", result.Referrers[0].Digest);
}
[Trait("Category", TestCategories.Unit)]
[Fact]
public async Task PullByDigestAsync_WhenDsseVerificationSucceeds_MarksSignatureVerified()
{
var sliceJson = """{"inputs":{"graphDigest":"sha256:g"},"query":{},"subgraph":{"nodes":[],"edges":[]},"verdict":{"status":"unknown","confidence":0.4},"manifest":{"scanId":"s","artifactDigest":"sha256:a","createdAtUtc":"2026-02-26T00:00:00Z","scannerVersion":"1","workerVersion":"1","concelierSnapshot":"","excititorSnapshot":"","latticePolicyHash":""}}""";
var sliceBytes = Encoding.UTF8.GetBytes(sliceJson);
var sliceDigest = Sha256Digest(sliceBytes);
var envelope = new DsseEnvelope(
PayloadType: "application/vnd.stellaops.slice+json",
Payload: Convert.ToBase64String(sliceBytes),
Signatures: [new DsseSignature("key-1", "sig-1")]);
var envelopeBytes = JsonSerializer.SerializeToUtf8Bytes(envelope);
var envelopeDigest = Sha256Digest(envelopeBytes);
var manifestBody = $$"""
{
"schemaVersion": 2,
"mediaType": "application/vnd.oci.image.manifest.v1+json",
"layers": [
{ "mediaType": "{{OciMediaTypes.ReachabilitySlice}}", "digest": "{{sliceDigest}}", "size": {{sliceBytes.Length}} },
{ "mediaType": "{{OciMediaTypes.DsseEnvelope}}", "digest": "{{envelopeDigest}}", "size": {{envelopeBytes.Length}} }
]
}
""";
var handler = new SlicePullHandler
{
ManifestBody = manifestBody,
BlobBodies =
{
[sliceDigest] = sliceBytes,
[envelopeDigest] = envelopeBytes
}
};
var verifier = new StubDsseSigningService(new DsseVerificationOutcome(true, true, null));
using var client = new HttpClient(handler);
var service = CreateService(client, verifier);
var reference = OciImageReference.Parse("registry.example/stellaops/demo:latest")!;
var result = await service.PullByDigestAsync(reference, "sha256:subject");
Assert.True(result.Success);
Assert.True(result.SignatureVerified);
Assert.NotNull(result.DsseEnvelope);
Assert.Single(verifier.VerifiedEnvelopes);
}
[Trait("Category", TestCategories.Unit)]
[Fact]
public async Task PullByDigestAsync_WhenDsseVerificationFails_ReturnsUnverifiedSlice()
{
var sliceJson = """{"inputs":{"graphDigest":"sha256:g"},"query":{},"subgraph":{"nodes":[],"edges":[]},"verdict":{"status":"unknown","confidence":0.4},"manifest":{"scanId":"s","artifactDigest":"sha256:a","createdAtUtc":"2026-02-26T00:00:00Z","scannerVersion":"1","workerVersion":"1","concelierSnapshot":"","excititorSnapshot":"","latticePolicyHash":""}}""";
var sliceBytes = Encoding.UTF8.GetBytes(sliceJson);
var sliceDigest = Sha256Digest(sliceBytes);
var envelope = new DsseEnvelope(
PayloadType: "application/vnd.stellaops.slice+json",
Payload: Convert.ToBase64String(sliceBytes),
Signatures: [new DsseSignature("key-1", "sig-1")]);
var envelopeBytes = JsonSerializer.SerializeToUtf8Bytes(envelope);
var envelopeDigest = Sha256Digest(envelopeBytes);
var manifestBody = $$"""
{
"schemaVersion": 2,
"mediaType": "application/vnd.oci.image.manifest.v1+json",
"layers": [
{ "mediaType": "{{OciMediaTypes.ReachabilitySlice}}", "digest": "{{sliceDigest}}", "size": {{sliceBytes.Length}} },
{ "mediaType": "{{OciMediaTypes.DsseEnvelope}}", "digest": "{{envelopeDigest}}", "size": {{envelopeBytes.Length}} }
]
}
""";
var handler = new SlicePullHandler
{
ManifestBody = manifestBody,
BlobBodies =
{
[sliceDigest] = sliceBytes,
[envelopeDigest] = envelopeBytes
}
};
var verifier = new StubDsseSigningService(new DsseVerificationOutcome(false, false, "signature_invalid"));
using var client = new HttpClient(handler);
var service = CreateService(client, verifier);
var reference = OciImageReference.Parse("registry.example/stellaops/demo:latest")!;
var result = await service.PullByDigestAsync(reference, "sha256:subject");
Assert.True(result.Success);
Assert.False(result.SignatureVerified);
Assert.NotNull(result.DsseEnvelope);
}
private static SlicePullService CreateService(HttpClient client, IDsseSigningService? signingService = null)
{
return new SlicePullService(
client,
OciRegistryAuthorization.FromOptions("registry.example", new OciRegistryAuthOptions()),
options: new SlicePullOptions
{
VerifySignature = true,
EnableReferrersFallback = true
},
dsseSigningService: signingService,
logger: NullLogger<SlicePullService>.Instance,
timeProvider: TimeProvider.System);
}
private static string Sha256Digest(byte[] bytes)
=> $"sha256:{Convert.ToHexString(SHA256.HashData(bytes)).ToLowerInvariant()}";
private sealed class SlicePullHandler : HttpMessageHandler
{
public HttpStatusCode ReferrersStatusCode { get; init; } = HttpStatusCode.OK;
public string ReferrersBody { get; init; } = """{"schemaVersion":2,"mediaType":"application/vnd.oci.image.index.v1+json","manifests":[]}""";
public string TagsBody { get; init; } = """{"name":"stellaops/demo","tags":[]}""";
public string ManifestBody { get; init; } = """{"schemaVersion":2,"mediaType":"application/vnd.oci.image.manifest.v1+json","layers":[]}""";
public Dictionary<string, byte[]> BlobBodies { get; } = new(StringComparer.Ordinal);
public Dictionary<string, TaggedManifest> TaggedManifests { get; } = new(StringComparer.Ordinal);
protected override Task<HttpResponseMessage> SendAsync(HttpRequestMessage request, CancellationToken cancellationToken)
{
var path = request.RequestUri?.AbsolutePath ?? string.Empty;
if (path.Contains("/referrers/", StringComparison.Ordinal))
{
return Task.FromResult(new HttpResponseMessage(ReferrersStatusCode)
{
Content = new StringContent(ReferrersBody, Encoding.UTF8, "application/json")
});
}
if (path.EndsWith("/tags/list", StringComparison.Ordinal))
{
return Task.FromResult(new HttpResponseMessage(HttpStatusCode.OK)
{
Content = new StringContent(TagsBody, Encoding.UTF8, "application/json")
});
}
if (path.Contains("/manifests/", StringComparison.Ordinal))
{
var tagOrDigest = Uri.UnescapeDataString(path[(path.LastIndexOf('/') + 1)..]);
if (TaggedManifests.TryGetValue(tagOrDigest, out var tagged))
{
var response = new HttpResponseMessage(HttpStatusCode.OK)
{
Content = new StringContent(tagged.Body, Encoding.UTF8, "application/json")
};
response.Headers.TryAddWithoutValidation("Docker-Content-Digest", tagged.Digest);
return Task.FromResult(response);
}
return Task.FromResult(new HttpResponseMessage(HttpStatusCode.OK)
{
Content = new StringContent(ManifestBody, Encoding.UTF8, "application/json")
});
}
if (path.Contains("/blobs/", StringComparison.Ordinal))
{
var digest = Uri.UnescapeDataString(path[(path.LastIndexOf('/') + 1)..]);
if (BlobBodies.TryGetValue(digest, out var content))
{
return Task.FromResult(new HttpResponseMessage(HttpStatusCode.OK)
{
Content = new ByteArrayContent(content)
});
}
return Task.FromResult(new HttpResponseMessage(HttpStatusCode.NotFound));
}
return Task.FromResult(new HttpResponseMessage(HttpStatusCode.NotFound));
}
}
private sealed record TaggedManifest(string Body, string Digest);
private sealed class StubDsseSigningService : IDsseSigningService
{
private readonly DsseVerificationOutcome _verificationOutcome;
public StubDsseSigningService(DsseVerificationOutcome verificationOutcome)
{
_verificationOutcome = verificationOutcome;
}
public List<DsseEnvelope> VerifiedEnvelopes { get; } = new();
public Task<DsseEnvelope> SignAsync(object payload, string payloadType, ICryptoProfile cryptoProfile, CancellationToken cancellationToken = default)
=> throw new NotSupportedException();
public Task<DsseVerificationOutcome> VerifyAsync(DsseEnvelope envelope, CancellationToken cancellationToken = default)
{
VerifiedEnvelopes.Add(envelope);
return Task.FromResult(_verificationOutcome);
}
}
}

View File

@@ -0,0 +1,183 @@
using System.Collections.Immutable;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Logging.Abstractions;
using Microsoft.Extensions.Options;
using StellaOps.Infrastructure.Postgres.Options;
using StellaOps.Scanner.Contracts;
using StellaOps.Scanner.ReachabilityDrift;
using StellaOps.Scanner.Storage.Postgres;
using StellaOps.TestKit;
using Xunit;
namespace StellaOps.Scanner.Storage.Tests;
[Collection("scanner-postgres")]
public sealed class ReachabilityDriftRepositorySchemaFallbackTests : IAsyncLifetime
{
private const string MissingSchemaName = "drift_missing";
private const string DefaultSchemaName = ScannerStorageDefaults.DefaultSchemaName;
private readonly ScannerPostgresFixture _fixture;
private ScannerDataSource _dataSource = null!;
private PostgresReachabilityDriftResultRepository _repository = null!;
public ReachabilityDriftRepositorySchemaFallbackTests(ScannerPostgresFixture fixture)
{
_fixture = fixture;
}
public async ValueTask InitializeAsync()
{
await _fixture.TruncateAllTablesAsync();
await PrepareDefaultSchemaDriftTablesAsync();
await _fixture.ExecuteSqlAsync($"CREATE SCHEMA IF NOT EXISTS {MissingSchemaName};");
var options = new ScannerStorageOptions
{
Postgres = new PostgresOptions
{
ConnectionString = _fixture.ConnectionString,
SchemaName = MissingSchemaName
}
};
_dataSource = new ScannerDataSource(
Options.Create(options),
NullLogger<ScannerDataSource>.Instance);
_repository = new PostgresReachabilityDriftResultRepository(
_dataSource,
NullLogger<PostgresReachabilityDriftResultRepository>.Instance);
}
public async ValueTask DisposeAsync()
{
if (_dataSource is not null)
{
await _dataSource.DisposeAsync();
}
}
[Trait("Category", TestCategories.Unit)]
[Fact]
public async Task Repository_FallsBackToDefaultSchema_WhenConfiguredSchemaLacksDriftTables()
{
// Arrange
const string tenantId = "tenant-fallback";
var driftId = Guid.NewGuid();
var result = new ReachabilityDriftResult
{
Id = driftId,
BaseScanId = "scan-base-fallback",
HeadScanId = "scan-head-fallback",
Language = "dotnet",
DetectedAt = DateTimeOffset.UtcNow,
ResultDigest = "sha256:drift-fallback",
NewlyReachable =
[
CreateSink(
"sink-reachable",
DriftDirection.BecameReachable,
"Namespace.Service.Entry")
],
NewlyUnreachable =
[
CreateSink(
"sink-unreachable",
DriftDirection.BecameUnreachable,
"Namespace.Service.Legacy")
]
};
// Act
await _repository.StoreAsync(result, tenantId: tenantId);
var latest = await _repository.TryGetLatestForHeadAsync(
result.HeadScanId,
result.Language,
tenantId: tenantId);
var byId = await _repository.TryGetByIdAsync(result.Id, tenantId: tenantId);
var exists = await _repository.ExistsAsync(result.Id, tenantId: tenantId);
var reachableSinks = await _repository.ListSinksAsync(
result.Id,
DriftDirection.BecameReachable,
offset: 0,
limit: 10,
tenantId: tenantId);
// Assert
Assert.NotNull(latest);
Assert.NotNull(byId);
Assert.True(exists);
Assert.Equal(result.ResultDigest, latest!.ResultDigest);
Assert.Equal(result.ResultDigest, byId!.ResultDigest);
Assert.Single(reachableSinks);
Assert.Equal("sink-reachable", reachableSinks[0].SinkNodeId);
var otherTenant = await _repository.TryGetLatestForHeadAsync(
result.HeadScanId,
result.Language,
tenantId: "tenant-other");
Assert.Null(otherTenant);
}
private static DriftedSink CreateSink(string sinkNodeId, DriftDirection direction, string symbol)
{
return new DriftedSink
{
Id = Guid.NewGuid(),
SinkNodeId = sinkNodeId,
Symbol = symbol,
SinkCategory = SinkCategory.SqlInjection,
Direction = direction,
Cause = DriftCause.GuardRemoved(symbol),
Path = new CompressedPath
{
Entrypoint = new PathNode
{
NodeId = "entry-1",
Symbol = "Program.Main"
},
Sink = new PathNode
{
NodeId = sinkNodeId,
Symbol = symbol
},
IntermediateCount = 0,
KeyNodes = ImmutableArray<PathNode>.Empty,
FullPath = ["entry-1", sinkNodeId]
}
};
}
private async Task PrepareDefaultSchemaDriftTablesAsync()
{
var sourceSchema = QuoteIdentifier(_fixture.SchemaName);
var defaultSchema = QuoteIdentifier(DefaultSchemaName);
var sql = $"""
CREATE SCHEMA IF NOT EXISTS {defaultSchema};
DROP TABLE IF EXISTS {defaultSchema}.drifted_sinks CASCADE;
DROP TABLE IF EXISTS {defaultSchema}.reachability_drift_results CASCADE;
CREATE TABLE {defaultSchema}.reachability_drift_results
(LIKE {sourceSchema}.reachability_drift_results INCLUDING ALL);
CREATE TABLE {defaultSchema}.drifted_sinks
(LIKE {sourceSchema}.drifted_sinks INCLUDING ALL);
""";
await _fixture.ExecuteSqlAsync(sql);
}
private static string QuoteIdentifier(string identifier)
{
var escaped = identifier.Replace("\"", "\"\"", StringComparison.Ordinal);
return $"\"{escaped}\"";
}
}

View File

@@ -52,6 +52,7 @@ public sealed class SbomValidationPipelineTests
JsonBytes = Encoding.UTF8.GetBytes("{}"),
JsonSha256 = "abc123",
ContentHash = "abc123",
CanonicalId = "abc123",
JsonMediaType = "application/vnd.cyclonedx+json",
ProtobufBytes = Array.Empty<byte>(),
ProtobufSha256 = "def456",
@@ -67,6 +68,7 @@ public sealed class SbomValidationPipelineTests
JsonBytes = Encoding.UTF8.GetBytes("{}"),
JsonSha256 = "xyz789",
ContentHash = "xyz789",
CanonicalId = "xyz789",
JsonMediaType = "application/vnd.cyclonedx+json",
ProtobufBytes = Array.Empty<byte>(),
ProtobufSha256 = "uvw012",

View File

@@ -1,4 +1,4 @@
// -----------------------------------------------------------------------------
// -----------------------------------------------------------------------------
// ActionablesEndpointsTests.cs
// Sprint: SPRINT_4200_0002_0006_delta_compare_api
// Description: Integration tests for actionables engine endpoints.
@@ -8,9 +8,9 @@ using System.Net;
using System.Net.Http.Json;
using System.Text.Json;
using StellaOps.Scanner.WebService.Contracts;
using StellaOps.TestKit;
using Xunit;
namespace StellaOps.Scanner.WebService.Tests;
/// <summary>
@@ -21,135 +21,156 @@ public sealed class ActionablesEndpointsTests
private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web);
[Trait("Category", TestCategories.Unit)]
[Fact]
public async Task GetDeltaActionables_ValidDeltaId_ReturnsActionables()
[Fact]
public async Task GetDeltaActionables_UnknownDelta_ReturnsNotFound()
{
await using var factory = new ScannerApplicationFactory();
await factory.InitializeAsync();
using var client = factory.CreateClient();
var response = await client.GetAsync("/api/v1/actionables/delta/delta-12345678");
Assert.Equal(HttpStatusCode.OK, response.StatusCode);
var result = await response.Content.ReadFromJsonAsync<ActionablesResponseDto>(SerializerOptions);
Assert.NotNull(result);
Assert.Equal("delta-12345678", result!.DeltaId);
Assert.NotNull(result.Actionables);
var response = await client.GetAsync("/api/v1/actionables/delta/cmp-missing", TestContext.Current.CancellationToken);
Assert.Equal(HttpStatusCode.NotFound, response.StatusCode);
}
[Trait("Category", TestCategories.Unit)]
[Fact]
public async Task GetDeltaActionables_SortedByPriority()
[Fact]
public async Task GetDeltaActionables_ValidDelta_ReturnsDerivedDeterministicActionables()
{
await using var factory = new ScannerApplicationFactory();
await factory.InitializeAsync();
using var client = factory.CreateClient();
var response = await client.GetAsync("/api/v1/actionables/delta/delta-12345678");
var result = await response.Content.ReadFromJsonAsync<ActionablesResponseDto>(SerializerOptions);
var delta = await CreateDeltaAsync(client);
Assert.NotNull(result);
if (result!.Actionables.Count > 1)
var first = await GetActionablesAsync(client, delta.ComparisonId);
var second = await GetActionablesAsync(client, delta.ComparisonId);
Assert.Equal(delta.ComparisonId, first.DeltaId);
Assert.NotNull(first.Actionables);
Assert.Equal(first.Actionables.Select(a => a.Id), second.Actionables.Select(a => a.Id));
Assert.All(first.Actionables, actionable => Assert.StartsWith("act-", actionable.Id, StringComparison.Ordinal));
var changedVulnIds = delta.Vulnerabilities!
.Where(v => v.ChangeType.Equals("Added", StringComparison.OrdinalIgnoreCase)
|| v.ChangeType.Equals("Modified", StringComparison.OrdinalIgnoreCase))
.Select(v => v.VulnId)
.ToHashSet(StringComparer.Ordinal);
foreach (var actionable in first.Actionables.Where(a => a.Type is "upgrade" or "investigate"))
{
var priorities = result.Actionables.Select(GetPriorityOrder).ToList();
Assert.True(priorities.SequenceEqual(priorities.Order()));
Assert.NotNull(actionable.CveIds);
Assert.NotEmpty(actionable.CveIds!);
Assert.Contains(actionable.CveIds!, id => changedVulnIds.Contains(id));
}
}
[Trait("Category", TestCategories.Unit)]
[Fact]
[Fact]
public async Task GetDeltaActionables_SortedByPriorityThenId()
{
await using var factory = new ScannerApplicationFactory();
await factory.InitializeAsync();
using var client = factory.CreateClient();
var delta = await CreateDeltaAsync(client);
var result = await GetActionablesAsync(client, delta.ComparisonId);
var actual = result.Actionables.Select(a => (Priority: GetPriorityOrder(a.Priority), a.Id)).ToList();
var expected = actual.OrderBy(x => x.Priority).ThenBy(x => x.Id, StringComparer.Ordinal).ToList();
Assert.Equal(expected, actual);
}
[Trait("Category", TestCategories.Unit)]
[Fact]
public async Task GetActionablesByPriority_Critical_FiltersCorrectly()
{
await using var factory = new ScannerApplicationFactory();
await factory.InitializeAsync();
using var client = factory.CreateClient();
var response = await client.GetAsync("/api/v1/actionables/delta/delta-12345678/by-priority/critical");
var delta = await CreateDeltaAsync(client);
var response = await client.GetAsync(
$"/api/v1/actionables/delta/{delta.ComparisonId}/by-priority/critical",
TestContext.Current.CancellationToken);
Assert.Equal(HttpStatusCode.OK, response.StatusCode);
var result = await response.Content.ReadFromJsonAsync<ActionablesResponseDto>(SerializerOptions);
var result = await response.Content.ReadFromJsonAsync<ActionablesResponseDto>(SerializerOptions, TestContext.Current.CancellationToken);
Assert.NotNull(result);
Assert.All(result!.Actionables, a => Assert.Equal("critical", a.Priority, StringComparer.OrdinalIgnoreCase));
}
[Trait("Category", TestCategories.Unit)]
[Fact]
[Fact]
public async Task GetActionablesByPriority_InvalidPriority_ReturnsBadRequest()
{
await using var factory = new ScannerApplicationFactory();
await factory.InitializeAsync();
using var client = factory.CreateClient();
var response = await client.GetAsync("/api/v1/actionables/delta/delta-12345678/by-priority/invalid");
var delta = await CreateDeltaAsync(client);
var response = await client.GetAsync(
$"/api/v1/actionables/delta/{delta.ComparisonId}/by-priority/invalid",
TestContext.Current.CancellationToken);
Assert.Equal(HttpStatusCode.BadRequest, response.StatusCode);
}
[Trait("Category", TestCategories.Unit)]
[Fact]
[Fact]
public async Task GetActionablesByType_Upgrade_FiltersCorrectly()
{
await using var factory = new ScannerApplicationFactory();
await factory.InitializeAsync();
using var client = factory.CreateClient();
var response = await client.GetAsync("/api/v1/actionables/delta/delta-12345678/by-type/upgrade");
var delta = await CreateDeltaAsync(client);
var response = await client.GetAsync(
$"/api/v1/actionables/delta/{delta.ComparisonId}/by-type/upgrade",
TestContext.Current.CancellationToken);
Assert.Equal(HttpStatusCode.OK, response.StatusCode);
var result = await response.Content.ReadFromJsonAsync<ActionablesResponseDto>(SerializerOptions);
var result = await response.Content.ReadFromJsonAsync<ActionablesResponseDto>(SerializerOptions, TestContext.Current.CancellationToken);
Assert.NotNull(result);
Assert.All(result!.Actionables, a => Assert.Equal("upgrade", a.Type, StringComparer.OrdinalIgnoreCase));
}
[Trait("Category", TestCategories.Unit)]
[Fact]
public async Task GetActionablesByType_Vex_FiltersCorrectly()
{
await using var factory = new ScannerApplicationFactory();
await factory.InitializeAsync();
using var client = factory.CreateClient();
var response = await client.GetAsync("/api/v1/actionables/delta/delta-12345678/by-type/vex");
Assert.Equal(HttpStatusCode.OK, response.StatusCode);
var result = await response.Content.ReadFromJsonAsync<ActionablesResponseDto>(SerializerOptions);
Assert.NotNull(result);
Assert.All(result!.Actionables, a => Assert.Equal("vex", a.Type, StringComparer.OrdinalIgnoreCase));
}
[Trait("Category", TestCategories.Unit)]
[Fact]
[Fact]
public async Task GetActionablesByType_InvalidType_ReturnsBadRequest()
{
await using var factory = new ScannerApplicationFactory();
await factory.InitializeAsync();
using var client = factory.CreateClient();
var response = await client.GetAsync("/api/v1/actionables/delta/delta-12345678/by-type/invalid");
var delta = await CreateDeltaAsync(client);
var response = await client.GetAsync(
$"/api/v1/actionables/delta/{delta.ComparisonId}/by-type/invalid",
TestContext.Current.CancellationToken);
Assert.Equal(HttpStatusCode.BadRequest, response.StatusCode);
}
[Trait("Category", TestCategories.Unit)]
[Fact]
public async Task GetDeltaActionables_IncludesEstimatedEffort()
[Fact]
public async Task GetDeltaActionables_IncludesValidEstimatedEffortValues()
{
await using var factory = new ScannerApplicationFactory();
await factory.InitializeAsync();
using var client = factory.CreateClient();
var response = await client.GetAsync("/api/v1/actionables/delta/delta-12345678");
var result = await response.Content.ReadFromJsonAsync<ActionablesResponseDto>(SerializerOptions);
var delta = await CreateDeltaAsync(client);
var result = await GetActionablesAsync(client, delta.ComparisonId);
Assert.NotNull(result);
foreach (var actionable in result!.Actionables)
foreach (var actionable in result.Actionables)
{
Assert.NotNull(actionable.EstimatedEffort);
Assert.Contains(actionable.EstimatedEffort, new[] { "trivial", "low", "medium", "high" });
}
}
private static int GetPriorityOrder(ActionableDto actionable)
private static int GetPriorityOrder(string priority)
{
return actionable.Priority.ToLowerInvariant() switch
return priority.ToLowerInvariant() switch
{
"critical" => 0,
"high" => 1,
@@ -158,4 +179,33 @@ public sealed class ActionablesEndpointsTests
_ => 4
};
}
private static async Task<DeltaCompareResponseDto> CreateDeltaAsync(HttpClient client)
{
var request = new DeltaCompareRequestDto
{
BaseDigest = "sha256:base-actionables",
TargetDigest = "sha256:target-actionables",
IncludeVulnerabilities = true,
IncludeComponents = true,
IncludePolicyDiff = true
};
var response = await client.PostAsJsonAsync("/api/v1/delta/compare", request, TestContext.Current.CancellationToken);
response.EnsureSuccessStatusCode();
var delta = await response.Content.ReadFromJsonAsync<DeltaCompareResponseDto>(SerializerOptions, TestContext.Current.CancellationToken);
Assert.NotNull(delta);
return delta!;
}
private static async Task<ActionablesResponseDto> GetActionablesAsync(HttpClient client, string deltaId)
{
var response = await client.GetAsync($"/api/v1/actionables/delta/{deltaId}", TestContext.Current.CancellationToken);
response.EnsureSuccessStatusCode();
var actionables = await response.Content.ReadFromJsonAsync<ActionablesResponseDto>(SerializerOptions, TestContext.Current.CancellationToken);
Assert.NotNull(actionables);
return actionables!;
}
}

View File

@@ -1,4 +1,4 @@
// -----------------------------------------------------------------------------
// -----------------------------------------------------------------------------
// DeltaCompareEndpointsTests.cs
// Sprint: SPRINT_4200_0002_0006_delta_compare_api
// Description: Integration tests for delta compare endpoints.
@@ -8,10 +8,9 @@ using System.Net;
using System.Net.Http.Json;
using System.Text.Json;
using StellaOps.Scanner.WebService.Contracts;
using StellaOps.TestKit;
using Xunit;
using StellaOps.TestKit;
namespace StellaOps.Scanner.WebService.Tests;
/// <summary>
@@ -22,37 +21,59 @@ public sealed class DeltaCompareEndpointsTests
private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web);
[Trait("Category", TestCategories.Unit)]
[Fact]
public async Task PostCompare_ValidRequest_ReturnsComparisonResult()
[Fact]
public async Task PostCompare_ValidRequest_ComputesDerivedSummaryAndPersistsById()
{
await using var factory = new ScannerApplicationFactory();
await factory.InitializeAsync();
using var client = factory.CreateClient();
var request = new DeltaCompareRequestDto
{
BaseDigest = "sha256:base123",
TargetDigest = "sha256:target456",
IncludeVulnerabilities = true,
IncludeComponents = true,
IncludePolicyDiff = true
};
var result = await PostCompareAsync(
client,
new DeltaCompareRequestDto
{
BaseDigest = "sha256:base123",
TargetDigest = "sha256:target456",
IncludeVulnerabilities = true,
IncludeComponents = true,
IncludePolicyDiff = true
});
var response = await client.PostAsJsonAsync("/api/v1/delta/compare", request, TestContext.Current.CancellationToken);
Assert.Equal(HttpStatusCode.OK, response.StatusCode);
var result = await response.Content.ReadFromJsonAsync<DeltaCompareResponseDto>(SerializerOptions, TestContext.Current.CancellationToken);
Assert.NotNull(result);
Assert.NotNull(result!.Base);
Assert.NotNull(result.Target);
Assert.NotNull(result.Summary);
Assert.NotNull(result.Vulnerabilities);
Assert.NotNull(result.Components);
Assert.NotNull(result.PolicyDiff);
Assert.NotEmpty(result.Vulnerabilities);
Assert.NotEmpty(result.ComparisonId);
Assert.StartsWith("cmp-", result.ComparisonId, StringComparison.Ordinal);
Assert.Equal("sha256:base123", result.Base.Digest);
Assert.Equal("sha256:target456", result.Target.Digest);
Assert.Equal(
result.Vulnerabilities.Count(v => v.ChangeType.Equals("Added", StringComparison.OrdinalIgnoreCase)),
result.Summary.Added);
Assert.Equal(
result.Vulnerabilities.Count(v => v.ChangeType.Equals("Removed", StringComparison.OrdinalIgnoreCase)),
result.Summary.Removed);
Assert.Equal(
result.Vulnerabilities.Count(v => v.ChangeType.Equals("Modified", StringComparison.OrdinalIgnoreCase)),
result.Summary.Modified);
Assert.Equal(
result.Vulnerabilities.Count(v => v.ChangeType.Equals("Unchanged", StringComparison.OrdinalIgnoreCase)),
result.Summary.Unchanged);
var persisted = await client.GetFromJsonAsync<DeltaCompareResponseDto>(
$"/api/v1/delta/{result.ComparisonId}",
TestContext.Current.CancellationToken);
Assert.NotNull(persisted);
Assert.Equal(result.ComparisonId, persisted!.ComparisonId);
Assert.Equal(result.Summary.RiskDirection, persisted.Summary.RiskDirection);
Assert.Equal(result.Summary.Added, persisted.Summary.Added);
Assert.Equal(result.Summary.Removed, persisted.Summary.Removed);
}
[Trait("Category", TestCategories.Unit)]
[Fact]
[Fact]
public async Task PostCompare_MissingBaseDigest_ReturnsBadRequest()
{
await using var factory = new ScannerApplicationFactory();
@@ -61,7 +82,7 @@ public sealed class DeltaCompareEndpointsTests
var request = new DeltaCompareRequestDto
{
BaseDigest = "",
BaseDigest = string.Empty,
TargetDigest = "sha256:target456"
};
@@ -70,7 +91,7 @@ public sealed class DeltaCompareEndpointsTests
}
[Trait("Category", TestCategories.Unit)]
[Fact]
[Fact]
public async Task PostCompare_MissingTargetDigest_ReturnsBadRequest()
{
await using var factory = new ScannerApplicationFactory();
@@ -80,7 +101,7 @@ public sealed class DeltaCompareEndpointsTests
var request = new DeltaCompareRequestDto
{
BaseDigest = "sha256:base123",
TargetDigest = ""
TargetDigest = string.Empty
};
var response = await client.PostAsJsonAsync("/api/v1/delta/compare", request, TestContext.Current.CancellationToken);
@@ -88,50 +109,69 @@ public sealed class DeltaCompareEndpointsTests
}
[Trait("Category", TestCategories.Unit)]
[Fact]
public async Task GetQuickDiff_ValidDigests_ReturnsQuickSummary()
[Fact]
public async Task GetQuickDiff_UsesComparisonDerivedValues()
{
await using var factory = new ScannerApplicationFactory();
await factory.InitializeAsync();
using var client = factory.CreateClient();
var response = await client.GetAsync("/api/v1/delta/quick?baseDigest=sha256:base123&targetDigest=sha256:target456");
var compare = await PostCompareAsync(
client,
new DeltaCompareRequestDto
{
BaseDigest = "sha256:base123",
TargetDigest = "sha256:target456",
IncludeVulnerabilities = true,
IncludeComponents = true,
IncludePolicyDiff = true
});
var response = await client.GetAsync(
"/api/v1/delta/quick?baseDigest=sha256:base123&targetDigest=sha256:target456",
TestContext.Current.CancellationToken);
Assert.Equal(HttpStatusCode.OK, response.StatusCode);
var result = await response.Content.ReadFromJsonAsync<QuickDiffSummaryDto>(SerializerOptions);
var result = await response.Content.ReadFromJsonAsync<QuickDiffSummaryDto>(
SerializerOptions,
TestContext.Current.CancellationToken);
Assert.NotNull(result);
Assert.Equal("sha256:base123", result!.BaseDigest);
Assert.Equal("sha256:target456", result.TargetDigest);
Assert.NotEmpty(result.RiskDirection);
Assert.Equal(compare.Summary.RiskDirection, result.RiskDirection);
Assert.Equal(compare.Summary.SeverityChanges.CriticalAdded, result.CriticalAdded);
Assert.Equal(compare.Summary.SeverityChanges.CriticalRemoved, result.CriticalRemoved);
Assert.Equal(compare.Summary.SeverityChanges.HighAdded, result.HighAdded);
Assert.Equal(compare.Summary.SeverityChanges.HighRemoved, result.HighRemoved);
Assert.NotEmpty(result.Summary);
}
[Trait("Category", TestCategories.Unit)]
[Fact]
[Fact]
public async Task GetQuickDiff_MissingDigest_ReturnsBadRequest()
{
await using var factory = new ScannerApplicationFactory();
await factory.InitializeAsync();
using var client = factory.CreateClient();
var response = await client.GetAsync("/api/v1/delta/quick?baseDigest=sha256:base123");
var response = await client.GetAsync("/api/v1/delta/quick?baseDigest=sha256:base123", TestContext.Current.CancellationToken);
Assert.Equal(HttpStatusCode.BadRequest, response.StatusCode);
}
[Trait("Category", TestCategories.Unit)]
[Fact]
[Fact]
public async Task GetComparison_NotFound_ReturnsNotFound()
{
await using var factory = new ScannerApplicationFactory();
await factory.InitializeAsync();
using var client = factory.CreateClient();
var response = await client.GetAsync("/api/v1/delta/nonexistent-id");
var response = await client.GetAsync("/api/v1/delta/nonexistent-id", TestContext.Current.CancellationToken);
Assert.Equal(HttpStatusCode.NotFound, response.StatusCode);
}
[Trait("Category", TestCategories.Unit)]
[Fact]
[Fact]
public async Task PostCompare_DeterministicComparisonId_SameInputsSameId()
{
await using var factory = new ScannerApplicationFactory();
@@ -144,14 +184,27 @@ public sealed class DeltaCompareEndpointsTests
TargetDigest = "sha256:target456"
};
var response1 = await client.PostAsJsonAsync("/api/v1/delta/compare", request);
var result1 = await response1.Content.ReadFromJsonAsync<DeltaCompareResponseDto>(SerializerOptions);
var result1 = await PostCompareAsync(client, request);
var result2 = await PostCompareAsync(client, request);
var response2 = await client.PostAsJsonAsync("/api/v1/delta/compare", request);
var result2 = await response2.Content.ReadFromJsonAsync<DeltaCompareResponseDto>(SerializerOptions);
Assert.Equal(result1.ComparisonId, result2.ComparisonId);
}
Assert.NotNull(result1);
Assert.NotNull(result2);
Assert.Equal(result1!.ComparisonId, result2!.ComparisonId);
private static async Task<DeltaCompareResponseDto> PostCompareAsync(
HttpClient client,
DeltaCompareRequestDto request)
{
var response = await client.PostAsJsonAsync(
"/api/v1/delta/compare",
request,
TestContext.Current.CancellationToken);
Assert.Equal(HttpStatusCode.OK, response.StatusCode);
var result = await response.Content.ReadFromJsonAsync<DeltaCompareResponseDto>(
SerializerOptions,
TestContext.Current.CancellationToken);
Assert.NotNull(result);
return result!;
}
}

View File

@@ -0,0 +1,110 @@
using StellaOps.Policy.Scoring;
using StellaOps.Scanner.WebService.Services;
using StellaOps.TestKit;
using Xunit;
namespace StellaOps.Scanner.WebService.Tests;
public sealed class DeterministicScoringServiceTests
{
[Trait("Category", TestCategories.Unit)]
[Fact]
public async Task ReplayScoreAsync_SameInputs_ReturnsStableOutputs()
{
var service = new DeterministicScoringService();
var seed = Convert.FromHexString("00112233445566778899AABBCCDDEEFF");
var freeze = new DateTimeOffset(2026, 03, 04, 12, 00, 00, TimeSpan.Zero);
var ledgerA = new ProofLedger();
var ledgerB = new ProofLedger();
var first = await service.ReplayScoreAsync(
scanId: "scan-a",
concelierSnapshotHash: "sha256:concelier-a",
excititorSnapshotHash: "sha256:excititor-a",
latticePolicyHash: "sha256:policy-a",
seed: seed,
freezeTimestamp: freeze,
ledger: ledgerA,
cancellationToken: TestContext.Current.CancellationToken);
var second = await service.ReplayScoreAsync(
scanId: "scan-a",
concelierSnapshotHash: "sha256:concelier-a",
excititorSnapshotHash: "sha256:excititor-a",
latticePolicyHash: "sha256:policy-a",
seed: seed,
freezeTimestamp: freeze,
ledger: ledgerB,
cancellationToken: TestContext.Current.CancellationToken);
Assert.Equal(first.Score, second.Score);
Assert.Equal(first.CanonicalInputHash, second.CanonicalInputHash);
Assert.Equal(first.CanonicalInputPayload, second.CanonicalInputPayload);
Assert.Equal(first.SeedHex, second.SeedHex);
Assert.Equal(first.FormulaVersion, second.FormulaVersion);
Assert.Equal(
first.Factors.Select(f => (f.Name, f.Weight, f.Raw, f.Weighted, f.Source)),
second.Factors.Select(f => (f.Name, f.Weight, f.Raw, f.Weighted, f.Source)));
Assert.Equal(2, ledgerA.Nodes.Count);
Assert.Equal(2, ledgerB.Nodes.Count);
}
[Trait("Category", TestCategories.Unit)]
[Fact]
public async Task ReplayScoreAsync_DifferentFreezeTimestamp_ChangesCanonicalInputHash()
{
var service = new DeterministicScoringService();
var seed = Convert.FromHexString("00112233445566778899AABBCCDDEEFF");
var first = await service.ReplayScoreAsync(
scanId: "scan-a",
concelierSnapshotHash: "sha256:concelier-a",
excititorSnapshotHash: "sha256:excititor-a",
latticePolicyHash: "sha256:policy-a",
seed: seed,
freezeTimestamp: new DateTimeOffset(2026, 03, 04, 12, 00, 00, TimeSpan.Zero),
ledger: new ProofLedger(),
cancellationToken: TestContext.Current.CancellationToken);
var second = await service.ReplayScoreAsync(
scanId: "scan-a",
concelierSnapshotHash: "sha256:concelier-a",
excititorSnapshotHash: "sha256:excititor-a",
latticePolicyHash: "sha256:policy-a",
seed: seed,
freezeTimestamp: new DateTimeOffset(2026, 03, 04, 12, 01, 00, TimeSpan.Zero),
ledger: new ProofLedger(),
cancellationToken: TestContext.Current.CancellationToken);
Assert.NotEqual(first.CanonicalInputHash, second.CanonicalInputHash);
Assert.NotEqual(first.CanonicalInputPayload, second.CanonicalInputPayload);
Assert.Equal(first.Score, second.Score);
}
[Trait("Category", TestCategories.Unit)]
[Fact]
public async Task ReplayScoreAsync_UsesFactorizedRoundedComposition()
{
var service = new DeterministicScoringService();
var seed = Convert.FromHexString("00112233445566778899AABBCCDDEEFF");
var result = await service.ReplayScoreAsync(
scanId: "scan-a",
concelierSnapshotHash: "sha256:concelier-a",
excititorSnapshotHash: "sha256:excititor-a",
latticePolicyHash: "sha256:policy-a",
seed: seed,
freezeTimestamp: new DateTimeOffset(2026, 03, 04, 12, 00, 00, TimeSpan.Zero),
ledger: new ProofLedger(),
cancellationToken: TestContext.Current.CancellationToken);
Assert.Equal("v2.factorized", result.FormulaVersion);
Assert.Equal(["cvss", "epss", "reachability", "provenance"], result.Factors.Select(f => f.Name));
var expectedScore = Math.Round(result.Factors.Sum(f => f.Weighted), 6, MidpointRounding.ToEven);
expectedScore = Math.Clamp(expectedScore, 0.0, 1.0);
Assert.Equal(expectedScore, result.Score);
}
}

View File

@@ -35,8 +35,8 @@ public sealed class NotifierIngestionTests
Priority = "critical"
};
var orchestratorEvent = CreateTestEvent(metadata);
var json = OrchestratorEventSerializer.Serialize(orchestratorEvent);
var jobEngineEvent = CreateTestEvent(metadata);
var json = JobEngineEventSerializer.Serialize(jobEngineEvent);
var node = JsonNode.Parse(json)?.AsObject();
Assert.NotNull(node);
@@ -59,10 +59,10 @@ public sealed class NotifierIngestionTests
[Fact]
public void NotifierMetadata_OmittedWhenNull()
{
var orchestratorEvent = new OrchestratorEvent
var jobEngineEvent = new JobEngineEvent
{
EventId = Guid.NewGuid(),
Kind = OrchestratorEventKinds.ScannerReportReady,
Kind = JobEngineEventKinds.ScannerReportReady,
Version = 1,
Tenant = "test-tenant",
OccurredAt = DateTimeOffset.UtcNow,
@@ -82,7 +82,7 @@ public sealed class NotifierIngestionTests
Notifier = null // Explicitly null
};
var json = OrchestratorEventSerializer.Serialize(orchestratorEvent);
var json = JobEngineEventSerializer.Serialize(jobEngineEvent);
var node = JsonNode.Parse(json)?.AsObject();
Assert.NotNull(node);
@@ -107,10 +107,10 @@ public sealed class NotifierIngestionTests
[Fact]
public void ScanStartedEvent_SerializesForNotifier()
{
var orchestratorEvent = new OrchestratorEvent
var jobEngineEvent = new JobEngineEvent
{
EventId = Guid.NewGuid(),
Kind = OrchestratorEventKinds.ScannerScanStarted,
Kind = JobEngineEventKinds.ScannerScanStarted,
Version = 1,
Tenant = "test-tenant",
OccurredAt = DateTimeOffset.Parse("2025-12-07T10:00:00Z"),
@@ -137,11 +137,11 @@ public sealed class NotifierIngestionTests
}
};
var json = OrchestratorEventSerializer.Serialize(orchestratorEvent);
var json = JobEngineEventSerializer.Serialize(jobEngineEvent);
var node = JsonNode.Parse(json)?.AsObject();
Assert.NotNull(node);
Assert.Equal(OrchestratorEventKinds.ScannerScanStarted, node["kind"]?.GetValue<string>());
Assert.Equal(JobEngineEventKinds.ScannerScanStarted, node["kind"]?.GetValue<string>());
var payload = node["payload"]?.AsObject();
Assert.NotNull(payload);
@@ -157,10 +157,10 @@ public sealed class NotifierIngestionTests
[Fact]
public void ScanFailedEvent_SerializesWithErrorDetails()
{
var orchestratorEvent = new OrchestratorEvent
var jobEngineEvent = new JobEngineEvent
{
EventId = Guid.NewGuid(),
Kind = OrchestratorEventKinds.ScannerScanFailed,
Kind = JobEngineEventKinds.ScannerScanFailed,
Version = 1,
Tenant = "test-tenant",
OccurredAt = DateTimeOffset.Parse("2025-12-07T10:05:00Z"),
@@ -200,11 +200,11 @@ public sealed class NotifierIngestionTests
}
};
var json = OrchestratorEventSerializer.Serialize(orchestratorEvent);
var json = JobEngineEventSerializer.Serialize(jobEngineEvent);
var node = JsonNode.Parse(json)?.AsObject();
Assert.NotNull(node);
Assert.Equal(OrchestratorEventKinds.ScannerScanFailed, node["kind"]?.GetValue<string>());
Assert.Equal(JobEngineEventKinds.ScannerScanFailed, node["kind"]?.GetValue<string>());
var payload = node["payload"]?.AsObject();
Assert.NotNull(payload);
@@ -225,10 +225,10 @@ public sealed class NotifierIngestionTests
[Fact]
public void VulnerabilityDetectedEvent_SerializesForNotifier()
{
var orchestratorEvent = new OrchestratorEvent
var jobEngineEvent = new JobEngineEvent
{
EventId = Guid.NewGuid(),
Kind = OrchestratorEventKinds.ScannerVulnerabilityDetected,
Kind = JobEngineEventKinds.ScannerVulnerabilityDetected,
Version = 1,
Tenant = "test-tenant",
OccurredAt = DateTimeOffset.Parse("2025-12-07T10:00:00Z"),
@@ -270,11 +270,11 @@ public sealed class NotifierIngestionTests
}
};
var json = OrchestratorEventSerializer.Serialize(orchestratorEvent);
var json = JobEngineEventSerializer.Serialize(jobEngineEvent);
var node = JsonNode.Parse(json)?.AsObject();
Assert.NotNull(node);
Assert.Equal(OrchestratorEventKinds.ScannerVulnerabilityDetected, node["kind"]?.GetValue<string>());
Assert.Equal(JobEngineEventKinds.ScannerVulnerabilityDetected, node["kind"]?.GetValue<string>());
var payload = node["payload"]?.AsObject();
Assert.NotNull(payload);
@@ -297,10 +297,10 @@ public sealed class NotifierIngestionTests
[Fact]
public void SbomGeneratedEvent_SerializesForNotifier()
{
var orchestratorEvent = new OrchestratorEvent
var jobEngineEvent = new JobEngineEvent
{
EventId = Guid.NewGuid(),
Kind = OrchestratorEventKinds.ScannerSbomGenerated,
Kind = JobEngineEventKinds.ScannerSbomGenerated,
Version = 1,
Tenant = "test-tenant",
OccurredAt = DateTimeOffset.Parse("2025-12-07T10:00:00Z"),
@@ -331,11 +331,11 @@ public sealed class NotifierIngestionTests
}
};
var json = OrchestratorEventSerializer.Serialize(orchestratorEvent);
var json = JobEngineEventSerializer.Serialize(jobEngineEvent);
var node = JsonNode.Parse(json)?.AsObject();
Assert.NotNull(node);
Assert.Equal(OrchestratorEventKinds.ScannerSbomGenerated, node["kind"]?.GetValue<string>());
Assert.Equal(JobEngineEventKinds.ScannerSbomGenerated, node["kind"]?.GetValue<string>());
var payload = node["payload"]?.AsObject();
Assert.NotNull(payload);
@@ -349,12 +349,12 @@ public sealed class NotifierIngestionTests
[Fact]
public void AllEventKinds_HaveCorrectFormat()
{
Assert.Matches(@"^scanner\.event\.[a-z]+\.[a-z]+$", OrchestratorEventKinds.ScannerReportReady);
Assert.Matches(@"^scanner\.event\.[a-z]+\.[a-z]+$", OrchestratorEventKinds.ScannerScanCompleted);
Assert.Matches(@"^scanner\.event\.[a-z]+\.[a-z]+$", OrchestratorEventKinds.ScannerScanStarted);
Assert.Matches(@"^scanner\.event\.[a-z]+\.[a-z]+$", OrchestratorEventKinds.ScannerScanFailed);
Assert.Matches(@"^scanner\.event\.[a-z]+\.[a-z]+$", OrchestratorEventKinds.ScannerSbomGenerated);
Assert.Matches(@"^scanner\.event\.[a-z]+\.[a-z]+$", OrchestratorEventKinds.ScannerVulnerabilityDetected);
Assert.Matches(@"^scanner\.event\.[a-z]+\.[a-z]+$", JobEngineEventKinds.ScannerReportReady);
Assert.Matches(@"^scanner\.event\.[a-z]+\.[a-z]+$", JobEngineEventKinds.ScannerScanCompleted);
Assert.Matches(@"^scanner\.event\.[a-z]+\.[a-z]+$", JobEngineEventKinds.ScannerScanStarted);
Assert.Matches(@"^scanner\.event\.[a-z]+\.[a-z]+$", JobEngineEventKinds.ScannerScanFailed);
Assert.Matches(@"^scanner\.event\.[a-z]+\.[a-z]+$", JobEngineEventKinds.ScannerSbomGenerated);
Assert.Matches(@"^scanner\.event\.[a-z]+\.[a-z]+$", JobEngineEventKinds.ScannerVulnerabilityDetected);
}
[Trait("Category", TestCategories.Unit)]
@@ -373,8 +373,8 @@ public sealed class NotifierIngestionTests
ImmediateDispatch = false
};
var orchestratorEvent = CreateTestEvent(metadata);
var json = OrchestratorEventSerializer.Serialize(orchestratorEvent);
var jobEngineEvent = CreateTestEvent(metadata);
var json = JobEngineEventSerializer.Serialize(jobEngineEvent);
var node = JsonNode.Parse(json)?.AsObject();
Assert.NotNull(node);
@@ -386,12 +386,12 @@ public sealed class NotifierIngestionTests
}
}
private static OrchestratorEvent CreateTestEvent(NotifierIngestionMetadata? notifier)
private static JobEngineEvent CreateTestEvent(NotifierIngestionMetadata? notifier)
{
return new OrchestratorEvent
return new JobEngineEvent
{
EventId = Guid.NewGuid(),
Kind = OrchestratorEventKinds.ScannerReportReady,
Kind = JobEngineEventKinds.ScannerReportReady,
Version = 1,
Tenant = "test-tenant",
OccurredAt = DateTimeOffset.UtcNow,

View File

@@ -23,26 +23,26 @@ public sealed class PlatformEventSamplesTests
[Trait("Category", TestCategories.Unit)]
[Theory]
[InlineData("scanner.event.report.ready@1.sample.json", OrchestratorEventKinds.ScannerReportReady)]
[InlineData("scanner.event.scan.completed@1.sample.json", OrchestratorEventKinds.ScannerScanCompleted)]
[InlineData("scanner.event.report.ready@1.sample.json", JobEngineEventKinds.ScannerReportReady)]
[InlineData("scanner.event.scan.completed@1.sample.json", JobEngineEventKinds.ScannerScanCompleted)]
public void PlatformEventSamplesStayCanonical(string fileName, string expectedKind)
{
var json = LoadSample(fileName);
var orchestratorEvent = DeserializeOrchestratorEvent(json, expectedKind);
var jobEngineEvent = DeserializeJobEngineEvent(json, expectedKind);
Assert.NotNull(orchestratorEvent);
Assert.Equal(expectedKind, orchestratorEvent.Kind);
Assert.Equal(1, orchestratorEvent.Version);
Assert.NotEqual(Guid.Empty, orchestratorEvent.EventId);
Assert.NotNull(orchestratorEvent.Payload);
Assert.NotNull(jobEngineEvent);
Assert.Equal(expectedKind, jobEngineEvent.Kind);
Assert.Equal(1, jobEngineEvent.Version);
Assert.NotEqual(Guid.Empty, jobEngineEvent.EventId);
Assert.NotNull(jobEngineEvent.Payload);
AssertReportConsistency(orchestratorEvent);
AssertSemanticEquality(json, orchestratorEvent);
AssertReportConsistency(jobEngineEvent);
AssertSemanticEquality(json, jobEngineEvent);
}
private static void AssertSemanticEquality(string originalJson, OrchestratorEvent orchestratorEvent)
private static void AssertSemanticEquality(string originalJson, JobEngineEvent jobEngineEvent)
{
var canonicalJson = OrchestratorEventSerializer.Serialize(orchestratorEvent);
var canonicalJson = JobEngineEventSerializer.Serialize(jobEngineEvent);
var originalNode = JsonNode.Parse(originalJson) ?? throw new InvalidOperationException("Sample JSON must not be null.");
var canonicalNode = JsonNode.Parse(canonicalJson) ?? throw new InvalidOperationException("Canonical JSON must not be null.");
@@ -101,9 +101,9 @@ public sealed class PlatformEventSamplesTests
return a.ToJsonString() == b.ToJsonString();
}
private static void AssertReportConsistency(OrchestratorEvent orchestratorEvent)
private static void AssertReportConsistency(JobEngineEvent jobEngineEvent)
{
switch (orchestratorEvent.Payload)
switch (jobEngineEvent.Payload)
{
case ReportReadyEventPayload ready:
Assert.Equal(ready.ReportId, ready.Report.ReportId);
@@ -143,7 +143,7 @@ public sealed class PlatformEventSamplesTests
}
break;
default:
throw new InvalidOperationException($"Unexpected payload type {orchestratorEvent.Payload.GetType().Name}.");
throw new InvalidOperationException($"Unexpected payload type {jobEngineEvent.Payload.GetType().Name}.");
}
}
@@ -160,7 +160,7 @@ public sealed class PlatformEventSamplesTests
Assert.Equal(report.Verdict, dsseReport.Verdict);
}
private static OrchestratorEvent DeserializeOrchestratorEvent(string json, string expectedKind)
private static JobEngineEvent DeserializeJobEngineEvent(string json, string expectedKind)
{
var root = JsonNode.Parse(json)?.AsObject() ?? throw new InvalidOperationException("Sample JSON must not be null.");
@@ -171,10 +171,10 @@ public sealed class PlatformEventSamplesTests
StringComparer.Ordinal).ToImmutableSortedDictionary(StringComparer.Ordinal)
: null;
OrchestratorEventScope? scope = null;
JobEngineEventScope? scope = null;
if (root["scope"] is JsonObject scopeObj)
{
scope = new OrchestratorEventScope
scope = new JobEngineEventScope
{
Namespace = scopeObj["namespace"]?.GetValue<string>(),
Repo = scopeObj["repo"]?.GetValue<string>() ?? string.Empty,
@@ -185,11 +185,11 @@ public sealed class PlatformEventSamplesTests
}
var payloadNode = root["payload"] ?? throw new InvalidOperationException("Payload node missing.");
OrchestratorEventPayload payload = expectedKind switch
JobEngineEventPayload payload = expectedKind switch
{
OrchestratorEventKinds.ScannerReportReady => payloadNode.Deserialize<ReportReadyEventPayload>(SerializerOptions)
JobEngineEventKinds.ScannerReportReady => payloadNode.Deserialize<ReportReadyEventPayload>(SerializerOptions)
?? throw new InvalidOperationException("Unable to deserialize report ready payload."),
OrchestratorEventKinds.ScannerScanCompleted => payloadNode.Deserialize<ScanCompletedEventPayload>(SerializerOptions)
JobEngineEventKinds.ScannerScanCompleted => payloadNode.Deserialize<ScanCompletedEventPayload>(SerializerOptions)
?? throw new InvalidOperationException("Unable to deserialize scan completed payload."),
_ => throw new InvalidOperationException("Unexpected event kind.")
};
@@ -204,7 +204,7 @@ public sealed class PlatformEventSamplesTests
throw new InvalidOperationException("ReportId was not parsed from scan completed payload.");
}
return new OrchestratorEvent
return new JobEngineEvent
{
EventId = Guid.Parse(root["eventId"]!.GetValue<string>()),
Kind = root["kind"]!.GetValue<string>(),

View File

@@ -133,7 +133,7 @@ public sealed class ReportEventDispatcherTests
Assert.Equal(2, publisher.Events.Count);
var readyEvent = Assert.Single(publisher.Events, evt => evt.Kind == OrchestratorEventKinds.ScannerReportReady);
var readyEvent = Assert.Single(publisher.Events, evt => evt.Kind == JobEngineEventKinds.ScannerReportReady);
Assert.Equal("tenant-alpha", readyEvent.Tenant);
Assert.Equal("scanner.event.report.ready:tenant-alpha:report-abc", readyEvent.IdempotencyKey);
Assert.Equal("api", readyEvent.Scope?.Repo);
@@ -156,7 +156,7 @@ public sealed class ReportEventDispatcherTests
Assert.Equal(envelope.Payload, readyPayload.Dsse?.Payload);
Assert.Equal("blocked", readyPayload.Report.Verdict);
var scanEvent = Assert.Single(publisher.Events, evt => evt.Kind == OrchestratorEventKinds.ScannerScanCompleted);
var scanEvent = Assert.Single(publisher.Events, evt => evt.Kind == JobEngineEventKinds.ScannerScanCompleted);
Assert.Equal("tenant-alpha", scanEvent.Tenant);
Assert.Equal("scanner.event.scan.completed:tenant-alpha:report-abc", scanEvent.IdempotencyKey);
Assert.Equal("sha256:feedface", scanEvent.Scope?.Digest);
@@ -446,7 +446,7 @@ public sealed class ReportEventDispatcherTests
await dispatcher.PublishAsync(request, preview, document, envelope, context, cancellationToken);
var readyEvent = Assert.Single(publisher.Events, evt => evt.Kind == OrchestratorEventKinds.ScannerReportReady);
var readyEvent = Assert.Single(publisher.Events, evt => evt.Kind == JobEngineEventKinds.ScannerReportReady);
var links = Assert.IsType<ReportReadyEventPayload>(readyEvent.Payload).Links;
Assert.Equal("https://scanner.example/console/insights/report-abc", links.Report?.Ui);
@@ -459,9 +459,9 @@ public sealed class ReportEventDispatcherTests
private sealed class RecordingEventPublisher : IPlatformEventPublisher
{
public List<OrchestratorEvent> Events { get; } = new();
public List<JobEngineEvent> Events { get; } = new();
public Task PublishAsync(OrchestratorEvent @event, CancellationToken cancellationToken = default)
public Task PublishAsync(JobEngineEvent @event, CancellationToken cancellationToken = default)
{
Events.Add(@event);
return Task.CompletedTask;

View File

@@ -210,8 +210,8 @@ rules:
response.EnsureSuccessStatusCode();
Assert.Equal(2, recorder.Events.Count);
var ready = recorder.Events.Single(evt => evt.Kind == OrchestratorEventKinds.ScannerReportReady);
var completed = recorder.Events.Single(evt => evt.Kind == OrchestratorEventKinds.ScannerScanCompleted);
var ready = recorder.Events.Single(evt => evt.Kind == JobEngineEventKinds.ScannerReportReady);
var completed = recorder.Events.Single(evt => evt.Kind == JobEngineEventKinds.ScannerScanCompleted);
Assert.Equal("default", ready.Tenant);
Assert.Equal("default", completed.Tenant);
@@ -255,9 +255,9 @@ rules:
private sealed class RecordingPlatformEventPublisher : IPlatformEventPublisher
{
public List<OrchestratorEvent> Events { get; } = new();
public List<JobEngineEvent> Events { get; } = new();
public Task PublishAsync(OrchestratorEvent @event, CancellationToken cancellationToken = default)
public Task PublishAsync(JobEngineEvent @event, CancellationToken cancellationToken = default)
{
Events.Add(@event);
return Task.CompletedTask;

View File

@@ -1,24 +1,17 @@
// =============================================================================
// ScoreReplayEndpointsTests.cs
// Sprint: SPRINT_3401_0002_0001_score_replay_proof_bundle
// Task: SCORE-REPLAY-013 - Integration tests for score replay endpoint
// Sprint: SPRINT_20260304_303_Scanner_score_replay_contract_and_formula_alignment
// Description: Integration tests for score replay endpoint contracts and deterministic metadata.
// =============================================================================
using System.Net;
using System.Net.Http.Json;
using System.Text.Json;
using FluentAssertions;
using Microsoft.Extensions.DependencyInjection;
using Xunit;
namespace StellaOps.Scanner.WebService.Tests;
/// <summary>
/// Integration tests for score replay endpoints.
/// Per Sprint 3401.0002.0001 - Score Replay & Proof Bundle.
/// </summary>
[Trait("Category", "Integration")]
[Trait("Sprint", "3401.0002")]
public sealed class ScoreReplayEndpointsTests : IAsyncLifetime
{
private TestSurfaceSecretsScope _secrets = null!;
@@ -44,287 +37,256 @@ public sealed class ScoreReplayEndpointsTests : IAsyncLifetime
_secrets.Dispose();
}
#region POST /score/{scanId}/replay Tests
[Fact(DisplayName = "POST /score/{scanId}/replay returns 404 for unknown scan")]
public async Task ReplayScore_UnknownScan_Returns404()
[Fact]
public async Task ReplayScore_UnknownScan_PrimaryRoute_Returns404()
{
// Arrange
var unknownScanId = Guid.NewGuid().ToString();
var unknownScanId = Guid.NewGuid().ToString("N");
// Act
var response = await _client.PostAsync($"/api/v1/score/{unknownScanId}/replay", null);
var response = await _client.PostAsync(
$"/api/v1/scans/{unknownScanId}/score/replay",
content: null,
TestContext.Current.CancellationToken);
// Assert
response.StatusCode.Should().Be(HttpStatusCode.NotFound);
}
[Fact(DisplayName = "POST /score/{scanId}/replay returns result for valid scan")]
public async Task ReplayScore_ValidScan_ReturnsResult()
[Fact]
public async Task ReplayScore_PrimaryRoute_ReturnsFactorizedContract()
{
// Arrange
var scanId = await CreateTestScanAsync();
// Act
var response = await _client.PostAsync($"/api/v1/score/{scanId}/replay", null);
var replay = await ReplayAsync(scanId, useLegacyRoute: false);
replay.Score.Should().BeInRange(0.0, 1.0);
replay.RootHash.Should().StartWith("sha256:");
replay.BundleUri.Should().NotBeNullOrWhiteSpace();
replay.ManifestHash.Should().StartWith("sha256:");
replay.ManifestDigest.Should().StartWith("sha256:");
replay.CanonicalInputHash.Should().StartWith("sha256:");
replay.CanonicalInputPayload.Should().NotBeNullOrWhiteSpace();
replay.SeedHex.Should().NotBeNullOrWhiteSpace();
replay.VerificationStatus.Should().Be("verified");
replay.Deterministic.Should().BeTrue();
replay.Factors.Should().NotBeNullOrEmpty();
replay.Factors.Should().OnlyContain(f => !string.IsNullOrWhiteSpace(f.Name));
replay.Factors.Should().OnlyContain(f => f.Weight >= 0 && f.Weight <= 1);
}
[Fact]
public async Task ReplayScore_PrimaryAndLegacyRoutes_AreCompatibleAndDeterministic()
{
var scanId = await CreateTestScanAsync();
var primary = await ReplayAsync(scanId, useLegacyRoute: false);
var legacy = await ReplayAsync(scanId, useLegacyRoute: true);
primary.Score.Should().Be(legacy.Score);
primary.RootHash.Should().Be(legacy.RootHash);
primary.CanonicalInputHash.Should().Be(legacy.CanonicalInputHash);
primary.ManifestDigest.Should().Be(legacy.ManifestDigest);
}
[Fact]
public async Task GetBundle_PrimaryAndLegacyRoutes_ReturnSameBundle()
{
var scanId = await CreateTestScanAsync();
var replay = await ReplayAsync(scanId, useLegacyRoute: false);
var primaryResponse = await _client.GetFromJsonAsync<ScoreBundleResponse>(
$"/api/v1/scans/{scanId}/score/bundle",
TestContext.Current.CancellationToken);
var legacyResponse = await _client.GetFromJsonAsync<ScoreBundleResponse>(
$"/api/v1/score/{scanId}/bundle",
TestContext.Current.CancellationToken);
primaryResponse.Should().NotBeNull();
legacyResponse.Should().NotBeNull();
primaryResponse!.RootHash.Should().Be(replay.RootHash);
primaryResponse.RootHash.Should().Be(legacyResponse!.RootHash);
primaryResponse.ManifestDsseValid.Should().BeTrue();
legacyResponse.ManifestDsseValid.Should().BeTrue();
}
[Fact]
public async Task VerifyBundle_WrongCanonicalHash_ReturnsInvalid()
{
var scanId = await CreateTestScanAsync();
var replay = await ReplayAsync(scanId, useLegacyRoute: false);
var response = await _client.PostAsJsonAsync(
$"/api/v1/scans/{scanId}/score/verify",
new ScoreVerifyRequest(
ExpectedRootHash: replay.RootHash,
ExpectedCanonicalInputHash: "sha256:deadbeef"),
TestContext.Current.CancellationToken);
// Assert
response.StatusCode.Should().Be(HttpStatusCode.OK);
var verify = await response.Content.ReadFromJsonAsync<ScoreVerifyResponse>(
cancellationToken: TestContext.Current.CancellationToken);
var result = await response.Content.ReadFromJsonAsync<ScoreReplayResponse>();
verify.Should().NotBeNull();
verify!.Valid.Should().BeFalse();
verify.ComputedRootHash.Should().Be(replay.RootHash);
verify.CanonicalInputHashValid.Should().BeFalse();
verify.ExpectedCanonicalInputHash.Should().Be("sha256:deadbeef");
verify.ErrorMessage.Should().NotBeNullOrWhiteSpace();
}
[Fact]
public async Task VerifyBundle_TamperedCanonicalPayload_ReturnsInvalid()
{
var scanId = await CreateTestScanAsync();
var replay = await ReplayAsync(scanId, useLegacyRoute: false);
var response = await _client.PostAsJsonAsync(
$"/api/v1/scans/{scanId}/score/verify",
new ScoreVerifyRequest(
ExpectedRootHash: replay.RootHash,
CanonicalInputPayload: replay.CanonicalInputPayload + " "),
TestContext.Current.CancellationToken);
response.StatusCode.Should().Be(HttpStatusCode.OK);
var verify = await response.Content.ReadFromJsonAsync<ScoreVerifyResponse>(
cancellationToken: TestContext.Current.CancellationToken);
verify.Should().NotBeNull();
verify!.Valid.Should().BeFalse();
verify.CanonicalInputHashValid.Should().BeFalse();
verify.CanonicalInputHash.Should().NotBe(replay.CanonicalInputHash);
verify.ErrorMessage.Should().NotBeNullOrWhiteSpace();
}
[Fact]
public async Task ScoreHistory_PrimaryAndLegacyRoutes_ExposeFactorVectors()
{
var scanId = await CreateTestScanAsync();
var replay1 = await ReplayAsync(scanId, useLegacyRoute: false);
var replay2 = await ReplayAsync(
scanId,
useLegacyRoute: false,
request: new ScoreReplayRequest(FreezeTimestamp: DateTimeOffset.UtcNow.AddMinutes(7)));
replay1.RootHash.Should().NotBe(replay2.RootHash);
var primaryHistory = await _client.GetFromJsonAsync<List<ScoreHistoryResponseItem>>(
$"/api/v1/scans/{scanId}/score/history",
TestContext.Current.CancellationToken);
var legacyHistory = await _client.GetFromJsonAsync<List<ScoreHistoryResponseItem>>(
$"/api/v1/score/{scanId}/history",
TestContext.Current.CancellationToken);
primaryHistory.Should().NotBeNull();
legacyHistory.Should().NotBeNull();
primaryHistory!.Should().Contain(h => h.RootHash == replay1.RootHash);
primaryHistory.Should().Contain(h => h.RootHash == replay2.RootHash);
primaryHistory.Should().OnlyContain(h => h.CanonicalInputHash.StartsWith("sha256:", StringComparison.Ordinal));
primaryHistory.Should().OnlyContain(h => h.ManifestDigest.StartsWith("sha256:", StringComparison.Ordinal));
primaryHistory.Should().OnlyContain(h => h.Factors.Count > 0);
primaryHistory.Should().BeInDescendingOrder(h => h.ReplayedAt);
legacyHistory!.Select(h => h.RootHash).Should().BeEquivalentTo(primaryHistory.Select(h => h.RootHash));
}
private async Task<ScoreReplayResponse> ReplayAsync(
string scanId,
bool useLegacyRoute,
ScoreReplayRequest? request = null)
{
var route = useLegacyRoute
? $"/api/v1/score/{scanId}/replay"
: $"/api/v1/scans/{scanId}/score/replay";
HttpResponseMessage response;
if (request is null)
{
response = await _client.PostAsync(route, null, TestContext.Current.CancellationToken);
}
else
{
response = await _client.PostAsJsonAsync(route, request, TestContext.Current.CancellationToken);
}
response.EnsureSuccessStatusCode();
var result = await response.Content.ReadFromJsonAsync<ScoreReplayResponse>(
cancellationToken: TestContext.Current.CancellationToken);
result.Should().NotBeNull();
result!.Score.Should().BeInRange(0.0, 1.0);
result.RootHash.Should().StartWith("sha256:");
result.BundleUri.Should().NotBeNullOrEmpty();
result.Deterministic.Should().BeTrue();
return result!;
}
[Fact(DisplayName = "POST /score/{scanId}/replay is deterministic")]
public async Task ReplayScore_IsDeterministic()
{
// Arrange
var scanId = await CreateTestScanAsync();
// Act - replay twice
var response1 = await _client.PostAsync($"/api/v1/score/{scanId}/replay", null);
var response2 = await _client.PostAsync($"/api/v1/score/{scanId}/replay", null);
// Assert
response1.StatusCode.Should().Be(HttpStatusCode.OK);
response2.StatusCode.Should().Be(HttpStatusCode.OK);
var result1 = await response1.Content.ReadFromJsonAsync<ScoreReplayResponse>();
var result2 = await response2.Content.ReadFromJsonAsync<ScoreReplayResponse>();
result1!.Score.Should().Be(result2!.Score, "Score should be deterministic");
result1.RootHash.Should().Be(result2.RootHash, "RootHash should be deterministic");
}
[Fact(DisplayName = "POST /score/{scanId}/replay with specific manifest hash")]
public async Task ReplayScore_WithManifestHash_UsesSpecificManifest()
{
// Arrange
var scanId = await CreateTestScanAsync();
// Get the manifest hash from the first replay
var firstResponse = await _client.PostAsync($"/api/v1/score/{scanId}/replay", null);
var firstResult = await firstResponse.Content.ReadFromJsonAsync<ScoreReplayResponse>();
var manifestHash = firstResult!.ManifestHash;
// Act - replay with specific manifest hash
var response = await _client.PostAsJsonAsync(
$"/api/v1/score/{scanId}/replay",
new { manifestHash });
// Assert
response.StatusCode.Should().Be(HttpStatusCode.OK);
var result = await response.Content.ReadFromJsonAsync<ScoreReplayResponse>();
result!.ManifestHash.Should().Be(manifestHash);
}
#endregion
#region GET /score/{scanId}/bundle Tests
[Fact(DisplayName = "GET /score/{scanId}/bundle returns 404 for unknown scan")]
public async Task GetBundle_UnknownScan_Returns404()
{
// Arrange
var unknownScanId = Guid.NewGuid().ToString();
// Act
var response = await _client.GetAsync($"/api/v1/score/{unknownScanId}/bundle");
// Assert
response.StatusCode.Should().Be(HttpStatusCode.NotFound);
}
[Fact(DisplayName = "GET /score/{scanId}/bundle returns bundle after replay")]
public async Task GetBundle_AfterReplay_ReturnsBundle()
{
// Arrange
var scanId = await CreateTestScanAsync();
// Create a replay first
var replayResponse = await _client.PostAsync($"/api/v1/score/{scanId}/replay", null);
replayResponse.EnsureSuccessStatusCode();
var replayResult = await replayResponse.Content.ReadFromJsonAsync<ScoreReplayResponse>();
// Act
var response = await _client.GetAsync($"/api/v1/score/{scanId}/bundle");
// Assert
response.StatusCode.Should().Be(HttpStatusCode.OK);
var bundle = await response.Content.ReadFromJsonAsync<ProofBundleResponse>();
bundle.Should().NotBeNull();
bundle!.RootHash.Should().Be(replayResult!.RootHash);
bundle.ManifestDsseValid.Should().BeTrue();
}
[Fact(DisplayName = "GET /score/{scanId}/bundle with specific rootHash")]
public async Task GetBundle_WithRootHash_ReturnsSpecificBundle()
{
// Arrange
var scanId = await CreateTestScanAsync();
// Create a replay to get a root hash
var replayResponse = await _client.PostAsync($"/api/v1/score/{scanId}/replay", null);
var replayResult = await replayResponse.Content.ReadFromJsonAsync<ScoreReplayResponse>();
var rootHash = replayResult!.RootHash;
// Act
var response = await _client.GetAsync($"/api/v1/score/{scanId}/bundle?rootHash={rootHash}");
// Assert
response.StatusCode.Should().Be(HttpStatusCode.OK);
var bundle = await response.Content.ReadFromJsonAsync<ProofBundleResponse>();
bundle!.RootHash.Should().Be(rootHash);
}
#endregion
#region POST /score/{scanId}/verify Tests
[Fact(DisplayName = "POST /score/{scanId}/verify returns valid for correct root hash")]
public async Task VerifyBundle_CorrectRootHash_ReturnsValid()
{
// Arrange
var scanId = await CreateTestScanAsync();
// Create a replay
var replayResponse = await _client.PostAsync($"/api/v1/score/{scanId}/replay", null);
var replayResult = await replayResponse.Content.ReadFromJsonAsync<ScoreReplayResponse>();
// Act
var response = await _client.PostAsJsonAsync(
$"/api/v1/score/{scanId}/verify",
new { expectedRootHash = replayResult!.RootHash });
// Assert
response.StatusCode.Should().Be(HttpStatusCode.OK);
var result = await response.Content.ReadFromJsonAsync<BundleVerifyResponse>();
result!.Valid.Should().BeTrue();
result.ComputedRootHash.Should().Be(replayResult.RootHash);
}
[Fact(DisplayName = "POST /score/{scanId}/verify returns invalid for wrong root hash")]
public async Task VerifyBundle_WrongRootHash_ReturnsInvalid()
{
// Arrange
var scanId = await CreateTestScanAsync();
// Create a replay first
await _client.PostAsync($"/api/v1/score/{scanId}/replay", null);
// Act
var response = await _client.PostAsJsonAsync(
$"/api/v1/score/{scanId}/verify",
new { expectedRootHash = "sha256:wrong_hash_value" });
// Assert
response.StatusCode.Should().Be(HttpStatusCode.OK);
var result = await response.Content.ReadFromJsonAsync<BundleVerifyResponse>();
result!.Valid.Should().BeFalse();
}
[Fact(DisplayName = "POST /score/{scanId}/verify validates manifest signature")]
public async Task VerifyBundle_ValidatesManifestSignature()
{
// Arrange
var scanId = await CreateTestScanAsync();
// Create a replay
var replayResponse = await _client.PostAsync($"/api/v1/score/{scanId}/replay", null);
var replayResult = await replayResponse.Content.ReadFromJsonAsync<ScoreReplayResponse>();
// Act
var response = await _client.PostAsJsonAsync(
$"/api/v1/score/{scanId}/verify",
new { expectedRootHash = replayResult!.RootHash });
// Assert
var result = await response.Content.ReadFromJsonAsync<BundleVerifyResponse>();
result!.ManifestValid.Should().BeTrue();
}
#endregion
#region Concurrency Tests
[Fact(DisplayName = "Concurrent replays produce same result")]
public async Task ConcurrentReplays_ProduceSameResult()
{
// Arrange
var scanId = await CreateTestScanAsync();
// Act - concurrent replays
var tasks = Enumerable.Range(0, 5)
.Select(_ => _client.PostAsync($"/api/v1/score/{scanId}/replay", null))
.ToList();
var responses = await Task.WhenAll(tasks);
// Assert
var results = new List<ScoreReplayResponse>();
foreach (var response in responses)
{
response.StatusCode.Should().Be(HttpStatusCode.OK);
var result = await response.Content.ReadFromJsonAsync<ScoreReplayResponse>();
results.Add(result!);
}
// All results should have the same score and root hash
var firstResult = results[0];
foreach (var result in results.Skip(1))
{
result.Score.Should().Be(firstResult.Score);
result.RootHash.Should().Be(firstResult.RootHash);
}
}
#endregion
#region Helper Methods
private async Task<string> CreateTestScanAsync()
{
var submitResponse = await _client.PostAsJsonAsync("/api/v1/scans", new
{
image = new { digest = "sha256:test_" + Guid.NewGuid().ToString("N")[..8] }
});
image = new { digest = "sha256:test_" + Guid.NewGuid().ToString("N")[..12] }
}, TestContext.Current.CancellationToken);
submitResponse.EnsureSuccessStatusCode();
var submitPayload = await submitResponse.Content.ReadFromJsonAsync<ScanSubmitResponse>();
var submitPayload = await submitResponse.Content.ReadFromJsonAsync<ScanSubmitResponse>(
cancellationToken: TestContext.Current.CancellationToken);
submitPayload.Should().NotBeNull();
return submitPayload!.ScanId;
}
#endregion
private sealed record ScoreReplayRequest(
string? ManifestHash = null,
DateTimeOffset? FreezeTimestamp = null);
#region Response Models
private sealed record ScoreReplayFactor(
string Name,
double Weight,
double Raw,
double Weighted,
string Source);
private sealed record ScoreReplayResponse(
double Score,
string RootHash,
string BundleUri,
string ManifestHash,
string ManifestDigest,
string CanonicalInputHash,
string CanonicalInputPayload,
string SeedHex,
List<ScoreReplayFactor> Factors,
string VerificationStatus,
DateTimeOffset ReplayedAt,
bool Deterministic);
private sealed record ProofBundleResponse(
private sealed record ScoreBundleResponse(
string ScanId,
string RootHash,
string BundleUri,
bool ManifestDsseValid,
DateTimeOffset CreatedAt);
private sealed record BundleVerifyResponse(
private sealed record ScoreVerifyRequest(
string ExpectedRootHash,
string? BundleUri = null,
string? ExpectedCanonicalInputHash = null,
string? CanonicalInputPayload = null);
private sealed record ScoreVerifyResponse(
bool Valid,
string ComputedRootHash,
string ExpectedRootHash,
bool ManifestValid,
bool LedgerValid,
bool CanonicalInputHashValid,
string? ExpectedCanonicalInputHash,
string? CanonicalInputHash,
DateTimeOffset VerifiedAtUtc,
string? ErrorMessage);
private sealed record ScanSubmitResponse(string ScanId);
private sealed record ScoreHistoryResponseItem(
string RootHash,
DateTimeOffset ReplayedAt,
double Score,
string CanonicalInputHash,
string ManifestDigest,
List<ScoreReplayFactor> Factors);
#endregion
private sealed record ScanSubmitResponse(string ScanId);
}