save progress

This commit is contained in:
master
2026-01-09 18:27:36 +02:00
parent e608752924
commit a21d3dbc1f
361 changed files with 63068 additions and 1192 deletions

View File

@@ -9,6 +9,7 @@ using Microsoft.Extensions.DependencyInjection.Extensions;
using Microsoft.Extensions.Hosting;
using Microsoft.Extensions.Options;
using StellaOps.AdvisoryAI.Caching;
using StellaOps.AdvisoryAI.Chat;
using StellaOps.AdvisoryAI.DependencyInjection;
using StellaOps.AdvisoryAI.Inference;
using StellaOps.AdvisoryAI.Metrics;
@@ -106,6 +107,17 @@ public static class ServiceCollectionExtensions
services.Replace(ServiceDescriptor.Singleton<IAdvisoryOutputStore, FileSystemAdvisoryOutputStore>());
services.TryAddSingleton<AdvisoryAiMetrics>();
// Chat services (SPRINT_20260107_006_003 CH-005)
services.AddOptions<ConversationOptions>()
.Bind(configuration.GetSection("AdvisoryAI:Chat"))
.ValidateOnStart();
services.TryAddSingleton<IGuidGenerator, DefaultGuidGenerator>();
services.TryAddSingleton<IConversationService, ConversationService>();
services.TryAddSingleton<ChatPromptAssembler>();
services.TryAddSingleton<ChatResponseStreamer>();
services.TryAddSingleton<GroundingValidator>();
services.TryAddSingleton<ActionProposalParser>();
return services;
}

View File

@@ -0,0 +1,398 @@
// <copyright file="ChatContracts.cs" company="StellaOps">
// Copyright (c) StellaOps. Licensed under the AGPL-3.0-or-later.
// </copyright>
using System.Collections.Immutable;
using System.Text.Json.Serialization;
using StellaOps.AdvisoryAI.Chat;
namespace StellaOps.AdvisoryAI.WebService.Contracts;
/// <summary>
/// Request to create a new conversation.
/// Sprint: SPRINT_20260107_006_003 Task CH-005
/// </summary>
public sealed record CreateConversationRequest
{
/// <summary>
/// Gets the tenant identifier.
/// </summary>
[JsonPropertyName("tenantId")]
public required string TenantId { get; init; }
/// <summary>
/// Gets the optional initial context for the conversation.
/// </summary>
[JsonPropertyName("context")]
public ConversationContextRequest? Context { get; init; }
/// <summary>
/// Gets optional metadata key-value pairs.
/// </summary>
[JsonPropertyName("metadata")]
public IReadOnlyDictionary<string, string>? Metadata { get; init; }
}
/// <summary>
/// Request for conversation context initialization.
/// </summary>
public sealed record ConversationContextRequest
{
/// <summary>
/// Gets the current CVE ID being discussed.
/// </summary>
[JsonPropertyName("currentCveId")]
public string? CurrentCveId { get; init; }
/// <summary>
/// Gets the current component PURL.
/// </summary>
[JsonPropertyName("currentComponent")]
public string? CurrentComponent { get; init; }
/// <summary>
/// Gets the current image digest.
/// </summary>
[JsonPropertyName("currentImageDigest")]
public string? CurrentImageDigest { get; init; }
/// <summary>
/// Gets the scan ID in context.
/// </summary>
[JsonPropertyName("scanId")]
public string? ScanId { get; init; }
/// <summary>
/// Gets the SBOM ID in context.
/// </summary>
[JsonPropertyName("sbomId")]
public string? SbomId { get; init; }
}
/// <summary>
/// Request to add a turn to an existing conversation.
/// </summary>
public sealed record AddTurnRequest
{
/// <summary>
/// Gets the user message content.
/// </summary>
[JsonPropertyName("content")]
public required string Content { get; init; }
/// <summary>
/// Gets optional metadata for this turn.
/// </summary>
[JsonPropertyName("metadata")]
public IReadOnlyDictionary<string, string>? Metadata { get; init; }
/// <summary>
/// Gets whether to stream the response as Server-Sent Events.
/// </summary>
[JsonPropertyName("stream")]
public bool Stream { get; init; } = false;
}
/// <summary>
/// Response for a created conversation.
/// </summary>
public sealed record ConversationResponse
{
/// <summary>
/// Gets the conversation ID.
/// </summary>
[JsonPropertyName("conversationId")]
public required string ConversationId { get; init; }
/// <summary>
/// Gets the tenant ID.
/// </summary>
[JsonPropertyName("tenantId")]
public required string TenantId { get; init; }
/// <summary>
/// Gets the user ID.
/// </summary>
[JsonPropertyName("userId")]
public required string UserId { get; init; }
/// <summary>
/// Gets the creation timestamp.
/// </summary>
[JsonPropertyName("createdAt")]
public required DateTimeOffset CreatedAt { get; init; }
/// <summary>
/// Gets the last update timestamp.
/// </summary>
[JsonPropertyName("updatedAt")]
public required DateTimeOffset UpdatedAt { get; init; }
/// <summary>
/// Gets the conversation turns.
/// </summary>
[JsonPropertyName("turns")]
public required IReadOnlyList<ConversationTurnResponse> Turns { get; init; }
/// <summary>
/// Creates a response from a conversation.
/// </summary>
public static ConversationResponse FromConversation(Conversation conversation) => new()
{
ConversationId = conversation.ConversationId,
TenantId = conversation.TenantId,
UserId = conversation.UserId,
CreatedAt = conversation.CreatedAt,
UpdatedAt = conversation.UpdatedAt,
Turns = conversation.Turns.Select(ConversationTurnResponse.FromTurn).ToList()
};
}
/// <summary>
/// Response for a conversation turn.
/// </summary>
public sealed record ConversationTurnResponse
{
/// <summary>
/// Gets the turn ID.
/// </summary>
[JsonPropertyName("turnId")]
public required string TurnId { get; init; }
/// <summary>
/// Gets the role (user, assistant, system).
/// </summary>
[JsonPropertyName("role")]
public required string Role { get; init; }
/// <summary>
/// Gets the message content.
/// </summary>
[JsonPropertyName("content")]
public required string Content { get; init; }
/// <summary>
/// Gets the timestamp.
/// </summary>
[JsonPropertyName("timestamp")]
public required DateTimeOffset Timestamp { get; init; }
/// <summary>
/// Gets the evidence links in this turn.
/// </summary>
[JsonPropertyName("evidenceLinks")]
public IReadOnlyList<EvidenceLinkResponse>? EvidenceLinks { get; init; }
/// <summary>
/// Gets the proposed actions in this turn.
/// </summary>
[JsonPropertyName("proposedActions")]
public IReadOnlyList<ProposedActionResponse>? ProposedActions { get; init; }
/// <summary>
/// Creates a response from a turn.
/// </summary>
public static ConversationTurnResponse FromTurn(ConversationTurn turn) => new()
{
TurnId = turn.TurnId,
Role = turn.Role.ToString().ToLowerInvariant(),
Content = turn.Content,
Timestamp = turn.Timestamp,
EvidenceLinks = turn.EvidenceLinks.IsEmpty
? null
: turn.EvidenceLinks.Select(EvidenceLinkResponse.FromLink).ToList(),
ProposedActions = turn.ProposedActions.IsEmpty
? null
: turn.ProposedActions.Select(ProposedActionResponse.FromAction).ToList()
};
}
/// <summary>
/// Response for an evidence link.
/// </summary>
public sealed record EvidenceLinkResponse
{
/// <summary>
/// Gets the link type (sbom, dsse, callGraph, reachability, etc.).
/// </summary>
[JsonPropertyName("type")]
public required string Type { get; init; }
/// <summary>
/// Gets the URI.
/// </summary>
[JsonPropertyName("uri")]
public required string Uri { get; init; }
/// <summary>
/// Gets the display label.
/// </summary>
[JsonPropertyName("label")]
public string? Label { get; init; }
/// <summary>
/// Gets the confidence score.
/// </summary>
[JsonPropertyName("confidence")]
public double? Confidence { get; init; }
/// <summary>
/// Creates a response from an evidence link.
/// </summary>
public static EvidenceLinkResponse FromLink(EvidenceLink link) => new()
{
Type = link.Type.ToString(),
Uri = link.Uri,
Label = link.Label,
Confidence = link.Confidence
};
}
/// <summary>
/// Response for a proposed action.
/// </summary>
public sealed record ProposedActionResponse
{
/// <summary>
/// Gets the action type (approve, quarantine, defer, generate_manifest, create_vex).
/// </summary>
[JsonPropertyName("actionType")]
public required string ActionType { get; init; }
/// <summary>
/// Gets the action label.
/// </summary>
[JsonPropertyName("label")]
public required string Label { get; init; }
/// <summary>
/// Gets the policy gate for this action.
/// </summary>
[JsonPropertyName("policyGate")]
public string? PolicyGate { get; init; }
/// <summary>
/// Gets whether this action requires confirmation.
/// </summary>
[JsonPropertyName("requiresConfirmation")]
public bool RequiresConfirmation { get; init; }
/// <summary>
/// Creates a response from a proposed action.
/// </summary>
public static ProposedActionResponse FromAction(ProposedAction action) => new()
{
ActionType = action.ActionType,
Label = action.Label,
PolicyGate = action.PolicyGate,
RequiresConfirmation = action.RequiresConfirmation
};
}
/// <summary>
/// Response for the assistant's turn (non-streaming).
/// </summary>
public sealed record AssistantTurnResponse
{
/// <summary>
/// Gets the turn ID.
/// </summary>
[JsonPropertyName("turnId")]
public required string TurnId { get; init; }
/// <summary>
/// Gets the assistant's response content.
/// </summary>
[JsonPropertyName("content")]
public required string Content { get; init; }
/// <summary>
/// Gets the timestamp.
/// </summary>
[JsonPropertyName("timestamp")]
public required DateTimeOffset Timestamp { get; init; }
/// <summary>
/// Gets evidence links found in the response.
/// </summary>
[JsonPropertyName("evidenceLinks")]
public IReadOnlyList<EvidenceLinkResponse>? EvidenceLinks { get; init; }
/// <summary>
/// Gets proposed actions in the response.
/// </summary>
[JsonPropertyName("proposedActions")]
public IReadOnlyList<ProposedActionResponse>? ProposedActions { get; init; }
/// <summary>
/// Gets the grounding score (0.0-1.0).
/// </summary>
[JsonPropertyName("groundingScore")]
public double GroundingScore { get; init; }
/// <summary>
/// Gets the token count.
/// </summary>
[JsonPropertyName("tokenCount")]
public int TokenCount { get; init; }
/// <summary>
/// Gets the processing duration in milliseconds.
/// </summary>
[JsonPropertyName("durationMs")]
public long DurationMs { get; init; }
}
/// <summary>
/// Response for listing conversations.
/// </summary>
public sealed record ConversationListResponse
{
/// <summary>
/// Gets the conversations.
/// </summary>
[JsonPropertyName("conversations")]
public required IReadOnlyList<ConversationSummary> Conversations { get; init; }
/// <summary>
/// Gets the total count.
/// </summary>
[JsonPropertyName("totalCount")]
public int TotalCount { get; init; }
}
/// <summary>
/// Summary of a conversation for listing.
/// </summary>
public sealed record ConversationSummary
{
/// <summary>
/// Gets the conversation ID.
/// </summary>
[JsonPropertyName("conversationId")]
public required string ConversationId { get; init; }
/// <summary>
/// Gets the creation timestamp.
/// </summary>
[JsonPropertyName("createdAt")]
public required DateTimeOffset CreatedAt { get; init; }
/// <summary>
/// Gets the last update timestamp.
/// </summary>
[JsonPropertyName("updatedAt")]
public required DateTimeOffset UpdatedAt { get; init; }
/// <summary>
/// Gets the turn count.
/// </summary>
[JsonPropertyName("turnCount")]
public int TurnCount { get; init; }
/// <summary>
/// Gets a preview of the first user message.
/// </summary>
[JsonPropertyName("preview")]
public string? Preview { get; init; }
}

View File

@@ -1,3 +1,4 @@
using System.Collections.Immutable;
using System.Diagnostics;
using System.Linq;
using System.Net;
@@ -10,6 +11,7 @@ using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.Hosting;
using Microsoft.Extensions.Logging;
using StellaOps.AdvisoryAI.Caching;
using StellaOps.AdvisoryAI.Chat;
using StellaOps.AdvisoryAI.Diagnostics;
using StellaOps.AdvisoryAI.Explanation;
using StellaOps.AdvisoryAI.Hosting;
@@ -161,6 +163,22 @@ app.MapPost("/v1/advisory-ai/remediate", HandleRemediate)
app.MapGet("/v1/advisory-ai/rate-limits", HandleGetRateLimits)
.RequireRateLimiting("advisory-ai");
// Chat endpoints (SPRINT_20260107_006_003 CH-005)
app.MapPost("/v1/advisory-ai/conversations", HandleCreateConversation)
.RequireRateLimiting("advisory-ai");
app.MapGet("/v1/advisory-ai/conversations/{conversationId}", HandleGetConversation)
.RequireRateLimiting("advisory-ai");
app.MapPost("/v1/advisory-ai/conversations/{conversationId}/turns", HandleAddTurn)
.RequireRateLimiting("advisory-ai");
app.MapDelete("/v1/advisory-ai/conversations/{conversationId}", HandleDeleteConversation)
.RequireRateLimiting("advisory-ai");
app.MapGet("/v1/advisory-ai/conversations", HandleListConversations)
.RequireRateLimiting("advisory-ai");
// Refresh Router endpoint cache
app.TryRefreshStellaRouterEndpoints(routerOptions);
@@ -926,6 +944,245 @@ static Task<IResult> HandleGetRateLimits(
return Task.FromResult(Results.Ok(response));
}
// Chat endpoint handlers (SPRINT_20260107_006_003 CH-005)
static async Task<IResult> HandleCreateConversation(
HttpContext httpContext,
StellaOps.AdvisoryAI.WebService.Contracts.CreateConversationRequest request,
IConversationService conversationService,
CancellationToken cancellationToken)
{
using var activity = AdvisoryAiActivitySource.Instance.StartActivity("advisory_ai.create_conversation", ActivityKind.Server);
activity?.SetTag("advisory.tenant_id", request.TenantId);
if (!EnsureChatAuthorized(httpContext))
{
return Results.StatusCode(StatusCodes.Status403Forbidden);
}
// Get user ID from header
var userId = httpContext.Request.Headers.TryGetValue("X-StellaOps-User", out var userHeader)
? userHeader.ToString()
: "anonymous";
var conversationRequest = new ConversationRequest
{
TenantId = request.TenantId,
UserId = userId,
InitialContext = request.Context is not null
? new ConversationContext
{
CurrentCveId = request.Context.CurrentCveId,
CurrentComponent = request.Context.CurrentComponent,
CurrentImageDigest = request.Context.CurrentImageDigest,
ScanId = request.Context.ScanId,
SbomId = request.Context.SbomId
}
: null,
Metadata = request.Metadata?.ToImmutableDictionary()
};
var conversation = await conversationService.CreateAsync(conversationRequest, cancellationToken).ConfigureAwait(false);
activity?.SetTag("advisory.conversation_id", conversation.ConversationId);
return Results.Created(
$"/v1/advisory-ai/conversations/{conversation.ConversationId}",
StellaOps.AdvisoryAI.WebService.Contracts.ConversationResponse.FromConversation(conversation));
}
static async Task<IResult> HandleGetConversation(
HttpContext httpContext,
string conversationId,
IConversationService conversationService,
CancellationToken cancellationToken)
{
using var activity = AdvisoryAiActivitySource.Instance.StartActivity("advisory_ai.get_conversation", ActivityKind.Server);
activity?.SetTag("advisory.conversation_id", conversationId);
if (!EnsureChatAuthorized(httpContext))
{
return Results.StatusCode(StatusCodes.Status403Forbidden);
}
var conversation = await conversationService.GetAsync(conversationId, cancellationToken).ConfigureAwait(false);
if (conversation is null)
{
return Results.NotFound(new { error = $"Conversation '{conversationId}' not found" });
}
return Results.Ok(StellaOps.AdvisoryAI.WebService.Contracts.ConversationResponse.FromConversation(conversation));
}
static async Task<IResult> HandleAddTurn(
HttpContext httpContext,
string conversationId,
StellaOps.AdvisoryAI.WebService.Contracts.AddTurnRequest request,
IConversationService conversationService,
ChatPromptAssembler? promptAssembler,
ChatResponseStreamer? responseStreamer,
GroundingValidator? groundingValidator,
ActionProposalParser? actionParser,
TimeProvider timeProvider,
ILogger<Program> logger,
CancellationToken cancellationToken)
{
using var activity = AdvisoryAiActivitySource.Instance.StartActivity("advisory_ai.add_turn", ActivityKind.Server);
activity?.SetTag("advisory.conversation_id", conversationId);
activity?.SetTag("advisory.stream", request.Stream);
if (!EnsureChatAuthorized(httpContext))
{
return Results.StatusCode(StatusCodes.Status403Forbidden);
}
var startTime = timeProvider.GetUtcNow();
// Add user turn
try
{
var userTurnRequest = new TurnRequest
{
Role = TurnRole.User,
Content = request.Content,
Metadata = request.Metadata?.ToImmutableDictionary()
};
var userTurn = await conversationService.AddTurnAsync(conversationId, userTurnRequest, cancellationToken)
.ConfigureAwait(false);
activity?.SetTag("advisory.user_turn_id", userTurn.TurnId);
// For now, return a placeholder response since we don't have the full LLM pipeline
// In a complete implementation, this would call the prompt assembler, LLM, and validators
var assistantContent = GeneratePlaceholderResponse(request.Content);
var assistantTurnRequest = new TurnRequest
{
Role = TurnRole.Assistant,
Content = assistantContent
};
var assistantTurn = await conversationService.AddTurnAsync(conversationId, assistantTurnRequest, cancellationToken)
.ConfigureAwait(false);
var elapsed = timeProvider.GetUtcNow() - startTime;
var response = new StellaOps.AdvisoryAI.WebService.Contracts.AssistantTurnResponse
{
TurnId = assistantTurn.TurnId,
Content = assistantTurn.Content,
Timestamp = assistantTurn.Timestamp,
EvidenceLinks = assistantTurn.EvidenceLinks.IsEmpty
? null
: assistantTurn.EvidenceLinks.Select(StellaOps.AdvisoryAI.WebService.Contracts.EvidenceLinkResponse.FromLink).ToList(),
ProposedActions = assistantTurn.ProposedActions.IsEmpty
? null
: assistantTurn.ProposedActions.Select(StellaOps.AdvisoryAI.WebService.Contracts.ProposedActionResponse.FromAction).ToList(),
GroundingScore = 1.0, // Placeholder
TokenCount = assistantContent.Split(' ').Length, // Rough estimate
DurationMs = (long)elapsed.TotalMilliseconds
};
return Results.Ok(response);
}
catch (ConversationNotFoundException)
{
return Results.NotFound(new { error = $"Conversation '{conversationId}' not found" });
}
}
static async Task<IResult> HandleDeleteConversation(
HttpContext httpContext,
string conversationId,
IConversationService conversationService,
CancellationToken cancellationToken)
{
using var activity = AdvisoryAiActivitySource.Instance.StartActivity("advisory_ai.delete_conversation", ActivityKind.Server);
activity?.SetTag("advisory.conversation_id", conversationId);
if (!EnsureChatAuthorized(httpContext))
{
return Results.StatusCode(StatusCodes.Status403Forbidden);
}
var deleted = await conversationService.DeleteAsync(conversationId, cancellationToken).ConfigureAwait(false);
if (!deleted)
{
return Results.NotFound(new { error = $"Conversation '{conversationId}' not found" });
}
return Results.NoContent();
}
static async Task<IResult> HandleListConversations(
HttpContext httpContext,
string? tenantId,
int? limit,
IConversationService conversationService,
CancellationToken cancellationToken)
{
using var activity = AdvisoryAiActivitySource.Instance.StartActivity("advisory_ai.list_conversations", ActivityKind.Server);
if (!EnsureChatAuthorized(httpContext))
{
return Results.StatusCode(StatusCodes.Status403Forbidden);
}
// Get tenant from header if not provided
var effectiveTenantId = tenantId
?? (httpContext.Request.Headers.TryGetValue("X-StellaOps-Tenant", out var tenantHeader)
? tenantHeader.ToString()
: "default");
// Get user from header for filtering
var userId = httpContext.Request.Headers.TryGetValue("X-StellaOps-User", out var userHeader)
? userHeader.ToString()
: null;
var conversations = await conversationService.ListAsync(effectiveTenantId, userId, limit, cancellationToken)
.ConfigureAwait(false);
var summaries = conversations.Select(c => new StellaOps.AdvisoryAI.WebService.Contracts.ConversationSummary
{
ConversationId = c.ConversationId,
CreatedAt = c.CreatedAt,
UpdatedAt = c.UpdatedAt,
TurnCount = c.Turns.Length,
Preview = c.Turns.FirstOrDefault(t => t.Role == TurnRole.User)?.Content is { } content
? content.Length > 100 ? content[..100] + "..." : content
: null
}).ToList();
return Results.Ok(new StellaOps.AdvisoryAI.WebService.Contracts.ConversationListResponse
{
Conversations = summaries,
TotalCount = summaries.Count
});
}
static bool EnsureChatAuthorized(HttpContext context)
{
if (!context.Request.Headers.TryGetValue("X-StellaOps-Scopes", out var scopes))
{
return false;
}
var allowed = scopes
.SelectMany(value => value?.Split(' ', StringSplitOptions.RemoveEmptyEntries | StringSplitOptions.TrimEntries) ?? [])
.ToHashSet(StringComparer.OrdinalIgnoreCase);
return allowed.Contains("advisory:run") || allowed.Contains("advisory:chat");
}
static string GeneratePlaceholderResponse(string userMessage)
{
// Placeholder implementation - in production this would call the LLM
return $"I received your message: \"{userMessage}\". This is a placeholder response. " +
"The full chat functionality with grounded responses will be implemented when the LLM pipeline is connected.";
}
internal sealed record PipelinePlanRequest(
AdvisoryTaskType? TaskType,
string AdvisoryKey,

View File

@@ -0,0 +1,314 @@
// <copyright file="ActionProposalParserTests.cs" company="StellaOps">
// Copyright (c) StellaOps. Licensed under the AGPL-3.0-or-later.
// </copyright>
using System.Collections.Immutable;
using FluentAssertions;
using StellaOps.AdvisoryAI.Chat;
using Xunit;
namespace StellaOps.AdvisoryAI.Tests.Chat;
/// <summary>
/// Unit tests for <see cref="ActionProposalParser"/>.
/// Sprint: SPRINT_20260107_006_003 Task CH-014
/// </summary>
[Trait("Category", "Unit")]
public sealed class ActionProposalParserTests
{
private readonly ActionProposalParser _parser = new();
[Fact]
public void Parse_ButtonFormat_ExtractsAction()
{
// Arrange
var modelOutput = "You can approve this risk: [Accept Risk]{action:approve,cve_id=CVE-2023-1234}";
var permissions = ImmutableArray.Create("approver");
// Act
var result = _parser.Parse(modelOutput, permissions);
// Assert
result.Proposals.Should().HaveCount(1);
result.Proposals[0].ActionType.Should().Be("approve");
result.Proposals[0].Label.Should().Be("Accept Risk");
result.Proposals[0].Parameters.Should().ContainKey("cve_id");
result.Proposals[0].Parameters["cve_id"].Should().Be("CVE-2023-1234");
result.Proposals[0].IsAllowed.Should().BeTrue();
result.Warnings.Should().BeEmpty();
}
[Fact]
public void Parse_MultipleActions_ExtractsAll()
{
// Arrange
var modelOutput = """
You have options:
[Accept Risk]{action:approve,cve_id=CVE-2023-1234}
[Block Image]{action:quarantine,image_digest=sha256:abc123}
""";
var permissions = ImmutableArray.Create("approver", "operator");
// Act
var result = _parser.Parse(modelOutput, permissions);
// Assert
result.Proposals.Should().HaveCount(2);
result.Proposals.Select(p => p.ActionType).Should().BeEquivalentTo("approve", "quarantine");
}
[Fact]
public void Parse_InlineActionFormat_ExtractsAction()
{
// Arrange
var modelOutput = "This vulnerability should be deferred. <!-- ACTION: defer cve_id=CVE-2023-5678 -->";
var permissions = ImmutableArray.Create("triage");
// Act
var result = _parser.Parse(modelOutput, permissions);
// Assert
result.Proposals.Should().HaveCount(1);
result.Proposals[0].ActionType.Should().Be("defer");
}
[Fact]
public void Parse_MissingPermission_MarksAsBlocked()
{
// Arrange
var modelOutput = "[Accept Risk]{action:approve,cve_id=CVE-2023-1234}";
var permissions = ImmutableArray.Create("viewer"); // No approver permission
// Act
var result = _parser.Parse(modelOutput, permissions);
// Assert
result.Proposals.Should().HaveCount(1);
result.Proposals[0].IsAllowed.Should().BeFalse();
result.Proposals[0].BlockedReason.Should().Contain("approver");
result.HasBlockedActions.Should().BeTrue();
}
[Fact]
public void Parse_MissingRequiredParameter_ReturnsWarning()
{
// Arrange
var modelOutput = "[Accept Risk]{action:approve}"; // Missing cve_id
var permissions = ImmutableArray.Create("approver");
// Act
var result = _parser.Parse(modelOutput, permissions);
// Assert
result.Proposals.Should().BeEmpty();
result.Warnings.Should().Contain(w => w.Contains("cve_id"));
}
[Fact]
public void Parse_UnknownActionType_ReturnsWarning()
{
// Arrange
var modelOutput = "[Do Something]{action:unknown_action,param=value}";
var permissions = ImmutableArray.Create("admin");
// Act
var result = _parser.Parse(modelOutput, permissions);
// Assert
result.Proposals.Should().BeEmpty();
result.Warnings.Should().Contain(w => w.Contains("Unknown action type"));
}
[Fact]
public void Parse_InvalidActionFormat_ReturnsWarning()
{
// Arrange - uses a valid button format but invalid action spec (missing action: prefix)
var modelOutput = "[Label]{someaction,param=value}";
var permissions = ImmutableArray.Create("admin");
// Act
var result = _parser.Parse(modelOutput, permissions);
// Assert - regex doesn't match so no proposals are extracted
// This test verifies the parser gracefully handles non-matching patterns
result.Proposals.Should().BeEmpty();
// No warnings since the regex pattern doesn't match at all
}
[Fact]
public void Parse_QuarantineAction_RequiresOperatorRole()
{
// Arrange
var modelOutput = "[Block Image]{action:quarantine,image_digest=sha256:abc123}";
var permissions = ImmutableArray.Create("operator");
// Act
var result = _parser.Parse(modelOutput, permissions);
// Assert
result.Proposals.Should().HaveCount(1);
result.Proposals[0].ActionType.Should().Be("quarantine");
result.Proposals[0].IsAllowed.Should().BeTrue();
result.Proposals[0].RequiredRole.Should().Be("operator");
}
[Fact]
public void Parse_CreateVexAction_RequiresIssuerRole()
{
// Arrange
var modelOutput = "[Create VEX]{action:create_vex,product=myapp,vulnerability=CVE-2023-1234}";
var permissions = ImmutableArray.Create("issuer");
// Act
var result = _parser.Parse(modelOutput, permissions);
// Assert
result.Proposals.Should().HaveCount(1);
result.Proposals[0].ActionType.Should().Be("create_vex");
result.Proposals[0].IsAllowed.Should().BeTrue();
result.Proposals[0].Description.Should().Contain("VEX");
}
[Fact]
public void Parse_GenerateManifestAction_RequiresAdminRole()
{
// Arrange
var modelOutput = "[Generate Manifest]{action:generate_manifest,integration_type=gitlab}";
var permissions = ImmutableArray.Create("admin");
// Act
var result = _parser.Parse(modelOutput, permissions);
// Assert
result.Proposals.Should().HaveCount(1);
result.Proposals[0].ActionType.Should().Be("generate_manifest");
result.Proposals[0].IsAllowed.Should().BeTrue();
}
[Fact]
public void Parse_NoActions_ReturnsEmptyResult()
{
// Arrange
var modelOutput = "This is a response with no action proposals.";
var permissions = ImmutableArray.Create("admin");
// Act
var result = _parser.Parse(modelOutput, permissions);
// Assert
result.Proposals.Should().BeEmpty();
result.Warnings.Should().BeEmpty();
result.HasBlockedActions.Should().BeFalse();
}
[Fact]
public void Parse_OptionalParameters_Included()
{
// Arrange
var modelOutput = "[Accept Risk]{action:approve,cve_id=CVE-2023-1234,rationale=tested,expiry=2024-12-31}";
var permissions = ImmutableArray.Create("approver");
// Act
var result = _parser.Parse(modelOutput, permissions);
// Assert
result.Proposals.Should().HaveCount(1);
result.Proposals[0].Parameters.Should().ContainKey("rationale");
result.Proposals[0].Parameters.Should().ContainKey("expiry");
result.Proposals[0].Parameters["rationale"].Should().Be("tested");
}
[Fact]
public void StripActionMarkers_RemovesButtonsKeepsLabel()
{
// Arrange
var modelOutput = "Click here: [Accept Risk]{action:approve,cve_id=CVE-2023-1234} to proceed.";
// Act
var stripped = _parser.StripActionMarkers(modelOutput);
// Assert
stripped.Should().Contain("Accept Risk");
stripped.Should().NotContain("{action:");
stripped.Should().NotContain("}");
}
[Fact]
public void StripActionMarkers_RemovesInlineActions()
{
// Arrange
var modelOutput = "Defer this. <!-- ACTION: defer cve_id=CVE-2023-5678 --> Continue.";
// Act
var stripped = _parser.StripActionMarkers(modelOutput);
// Assert
stripped.Should().NotContain("ACTION:");
stripped.Should().NotContain("<!--");
}
[Fact]
public void AllowedProposals_FiltersBlockedActions()
{
// Arrange
var modelOutput = """
[Accept]{action:approve,cve_id=CVE-1}
[Block]{action:quarantine,image_digest=sha256:abc}
""";
var permissions = ImmutableArray.Create("approver"); // Has approver but not operator
// Act
var result = _parser.Parse(modelOutput, permissions);
// Assert
result.Proposals.Should().HaveCount(2);
result.AllowedProposals.Should().HaveCount(1);
result.AllowedProposals[0].ActionType.Should().Be("approve");
}
[Fact]
public void Parse_DeferAction_RequiresTriageRole()
{
// Arrange
var modelOutput = "[Defer Review]{action:defer,cve_id=CVE-2023-9999,assignee=security-team}";
var permissions = ImmutableArray.Create("triage");
// Act
var result = _parser.Parse(modelOutput, permissions);
// Assert
result.Proposals.Should().HaveCount(1);
result.Proposals[0].ActionType.Should().Be("defer");
result.Proposals[0].IsAllowed.Should().BeTrue();
result.Proposals[0].Parameters.Should().ContainKey("assignee");
}
[Fact]
public void Parse_CaseInsensitiveActionType()
{
// Arrange
var modelOutput = "[Accept]{action:APPROVE,cve_id=CVE-2023-1234}";
var permissions = ImmutableArray.Create("approver");
// Act
var result = _parser.Parse(modelOutput, permissions);
// Assert
result.Proposals.Should().HaveCount(1);
result.Proposals[0].ActionType.Should().Be("approve");
}
[Fact]
public void Parse_CaseInsensitiveRoleCheck()
{
// Arrange
var modelOutput = "[Accept]{action:approve,cve_id=CVE-2023-1234}";
var permissions = ImmutableArray.Create("APPROVER"); // Uppercase
// Act
var result = _parser.Parse(modelOutput, permissions);
// Assert
result.Proposals[0].IsAllowed.Should().BeTrue();
}
}

View File

@@ -0,0 +1,449 @@
// <copyright file="ChatIntegrationTests.cs" company="StellaOps">
// Copyright (c) StellaOps. Licensed under the AGPL-3.0-or-later.
// </copyright>
using System.Collections.Immutable;
using System.Net;
using System.Net.Http.Json;
using System.Text.Json;
using FluentAssertions;
using Microsoft.AspNetCore.Mvc.Testing;
using Microsoft.Extensions.DependencyInjection;
using StellaOps.AdvisoryAI.Chat;
using StellaOps.AdvisoryAI.Storage;
using StellaOps.AdvisoryAI.WebService.Contracts;
using StellaOps.TestKit;
using Xunit;
namespace StellaOps.AdvisoryAI.Tests.Chat;
/// <summary>
/// Integration tests for Chat API endpoints.
/// Sprint: SPRINT_20260107_006_003 Task CH-015
/// </summary>
[Trait("Category", TestCategories.Integration)]
public sealed class ChatIntegrationTests : IClassFixture<WebApplicationFactory<Program>>
{
private readonly WebApplicationFactory<Program> _factory;
private readonly HttpClient _client;
public ChatIntegrationTests(WebApplicationFactory<Program> factory)
{
_factory = factory.WithWebHostBuilder(builder =>
{
builder.ConfigureServices(services =>
{
// Use in-memory conversation store for tests
services.AddSingleton<IConversationStore, InMemoryConversationStore>();
});
});
_client = _factory.CreateClient();
_client.DefaultRequestHeaders.Add("X-StellaOps-User", "test-user");
_client.DefaultRequestHeaders.Add("X-StellaOps-Client", "test-client");
_client.DefaultRequestHeaders.Add("X-StellaOps-Roles", "chat:user");
}
#region Create Conversation Tests
[Fact]
public async Task CreateConversation_ValidRequest_Returns201Created()
{
// Arrange
var request = new CreateConversationRequest
{
TenantId = "test-tenant-001"
};
// Act
var response = await _client.PostAsJsonAsync("/v1/advisory-ai/conversations", request);
// Assert
response.StatusCode.Should().Be(HttpStatusCode.Created);
response.Headers.Location.Should().NotBeNull();
var result = await response.Content.ReadFromJsonAsync<ConversationResponse>();
result.Should().NotBeNull();
result!.ConversationId.Should().NotBeNullOrEmpty();
result.TenantId.Should().Be("test-tenant-001");
result.UserId.Should().Be("test-user");
result.Turns.Should().BeEmpty();
}
[Fact]
public async Task CreateConversation_WithContext_ContextPreserved()
{
// Arrange
var request = new CreateConversationRequest
{
TenantId = "test-tenant-002",
Context = new ConversationContextRequest
{
CurrentCveId = "CVE-2023-44487",
CurrentComponent = "pkg:npm/http2@1.0.0",
CurrentImageDigest = "sha256:abc123"
}
};
// Act
var response = await _client.PostAsJsonAsync("/v1/advisory-ai/conversations", request);
// Assert
response.StatusCode.Should().Be(HttpStatusCode.Created);
var result = await response.Content.ReadFromJsonAsync<ConversationResponse>();
result.Should().NotBeNull();
result!.ConversationId.Should().NotBeNullOrEmpty();
}
[Fact]
public async Task CreateConversation_Unauthorized_Returns403()
{
// Arrange
var client = _factory.CreateClient();
// No auth headers
var request = new CreateConversationRequest { TenantId = "test-tenant" };
// Act
var response = await client.PostAsJsonAsync("/v1/advisory-ai/conversations", request);
// Assert
response.StatusCode.Should().Be(HttpStatusCode.Forbidden);
}
#endregion
#region Get Conversation Tests
[Fact]
public async Task GetConversation_ExistingConversation_Returns200()
{
// Arrange - Create conversation first
var createRequest = new CreateConversationRequest { TenantId = "test-tenant-get" };
var createResponse = await _client.PostAsJsonAsync("/v1/advisory-ai/conversations", createRequest);
var created = await createResponse.Content.ReadFromJsonAsync<ConversationResponse>();
// Act
var response = await _client.GetAsync($"/v1/advisory-ai/conversations/{created!.ConversationId}");
// Assert
response.StatusCode.Should().Be(HttpStatusCode.OK);
var result = await response.Content.ReadFromJsonAsync<ConversationResponse>();
result.Should().NotBeNull();
result!.ConversationId.Should().Be(created.ConversationId);
}
[Fact]
public async Task GetConversation_NonExistent_Returns404()
{
// Act
var response = await _client.GetAsync("/v1/advisory-ai/conversations/non-existent-id");
// Assert
response.StatusCode.Should().Be(HttpStatusCode.NotFound);
}
#endregion
#region Delete Conversation Tests
[Fact]
public async Task DeleteConversation_ExistingConversation_Returns204()
{
// Arrange - Create conversation first
var createRequest = new CreateConversationRequest { TenantId = "test-tenant-delete" };
var createResponse = await _client.PostAsJsonAsync("/v1/advisory-ai/conversations", createRequest);
var created = await createResponse.Content.ReadFromJsonAsync<ConversationResponse>();
// Act
var response = await _client.DeleteAsync($"/v1/advisory-ai/conversations/{created!.ConversationId}");
// Assert
response.StatusCode.Should().Be(HttpStatusCode.NoContent);
// Verify deleted
var getResponse = await _client.GetAsync($"/v1/advisory-ai/conversations/{created.ConversationId}");
getResponse.StatusCode.Should().Be(HttpStatusCode.NotFound);
}
[Fact]
public async Task DeleteConversation_NonExistent_Returns404()
{
// Act
var response = await _client.DeleteAsync("/v1/advisory-ai/conversations/non-existent-id");
// Assert
response.StatusCode.Should().Be(HttpStatusCode.NotFound);
}
#endregion
#region List Conversations Tests
[Fact]
public async Task ListConversations_WithTenant_ReturnsFilteredList()
{
// Arrange - Create multiple conversations
var tenantId = $"test-tenant-list-{Guid.NewGuid():N}";
await _client.PostAsJsonAsync("/v1/advisory-ai/conversations", new CreateConversationRequest { TenantId = tenantId });
await _client.PostAsJsonAsync("/v1/advisory-ai/conversations", new CreateConversationRequest { TenantId = tenantId });
await _client.PostAsJsonAsync("/v1/advisory-ai/conversations", new CreateConversationRequest { TenantId = "other-tenant" });
// Act
var response = await _client.GetAsync($"/v1/advisory-ai/conversations?tenantId={tenantId}");
// Assert
response.StatusCode.Should().Be(HttpStatusCode.OK);
var result = await response.Content.ReadFromJsonAsync<ConversationListResponse>();
result.Should().NotBeNull();
result!.Conversations.Should().HaveCountGreaterThanOrEqualTo(2);
}
[Fact]
public async Task ListConversations_WithPagination_ReturnsPagedResults()
{
// Arrange - Create multiple conversations
var tenantId = $"test-tenant-page-{Guid.NewGuid():N}";
for (int i = 0; i < 5; i++)
{
await _client.PostAsJsonAsync("/v1/advisory-ai/conversations", new CreateConversationRequest { TenantId = tenantId });
}
// Act
var response = await _client.GetAsync($"/v1/advisory-ai/conversations?tenantId={tenantId}&limit=2");
// Assert
response.StatusCode.Should().Be(HttpStatusCode.OK);
var result = await response.Content.ReadFromJsonAsync<ConversationListResponse>();
result.Should().NotBeNull();
result!.Conversations.Should().HaveCount(2);
}
#endregion
#region Add Turn Tests
[Fact]
public async Task AddTurn_ValidMessage_Returns200WithResponse()
{
// Arrange - Create conversation first
var createRequest = new CreateConversationRequest
{
TenantId = "test-tenant-turn",
Context = new ConversationContextRequest { CurrentCveId = "CVE-2023-44487" }
};
var createResponse = await _client.PostAsJsonAsync("/v1/advisory-ai/conversations", createRequest);
var created = await createResponse.Content.ReadFromJsonAsync<ConversationResponse>();
var turnRequest = new AddTurnRequest
{
Content = "What is the severity of this vulnerability?",
Stream = false
};
// Act
var response = await _client.PostAsJsonAsync(
$"/v1/advisory-ai/conversations/{created!.ConversationId}/turns",
turnRequest);
// Assert
response.StatusCode.Should().Be(HttpStatusCode.OK);
var result = await response.Content.ReadFromJsonAsync<AssistantTurnResponse>();
result.Should().NotBeNull();
result!.TurnId.Should().NotBeNullOrEmpty();
result.Content.Should().NotBeNullOrEmpty();
}
[Fact]
public async Task AddTurn_NonExistentConversation_Returns404()
{
// Arrange
var turnRequest = new AddTurnRequest
{
Content = "Test message",
Stream = false
};
// Act
var response = await _client.PostAsJsonAsync(
"/v1/advisory-ai/conversations/non-existent-id/turns",
turnRequest);
// Assert
response.StatusCode.Should().Be(HttpStatusCode.NotFound);
}
[Fact]
public async Task AddTurn_MultipleMessages_BuildsConversationHistory()
{
// Arrange - Create conversation
var createResponse = await _client.PostAsJsonAsync(
"/v1/advisory-ai/conversations",
new CreateConversationRequest { TenantId = "test-tenant-multi" });
var created = await createResponse.Content.ReadFromJsonAsync<ConversationResponse>();
var conversationId = created!.ConversationId;
// Act - Send multiple messages
await _client.PostAsJsonAsync(
$"/v1/advisory-ai/conversations/{conversationId}/turns",
new AddTurnRequest { Content = "First question", Stream = false });
await _client.PostAsJsonAsync(
$"/v1/advisory-ai/conversations/{conversationId}/turns",
new AddTurnRequest { Content = "Follow-up question", Stream = false });
// Assert - Check conversation has all turns
var getResponse = await _client.GetAsync($"/v1/advisory-ai/conversations/{conversationId}");
var conversation = await getResponse.Content.ReadFromJsonAsync<ConversationResponse>();
conversation!.Turns.Should().HaveCountGreaterThanOrEqualTo(4); // 2 user + 2 assistant
}
#endregion
#region Streaming Tests
[Fact]
public async Task AddTurn_WithStreaming_ReturnsSSEStream()
{
// Arrange - Create conversation
var createResponse = await _client.PostAsJsonAsync(
"/v1/advisory-ai/conversations",
new CreateConversationRequest { TenantId = "test-tenant-stream" });
var created = await createResponse.Content.ReadFromJsonAsync<ConversationResponse>();
var turnRequest = new AddTurnRequest
{
Content = "Explain this CVE",
Stream = true
};
// Act
var request = new HttpRequestMessage(
HttpMethod.Post,
$"/v1/advisory-ai/conversations/{created!.ConversationId}/turns");
request.Content = JsonContent.Create(turnRequest);
request.Headers.Accept.Clear();
request.Headers.Accept.Add(new System.Net.Http.Headers.MediaTypeWithQualityHeaderValue("text/event-stream"));
var response = await _client.SendAsync(request, HttpCompletionOption.ResponseHeadersRead);
// Assert
response.StatusCode.Should().Be(HttpStatusCode.OK);
response.Content.Headers.ContentType?.MediaType.Should().Be("text/event-stream");
}
#endregion
#region Action Gating Tests
[Fact]
public async Task AddTurn_WithProposedAction_ActionRequiresConfirmation()
{
// This test verifies that action proposals are returned in the response
// but not executed without explicit confirmation
// Arrange - Create conversation with CVE context
var createResponse = await _client.PostAsJsonAsync(
"/v1/advisory-ai/conversations",
new CreateConversationRequest
{
TenantId = "test-tenant-action",
Context = new ConversationContextRequest { CurrentCveId = "CVE-2023-44487" }
});
var created = await createResponse.Content.ReadFromJsonAsync<ConversationResponse>();
var turnRequest = new AddTurnRequest
{
Content = "Please quarantine this component",
Stream = false
};
// Act
var response = await _client.PostAsJsonAsync(
$"/v1/advisory-ai/conversations/{created!.ConversationId}/turns",
turnRequest);
// Assert
response.StatusCode.Should().Be(HttpStatusCode.OK);
var result = await response.Content.ReadFromJsonAsync<AssistantTurnResponse>();
result.Should().NotBeNull();
// If action was proposed, it should be in the response but not executed
// The response should indicate that user confirmation is needed
if (result!.ProposedActions?.Any() == true)
{
result.ProposedActions.Should().AllSatisfy(a =>
{
a.ActionType.Should().NotBeNullOrEmpty();
a.RequiresConfirmation.Should().BeTrue();
});
}
}
#endregion
}
/// <summary>
/// In-memory conversation store for testing.
/// </summary>
internal sealed class InMemoryConversationStore : IConversationStore
{
private readonly Dictionary<string, Conversation> _conversations = new();
public Task<Conversation> CreateAsync(Conversation conversation, CancellationToken cancellationToken = default)
{
_conversations[conversation.ConversationId] = conversation;
return Task.FromResult(conversation);
}
public Task<Conversation?> GetByIdAsync(string conversationId, CancellationToken cancellationToken = default)
{
_conversations.TryGetValue(conversationId, out var conversation);
return Task.FromResult(conversation);
}
public Task<IReadOnlyList<Conversation>> GetByUserAsync(string tenantId, string userId, int limit = 20, CancellationToken cancellationToken = default)
{
var result = _conversations.Values
.Where(c => c.TenantId == tenantId && c.UserId == userId)
.OrderByDescending(c => c.CreatedAt)
.Take(limit)
.ToList();
return Task.FromResult<IReadOnlyList<Conversation>>(result);
}
public Task<Conversation> AddTurnAsync(string conversationId, ConversationTurn turn, CancellationToken cancellationToken = default)
{
if (!_conversations.TryGetValue(conversationId, out var conversation))
{
throw new InvalidOperationException($"Conversation {conversationId} not found");
}
var updatedTurns = conversation.Turns.Add(turn);
var updated = conversation with { Turns = updatedTurns, UpdatedAt = DateTimeOffset.UtcNow };
_conversations[conversationId] = updated;
return Task.FromResult(updated);
}
public Task<bool> DeleteAsync(string conversationId, CancellationToken cancellationToken = default)
{
return Task.FromResult(_conversations.Remove(conversationId));
}
public Task CleanupExpiredAsync(TimeSpan maxAge, CancellationToken cancellationToken = default)
{
var cutoff = DateTimeOffset.UtcNow - maxAge;
var expired = _conversations.Where(kvp => kvp.Value.CreatedAt < cutoff).Select(kvp => kvp.Key).ToList();
foreach (var key in expired)
{
_conversations.Remove(key);
}
return Task.CompletedTask;
}
}

View File

@@ -0,0 +1,324 @@
// <copyright file="ChatPromptAssemblerTests.cs" company="StellaOps">
// Copyright (c) StellaOps. Licensed under the AGPL-3.0-or-later.
// </copyright>
using System.Collections.Immutable;
using FluentAssertions;
using Microsoft.Extensions.Options;
using StellaOps.AdvisoryAI.Chat;
using Xunit;
namespace StellaOps.AdvisoryAI.Tests.Chat;
/// <summary>
/// Unit tests for <see cref="ChatPromptAssembler"/>.
/// Sprint: SPRINT_20260107_006_003 Task CH-014
/// </summary>
[Trait("Category", "Unit")]
public sealed class ChatPromptAssemblerTests
{
private readonly ChatPromptAssembler _assembler;
private readonly ChatPromptOptions _options;
public ChatPromptAssemblerTests()
{
_options = new ChatPromptOptions
{
BaseSystemPrompt = "You are AdvisoryAI.",
MaxContextTokens = 4000,
SystemPromptVersion = "v1.0.0"
};
var contextBuilder = new ConversationContextBuilder();
_assembler = new ChatPromptAssembler(Options.Create(_options), contextBuilder);
}
[Fact]
public void Assemble_EmptyConversation_IncludesSystemAndUserMessage()
{
// Arrange
var conversation = CreateConversation();
var userMessage = "What is CVE-2023-1234?";
// Act
var result = _assembler.Assemble(conversation, userMessage);
// Assert
result.Messages.Should().HaveCountGreaterThanOrEqualTo(2);
result.Messages[0].Role.Should().Be(ChatMessageRole.System);
result.Messages[^1].Role.Should().Be(ChatMessageRole.User);
result.Messages[^1].Content.Should().Be(userMessage);
}
[Fact]
public void Assemble_SystemPrompt_ContainsGroundingRules()
{
// Arrange
var conversation = CreateConversation();
// Act
var result = _assembler.Assemble(conversation, "Hello");
// Assert
var systemMessage = result.Messages.First(m => m.Role == ChatMessageRole.System);
systemMessage.Content.Should().Contain("GROUNDING RULES");
systemMessage.Content.Should().Contain("cite");
}
[Fact]
public void Assemble_SystemPrompt_ContainsObjectLinkFormats()
{
// Arrange
var conversation = CreateConversation();
// Act
var result = _assembler.Assemble(conversation, "Hello");
// Assert
var systemMessage = result.Messages.First(m => m.Role == ChatMessageRole.System);
systemMessage.Content.Should().Contain("OBJECT LINK FORMATS");
systemMessage.Content.Should().Contain("[sbom:");
systemMessage.Content.Should().Contain("[reach:");
systemMessage.Content.Should().Contain("[vex:");
}
[Fact]
public void Assemble_SystemPrompt_ContainsActionProposalFormat()
{
// Arrange
var conversation = CreateConversation();
// Act
var result = _assembler.Assemble(conversation, "Hello");
// Assert
var systemMessage = result.Messages.First(m => m.Role == ChatMessageRole.System);
systemMessage.Content.Should().Contain("ACTION PROPOSALS");
systemMessage.Content.Should().Contain("approve");
systemMessage.Content.Should().Contain("quarantine");
}
[Fact]
public void Assemble_WithHistory_IncludesPriorTurns()
{
// Arrange
var turns = ImmutableArray.Create(
new ConversationTurn
{
TurnId = "t1",
Role = TurnRole.User,
Content = "Previous question",
Timestamp = DateTimeOffset.UtcNow.AddMinutes(-5)
},
new ConversationTurn
{
TurnId = "t2",
Role = TurnRole.Assistant,
Content = "Previous answer",
Timestamp = DateTimeOffset.UtcNow.AddMinutes(-4)
});
var conversation = CreateConversation(turns: turns);
// Act
var result = _assembler.Assemble(conversation, "New question");
// Assert
result.Messages.Should().HaveCountGreaterThan(3); // System + 2 history + user
result.Messages.Should().Contain(m => m.Content == "Previous question");
result.Messages.Should().Contain(m => m.Content == "Previous answer");
}
[Fact]
public void Assemble_WithCveContext_IncludesFocusInSystemPrompt()
{
// Arrange
var context = new ConversationContext { CurrentCveId = "CVE-2023-44487" };
var conversation = CreateConversation(context: context);
// Act
var result = _assembler.Assemble(conversation, "Tell me more");
// Assert
var systemMessage = result.Messages.First(m => m.Role == ChatMessageRole.System);
systemMessage.Content.Should().Contain("CVE-2023-44487");
systemMessage.Content.Should().Contain("CURRENT FOCUS");
}
[Fact]
public void Assemble_WithPolicyContext_IncludesPermissions()
{
// Arrange
var policy = new PolicyContext
{
Permissions = ImmutableArray.Create("approver", "viewer"),
AutomationAllowed = true
};
var context = new ConversationContext { Policy = policy };
var conversation = CreateConversation(context: context);
// Act
var result = _assembler.Assemble(conversation, "What can I do?");
// Assert
var systemMessage = result.Messages.First(m => m.Role == ChatMessageRole.System);
systemMessage.Content.Should().Contain("USER PERMISSIONS");
systemMessage.Content.Should().Contain("Automation is ALLOWED");
}
[Fact]
public void Assemble_AutomationDisabled_IndicatesInSystemPrompt()
{
// Arrange
var policy = new PolicyContext { AutomationAllowed = false };
var context = new ConversationContext { Policy = policy };
var conversation = CreateConversation(context: context);
// Act
var result = _assembler.Assemble(conversation, "Execute action");
// Assert
var systemMessage = result.Messages.First(m => m.Role == ChatMessageRole.System);
systemMessage.Content.Should().Contain("Automation is DISABLED");
}
[Fact]
public void Assemble_EstimatesTokenCount()
{
// Arrange
var conversation = CreateConversation();
// Act
var result = _assembler.Assemble(conversation, "A short message");
// Assert
result.EstimatedTokens.Should().BePositive();
}
[Fact]
public void Assemble_IncludesSystemPromptVersion()
{
// Arrange
var conversation = CreateConversation();
// Act
var result = _assembler.Assemble(conversation, "Hello");
// Assert
result.SystemPromptVersion.Should().Be("v1.0.0");
}
[Fact]
public void Assemble_AssistantTurnWithEvidenceLinks_AppendsFootnotes()
{
// Arrange
var evidenceLinks = ImmutableArray.Create(
new EvidenceLink
{
Type = EvidenceLinkType.Sbom,
Uri = "sbom:abc123",
Label = "Component SBOM"
});
var turns = ImmutableArray.Create(
new ConversationTurn
{
TurnId = "t1",
Role = TurnRole.User,
Content = "What's in the SBOM?",
Timestamp = DateTimeOffset.UtcNow.AddMinutes(-2)
},
new ConversationTurn
{
TurnId = "t2",
Role = TurnRole.Assistant,
Content = "The SBOM contains lodash.",
Timestamp = DateTimeOffset.UtcNow.AddMinutes(-1),
EvidenceLinks = evidenceLinks
});
var conversation = CreateConversation(turns: turns);
// Act
var result = _assembler.Assemble(conversation, "Anything else?");
// Assert
var assistantMessage = result.Messages.FirstOrDefault(m =>
m.Role == ChatMessageRole.Assistant && m.Content.Contains("lodash"));
assistantMessage.Should().NotBeNull();
assistantMessage!.Content.Should().Contain("Evidence:");
assistantMessage.Content.Should().Contain("Component SBOM");
}
[Fact]
public void Assemble_MessageRolesCorrectlyMapped()
{
// Arrange
var turns = ImmutableArray.Create(
new ConversationTurn
{
TurnId = "t1",
Role = TurnRole.User,
Content = "User message",
Timestamp = DateTimeOffset.UtcNow.AddMinutes(-2)
},
new ConversationTurn
{
TurnId = "t2",
Role = TurnRole.Assistant,
Content = "Assistant message",
Timestamp = DateTimeOffset.UtcNow.AddMinutes(-1)
},
new ConversationTurn
{
TurnId = "t3",
Role = TurnRole.System,
Content = "System note",
Timestamp = DateTimeOffset.UtcNow
});
var conversation = CreateConversation(turns: turns);
// Act
var result = _assembler.Assemble(conversation, "New message");
// Assert
result.Messages.Should().Contain(m => m.Role == ChatMessageRole.User && m.Content == "User message");
result.Messages.Should().Contain(m => m.Role == ChatMessageRole.Assistant && m.Content.Contains("Assistant message"));
result.Messages.Should().Contain(m => m.Role == ChatMessageRole.System && m.Content == "System note");
}
[Fact]
public void Assemble_ReturnsBuiltContext()
{
// Arrange
var context = new ConversationContext
{
CurrentCveId = "CVE-2023-1234",
CurrentComponent = "pkg:npm/lodash@4.17.21"
};
var conversation = CreateConversation(context: context);
// Act
var result = _assembler.Assemble(conversation, "Analyze this");
// Assert
result.Context.Should().NotBeNull();
}
private static Conversation CreateConversation(
ConversationContext? context = null,
ImmutableArray<ConversationTurn>? turns = null)
{
return new Conversation
{
ConversationId = "conv-1",
TenantId = "tenant-1",
UserId = "user-1",
CreatedAt = DateTimeOffset.UtcNow,
UpdatedAt = DateTimeOffset.UtcNow,
Context = context ?? new ConversationContext(),
Turns = turns ?? ImmutableArray<ConversationTurn>.Empty
};
}
}

View File

@@ -0,0 +1,444 @@
// <copyright file="ConversationServiceTests.cs" company="StellaOps">
// Copyright (c) StellaOps. Licensed under the AGPL-3.0-or-later.
// </copyright>
using System.Collections.Immutable;
using FluentAssertions;
using Microsoft.Extensions.Logging.Abstractions;
using Microsoft.Extensions.Options;
using StellaOps.AdvisoryAI.Chat;
using Xunit;
namespace StellaOps.AdvisoryAI.Tests.Chat;
/// <summary>
/// Unit tests for <see cref="ConversationService"/>.
/// Sprint: SPRINT_20260107_006_003 Task CH-014
/// </summary>
[Trait("Category", "Unit")]
public sealed class ConversationServiceTests
{
private readonly ConversationService _service;
private readonly TestGuidGenerator _guidGenerator;
private readonly TestTimeProvider _timeProvider;
public ConversationServiceTests()
{
_guidGenerator = new TestGuidGenerator();
_timeProvider = new TestTimeProvider(new DateTimeOffset(2026, 1, 9, 12, 0, 0, TimeSpan.Zero));
var options = Options.Create(new ConversationOptions
{
MaxTurnsPerConversation = 50,
ConversationRetention = TimeSpan.FromDays(7)
});
_service = new ConversationService(
options,
_timeProvider,
_guidGenerator,
NullLogger<ConversationService>.Instance);
}
[Fact]
public async Task CreateAsync_CreatesConversation()
{
// Arrange
var request = new ConversationRequest
{
TenantId = "tenant-1",
UserId = "user-1"
};
// Act
var conversation = await _service.CreateAsync(request);
// Assert
conversation.Should().NotBeNull();
conversation.ConversationId.Should().NotBeNullOrEmpty();
conversation.TenantId.Should().Be("tenant-1");
conversation.UserId.Should().Be("user-1");
conversation.CreatedAt.Should().Be(_timeProvider.GetUtcNow());
conversation.UpdatedAt.Should().Be(_timeProvider.GetUtcNow());
conversation.Turns.Should().BeEmpty();
}
[Fact]
public async Task CreateAsync_WithInitialContext_SetsContext()
{
// Arrange
var context = new ConversationContext
{
CurrentCveId = "CVE-2023-1234",
CurrentComponent = "pkg:npm/lodash@4.17.21"
};
var request = new ConversationRequest
{
TenantId = "tenant-1",
UserId = "user-1",
InitialContext = context
};
// Act
var conversation = await _service.CreateAsync(request);
// Assert
conversation.Context.CurrentCveId.Should().Be("CVE-2023-1234");
conversation.Context.CurrentComponent.Should().Be("pkg:npm/lodash@4.17.21");
}
[Fact]
public async Task CreateAsync_WithMetadata_StoresMetadata()
{
// Arrange
var request = new ConversationRequest
{
TenantId = "tenant-1",
UserId = "user-1",
Metadata = ImmutableDictionary.CreateRange(new[]
{
KeyValuePair.Create("source", "ui"),
KeyValuePair.Create("version", "1.0")
})
};
// Act
var conversation = await _service.CreateAsync(request);
// Assert
conversation.Metadata.Should().ContainKey("source");
conversation.Metadata["source"].Should().Be("ui");
}
[Fact]
public async Task GetAsync_ExistingConversation_ReturnsConversation()
{
// Arrange
var request = new ConversationRequest
{
TenantId = "tenant-1",
UserId = "user-1"
};
var created = await _service.CreateAsync(request);
// Act
var retrieved = await _service.GetAsync(created.ConversationId);
// Assert
retrieved.Should().NotBeNull();
retrieved!.ConversationId.Should().Be(created.ConversationId);
}
[Fact]
public async Task GetAsync_NonExistentConversation_ReturnsNull()
{
// Act
var result = await _service.GetAsync("non-existent-id");
// Assert
result.Should().BeNull();
}
[Fact]
public async Task AddTurnAsync_AddsUserTurn()
{
// Arrange
var conversation = await CreateTestConversation();
var turnRequest = new TurnRequest
{
Role = TurnRole.User,
Content = "What is CVE-2023-1234?"
};
// Act
var turn = await _service.AddTurnAsync(conversation.ConversationId, turnRequest);
// Assert
turn.Should().NotBeNull();
turn.Role.Should().Be(TurnRole.User);
turn.Content.Should().Be("What is CVE-2023-1234?");
turn.TurnId.Should().NotBeNullOrEmpty();
turn.Timestamp.Should().Be(_timeProvider.GetUtcNow());
}
[Fact]
public async Task AddTurnAsync_AddsAssistantTurn()
{
// Arrange
var conversation = await CreateTestConversation();
var turnRequest = new TurnRequest
{
Role = TurnRole.Assistant,
Content = "CVE-2023-1234 is a critical vulnerability..."
};
// Act
var turn = await _service.AddTurnAsync(conversation.ConversationId, turnRequest);
// Assert
turn.Role.Should().Be(TurnRole.Assistant);
turn.Content.Should().Contain("CVE-2023-1234");
}
[Fact]
public async Task AddTurnAsync_WithEvidenceLinks_StoresLinks()
{
// Arrange
var conversation = await CreateTestConversation();
var links = ImmutableArray.Create(
new EvidenceLink
{
Type = EvidenceLinkType.Sbom,
Uri = "sbom:abc123",
Label = "SBOM Reference"
});
var turnRequest = new TurnRequest
{
Role = TurnRole.Assistant,
Content = "Found in SBOM [sbom:abc123]",
EvidenceLinks = links
};
// Act
var turn = await _service.AddTurnAsync(conversation.ConversationId, turnRequest);
// Assert
turn.EvidenceLinks.Should().HaveCount(1);
turn.EvidenceLinks[0].Type.Should().Be(EvidenceLinkType.Sbom);
}
[Fact]
public async Task AddTurnAsync_WithProposedActions_StoresActions()
{
// Arrange
var conversation = await CreateTestConversation();
var actions = ImmutableArray.Create(
new ProposedAction
{
ActionType = "approve",
Label = "Accept Risk",
RequiresConfirmation = true
});
var turnRequest = new TurnRequest
{
Role = TurnRole.Assistant,
Content = "You may want to approve this risk.",
ProposedActions = actions
};
// Act
var turn = await _service.AddTurnAsync(conversation.ConversationId, turnRequest);
// Assert
turn.ProposedActions.Should().HaveCount(1);
turn.ProposedActions[0].ActionType.Should().Be("approve");
}
[Fact]
public async Task AddTurnAsync_NonExistentConversation_ThrowsException()
{
// Arrange
var turnRequest = new TurnRequest
{
Role = TurnRole.User,
Content = "Hello"
};
// Act
Func<Task> act = () => _service.AddTurnAsync("non-existent", turnRequest);
// Assert
await act.Should().ThrowAsync<ConversationNotFoundException>();
}
[Fact]
public async Task AddTurnAsync_UpdatesConversationTimestamp()
{
// Arrange
var conversation = await CreateTestConversation();
var originalUpdatedAt = conversation.UpdatedAt;
// Advance time
_timeProvider.Advance(TimeSpan.FromMinutes(5));
var turnRequest = new TurnRequest
{
Role = TurnRole.User,
Content = "New message"
};
// Act
await _service.AddTurnAsync(conversation.ConversationId, turnRequest);
var updated = await _service.GetAsync(conversation.ConversationId);
// Assert
updated!.UpdatedAt.Should().BeAfter(originalUpdatedAt);
}
[Fact]
public async Task DeleteAsync_ExistingConversation_ReturnsTrue()
{
// Arrange
var conversation = await CreateTestConversation();
// Act
var result = await _service.DeleteAsync(conversation.ConversationId);
// Assert
result.Should().BeTrue();
(await _service.GetAsync(conversation.ConversationId)).Should().BeNull();
}
[Fact]
public async Task DeleteAsync_NonExistentConversation_ReturnsFalse()
{
// Act
var result = await _service.DeleteAsync("non-existent");
// Assert
result.Should().BeFalse();
}
[Fact]
public async Task ListAsync_ByTenant_ReturnsMatchingConversations()
{
// Arrange
await _service.CreateAsync(new ConversationRequest { TenantId = "tenant-1", UserId = "user-1" });
await _service.CreateAsync(new ConversationRequest { TenantId = "tenant-1", UserId = "user-2" });
await _service.CreateAsync(new ConversationRequest { TenantId = "tenant-2", UserId = "user-1" });
// Act
var result = await _service.ListAsync("tenant-1");
// Assert
result.Should().HaveCount(2);
result.All(c => c.TenantId == "tenant-1").Should().BeTrue();
}
[Fact]
public async Task ListAsync_ByTenantAndUser_ReturnsMatchingConversations()
{
// Arrange
await _service.CreateAsync(new ConversationRequest { TenantId = "tenant-1", UserId = "user-1" });
await _service.CreateAsync(new ConversationRequest { TenantId = "tenant-1", UserId = "user-2" });
await _service.CreateAsync(new ConversationRequest { TenantId = "tenant-1", UserId = "user-1" });
// Act
var result = await _service.ListAsync("tenant-1", "user-1");
// Assert
result.Should().HaveCount(2);
result.All(c => c.UserId == "user-1").Should().BeTrue();
}
[Fact]
public async Task ListAsync_WithLimit_ReturnsLimitedResults()
{
// Arrange
for (int i = 0; i < 5; i++)
{
await _service.CreateAsync(new ConversationRequest { TenantId = "tenant-1", UserId = "user-1" });
}
// Act
var result = await _service.ListAsync("tenant-1", limit: 3);
// Assert
result.Should().HaveCount(3);
}
[Fact]
public async Task UpdateContextAsync_UpdatesContext()
{
// Arrange
var conversation = await CreateTestConversation();
var newContext = new ConversationContext
{
CurrentCveId = "CVE-2023-5678",
ScanId = "scan-123"
};
// Act
var updated = await _service.UpdateContextAsync(conversation.ConversationId, newContext);
// Assert
updated.Should().NotBeNull();
updated!.Context.CurrentCveId.Should().Be("CVE-2023-5678");
updated.Context.ScanId.Should().Be("scan-123");
}
[Fact]
public async Task UpdateContextAsync_NonExistentConversation_ReturnsNull()
{
// Arrange
var context = new ConversationContext { CurrentCveId = "CVE-2023-1234" };
// Act
var result = await _service.UpdateContextAsync("non-existent", context);
// Assert
result.Should().BeNull();
}
[Fact]
public async Task TurnCount_ReflectsNumberOfTurns()
{
// Arrange
var conversation = await CreateTestConversation();
await _service.AddTurnAsync(conversation.ConversationId, new TurnRequest { Role = TurnRole.User, Content = "Q1" });
await _service.AddTurnAsync(conversation.ConversationId, new TurnRequest { Role = TurnRole.Assistant, Content = "A1" });
await _service.AddTurnAsync(conversation.ConversationId, new TurnRequest { Role = TurnRole.User, Content = "Q2" });
// Act
var updated = await _service.GetAsync(conversation.ConversationId);
// Assert
updated!.TurnCount.Should().Be(3);
updated.Turns.Should().HaveCount(3);
}
private async Task<Conversation> CreateTestConversation()
{
return await _service.CreateAsync(new ConversationRequest
{
TenantId = "test-tenant",
UserId = "test-user"
});
}
private sealed class TestGuidGenerator : IGuidGenerator
{
private int _counter;
public Guid NewGuid()
{
return new Guid(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, (byte)Interlocked.Increment(ref _counter));
}
}
private sealed class TestTimeProvider : TimeProvider
{
private DateTimeOffset _utcNow;
public TestTimeProvider(DateTimeOffset initialTime)
{
_utcNow = initialTime;
}
public override DateTimeOffset GetUtcNow() => _utcNow;
public void Advance(TimeSpan duration)
{
_utcNow = _utcNow.Add(duration);
}
}
}

View File

@@ -0,0 +1,413 @@
// <copyright file="GroundingValidatorTests.cs" company="StellaOps">
// Copyright (c) StellaOps. Licensed under the AGPL-3.0-or-later.
// </copyright>
using System.Collections.Immutable;
using FluentAssertions;
using Microsoft.Extensions.Logging.Abstractions;
using StellaOps.AdvisoryAI.Chat;
using Xunit;
namespace StellaOps.AdvisoryAI.Tests.Chat;
/// <summary>
/// Unit tests for <see cref="GroundingValidator"/>.
/// Sprint: SPRINT_20260107_006_003 Task CH-014
/// </summary>
[Trait("Category", "Unit")]
public sealed class GroundingValidatorTests
{
private readonly MockObjectLinkResolver _resolver;
private readonly GroundingValidator _validator;
private readonly GroundingOptions _options;
public GroundingValidatorTests()
{
_resolver = new MockObjectLinkResolver();
_options = new GroundingOptions
{
MinGroundingScore = 0.5,
MaxLinkDistance = 200
};
_validator = new GroundingValidator(
_resolver,
NullLogger<GroundingValidator>.Instance,
_options);
}
[Fact]
public async Task ValidateAsync_WellGroundedResponse_ReturnsAcceptable()
{
// Arrange
_resolver.AddResolution("sbom", "abc123", exists: true);
var response = "The component is affected [sbom:abc123] as shown in the SBOM.";
var context = CreateContext();
// Act
var result = await _validator.ValidateAsync(response, context);
// Assert
result.IsAcceptable.Should().BeTrue();
result.GroundingScore.Should().BeGreaterThan(0);
result.ValidatedLinks.Should().HaveCount(1);
result.ValidatedLinks[0].IsValid.Should().BeTrue();
}
[Fact]
public async Task ValidateAsync_NoLinks_LowScore()
{
// Arrange
var response = "The component is vulnerable but I have no evidence.";
var context = CreateContext();
// Act
var result = await _validator.ValidateAsync(response, context);
// Assert
result.ValidatedLinks.Should().BeEmpty();
result.GroundingScore.Should().BeLessThan(1.0);
}
[Fact]
public async Task ValidateAsync_InvalidLink_AddsIssue()
{
// Arrange
_resolver.AddResolution("sbom", "nonexistent", exists: false);
var response = "Check this SBOM [sbom:nonexistent] for details.";
var context = CreateContext();
// Act
var result = await _validator.ValidateAsync(response, context);
// Assert
result.ValidatedLinks.Should().HaveCount(1);
result.ValidatedLinks[0].IsValid.Should().BeFalse();
result.Issues.Should().Contain(i => i.Type == GroundingIssueType.InvalidLink);
}
[Fact]
public async Task ValidateAsync_ExtractsMultipleLinkTypes()
{
// Arrange
_resolver.AddResolution("sbom", "abc", exists: true);
_resolver.AddResolution("vex", "issuer:digest", exists: true);
_resolver.AddResolution("reach", "api:func", exists: true);
var response = "Found in SBOM [sbom:abc], VEX [vex:issuer:digest], and reachability [reach:api:func].";
var context = CreateContext();
// Act
var result = await _validator.ValidateAsync(response, context);
// Assert
result.ValidatedLinks.Should().HaveCount(3);
result.ValidatedLinks.Should().Contain(l => l.Type == "sbom");
result.ValidatedLinks.Should().Contain(l => l.Type == "vex");
result.ValidatedLinks.Should().Contain(l => l.Type == "reach");
}
[Fact]
public async Task ValidateAsync_DetectsAffectedClaim()
{
// Arrange
var response = "This component is affected by the vulnerability.";
var context = CreateContext();
// Act
var result = await _validator.ValidateAsync(response, context);
// Assert
result.TotalClaims.Should().BeGreaterThan(0);
}
[Fact]
public async Task ValidateAsync_DetectsNotAffectedClaim()
{
// Arrange
var response = "The service is not affected by this CVE.";
var context = CreateContext();
// Act
var result = await _validator.ValidateAsync(response, context);
// Assert
result.TotalClaims.Should().BeGreaterThan(0);
}
[Fact]
public async Task ValidateAsync_DetectsFixedClaim()
{
// Arrange
var response = "The vulnerability has been fixed in version 2.0.";
var context = CreateContext();
// Act
var result = await _validator.ValidateAsync(response, context);
// Assert
result.TotalClaims.Should().BeGreaterThan(0);
}
[Fact]
public async Task ValidateAsync_DetectsSeverityClaim()
{
// Arrange
var response = "The CVSS score is 9.8, making this critical.";
var context = CreateContext();
// Act
var result = await _validator.ValidateAsync(response, context);
// Assert
result.TotalClaims.Should().BeGreaterThan(0);
}
[Fact]
public async Task ValidateAsync_UngroundedClaimNearLink_IsGrounded()
{
// Arrange
_resolver.AddResolution("sbom", "abc123", exists: true);
var response = "The component [sbom:abc123] is affected by this vulnerability.";
var context = CreateContext();
// Act
var result = await _validator.ValidateAsync(response, context);
// Assert
result.GroundedClaims.Should().BeGreaterThan(0);
}
[Fact]
public async Task ValidateAsync_ClaimFarFromLink_IsUngrounded()
{
// Arrange
_resolver.AddResolution("sbom", "abc123", exists: true);
// Put the link far from the claim
var response = "[sbom:abc123]\n\n" + new string(' ', 300) + "\n\nThe component is affected.";
var context = CreateContext();
// Act
var result = await _validator.ValidateAsync(response, context);
// Assert
result.UngroundedClaims.Should().NotBeEmpty();
}
[Fact]
public async Task ValidateAsync_BelowThreshold_AddsIssue()
{
// Arrange - use a high threshold
var strictValidator = new GroundingValidator(
_resolver,
NullLogger<GroundingValidator>.Instance,
new GroundingOptions { MinGroundingScore = 0.95 });
var response = "This is affected. No evidence provided.";
var context = CreateContext();
// Act
var result = await strictValidator.ValidateAsync(response, context);
// Assert
result.IsAcceptable.Should().BeFalse();
result.Issues.Should().Contain(i => i.Type == GroundingIssueType.BelowThreshold);
}
[Fact]
public void RejectResponse_CreatesRejectionWithReason()
{
// Arrange
var validation = new GroundingValidationResult
{
GroundingScore = 0.3,
IsAcceptable = false,
Issues = ImmutableArray.Create(
new GroundingIssue
{
Type = GroundingIssueType.BelowThreshold,
Message = "Score too low",
Severity = IssueSeverity.Critical
})
};
// Act
var rejection = _validator.RejectResponse(validation);
// Assert
rejection.Reason.Should().Contain("rejected");
rejection.GroundingScore.Should().Be(0.3);
rejection.RequiredScore.Should().Be(_options.MinGroundingScore);
}
[Fact]
public void SuggestImprovements_ForUngroundedClaims_SuggestsAddCitations()
{
// Arrange
var validation = new GroundingValidationResult
{
UngroundedClaims = ImmutableArray.Create(
new UngroundedClaim { Text = "is affected", Position = 10 })
};
// Act
var suggestions = _validator.SuggestImprovements(validation);
// Assert
suggestions.Should().Contain(s => s.Type == SuggestionType.AddCitations);
}
[Fact]
public void SuggestImprovements_ForInvalidLinks_SuggestsFixLinks()
{
// Arrange
var validation = new GroundingValidationResult
{
ValidatedLinks = ImmutableArray.Create(
new ValidatedLink { Type = "sbom", Path = "bad", IsValid = false })
};
// Act
var suggestions = _validator.SuggestImprovements(validation);
// Assert
suggestions.Should().Contain(s => s.Type == SuggestionType.FixLinks);
}
[Fact]
public void SuggestImprovements_NoLinksWithClaims_SuggestsAddEvidence()
{
// Arrange
var validation = new GroundingValidationResult
{
ValidatedLinks = ImmutableArray<ValidatedLink>.Empty,
TotalClaims = 3
};
// Act
var suggestions = _validator.SuggestImprovements(validation);
// Assert
suggestions.Should().Contain(s => s.Type == SuggestionType.AddEvidence);
suggestions.First(s => s.Type == SuggestionType.AddEvidence)
.Examples.Should().Contain(e => e.Contains("[sbom:"));
}
[Fact]
public async Task ValidateAsync_RuntimeLink_ExtractsCorrectly()
{
// Arrange
_resolver.AddResolution("runtime", "api-gateway:traces", exists: true);
var response = "Check runtime traces [runtime:api-gateway:traces] for execution data.";
var context = CreateContext();
// Act
var result = await _validator.ValidateAsync(response, context);
// Assert
result.ValidatedLinks.Should().HaveCount(1);
result.ValidatedLinks[0].Type.Should().Be("runtime");
result.ValidatedLinks[0].Path.Should().Be("api-gateway:traces");
}
[Fact]
public async Task ValidateAsync_AttestLink_ExtractsCorrectly()
{
// Arrange
_resolver.AddResolution("attest", "dsse:sha256:xyz", exists: true);
var response = "See attestation [attest:dsse:sha256:xyz] for provenance.";
var context = CreateContext();
// Act
var result = await _validator.ValidateAsync(response, context);
// Assert
result.ValidatedLinks.Should().HaveCount(1);
result.ValidatedLinks[0].Type.Should().Be("attest");
}
[Fact]
public async Task ValidateAsync_AuthLink_ExtractsCorrectly()
{
// Arrange
_resolver.AddResolution("auth", "keys/gitlab-oidc", exists: true);
var response = "Verify with authority key [auth:keys/gitlab-oidc].";
var context = CreateContext();
// Act
var result = await _validator.ValidateAsync(response, context);
// Assert
result.ValidatedLinks.Should().HaveCount(1);
result.ValidatedLinks[0].Type.Should().Be("auth");
}
[Fact]
public async Task ValidateAsync_DocsLink_ExtractsCorrectly()
{
// Arrange
_resolver.AddResolution("docs", "scopes/ci-webhook", exists: true);
var response = "Read the documentation [docs:scopes/ci-webhook] for details.";
var context = CreateContext();
// Act
var result = await _validator.ValidateAsync(response, context);
// Assert
result.ValidatedLinks.Should().HaveCount(1);
result.ValidatedLinks[0].Type.Should().Be("docs");
}
[Fact]
public async Task ValidateAsync_MixedValidAndInvalid_CalculatesCorrectScore()
{
// Arrange
_resolver.AddResolution("sbom", "good", exists: true);
_resolver.AddResolution("sbom", "bad", exists: false);
var response = "Found in [sbom:good] but not in [sbom:bad].";
var context = CreateContext();
// Act
var result = await _validator.ValidateAsync(response, context);
// Assert
result.ValidatedLinks.Should().HaveCount(2);
result.ValidatedLinks.Count(l => l.IsValid).Should().Be(1);
result.ValidatedLinks.Count(l => !l.IsValid).Should().Be(1);
}
private static ConversationContext CreateContext()
{
return new ConversationContext
{
TenantId = "test-tenant"
};
}
private sealed class MockObjectLinkResolver : IObjectLinkResolver
{
private readonly Dictionary<string, LinkResolution> _resolutions = new();
public void AddResolution(string type, string path, bool exists, string? uri = null)
{
_resolutions[$"{type}:{path}"] = new LinkResolution
{
Exists = exists,
Uri = uri ?? $"{type}://{path}",
ObjectType = type
};
}
public Task<LinkResolution> ResolveAsync(
string type, string path, string? tenantId, CancellationToken cancellationToken)
{
var key = $"{type}:{path}";
if (_resolutions.TryGetValue(key, out var resolution))
{
return Task.FromResult(resolution);
}
return Task.FromResult(new LinkResolution { Exists = false });
}
}
}

View File

@@ -9,6 +9,7 @@
</PropertyGroup>
<ItemGroup>
<PackageReference Include="FluentAssertions" />
<PackageReference Include="Microsoft.AspNetCore.Mvc.Testing" />
<PackageReference Include="Microsoft.Extensions.Configuration" />
<PackageReference Include="Microsoft.Extensions.Hosting.Abstractions" />
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" />
@@ -16,6 +17,7 @@
<ItemGroup>
<ProjectReference Include="..\..\StellaOps.AdvisoryAI\StellaOps.AdvisoryAI.csproj" />
<ProjectReference Include="..\..\StellaOps.AdvisoryAI.Hosting\StellaOps.AdvisoryAI.Hosting.csproj" />
<ProjectReference Include="..\..\StellaOps.AdvisoryAI.WebService\StellaOps.AdvisoryAI.WebService.csproj" />
<ProjectReference Include="..\..\..\Concelier\__Libraries\StellaOps.Concelier.Core\StellaOps.Concelier.Core.csproj" />
<ProjectReference Include="..\..\..\Concelier\__Libraries\StellaOps.Concelier.RawModels\StellaOps.Concelier.RawModels.csproj" />
<ProjectReference Include="..\..\..\Excititor\__Libraries\StellaOps.Excititor.Core\StellaOps.Excititor.Core.csproj" />

View File

@@ -23,6 +23,7 @@ using StellaOps.Attestor.Core.Storage;
using StellaOps.Attestor.Core.Submission;
using StellaOps.Attestor.Core.Verification;
using StellaOps.Attestor.Infrastructure;
using StellaOps.Attestor.Spdx3;
using StellaOps.Attestor.WebService.Options;
using StellaOps.Configuration;
using StellaOps.Cryptography.DependencyInjection;
@@ -129,6 +130,9 @@ internal static class AttestorWebServiceComposition
builder.Services.AddScoped<Services.IProofChainQueryService, Services.ProofChainQueryService>();
builder.Services.AddScoped<Services.IProofVerificationService, Services.ProofVerificationService>();
// SPDX 3.0.1 Build profile support (BP-007)
builder.Services.AddSingleton<IBuildAttestationMapper, BuildAttestationMapper>();
builder.Services.AddSingleton<StellaOps.Attestor.StandardPredicates.IStandardPredicateRegistry>(sp =>
{
var registry = new StellaOps.Attestor.StandardPredicates.StandardPredicateRegistry();

View File

@@ -11,6 +11,7 @@ using StellaOps.Attestor.Core.Signing;
using StellaOps.Attestor.Core.Storage;
using StellaOps.Attestor.Core.Submission;
using StellaOps.Attestor.Core.Verification;
using StellaOps.Attestor.Spdx3;
using StellaOps.Attestor.WebService.Contracts;
namespace StellaOps.Attestor.WebService;
@@ -394,6 +395,125 @@ internal static class AttestorWebServiceEndpoints
return Results.Ok(BulkVerificationContracts.MapJob(job));
}).RequireAuthorization("attestor:write");
// SPDX 3.0.1 Build Profile export endpoint (BP-007)
app.MapPost("/api/v1/attestations:export-build", (
Spdx3BuildExportRequestDto? requestDto,
HttpContext httpContext,
IBuildAttestationMapper mapper) =>
{
if (requestDto is null)
{
return Results.Problem(statusCode: StatusCodes.Status400BadRequest, title: "Request body is required.");
}
if (!IsJsonContentType(httpContext.Request.ContentType))
{
return UnsupportedMediaTypeResult();
}
if (string.IsNullOrWhiteSpace(requestDto.BuildType))
{
return Results.Problem(statusCode: StatusCodes.Status400BadRequest, title: "buildType is required.");
}
// Build the attestation payload from the request
var configSource = (!string.IsNullOrWhiteSpace(requestDto.ConfigSourceUri) ||
requestDto.ConfigSourceDigest?.Count > 0 ||
!string.IsNullOrWhiteSpace(requestDto.ConfigEntryPoint))
? new ConfigSource
{
Uri = requestDto.ConfigSourceUri,
Digest = requestDto.ConfigSourceDigest ?? new Dictionary<string, string>(),
EntryPoint = requestDto.ConfigEntryPoint
}
: null;
var materials = requestDto.Materials?.Select(m => new BuildMaterial
{
Uri = m.Uri,
Digest = m.Digest ?? new Dictionary<string, string>()
}).ToList() ?? new List<BuildMaterial>();
var attestationPayload = new BuildAttestationPayload
{
BuildType = requestDto.BuildType,
Builder = !string.IsNullOrWhiteSpace(requestDto.BuilderId)
? new BuilderInfo
{
Id = requestDto.BuilderId,
Version = requestDto.BuilderVersion
}
: null,
Invocation = new BuildInvocation
{
ConfigSource = configSource,
Environment = requestDto.Environment ?? new Dictionary<string, string>(),
Parameters = requestDto.Parameters ?? new Dictionary<string, string>()
},
Metadata = new BuildMetadata
{
BuildInvocationId = requestDto.BuildId,
BuildStartedOn = requestDto.BuildStartTime,
BuildFinishedOn = requestDto.BuildEndTime
},
Materials = materials
};
// Check if the payload can be mapped
if (!mapper.CanMapToSpdx3(attestationPayload))
{
return Results.Problem(
statusCode: StatusCodes.Status400BadRequest,
title: "Cannot map attestation to SPDX 3.0.1",
detail: "The provided attestation payload is missing required fields for SPDX 3.0.1 Build profile.");
}
// Map to SPDX 3.0.1 Build element
var spdx3Build = mapper.MapToSpdx3(attestationPayload, requestDto.SpdxIdPrefix);
// Build response based on requested format
var response = new Spdx3BuildExportResponseDto
{
Format = requestDto.Format,
BuildSpdxId = spdx3Build.SpdxId,
Spdx3Document = requestDto.Format is BuildAttestationFormat.Spdx3 or BuildAttestationFormat.Both
? new
{
spdxVersion = "SPDX-3.0.1",
conformsTo = new[] { "https://spdx.org/rdf/v3/Build" },
spdxId = $"{requestDto.SpdxIdPrefix}/document",
elements = new object[]
{
new
{
type = spdx3Build.Type,
spdxId = spdx3Build.SpdxId,
name = spdx3Build.Name,
build_buildType = spdx3Build.BuildType,
build_buildId = spdx3Build.BuildId,
build_buildStartTime = spdx3Build.BuildStartTime?.ToString("O", CultureInfo.InvariantCulture),
build_buildEndTime = spdx3Build.BuildEndTime?.ToString("O", CultureInfo.InvariantCulture),
build_configSourceUri = spdx3Build.ConfigSourceUri.IsEmpty ? null : spdx3Build.ConfigSourceUri.ToArray(),
build_configSourceDigest = spdx3Build.ConfigSourceDigest.IsEmpty ? null : spdx3Build.ConfigSourceDigest.Select(h => new { algorithm = h.Algorithm, hashValue = h.HashValue }).ToArray(),
build_configSourceEntrypoint = spdx3Build.ConfigSourceEntrypoint.IsEmpty ? null : spdx3Build.ConfigSourceEntrypoint.ToArray(),
build_environment = spdx3Build.Environment.Count > 0 ? spdx3Build.Environment : null,
build_parameter = spdx3Build.Parameter.Count > 0 ? spdx3Build.Parameter : null
}
}
}
: null,
// DSSE envelope generation would require signing service integration
// For now, return null for DSSE when not specifically requested or when signing is disabled
DsseEnvelope = null,
Signing = null
};
return Results.Ok(response);
})
.RequireAuthorization("attestor:write")
.RequireRateLimiting("attestor-submissions")
.Produces<Spdx3BuildExportResponseDto>(StatusCodes.Status200OK);
}
private static async Task<IResult> GetAttestationDetailResultAsync(

View File

@@ -0,0 +1,221 @@
// -----------------------------------------------------------------------------
// Spdx3BuildProfileContracts.cs
// Sprint: SPRINT_20260107_004_003_BE
// Task: BP-007 - Attestor WebService Integration for SPDX 3.0.1 Build Profile
// Description: DTOs for SPDX 3.0.1 Build profile export endpoint
// -----------------------------------------------------------------------------
using System.ComponentModel.DataAnnotations;
using System.Text.Json.Serialization;
namespace StellaOps.Attestor.WebService.Contracts;
/// <summary>
/// Supported export formats for build attestations.
/// </summary>
[JsonConverter(typeof(JsonStringEnumConverter))]
public enum BuildAttestationFormat
{
/// <summary>
/// DSSE (Dead Simple Signing Envelope) format - default.
/// </summary>
Dsse = 0,
/// <summary>
/// SPDX 3.0.1 Build profile format.
/// </summary>
Spdx3 = 1,
/// <summary>
/// Both DSSE and SPDX 3.0.1 formats combined.
/// </summary>
Both = 2
}
/// <summary>
/// Request to export a build attestation in SPDX 3.0.1 format.
/// </summary>
public sealed record Spdx3BuildExportRequestDto
{
/// <summary>
/// Gets or sets the build type URI (e.g., "https://slsa.dev/provenance/v1").
/// </summary>
[Required]
public required string BuildType { get; init; }
/// <summary>
/// Gets or sets the builder ID URI.
/// </summary>
public string? BuilderId { get; init; }
/// <summary>
/// Gets or sets the builder version.
/// </summary>
public string? BuilderVersion { get; init; }
/// <summary>
/// Gets or sets the build invocation ID.
/// </summary>
public string? BuildId { get; init; }
/// <summary>
/// Gets or sets when the build started.
/// </summary>
public DateTimeOffset? BuildStartTime { get; init; }
/// <summary>
/// Gets or sets when the build finished.
/// </summary>
public DateTimeOffset? BuildEndTime { get; init; }
/// <summary>
/// Gets or sets the configuration source URI.
/// </summary>
public string? ConfigSourceUri { get; init; }
/// <summary>
/// Gets or sets the configuration source digest (algorithm:value).
/// </summary>
public Dictionary<string, string>? ConfigSourceDigest { get; init; }
/// <summary>
/// Gets or sets the configuration entry point.
/// </summary>
public string? ConfigEntryPoint { get; init; }
/// <summary>
/// Gets or sets the build environment variables.
/// </summary>
public Dictionary<string, string>? Environment { get; init; }
/// <summary>
/// Gets or sets the build parameters.
/// </summary>
public Dictionary<string, string>? Parameters { get; init; }
/// <summary>
/// Gets or sets the build materials (source inputs).
/// </summary>
public List<BuildMaterialDto>? Materials { get; init; }
/// <summary>
/// Gets or sets the output format.
/// </summary>
public BuildAttestationFormat Format { get; init; } = BuildAttestationFormat.Dsse;
/// <summary>
/// Gets or sets whether to sign the SPDX 3.0.1 document with DSSE.
/// </summary>
public bool Sign { get; init; } = true;
/// <summary>
/// Gets or sets the SPDX ID prefix for generated elements.
/// </summary>
public string SpdxIdPrefix { get; init; } = "urn:stellaops";
}
/// <summary>
/// Build material (input) DTO.
/// </summary>
public sealed record BuildMaterialDto
{
/// <summary>
/// Gets or sets the material URI.
/// </summary>
[Required]
public required string Uri { get; init; }
/// <summary>
/// Gets or sets the material digest (algorithm:value).
/// </summary>
public Dictionary<string, string>? Digest { get; init; }
}
/// <summary>
/// Response containing SPDX 3.0.1 Build profile export result.
/// </summary>
public sealed record Spdx3BuildExportResponseDto
{
/// <summary>
/// Gets or sets the format of the response.
/// </summary>
public required BuildAttestationFormat Format { get; init; }
/// <summary>
/// Gets or sets the SPDX 3.0.1 document (JSON-LD) when format is Spdx3 or Both.
/// </summary>
public object? Spdx3Document { get; init; }
/// <summary>
/// Gets or sets the DSSE envelope when format is Dsse or Both.
/// </summary>
public DsseEnvelopeDto? DsseEnvelope { get; init; }
/// <summary>
/// Gets or sets the SPDX ID of the generated Build element.
/// </summary>
public string? BuildSpdxId { get; init; }
/// <summary>
/// Gets or sets the signing information.
/// </summary>
public BuildSigningInfoDto? Signing { get; init; }
}
/// <summary>
/// DSSE envelope DTO.
/// </summary>
public sealed record DsseEnvelopeDto
{
/// <summary>
/// Gets or sets the payload type.
/// </summary>
public required string PayloadType { get; init; }
/// <summary>
/// Gets or sets the base64-encoded payload.
/// </summary>
public required string PayloadBase64 { get; init; }
/// <summary>
/// Gets or sets the signatures.
/// </summary>
public required List<DsseSignatureDto> Signatures { get; init; }
}
/// <summary>
/// DSSE signature DTO.
/// </summary>
public sealed record DsseSignatureDto
{
/// <summary>
/// Gets or sets the key ID.
/// </summary>
public required string KeyId { get; init; }
/// <summary>
/// Gets or sets the base64-encoded signature.
/// </summary>
public required string Sig { get; init; }
}
/// <summary>
/// Build signing information DTO.
/// </summary>
public sealed record BuildSigningInfoDto
{
/// <summary>
/// Gets or sets the key ID used for signing.
/// </summary>
public required string KeyId { get; init; }
/// <summary>
/// Gets or sets the signing algorithm.
/// </summary>
public required string Algorithm { get; init; }
/// <summary>
/// Gets or sets when the document was signed.
/// </summary>
public required string SignedAt { get; init; }
}

View File

@@ -29,5 +29,6 @@
<ProjectReference Include="../../__Libraries/StellaOps.Attestor.StandardPredicates/StellaOps.Attestor.StandardPredicates.csproj" />
<ProjectReference Include="../../../Router/__Libraries/StellaOps.Router.AspNet/StellaOps.Router.AspNet.csproj" />
<ProjectReference Include="..\..\__Libraries\StellaOps.Attestor.Bundling\StellaOps.Attestor.Bundling.csproj" />
<ProjectReference Include="..\..\__Libraries\StellaOps.Attestor.Spdx3\StellaOps.Attestor.Spdx3.csproj" />
</ItemGroup>
</Project>

View File

@@ -34,7 +34,7 @@ public sealed class BuildAttestationMapper : IBuildAttestationMapper
ArgumentException.ThrowIfNullOrWhiteSpace(spdxIdPrefix);
var configSourceUris = ImmutableArray<string>.Empty;
var configSourceDigests = ImmutableArray<Spdx3Hash>.Empty;
var configSourceDigests = ImmutableArray<Spdx3BuildHash>.Empty;
var configSourceEntrypoints = ImmutableArray<string>.Empty;
if (attestation.Invocation?.ConfigSource is { } configSource)
@@ -47,7 +47,7 @@ public sealed class BuildAttestationMapper : IBuildAttestationMapper
if (configSource.Digest.Count > 0)
{
configSourceDigests = configSource.Digest
.Select(kvp => new Spdx3Hash { Algorithm = kvp.Key, HashValue = kvp.Value })
.Select(kvp => new Spdx3BuildHash { Algorithm = kvp.Key, HashValue = kvp.Value })
.ToImmutableArray();
}

View File

@@ -35,7 +35,7 @@ public sealed class BuildRelationshipBuilder
public BuildRelationshipBuilder AddBuildToolOf(string toolSpdxId, string artifactSpdxId)
{
_relationships.Add(CreateRelationship(
"BUILD_TOOL_OF",
Spdx3RelationshipType.BuildToolOf,
toolSpdxId,
artifactSpdxId));
return this;
@@ -49,7 +49,7 @@ public sealed class BuildRelationshipBuilder
public BuildRelationshipBuilder AddGenerates(string buildSpdxId, string artifactSpdxId)
{
_relationships.Add(CreateRelationship(
"GENERATES",
Spdx3RelationshipType.Generates,
buildSpdxId,
artifactSpdxId));
return this;
@@ -63,7 +63,7 @@ public sealed class BuildRelationshipBuilder
public BuildRelationshipBuilder AddGeneratedFrom(string artifactSpdxId, string sourceSpdxId)
{
_relationships.Add(CreateRelationship(
"GENERATED_FROM",
Spdx3RelationshipType.GeneratedFrom,
artifactSpdxId,
sourceSpdxId));
return this;
@@ -77,7 +77,7 @@ public sealed class BuildRelationshipBuilder
public BuildRelationshipBuilder AddHasPrerequisite(string buildSpdxId, string prerequisiteSpdxId)
{
_relationships.Add(CreateRelationship(
"HAS_PREREQUISITE",
Spdx3RelationshipType.HasPrerequisite,
buildSpdxId,
prerequisiteSpdxId));
return this;
@@ -133,11 +133,11 @@ public sealed class BuildRelationshipBuilder
}
private Spdx3Relationship CreateRelationship(
string relationshipType,
Spdx3RelationshipType relationshipType,
string fromSpdxId,
string toSpdxId)
{
var relId = $"{_spdxIdPrefix}/relationship/{relationshipType.ToLowerInvariant()}/{_relationships.Count + 1}";
var relId = $"{_spdxIdPrefix}/relationship/{relationshipType.ToString().ToLowerInvariant()}/{_relationships.Count + 1}";
return new Spdx3Relationship
{

View File

@@ -11,7 +11,7 @@
</PropertyGroup>
<ItemGroup>
<ProjectReference Include="..\..\..\..\__Libraries\StellaOps.Spdx3\StellaOps.Spdx3.csproj" />
<ProjectReference Include="..\..\..\__Libraries\StellaOps.Spdx3\StellaOps.Spdx3.csproj" />
</ItemGroup>
</Project>

View File

@@ -0,0 +1,387 @@
// -----------------------------------------------------------------------------
// BuildProfileIntegrationTests.cs
// Sprint: SPRINT_20260107_004_003_BE_spdx3_build_profile
// Task: BP-011 - Integration tests for SPDX 3.0.1 Build profile
// -----------------------------------------------------------------------------
using System.Collections.Immutable;
using System.Text;
using System.Text.Json;
using FluentAssertions;
using Microsoft.Extensions.Time.Testing;
using Moq;
using StellaOps.Spdx3.Model;
using StellaOps.Spdx3.Model.Build;
using Xunit;
namespace StellaOps.Attestor.Spdx3.Tests.Integration;
/// <summary>
/// Integration tests for SPDX 3.0.1 Build profile end-to-end flows.
/// These tests verify the complete attestation-to-SPDX 3.0.1 pipeline.
/// </summary>
[Trait("Category", "Integration")]
public sealed class BuildProfileIntegrationTests
{
private static readonly DateTimeOffset FixedTimestamp =
new(2026, 1, 9, 12, 0, 0, TimeSpan.Zero);
[Fact]
public async Task EndToEnd_AttestationToSpdx3_ProducesValidBuildProfile()
{
// Arrange: Create a realistic build attestation payload
var attestation = new BuildAttestationPayload
{
Type = "https://in-toto.io/Statement/v1",
PredicateType = "https://slsa.dev/provenance/v1",
Subject = ImmutableArray.Create(new AttestationSubject
{
Name = "pkg:oci/myapp@sha256:abc123",
Digest = new Dictionary<string, string>
{
["sha256"] = "abc123def456"
}.ToImmutableDictionary()
}),
Predicate = new BuildPredicate
{
BuildDefinition = new BuildDefinitionInfo
{
BuildType = "https://stellaops.org/build/container-scan/v1",
ExternalParameters = new Dictionary<string, object>
{
["imageReference"] = "registry.io/myapp:latest"
}.ToImmutableDictionary(),
InternalParameters = ImmutableDictionary<string, object>.Empty,
ResolvedDependencies = ImmutableArray.Create(new ResourceDescriptor
{
Name = "base-image",
Uri = "pkg:oci/alpine@sha256:def789",
Digest = new Dictionary<string, string>
{
["sha256"] = "def789"
}.ToImmutableDictionary()
})
},
RunDetails = new RunDetailsInfo
{
Builder = new BuilderInfo
{
Id = "https://stellaops.org/scanner/v1.0.0",
Version = new Dictionary<string, string>
{
["stellaops"] = "1.0.0"
}.ToImmutableDictionary()
},
Metadata = new BuildMetadata
{
InvocationId = "scan-12345",
StartedOn = FixedTimestamp.AddMinutes(-5),
FinishedOn = FixedTimestamp
}
}
}
};
var mapper = new BuildAttestationMapper();
// Act: Map attestation to SPDX 3.0.1 Build element
var buildElement = mapper.MapToSpdx3(attestation);
// Assert: Verify all fields are correctly mapped
buildElement.Should().NotBeNull();
buildElement.BuildType.Should().Be("https://stellaops.org/build/container-scan/v1");
buildElement.BuildId.Should().Be("scan-12345");
buildElement.BuildStartTime.Should().Be(FixedTimestamp.AddMinutes(-5));
buildElement.BuildEndTime.Should().Be(FixedTimestamp);
buildElement.ConfigSourceUri.Should().NotBeNullOrEmpty();
}
[Fact]
public void SignatureVerification_ValidSignedDocument_Succeeds()
{
// Arrange: Create document and sign it
var timeProvider = new FakeTimeProvider(FixedTimestamp);
var serializer = new Spdx3JsonSerializer();
var signingProvider = new TestDsseSigningProvider();
var signer = new DsseSpdx3Signer(serializer, signingProvider, timeProvider);
var document = CreateTestSpdx3Document();
var options = new DsseSpdx3SigningOptions { PrimaryKeyId = "test-key-1" };
// Act: Sign the document
var envelope = signer.SignAsync(document, options).Result;
// Assert: Signature should be present
envelope.Should().NotBeNull();
envelope.Signatures.Should().HaveCount(1);
envelope.PayloadType.Should().Be("application/vnd.spdx3+json");
// Verify: Extract and verify the document
var verifier = new DsseSpdx3Verifier(serializer, signingProvider);
var verificationResult = verifier.VerifyAsync(envelope, CancellationToken.None).Result;
verificationResult.IsValid.Should().BeTrue();
verificationResult.ExtractedDocument.Should().NotBeNull();
}
[Fact]
public void ImportExternalBuildProfile_ValidDocument_ParsesCorrectly()
{
// Arrange: External SPDX 3.0.1 Build profile JSON
var externalJson = """
{
"@context": "https://spdx.org/rdf/3.0.1/terms/",
"@graph": [
{
"@type": "Build",
"spdxId": "urn:external:build:ext-build-001",
"build_buildType": "https://example.com/build/maven/v1",
"build_buildId": "maven-build-789",
"build_buildStartTime": "2026-01-09T10:00:00Z",
"build_buildEndTime": "2026-01-09T10:15:00Z",
"build_configSourceUri": ["https://github.com/example/repo"],
"build_configSourceDigest": [
{
"algorithm": "sha256",
"hashValue": "feedfacecafe"
}
],
"build_environment": {
"JAVA_VERSION": "21",
"MAVEN_VERSION": "3.9.6"
}
}
]
}
""";
// Act: Parse the external document
var parser = new Spdx3Parser();
var parseResult = parser.Parse(externalJson);
// Assert: Build element should be correctly parsed
parseResult.IsSuccess.Should().BeTrue();
parseResult.Document.Should().NotBeNull();
var buildElements = parseResult.Document!.Elements
.OfType<Spdx3Build>()
.ToList();
buildElements.Should().HaveCount(1);
var build = buildElements[0];
build.SpdxId.Should().Be("urn:external:build:ext-build-001");
build.BuildType.Should().Be("https://example.com/build/maven/v1");
build.BuildId.Should().Be("maven-build-789");
build.BuildStartTime.Should().Be(new DateTimeOffset(2026, 1, 9, 10, 0, 0, TimeSpan.Zero));
build.BuildEndTime.Should().Be(new DateTimeOffset(2026, 1, 9, 10, 15, 0, TimeSpan.Zero));
build.ConfigSourceUri.Should().Contain("https://github.com/example/repo");
build.Environment.Should().ContainKey("JAVA_VERSION");
build.Environment!["JAVA_VERSION"].Should().Be("21");
}
[Fact]
public void CombinedDocument_SoftwareAndBuildProfiles_MergesCorrectly()
{
// Arrange: Create Software profile SBOM
var sbomDocument = new Spdx3Document
{
SpdxId = "urn:stellaops:sbom:sbom-001",
Name = "MyApp SBOM",
Namespaces = ImmutableArray.Create("https://stellaops.org/spdx/"),
ProfileConformance = ImmutableArray.Create(Spdx3Profile.Software),
Elements = ImmutableArray.Create<Spdx3Element>(
new Spdx3Package
{
SpdxId = "urn:stellaops:pkg:myapp-1.0.0",
Name = "MyApp",
PackageVersion = "1.0.0",
PackageUrl = "pkg:npm/myapp@1.0.0"
}
)
};
// Arrange: Create Build profile element
var buildElement = new Spdx3Build
{
SpdxId = "urn:stellaops:build:build-001",
BuildType = "https://stellaops.org/build/scanner/v1",
BuildId = "scan-12345",
BuildStartTime = FixedTimestamp.AddMinutes(-5),
BuildEndTime = FixedTimestamp
};
// Act: Combine using CombinedDocumentBuilder
var builder = new CombinedDocumentBuilder();
var combinedDoc = builder
.WithSoftwareDocument(sbomDocument)
.WithBuildProvenance(buildElement)
.Build();
// Assert: Combined document has both profiles
combinedDoc.Should().NotBeNull();
combinedDoc.ProfileConformance.Should().Contain(Spdx3Profile.Software);
combinedDoc.ProfileConformance.Should().Contain(Spdx3Profile.Build);
// Assert: Contains both package and build elements
combinedDoc.Elements.OfType<Spdx3Package>().Should().HaveCount(1);
combinedDoc.Elements.OfType<Spdx3Build>().Should().HaveCount(1);
// Assert: GENERATES relationship exists
var relationships = combinedDoc.Elements.OfType<Spdx3Relationship>().ToList();
var generatesRel = relationships.FirstOrDefault(r =>
r.RelationshipType == Spdx3RelationshipType.Generates);
generatesRel.Should().NotBeNull();
generatesRel!.From.Should().Be(buildElement.SpdxId);
}
[Fact]
public async Task RoundTrip_SignedCombinedDocument_PreservesAllData()
{
// Arrange: Create combined document
var timeProvider = new FakeTimeProvider(FixedTimestamp);
var serializer = new Spdx3JsonSerializer();
var signingProvider = new TestDsseSigningProvider();
var signer = new DsseSpdx3Signer(serializer, signingProvider, timeProvider);
var verifier = new DsseSpdx3Verifier(serializer, signingProvider);
var originalDoc = CreateCombinedTestDocument();
var options = new DsseSpdx3SigningOptions { PrimaryKeyId = "test-key-1" };
// Act: Sign, then verify and extract
var envelope = await signer.SignAsync(originalDoc, options);
var verifyResult = await verifier.VerifyAsync(envelope, CancellationToken.None);
// Assert: Extracted document matches original
verifyResult.IsValid.Should().BeTrue();
var extractedDoc = verifyResult.ExtractedDocument;
extractedDoc.Should().NotBeNull();
extractedDoc!.SpdxId.Should().Be(originalDoc.SpdxId);
extractedDoc.Name.Should().Be(originalDoc.Name);
extractedDoc.ProfileConformance.Should().BeEquivalentTo(originalDoc.ProfileConformance);
// Verify elements preserved
extractedDoc.Elements.OfType<Spdx3Package>().Count()
.Should().Be(originalDoc.Elements.OfType<Spdx3Package>().Count());
extractedDoc.Elements.OfType<Spdx3Build>().Count()
.Should().Be(originalDoc.Elements.OfType<Spdx3Build>().Count());
}
#region Test Helpers
private static Spdx3Document CreateTestSpdx3Document()
{
return new Spdx3Document
{
SpdxId = "urn:stellaops:sbom:test-001",
Name = "Test SBOM",
Namespaces = ImmutableArray.Create("https://stellaops.org/spdx/"),
ProfileConformance = ImmutableArray.Create(Spdx3Profile.Software),
Elements = ImmutableArray.Create<Spdx3Element>(
new Spdx3Package
{
SpdxId = "urn:stellaops:pkg:test-pkg",
Name = "TestPackage",
PackageVersion = "1.0.0"
}
)
};
}
private static Spdx3Document CreateCombinedTestDocument()
{
return new Spdx3Document
{
SpdxId = "urn:stellaops:combined:test-001",
Name = "Combined Test Document",
Namespaces = ImmutableArray.Create("https://stellaops.org/spdx/"),
ProfileConformance = ImmutableArray.Create(Spdx3Profile.Software, Spdx3Profile.Build),
Elements = ImmutableArray.Create<Spdx3Element>(
new Spdx3Package
{
SpdxId = "urn:stellaops:pkg:combined-pkg",
Name = "CombinedPackage",
PackageVersion = "2.0.0"
},
new Spdx3Build
{
SpdxId = "urn:stellaops:build:combined-build",
BuildType = "https://stellaops.org/build/test/v1",
BuildId = "combined-build-001",
BuildStartTime = FixedTimestamp.AddMinutes(-10),
BuildEndTime = FixedTimestamp
},
new Spdx3Relationship
{
SpdxId = "urn:stellaops:rel:generates-001",
RelationshipType = Spdx3RelationshipType.Generates,
From = "urn:stellaops:build:combined-build",
To = ImmutableArray.Create("urn:stellaops:pkg:combined-pkg")
}
)
};
}
/// <summary>
/// Test signing provider that uses a simple HMAC for testing purposes.
/// </summary>
private sealed class TestDsseSigningProvider : IDsseSigningProvider
{
private static readonly byte[] TestKey = Encoding.UTF8.GetBytes("test-signing-key-32-bytes-long!!");
public Task<DsseSignatureResult> SignAsync(
byte[] payload,
string keyId,
string? algorithm,
CancellationToken cancellationToken)
{
using var hmac = new System.Security.Cryptography.HMACSHA256(TestKey);
var signature = hmac.ComputeHash(payload);
return Task.FromResult(new DsseSignatureResult
{
KeyId = keyId,
Algorithm = algorithm ?? "HMAC-SHA256",
SignatureBytes = signature
});
}
public Task<bool> VerifyAsync(
byte[] payload,
byte[] signature,
string keyId,
CancellationToken cancellationToken)
{
using var hmac = new System.Security.Cryptography.HMACSHA256(TestKey);
var expectedSignature = hmac.ComputeHash(payload);
return Task.FromResult(signature.SequenceEqual(expectedSignature));
}
}
#endregion
}
/// <summary>
/// Simple JSON serializer for SPDX 3.0.1 documents (test implementation).
/// </summary>
file sealed class Spdx3JsonSerializer : ISpdx3Serializer
{
private static readonly JsonSerializerOptions Options = new()
{
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
WriteIndented = false
};
public byte[] SerializeToBytes(Spdx3Document document)
{
return JsonSerializer.SerializeToUtf8Bytes(document, Options);
}
public Spdx3Document? DeserializeFromBytes(byte[] bytes)
{
return JsonSerializer.Deserialize<Spdx3Document>(bytes, Options);
}
}

View File

@@ -59,7 +59,7 @@ public class ConcelierSchemaEvolutionTests : PostgresSchemaEvolutionTestBase
public async Task AdvisoryReadOperations_CompatibleWithPreviousSchema()
{
// Arrange
await InitializeAsync();
await InitializeAsync(TestContext.Current.CancellationToken);
// Act
var results = await TestReadBackwardCompatibilityAsync(
@@ -76,7 +76,7 @@ public class ConcelierSchemaEvolutionTests : PostgresSchemaEvolutionTestBase
return exists is true or 1 or (long)1;
},
result => result,
CancellationToken.None);
TestContext.Current.CancellationToken);
// Assert
results.Should().AllSatisfy(r => r.IsCompatible.Should().BeTrue(
@@ -90,7 +90,7 @@ public class ConcelierSchemaEvolutionTests : PostgresSchemaEvolutionTestBase
public async Task AdvisoryWriteOperations_CompatibleWithPreviousSchema()
{
// Arrange
await InitializeAsync();
await InitializeAsync(TestContext.Current.CancellationToken);
// Act
var results = await TestWriteForwardCompatibilityAsync(
@@ -106,7 +106,7 @@ public class ConcelierSchemaEvolutionTests : PostgresSchemaEvolutionTestBase
await cmd.ExecuteScalarAsync();
},
CancellationToken.None);
TestContext.Current.CancellationToken);
// Assert
results.Should().AllSatisfy(r => r.IsCompatible.Should().BeTrue(
@@ -120,7 +120,7 @@ public class ConcelierSchemaEvolutionTests : PostgresSchemaEvolutionTestBase
public async Task VexStorageOperations_CompatibleAcrossVersions()
{
// Arrange
await InitializeAsync();
await InitializeAsync(TestContext.Current.CancellationToken);
// Act
var result = await TestAgainstPreviousSchemaAsync(
@@ -132,7 +132,7 @@ public class ConcelierSchemaEvolutionTests : PostgresSchemaEvolutionTestBase
await cmd.ExecuteScalarAsync();
},
CancellationToken.None);
TestContext.Current.CancellationToken);
// Assert
result.IsCompatible.Should().BeTrue(
@@ -146,7 +146,7 @@ public class ConcelierSchemaEvolutionTests : PostgresSchemaEvolutionTestBase
public async Task FeedSourceOperations_CompatibleAcrossVersions()
{
// Arrange
await InitializeAsync();
await InitializeAsync(TestContext.Current.CancellationToken);
// Act
var result = await TestAgainstPreviousSchemaAsync(
@@ -160,7 +160,7 @@ public class ConcelierSchemaEvolutionTests : PostgresSchemaEvolutionTestBase
await cmd.ExecuteScalarAsync();
},
CancellationToken.None);
TestContext.Current.CancellationToken);
// Assert
result.IsCompatible.Should().BeTrue();
@@ -173,12 +173,12 @@ public class ConcelierSchemaEvolutionTests : PostgresSchemaEvolutionTestBase
public async Task MigrationRollbacks_ExecuteSuccessfully()
{
// Arrange
await InitializeAsync();
await InitializeAsync(TestContext.Current.CancellationToken);
// Act
var results = await TestMigrationRollbacksAsync(
migrationsToTest: 3,
CancellationToken.None);
TestContext.Current.CancellationToken);
// Assert - relaxed assertion since migrations may not have down scripts
results.Should().NotBeNull();

View File

@@ -44,7 +44,7 @@ public class TarGzBundleExporterTests
using var stream = new MemoryStream();
// Act
var result = await _exporter.ExportToStreamAsync(request, stream);
var result = await _exporter.ExportToStreamAsync(request, stream, TestContext.Current.CancellationToken);
// Assert
result.Success.Should().BeFalse();
@@ -64,7 +64,7 @@ public class TarGzBundleExporterTests
using var stream = new MemoryStream();
// Act
var result = await _exporter.ExportToStreamAsync(request, stream);
var result = await _exporter.ExportToStreamAsync(request, stream, TestContext.Current.CancellationToken);
// Assert
result.Success.Should().BeTrue();
@@ -86,7 +86,7 @@ public class TarGzBundleExporterTests
using var stream = new MemoryStream();
// Act
var result = await _exporter.ExportToStreamAsync(request, stream);
var result = await _exporter.ExportToStreamAsync(request, stream, TestContext.Current.CancellationToken);
// Assert
result.Success.Should().BeTrue();
@@ -118,7 +118,7 @@ public class TarGzBundleExporterTests
using var stream = new MemoryStream();
// Act
var result = await _exporter.ExportToStreamAsync(request, stream);
var result = await _exporter.ExportToStreamAsync(request, stream, TestContext.Current.CancellationToken);
// Assert
result.Success.Should().BeTrue();
@@ -146,7 +146,7 @@ public class TarGzBundleExporterTests
using var stream = new MemoryStream();
// Act
var result = await _exporter.ExportToStreamAsync(request, stream);
var result = await _exporter.ExportToStreamAsync(request, stream, TestContext.Current.CancellationToken);
// Assert
result.Success.Should().BeTrue();
@@ -170,7 +170,7 @@ public class TarGzBundleExporterTests
using var stream = new MemoryStream();
// Act
var result = await _exporter.ExportToStreamAsync(request, stream);
var result = await _exporter.ExportToStreamAsync(request, stream, TestContext.Current.CancellationToken);
// Assert
result.Success.Should().BeTrue();
@@ -198,7 +198,7 @@ public class TarGzBundleExporterTests
using var stream = new MemoryStream();
// Act
var result = await _exporter.ExportToStreamAsync(request, stream);
var result = await _exporter.ExportToStreamAsync(request, stream, TestContext.Current.CancellationToken);
// Assert
result.Success.Should().BeTrue();
@@ -222,7 +222,7 @@ public class TarGzBundleExporterTests
using var stream = new MemoryStream();
// Act
var result = await _exporter.ExportToStreamAsync(request, stream);
var result = await _exporter.ExportToStreamAsync(request, stream, TestContext.Current.CancellationToken);
// Assert
result.Success.Should().BeTrue();

View File

@@ -59,7 +59,7 @@ public class EvidenceLockerSchemaEvolutionTests : PostgresSchemaEvolutionTestBas
public async Task EvidenceReadOperations_CompatibleWithPreviousSchema()
{
// Arrange
await InitializeAsync();
await InitializeAsync(TestContext.Current.CancellationToken);
// Act
var results = await TestReadBackwardCompatibilityAsync(
@@ -76,7 +76,7 @@ public class EvidenceLockerSchemaEvolutionTests : PostgresSchemaEvolutionTestBas
return exists is true or 1 or (long)1;
},
result => result,
CancellationToken.None);
TestContext.Current.CancellationToken);
// Assert
results.Should().AllSatisfy(r => r.IsCompatible.Should().BeTrue(
@@ -90,7 +90,7 @@ public class EvidenceLockerSchemaEvolutionTests : PostgresSchemaEvolutionTestBas
public async Task EvidenceWriteOperations_CompatibleWithPreviousSchema()
{
// Arrange
await InitializeAsync();
await InitializeAsync(TestContext.Current.CancellationToken);
// Act
var results = await TestWriteForwardCompatibilityAsync(
@@ -106,7 +106,7 @@ public class EvidenceLockerSchemaEvolutionTests : PostgresSchemaEvolutionTestBas
await cmd.ExecuteScalarAsync();
},
CancellationToken.None);
TestContext.Current.CancellationToken);
// Assert
results.Should().AllSatisfy(r => r.IsCompatible.Should().BeTrue(
@@ -120,7 +120,7 @@ public class EvidenceLockerSchemaEvolutionTests : PostgresSchemaEvolutionTestBas
public async Task AttestationStorageOperations_CompatibleAcrossVersions()
{
// Arrange
await InitializeAsync();
await InitializeAsync(TestContext.Current.CancellationToken);
// Act
var result = await TestAgainstPreviousSchemaAsync(
@@ -132,7 +132,7 @@ public class EvidenceLockerSchemaEvolutionTests : PostgresSchemaEvolutionTestBas
await cmd.ExecuteScalarAsync();
},
CancellationToken.None);
TestContext.Current.CancellationToken);
// Assert
result.IsCompatible.Should().BeTrue(
@@ -146,7 +146,7 @@ public class EvidenceLockerSchemaEvolutionTests : PostgresSchemaEvolutionTestBas
public async Task BundleExportOperations_CompatibleAcrossVersions()
{
// Arrange
await InitializeAsync();
await InitializeAsync(TestContext.Current.CancellationToken);
// Act
var result = await TestAgainstPreviousSchemaAsync(
@@ -160,7 +160,7 @@ public class EvidenceLockerSchemaEvolutionTests : PostgresSchemaEvolutionTestBas
await cmd.ExecuteScalarAsync();
},
CancellationToken.None);
TestContext.Current.CancellationToken);
// Assert
result.IsCompatible.Should().BeTrue();
@@ -173,7 +173,7 @@ public class EvidenceLockerSchemaEvolutionTests : PostgresSchemaEvolutionTestBas
public async Task SealedEvidenceOperations_CompatibleAcrossVersions()
{
// Arrange
await InitializeAsync();
await InitializeAsync(TestContext.Current.CancellationToken);
// Act
var result = await TestAgainstPreviousSchemaAsync(
@@ -188,7 +188,7 @@ public class EvidenceLockerSchemaEvolutionTests : PostgresSchemaEvolutionTestBas
await cmd.ExecuteScalarAsync();
},
CancellationToken.None);
TestContext.Current.CancellationToken);
// Assert
result.IsCompatible.Should().BeTrue();
@@ -201,12 +201,12 @@ public class EvidenceLockerSchemaEvolutionTests : PostgresSchemaEvolutionTestBas
public async Task MigrationRollbacks_ExecuteSuccessfully()
{
// Arrange
await InitializeAsync();
await InitializeAsync(TestContext.Current.CancellationToken);
// Act
var results = await TestMigrationRollbacksAsync(
migrationsToTest: 3,
CancellationToken.None);
TestContext.Current.CancellationToken);
// Assert - relaxed assertion since migrations may not have down scripts
results.Should().NotBeNull();

View File

@@ -35,7 +35,7 @@ public sealed class ExportNotificationEmitterTests
Assert.True(result.Success);
Assert.Equal(1, result.AttemptCount);
Assert.Single(_sink);
Assert.Equal(1, _sink.Count);
}
[Trait("Category", TestCategories.Unit)]
@@ -84,7 +84,7 @@ public sealed class ExportNotificationEmitterTests
var result = await emitter.EmitAirgapReadyAsync(notification);
Assert.False(result.Success);
Assert.Single(_dlq);
Assert.Equal(1, _dlq.Count);
}
[Trait("Category", TestCategories.Unit)]
@@ -132,7 +132,7 @@ public sealed class ExportNotificationEmitterTests
Assert.True(result.Success);
Assert.Equal(3, result.AttemptCount);
Assert.Empty(_dlq);
Assert.Equal(0, _dlq.Count);
}
[Trait("Category", TestCategories.Unit)]
@@ -221,7 +221,7 @@ public sealed class ExportNotificationEmitterTests
var result = await emitter.EmitAirgapReadyAsync(notification);
Assert.False(result.Success);
Assert.Single(_dlq);
Assert.Equal(1, _dlq.Count);
}
[Trait("Category", TestCategories.Unit)]
@@ -451,7 +451,7 @@ public sealed class InMemoryExportNotificationSinkTests
await sink.PublishAsync("test.channel", "{\"test\":true}");
Assert.Single(sink);
Assert.Equal(1, sink.Count);
}
[Trait("Category", TestCategories.Unit)]
@@ -481,7 +481,7 @@ public sealed class InMemoryExportNotificationSinkTests
await sink.PublishAsync("test", "message2");
sink.Clear();
Assert.Empty(sink);
Assert.Equal(0, sink.Count);
}
}
@@ -496,7 +496,7 @@ public sealed class InMemoryExportNotificationDlqTests
await dlq.EnqueueAsync(entry);
Assert.Single(dlq);
Assert.Equal(1, dlq.Count);
}
[Trait("Category", TestCategories.Unit)]

View File

@@ -175,11 +175,12 @@ public sealed class ExceptionReportGenerator : IExceptionReportGenerator
var entries = new List<ExceptionReportEntry>();
var processedCount = 0;
var referenceTime = job.StartedAt ?? _timeProvider.GetUtcNow();
foreach (var exception in exceptions)
{
var entry = new ExceptionReportEntry
{
Exception = ToReportException(exception)
Exception = ToReportException(exception, referenceTime)
};
if (job.Request.IncludeHistory)
@@ -307,7 +308,7 @@ public sealed class ExceptionReportGenerator : IExceptionReportGenerator
}
}
private static ExceptionReportScope ToReportException(ExceptionObject exc) => new()
private static ExceptionReportScope ToReportException(ExceptionObject exc, DateTimeOffset referenceTime) => new()
{
ExceptionId = exc.ExceptionId,
Version = exc.Version,
@@ -331,7 +332,7 @@ public sealed class ExceptionReportGenerator : IExceptionReportGenerator
UpdatedAt = exc.UpdatedAt,
ApprovedAt = exc.ApprovedAt,
ExpiresAt = exc.ExpiresAt,
IsEffective = exc.IsEffective,
IsEffective = exc.IsEffectiveAt(referenceTime),
EvidenceRefs = exc.EvidenceRefs.ToList(),
CompensatingControls = exc.CompensatingControls.ToList()
};

View File

@@ -0,0 +1,364 @@
// -----------------------------------------------------------------------------
// BackportContracts.cs
// Sprint: SPRINT_20260107_006_002_FE_diff_runtime_tabs
// Task: DR-014 — API contracts for backport evidence endpoints
// -----------------------------------------------------------------------------
namespace StellaOps.Findings.Ledger.WebService.Contracts;
/// <summary>
/// Response containing backport verification evidence for a finding.
/// </summary>
public sealed record BackportEvidenceResponse
{
/// <summary>
/// Finding this evidence is for.
/// </summary>
public required Guid FindingId { get; init; }
/// <summary>
/// Backport verification verdict.
/// </summary>
public required BackportVerdict Verdict { get; init; }
/// <summary>
/// Diff content for each patch.
/// </summary>
public IReadOnlyList<DiffContent>? Diffs { get; init; }
}
/// <summary>
/// Backport verification verdict from Feedser.
/// </summary>
public sealed record BackportVerdict
{
/// <summary>
/// Verification status.
/// </summary>
public required BackportVerdictStatus Status { get; init; }
/// <summary>
/// Confidence score (0.0 to 1.0).
/// </summary>
public required double Confidence { get; init; }
/// <summary>
/// Evidence tier (1-5).
/// </summary>
public required EvidenceTier Tier { get; init; }
/// <summary>
/// Human-readable tier description.
/// </summary>
public required string TierDescription { get; init; }
/// <summary>
/// Upstream package information.
/// </summary>
public required UpstreamInfo Upstream { get; init; }
/// <summary>
/// Distro package information.
/// </summary>
public required DistroInfo Distro { get; init; }
/// <summary>
/// Patch signatures that verify the backport.
/// </summary>
public required IReadOnlyList<PatchSignature> Patches { get; init; }
}
/// <summary>
/// Verification status enumeration.
/// </summary>
public enum BackportVerdictStatus
{
/// <summary>
/// Backport verified with high confidence.
/// </summary>
Verified,
/// <summary>
/// Backport could not be verified.
/// </summary>
Unverified,
/// <summary>
/// Unable to determine backport status.
/// </summary>
Unknown,
/// <summary>
/// Partially verified (some patches confirmed).
/// </summary>
Partial
}
/// <summary>
/// Evidence tier classification.
/// </summary>
public enum EvidenceTier
{
/// <summary>
/// Tier 1: Confirmed by distro advisory (95-100% confidence).
/// </summary>
DistroAdvisory = 1,
/// <summary>
/// Tier 2: Confirmed by changelog (80-94% confidence).
/// </summary>
Changelog = 2,
/// <summary>
/// Tier 3: Patch header match (65-79% confidence).
/// </summary>
PatchHeader = 3,
/// <summary>
/// Tier 4: Binary fingerprint match (40-64% confidence).
/// </summary>
BinaryFingerprint = 4,
/// <summary>
/// Tier 5: NVD heuristic match (20-39% confidence).
/// </summary>
NvdHeuristic = 5
}
/// <summary>
/// Upstream package information.
/// </summary>
public sealed record UpstreamInfo
{
/// <summary>
/// Package URL (purl).
/// </summary>
public required string Purl { get; init; }
/// <summary>
/// Commit SHA that fixes the vulnerability.
/// </summary>
public string? CommitSha { get; init; }
/// <summary>
/// URL to the commit.
/// </summary>
public string? CommitUrl { get; init; }
/// <summary>
/// CVEs resolved by this upstream version.
/// </summary>
public IReadOnlyList<string>? Resolves { get; init; }
}
/// <summary>
/// Distribution package information.
/// </summary>
public sealed record DistroInfo
{
/// <summary>
/// Package URL (purl).
/// </summary>
public required string Purl { get; init; }
/// <summary>
/// Advisory ID (e.g., DSA-5678).
/// </summary>
public string? AdvisoryId { get; init; }
/// <summary>
/// Advisory URL.
/// </summary>
public string? AdvisoryUrl { get; init; }
}
/// <summary>
/// Patch signature information.
/// </summary>
public sealed record PatchSignature
{
/// <summary>
/// Signature identifier.
/// </summary>
public required string Id { get; init; }
/// <summary>
/// Type of patch.
/// </summary>
public required string Type { get; init; }
/// <summary>
/// File path being patched.
/// </summary>
public required string FilePath { get; init; }
/// <summary>
/// Hunk signature (content-addressed).
/// </summary>
public required string HunkSignature { get; init; }
/// <summary>
/// CVEs resolved by this patch.
/// </summary>
public IReadOnlyList<string>? Resolves { get; init; }
/// <summary>
/// Whether this is the primary patch.
/// </summary>
public bool IsPrimary { get; init; }
/// <summary>
/// URL to fetch diff content.
/// </summary>
public string? DiffUrl { get; init; }
}
/// <summary>
/// Diff content for a patch.
/// </summary>
public sealed record DiffContent
{
/// <summary>
/// Signature ID this diff is for.
/// </summary>
public required string SignatureId { get; init; }
/// <summary>
/// Original file path.
/// </summary>
public required string OldPath { get; init; }
/// <summary>
/// New file path.
/// </summary>
public required string NewPath { get; init; }
/// <summary>
/// Raw unified diff content.
/// </summary>
public string? RawDiff { get; init; }
/// <summary>
/// Parsed hunks.
/// </summary>
public IReadOnlyList<DiffHunk>? Hunks { get; init; }
/// <summary>
/// Number of additions.
/// </summary>
public int Additions { get; init; }
/// <summary>
/// Number of deletions.
/// </summary>
public int Deletions { get; init; }
}
/// <summary>
/// A hunk in a unified diff.
/// </summary>
public sealed record DiffHunk
{
/// <summary>
/// Hunk index.
/// </summary>
public required int Index { get; init; }
/// <summary>
/// Start line in old file.
/// </summary>
public required int OldStart { get; init; }
/// <summary>
/// Line count in old file.
/// </summary>
public required int OldCount { get; init; }
/// <summary>
/// Start line in new file.
/// </summary>
public required int NewStart { get; init; }
/// <summary>
/// Line count in new file.
/// </summary>
public required int NewCount { get; init; }
/// <summary>
/// Hunk header (@@...@@).
/// </summary>
public required string Header { get; init; }
/// <summary>
/// Function context.
/// </summary>
public string? FunctionContext { get; init; }
/// <summary>
/// Lines in this hunk.
/// </summary>
public required IReadOnlyList<DiffLine> Lines { get; init; }
}
/// <summary>
/// A line in a diff hunk.
/// </summary>
public sealed record DiffLine
{
/// <summary>
/// Line type.
/// </summary>
public required DiffLineType Type { get; init; }
/// <summary>
/// Line content.
/// </summary>
public required string Content { get; init; }
/// <summary>
/// Line number in old file.
/// </summary>
public int? OldLineNumber { get; init; }
/// <summary>
/// Line number in new file.
/// </summary>
public int? NewLineNumber { get; init; }
}
/// <summary>
/// Type of diff line.
/// </summary>
public enum DiffLineType
{
/// <summary>
/// Context line (unchanged).
/// </summary>
Context,
/// <summary>
/// Added line.
/// </summary>
Addition,
/// <summary>
/// Deleted line.
/// </summary>
Deletion
}
/// <summary>
/// Response containing patches for a finding.
/// </summary>
public sealed record PatchesResponse
{
/// <summary>
/// Finding ID.
/// </summary>
public required Guid FindingId { get; init; }
/// <summary>
/// Patches that affect this finding.
/// </summary>
public required IReadOnlyList<PatchSignature> Patches { get; init; }
}

View File

@@ -0,0 +1,255 @@
// -----------------------------------------------------------------------------
// RuntimeTracesContracts.cs
// Sprint: SPRINT_20260107_006_002_FE_diff_runtime_tabs
// Task: DR-014 — API contracts for runtime traces endpoints
// -----------------------------------------------------------------------------
namespace StellaOps.Findings.Ledger.WebService.Contracts;
/// <summary>
/// Response containing runtime traces for a finding.
/// </summary>
public sealed record RuntimeTracesResponse
{
/// <summary>
/// Finding this evidence is for.
/// </summary>
public required Guid FindingId { get; init; }
/// <summary>
/// Whether collection is currently active.
/// </summary>
public required bool CollectionActive { get; init; }
/// <summary>
/// When collection started.
/// </summary>
public DateTimeOffset? CollectionStarted { get; init; }
/// <summary>
/// Summary of observations.
/// </summary>
public required ObservationSummary Summary { get; init; }
/// <summary>
/// Function traces.
/// </summary>
public required IReadOnlyList<FunctionTrace> Traces { get; init; }
}
/// <summary>
/// Summary of runtime observations.
/// </summary>
public sealed record ObservationSummary
{
/// <summary>
/// Total hit count across all traces.
/// </summary>
public required long TotalHits { get; init; }
/// <summary>
/// Number of unique call paths.
/// </summary>
public required int UniquePaths { get; init; }
/// <summary>
/// Observation posture.
/// </summary>
public required RuntimePosture Posture { get; init; }
/// <summary>
/// Last hit timestamp.
/// </summary>
public DateTimeOffset? LastHit { get; init; }
/// <summary>
/// Whether a direct path to vulnerable function was observed.
/// </summary>
public required bool DirectPathObserved { get; init; }
/// <summary>
/// Whether production traffic was observed.
/// </summary>
public required bool ProductionTraffic { get; init; }
/// <summary>
/// Number of containers with observations.
/// </summary>
public required int ContainerCount { get; init; }
}
/// <summary>
/// Runtime observation posture.
/// </summary>
public enum RuntimePosture
{
/// <summary>
/// No runtime observation configured.
/// </summary>
None = 0,
/// <summary>
/// Passive observation (logs only).
/// </summary>
Passive = 1,
/// <summary>
/// Active tracing (syscalls/ETW).
/// </summary>
ActiveTracing = 2,
/// <summary>
/// eBPF deep probes active.
/// </summary>
EbpfDeep = 3,
/// <summary>
/// Full instrumentation coverage.
/// </summary>
FullInstrumentation = 4
}
/// <summary>
/// A function trace showing a call path to a vulnerable function.
/// </summary>
public sealed record FunctionTrace
{
/// <summary>
/// Trace identifier.
/// </summary>
public required string Id { get; init; }
/// <summary>
/// Vulnerable function symbol.
/// </summary>
public required string VulnerableFunction { get; init; }
/// <summary>
/// Whether this is a direct path.
/// </summary>
public required bool IsDirectPath { get; init; }
/// <summary>
/// Number of times this path was hit.
/// </summary>
public required long HitCount { get; init; }
/// <summary>
/// First observation timestamp.
/// </summary>
public required DateTimeOffset FirstSeen { get; init; }
/// <summary>
/// Last observation timestamp.
/// </summary>
public required DateTimeOffset LastSeen { get; init; }
/// <summary>
/// Container ID where observed.
/// </summary>
public string? ContainerId { get; init; }
/// <summary>
/// Container name.
/// </summary>
public string? ContainerName { get; init; }
/// <summary>
/// Call path (stack frames).
/// </summary>
public required IReadOnlyList<StackFrame> CallPath { get; init; }
}
/// <summary>
/// A stack frame in a call path.
/// </summary>
public sealed record StackFrame
{
/// <summary>
/// Function/method symbol.
/// </summary>
public required string Symbol { get; init; }
/// <summary>
/// Source file path.
/// </summary>
public string? File { get; init; }
/// <summary>
/// Line number.
/// </summary>
public int? Line { get; init; }
/// <summary>
/// Whether this is an entry point.
/// </summary>
public bool IsEntryPoint { get; init; }
/// <summary>
/// Whether this is the vulnerable function.
/// </summary>
public bool IsVulnerableFunction { get; init; }
/// <summary>
/// Confidence score for this frame.
/// </summary>
public double? Confidence { get; init; }
}
/// <summary>
/// Response containing RTS score for a finding.
/// </summary>
public sealed record RtsScoreResponse
{
/// <summary>
/// Finding ID.
/// </summary>
public required Guid FindingId { get; init; }
/// <summary>
/// RTS score.
/// </summary>
public required RtsScore Score { get; init; }
}
/// <summary>
/// Runtime Trustworthiness Score.
/// </summary>
public sealed record RtsScore
{
/// <summary>
/// Aggregate score (0.0 to 1.0).
/// </summary>
public required double Score { get; init; }
/// <summary>
/// Score breakdown.
/// </summary>
public required RtsBreakdown Breakdown { get; init; }
/// <summary>
/// When the score was computed.
/// </summary>
public required DateTimeOffset ComputedAt { get; init; }
}
/// <summary>
/// Breakdown of RTS score components.
/// </summary>
public sealed record RtsBreakdown
{
/// <summary>
/// Score based on observation quality (0.0 to 1.0).
/// </summary>
public required double ObservationScore { get; init; }
/// <summary>
/// Factor based on recency of observations (0.0 to 1.0).
/// </summary>
public required double RecencyFactor { get; init; }
/// <summary>
/// Factor based on data quality (0.0 to 1.0).
/// </summary>
public required double QualityFactor { get; init; }
}

View File

@@ -0,0 +1,92 @@
// -----------------------------------------------------------------------------
// BackportEndpoints.cs
// Sprint: SPRINT_20260107_006_002_FE_diff_runtime_tabs
// Task: DR-014 — Backport evidence API endpoints
// -----------------------------------------------------------------------------
using Microsoft.AspNetCore.Http.HttpResults;
using StellaOps.Findings.Ledger.WebService.Contracts;
namespace StellaOps.Findings.Ledger.WebService.Endpoints;
/// <summary>
/// API endpoints for backport verification evidence.
/// </summary>
public static class BackportEndpoints
{
/// <summary>
/// Maps backport endpoints to the application.
/// </summary>
public static void MapBackportEndpoints(this WebApplication app)
{
var group = app.MapGroup("/api/v1/findings")
.WithTags("Backport Evidence")
.RequireAuthorization();
// GET /api/v1/findings/{findingId}/backport
group.MapGet("/{findingId:guid}/backport", GetBackportEvidence)
.WithName("GetBackportEvidence")
.WithDescription("Get backport verification evidence for a finding")
.Produces<BackportEvidenceResponse>(200)
.Produces(404);
// GET /api/v1/findings/{findingId}/patches
group.MapGet("/{findingId:guid}/patches", GetPatches)
.WithName("GetPatches")
.WithDescription("Get patch signatures for a finding")
.Produces<PatchesResponse>(200)
.Produces(404);
}
/// <summary>
/// Gets backport verification evidence for a finding.
/// </summary>
private static async Task<Results<Ok<BackportEvidenceResponse>, NotFound>> GetBackportEvidence(
Guid findingId,
IBackportEvidenceService service,
CancellationToken ct)
{
var evidence = await service.GetBackportEvidenceAsync(findingId, ct);
return evidence is not null
? TypedResults.Ok(evidence)
: TypedResults.NotFound();
}
/// <summary>
/// Gets patch signatures for a finding.
/// </summary>
private static async Task<Results<Ok<PatchesResponse>, NotFound>> GetPatches(
Guid findingId,
IBackportEvidenceService service,
CancellationToken ct)
{
var patches = await service.GetPatchesAsync(findingId, ct);
return patches is not null
? TypedResults.Ok(patches)
: TypedResults.NotFound();
}
}
/// <summary>
/// Service for retrieving backport evidence from Feedser.
/// </summary>
public interface IBackportEvidenceService
{
/// <summary>
/// Gets backport verification evidence for a finding.
/// </summary>
/// <param name="findingId">Finding identifier.</param>
/// <param name="ct">Cancellation token.</param>
/// <returns>Backport evidence response or null if not found.</returns>
Task<BackportEvidenceResponse?> GetBackportEvidenceAsync(Guid findingId, CancellationToken ct);
/// <summary>
/// Gets patch signatures for a finding.
/// </summary>
/// <param name="findingId">Finding identifier.</param>
/// <param name="ct">Cancellation token.</param>
/// <returns>Patches response or null if not found.</returns>
Task<PatchesResponse?> GetPatchesAsync(Guid findingId, CancellationToken ct);
}

View File

@@ -0,0 +1,121 @@
// -----------------------------------------------------------------------------
// RuntimeTracesEndpoints.cs
// Sprint: SPRINT_20260107_006_002_FE_diff_runtime_tabs
// Task: DR-014 — Runtime traces API endpoints
// -----------------------------------------------------------------------------
using Microsoft.AspNetCore.Http.HttpResults;
using Microsoft.AspNetCore.Mvc;
using StellaOps.Findings.Ledger.WebService.Contracts;
namespace StellaOps.Findings.Ledger.WebService.Endpoints;
/// <summary>
/// API endpoints for runtime traces evidence.
/// </summary>
public static class RuntimeTracesEndpoints
{
/// <summary>
/// Maps runtime traces endpoints to the application.
/// </summary>
public static void MapRuntimeTracesEndpoints(this WebApplication app)
{
var group = app.MapGroup("/api/v1/findings")
.WithTags("Runtime Evidence")
.RequireAuthorization();
// GET /api/v1/findings/{findingId}/runtime/traces
group.MapGet("/{findingId:guid}/runtime/traces", GetRuntimeTraces)
.WithName("GetRuntimeTraces")
.WithDescription("Get runtime function traces for a finding")
.Produces<RuntimeTracesResponse>(200)
.Produces(404);
// GET /api/v1/findings/{findingId}/runtime/score
group.MapGet("/{findingId:guid}/runtime/score", GetRtsScore)
.WithName("GetRtsScore")
.WithDescription("Get Runtime Trustworthiness Score for a finding")
.Produces<RtsScoreResponse>(200)
.Produces(404);
}
/// <summary>
/// Gets runtime function traces for a finding.
/// </summary>
private static async Task<Results<Ok<RuntimeTracesResponse>, NotFound>> GetRuntimeTraces(
Guid findingId,
IRuntimeTracesService service,
CancellationToken ct,
[FromQuery] int? limit = null,
[FromQuery] string? sortBy = null)
{
var options = new RuntimeTracesQueryOptions
{
Limit = limit ?? 50,
SortBy = sortBy ?? "hits"
};
var traces = await service.GetTracesAsync(findingId, options, ct);
return traces is not null
? TypedResults.Ok(traces)
: TypedResults.NotFound();
}
/// <summary>
/// Gets the RTS score for a finding.
/// </summary>
private static async Task<Results<Ok<RtsScoreResponse>, NotFound>> GetRtsScore(
Guid findingId,
IRuntimeTracesService service,
CancellationToken ct)
{
var score = await service.GetRtsScoreAsync(findingId, ct);
return score is not null
? TypedResults.Ok(score)
: TypedResults.NotFound();
}
}
/// <summary>
/// Query options for runtime traces.
/// </summary>
public sealed record RuntimeTracesQueryOptions
{
/// <summary>
/// Maximum number of traces to return.
/// </summary>
public int Limit { get; init; } = 50;
/// <summary>
/// Sort by field (hits, recent).
/// </summary>
public string SortBy { get; init; } = "hits";
}
/// <summary>
/// Service for retrieving runtime traces.
/// </summary>
public interface IRuntimeTracesService
{
/// <summary>
/// Gets runtime traces for a finding.
/// </summary>
/// <param name="findingId">Finding identifier.</param>
/// <param name="options">Query options.</param>
/// <param name="ct">Cancellation token.</param>
/// <returns>Runtime traces response or null if not found.</returns>
Task<RuntimeTracesResponse?> GetTracesAsync(
Guid findingId,
RuntimeTracesQueryOptions options,
CancellationToken ct);
/// <summary>
/// Gets RTS score for a finding.
/// </summary>
/// <param name="findingId">Finding identifier.</param>
/// <param name="ct">Cancellation token.</param>
/// <returns>RTS score response or null if not found.</returns>
Task<RtsScoreResponse?> GetRtsScoreAsync(Guid findingId, CancellationToken ct);
}

View File

@@ -1926,6 +1926,10 @@ app.MapEvidenceGraphEndpoints();
app.MapReachabilityMapEndpoints();
app.MapRuntimeTimelineEndpoints();
// Backport and runtime traces endpoints (SPRINT_20260107_006_002_FE)
app.MapBackportEndpoints();
app.MapRuntimeTracesEndpoints();
// Map EWS scoring and webhook endpoints (SPRINT_8200.0012.0004)
app.MapScoringEndpoints();
app.MapWebhookEndpoints();

View File

@@ -0,0 +1,124 @@
// <copyright file="AlertFilter.cs" company="StellaOps">
// Copyright (c) StellaOps. Licensed under the AGPL-3.0-or-later.
// </copyright>
namespace StellaOps.Integrations.Plugin.GitHubApp.CodeScanning;
/// <summary>
/// Filter for querying code scanning alerts.
/// Sprint: SPRINT_20260109_010_002 Task: Implement models
/// </summary>
public sealed record AlertFilter
{
/// <summary>
/// Alert state filter (open, closed, dismissed, fixed).
/// </summary>
public string? State { get; init; }
/// <summary>
/// Severity filter (critical, high, medium, low, warning, note, error).
/// </summary>
public string? Severity { get; init; }
/// <summary>
/// Tool name filter.
/// </summary>
public string? Tool { get; init; }
/// <summary>
/// Git ref filter (e.g., refs/heads/main).
/// </summary>
public string? Ref { get; init; }
/// <summary>
/// Results per page (max 100).
/// </summary>
public int? PerPage { get; init; }
/// <summary>
/// Page number for pagination.
/// </summary>
public int? Page { get; init; }
/// <summary>
/// Sort field (created, updated).
/// </summary>
public string? Sort { get; init; }
/// <summary>
/// Sort direction (asc, desc).
/// </summary>
public string? Direction { get; init; }
/// <summary>
/// Builds query string for the filter.
/// </summary>
public string ToQueryString()
{
var parameters = new List<string>();
if (!string.IsNullOrEmpty(State))
parameters.Add($"state={Uri.EscapeDataString(State)}");
if (!string.IsNullOrEmpty(Severity))
parameters.Add($"severity={Uri.EscapeDataString(Severity)}");
if (!string.IsNullOrEmpty(Tool))
parameters.Add($"tool_name={Uri.EscapeDataString(Tool)}");
if (!string.IsNullOrEmpty(Ref))
parameters.Add($"ref={Uri.EscapeDataString(Ref)}");
if (PerPage.HasValue)
parameters.Add($"per_page={Math.Min(PerPage.Value, 100)}");
if (Page.HasValue)
parameters.Add($"page={Page.Value}");
if (!string.IsNullOrEmpty(Sort))
parameters.Add($"sort={Uri.EscapeDataString(Sort)}");
if (!string.IsNullOrEmpty(Direction))
parameters.Add($"direction={Uri.EscapeDataString(Direction)}");
return parameters.Count > 0 ? "?" + string.Join("&", parameters) : "";
}
}
/// <summary>
/// Update request for an alert.
/// </summary>
public sealed record AlertUpdate
{
/// <summary>
/// New state (dismissed, open).
/// </summary>
public required string State { get; init; }
/// <summary>
/// Reason for dismissal (false_positive, won't_fix, used_in_tests).
/// </summary>
public string? DismissedReason { get; init; }
/// <summary>
/// Comment for dismissal.
/// </summary>
public string? DismissedComment { get; init; }
/// <summary>
/// Validates the update request.
/// </summary>
public void Validate()
{
var validStates = new[] { "dismissed", "open" };
if (!validStates.Contains(State, StringComparer.OrdinalIgnoreCase))
throw new ArgumentException($"State must be one of: {string.Join(", ", validStates)}", nameof(State));
if (State.Equals("dismissed", StringComparison.OrdinalIgnoreCase) && string.IsNullOrEmpty(DismissedReason))
throw new ArgumentException("DismissedReason is required when dismissing an alert", nameof(DismissedReason));
var validReasons = new[] { "false_positive", "won't_fix", "used_in_tests" };
if (!string.IsNullOrEmpty(DismissedReason) && !validReasons.Contains(DismissedReason, StringComparer.OrdinalIgnoreCase))
throw new ArgumentException($"DismissedReason must be one of: {string.Join(", ", validReasons)}", nameof(DismissedReason));
}
}

View File

@@ -0,0 +1,280 @@
// <copyright file="CodeScanningAlert.cs" company="StellaOps">
// Copyright (c) StellaOps. Licensed under the AGPL-3.0-or-later.
// </copyright>
using System.Text.Json.Serialization;
namespace StellaOps.Integrations.Plugin.GitHubApp.CodeScanning;
/// <summary>
/// Code scanning alert from GitHub.
/// Sprint: SPRINT_20260109_010_002 Task: Implement models
/// </summary>
public sealed record CodeScanningAlert
{
/// <summary>
/// Alert number.
/// </summary>
[JsonPropertyName("number")]
public required int Number { get; init; }
/// <summary>
/// Alert state (open, closed, dismissed, fixed).
/// </summary>
[JsonPropertyName("state")]
public required string State { get; init; }
/// <summary>
/// Rule ID that triggered the alert.
/// </summary>
public required string RuleId { get; init; }
/// <summary>
/// Rule severity.
/// </summary>
public required string RuleSeverity { get; init; }
/// <summary>
/// Rule description.
/// </summary>
public required string RuleDescription { get; init; }
/// <summary>
/// Tool that produced the alert.
/// </summary>
public required string Tool { get; init; }
/// <summary>
/// HTML URL to the alert.
/// </summary>
[JsonPropertyName("html_url")]
public required string HtmlUrl { get; init; }
/// <summary>
/// When the alert was created.
/// </summary>
[JsonPropertyName("created_at")]
public required DateTimeOffset CreatedAt { get; init; }
/// <summary>
/// When the alert was dismissed (if applicable).
/// </summary>
[JsonPropertyName("dismissed_at")]
public DateTimeOffset? DismissedAt { get; init; }
/// <summary>
/// Reason for dismissal.
/// </summary>
[JsonPropertyName("dismissed_reason")]
public string? DismissedReason { get; init; }
/// <summary>
/// Who dismissed the alert.
/// </summary>
[JsonPropertyName("dismissed_by")]
public string? DismissedBy { get; init; }
/// <summary>
/// Most recent instance of the alert.
/// </summary>
[JsonPropertyName("most_recent_instance")]
public AlertInstance? MostRecentInstance { get; init; }
/// <summary>
/// Creates alert from GitHub API response.
/// </summary>
public static CodeScanningAlert FromApiResponse(GitHubAlertResponse response) => new()
{
Number = response.Number,
State = response.State ?? "unknown",
RuleId = response.Rule?.Id ?? "unknown",
RuleSeverity = response.Rule?.Severity ?? "unknown",
RuleDescription = response.Rule?.Description ?? "",
Tool = response.Tool?.Name ?? "unknown",
HtmlUrl = response.HtmlUrl ?? "",
CreatedAt = response.CreatedAt,
DismissedAt = response.DismissedAt,
DismissedReason = response.DismissedReason,
DismissedBy = response.DismissedBy?.Login,
MostRecentInstance = response.MostRecentInstance is not null
? AlertInstance.FromApiResponse(response.MostRecentInstance)
: null
};
}
/// <summary>
/// Alert instance location.
/// </summary>
public sealed record AlertInstance
{
/// <summary>
/// Git ref where the alert was found.
/// </summary>
public required string Ref { get; init; }
/// <summary>
/// Analysis key.
/// </summary>
public string? AnalysisKey { get; init; }
/// <summary>
/// Environment (e.g., "production").
/// </summary>
public string? Environment { get; init; }
/// <summary>
/// Location in the code.
/// </summary>
public AlertLocation? Location { get; init; }
/// <summary>
/// Creates instance from API response.
/// </summary>
public static AlertInstance FromApiResponse(GitHubAlertInstanceResponse response) => new()
{
Ref = response.Ref ?? "unknown",
AnalysisKey = response.AnalysisKey,
Environment = response.Environment,
Location = response.Location is not null
? new AlertLocation
{
Path = response.Location.Path ?? "",
StartLine = response.Location.StartLine,
EndLine = response.Location.EndLine,
StartColumn = response.Location.StartColumn,
EndColumn = response.Location.EndColumn
}
: null
};
}
/// <summary>
/// Alert location in source code.
/// </summary>
public sealed record AlertLocation
{
/// <summary>
/// File path.
/// </summary>
public required string Path { get; init; }
/// <summary>
/// Start line.
/// </summary>
public int? StartLine { get; init; }
/// <summary>
/// End line.
/// </summary>
public int? EndLine { get; init; }
/// <summary>
/// Start column.
/// </summary>
public int? StartColumn { get; init; }
/// <summary>
/// End column.
/// </summary>
public int? EndColumn { get; init; }
}
#region GitHub API Response Models
/// <summary>
/// GitHub API alert response.
/// </summary>
public sealed record GitHubAlertResponse
{
[JsonPropertyName("number")]
public int Number { get; init; }
[JsonPropertyName("state")]
public string? State { get; init; }
[JsonPropertyName("rule")]
public GitHubRuleResponse? Rule { get; init; }
[JsonPropertyName("tool")]
public GitHubToolResponse? Tool { get; init; }
[JsonPropertyName("html_url")]
public string? HtmlUrl { get; init; }
[JsonPropertyName("created_at")]
public DateTimeOffset CreatedAt { get; init; }
[JsonPropertyName("dismissed_at")]
public DateTimeOffset? DismissedAt { get; init; }
[JsonPropertyName("dismissed_reason")]
public string? DismissedReason { get; init; }
[JsonPropertyName("dismissed_by")]
public GitHubUserResponse? DismissedBy { get; init; }
[JsonPropertyName("most_recent_instance")]
public GitHubAlertInstanceResponse? MostRecentInstance { get; init; }
}
public sealed record GitHubRuleResponse
{
[JsonPropertyName("id")]
public string? Id { get; init; }
[JsonPropertyName("severity")]
public string? Severity { get; init; }
[JsonPropertyName("description")]
public string? Description { get; init; }
}
public sealed record GitHubToolResponse
{
[JsonPropertyName("name")]
public string? Name { get; init; }
[JsonPropertyName("version")]
public string? Version { get; init; }
}
public sealed record GitHubUserResponse
{
[JsonPropertyName("login")]
public string? Login { get; init; }
}
public sealed record GitHubAlertInstanceResponse
{
[JsonPropertyName("ref")]
public string? Ref { get; init; }
[JsonPropertyName("analysis_key")]
public string? AnalysisKey { get; init; }
[JsonPropertyName("environment")]
public string? Environment { get; init; }
[JsonPropertyName("location")]
public GitHubLocationResponse? Location { get; init; }
}
public sealed record GitHubLocationResponse
{
[JsonPropertyName("path")]
public string? Path { get; init; }
[JsonPropertyName("start_line")]
public int? StartLine { get; init; }
[JsonPropertyName("end_line")]
public int? EndLine { get; init; }
[JsonPropertyName("start_column")]
public int? StartColumn { get; init; }
[JsonPropertyName("end_column")]
public int? EndColumn { get; init; }
}
#endregion

View File

@@ -0,0 +1,312 @@
// <copyright file="GitHubCodeScanningClient.cs" company="StellaOps">
// Copyright (c) StellaOps. Licensed under the AGPL-3.0-or-later.
// </copyright>
using System.IO.Compression;
using System.Net;
using System.Net.Http.Headers;
using System.Text;
using System.Text.Json;
using Microsoft.Extensions.Logging;
namespace StellaOps.Integrations.Plugin.GitHubApp.CodeScanning;
/// <summary>
/// Client for GitHub Code Scanning API.
/// Sprint: SPRINT_20260109_010_002 Task: Implement GitHubCodeScanningClient
/// </summary>
public sealed class GitHubCodeScanningClient : IGitHubCodeScanningClient
{
private readonly IHttpClientFactory _httpClientFactory;
private readonly ILogger<GitHubCodeScanningClient> _logger;
private readonly TimeProvider _timeProvider;
private static readonly JsonSerializerOptions JsonOptions = new()
{
PropertyNameCaseInsensitive = true,
PropertyNamingPolicy = JsonNamingPolicy.SnakeCaseLower
};
/// <summary>
/// HTTP client name for IHttpClientFactory.
/// </summary>
public const string HttpClientName = "GitHubCodeScanning";
public GitHubCodeScanningClient(
IHttpClientFactory httpClientFactory,
ILogger<GitHubCodeScanningClient> logger,
TimeProvider timeProvider)
{
_httpClientFactory = httpClientFactory;
_logger = logger;
_timeProvider = timeProvider;
}
/// <inheritdoc />
public async Task<SarifUploadResult> UploadSarifAsync(
string owner,
string repo,
SarifUploadRequest request,
CancellationToken ct)
{
ArgumentException.ThrowIfNullOrWhiteSpace(owner);
ArgumentException.ThrowIfNullOrWhiteSpace(repo);
ArgumentNullException.ThrowIfNull(request);
request.Validate();
_logger.LogInformation(
"Uploading SARIF to {Owner}/{Repo} for commit {CommitSha}",
owner, repo, request.CommitSha[..7]);
// Compress and encode SARIF content
var compressedSarif = await CompressGzipAsync(request.SarifContent, ct);
var encodedSarif = Convert.ToBase64String(compressedSarif);
_logger.LogDebug(
"SARIF compressed from {OriginalSize} to {CompressedSize} bytes",
request.SarifContent.Length, compressedSarif.Length);
// Build request body
var body = new Dictionary<string, object?>
{
["commit_sha"] = request.CommitSha,
["ref"] = request.Ref,
["sarif"] = encodedSarif
};
if (!string.IsNullOrEmpty(request.CheckoutUri))
body["checkout_uri"] = request.CheckoutUri;
if (request.StartedAt.HasValue)
body["started_at"] = request.StartedAt.Value.ToString("O");
if (!string.IsNullOrEmpty(request.ToolName))
body["tool_name"] = request.ToolName;
var client = _httpClientFactory.CreateClient(HttpClientName);
var url = $"/repos/{owner}/{repo}/code-scanning/sarifs";
var content = new StringContent(
JsonSerializer.Serialize(body, JsonOptions),
Encoding.UTF8,
"application/json");
var response = await client.PostAsync(url, content, ct);
await EnsureSuccessStatusCodeAsync(response, "upload SARIF", ct);
var responseBody = await response.Content.ReadAsStringAsync(ct);
var uploadResponse = JsonSerializer.Deserialize<GitHubSarifUploadResponse>(responseBody, JsonOptions)
?? throw new InvalidOperationException("Failed to parse upload response");
_logger.LogInformation("SARIF uploaded successfully. ID: {SarifId}", uploadResponse.Id);
return SarifUploadResult.FromApiResponse(uploadResponse);
}
/// <inheritdoc />
public async Task<SarifUploadStatus> GetUploadStatusAsync(
string owner,
string repo,
string sarifId,
CancellationToken ct)
{
ArgumentException.ThrowIfNullOrWhiteSpace(owner);
ArgumentException.ThrowIfNullOrWhiteSpace(repo);
ArgumentException.ThrowIfNullOrWhiteSpace(sarifId);
var client = _httpClientFactory.CreateClient(HttpClientName);
var url = $"/repos/{owner}/{repo}/code-scanning/sarifs/{sarifId}";
var response = await client.GetAsync(url, ct);
await EnsureSuccessStatusCodeAsync(response, "get upload status", ct);
var responseBody = await response.Content.ReadAsStringAsync(ct);
var statusResponse = JsonSerializer.Deserialize<GitHubSarifStatusResponse>(responseBody, JsonOptions)
?? throw new InvalidOperationException("Failed to parse status response");
return SarifUploadStatus.FromApiResponse(statusResponse);
}
/// <inheritdoc />
public async Task<SarifUploadStatus> WaitForProcessingAsync(
string owner,
string repo,
string sarifId,
TimeSpan timeout,
CancellationToken ct)
{
ArgumentException.ThrowIfNullOrWhiteSpace(owner);
ArgumentException.ThrowIfNullOrWhiteSpace(repo);
ArgumentException.ThrowIfNullOrWhiteSpace(sarifId);
var deadline = _timeProvider.GetUtcNow() + timeout;
var delay = TimeSpan.FromSeconds(2);
const int maxDelaySeconds = 30;
_logger.LogInformation(
"Waiting for SARIF {SarifId} processing (timeout: {Timeout})",
sarifId, timeout);
while (_timeProvider.GetUtcNow() < deadline)
{
ct.ThrowIfCancellationRequested();
var status = await GetUploadStatusAsync(owner, repo, sarifId, ct);
if (status.IsComplete)
{
_logger.LogInformation(
"SARIF {SarifId} processing complete. Status: {Status}",
sarifId, status.Status);
return status;
}
_logger.LogDebug("SARIF {SarifId} still processing, waiting {Delay}...", sarifId, delay);
await Task.Delay(delay, ct);
// Exponential backoff with max
delay = TimeSpan.FromSeconds(Math.Min(delay.TotalSeconds * 1.5, maxDelaySeconds));
}
throw new TimeoutException($"SARIF processing did not complete within {timeout}");
}
/// <inheritdoc />
public async Task<IReadOnlyList<CodeScanningAlert>> ListAlertsAsync(
string owner,
string repo,
AlertFilter? filter,
CancellationToken ct)
{
ArgumentException.ThrowIfNullOrWhiteSpace(owner);
ArgumentException.ThrowIfNullOrWhiteSpace(repo);
var client = _httpClientFactory.CreateClient(HttpClientName);
var queryString = filter?.ToQueryString() ?? "";
var url = $"/repos/{owner}/{repo}/code-scanning/alerts{queryString}";
var response = await client.GetAsync(url, ct);
await EnsureSuccessStatusCodeAsync(response, "list alerts", ct);
var responseBody = await response.Content.ReadAsStringAsync(ct);
var alertResponses = JsonSerializer.Deserialize<GitHubAlertResponse[]>(responseBody, JsonOptions)
?? [];
return alertResponses.Select(CodeScanningAlert.FromApiResponse).ToList();
}
/// <inheritdoc />
public async Task<CodeScanningAlert> GetAlertAsync(
string owner,
string repo,
int alertNumber,
CancellationToken ct)
{
ArgumentException.ThrowIfNullOrWhiteSpace(owner);
ArgumentException.ThrowIfNullOrWhiteSpace(repo);
var client = _httpClientFactory.CreateClient(HttpClientName);
var url = $"/repos/{owner}/{repo}/code-scanning/alerts/{alertNumber}";
var response = await client.GetAsync(url, ct);
await EnsureSuccessStatusCodeAsync(response, "get alert", ct);
var responseBody = await response.Content.ReadAsStringAsync(ct);
var alertResponse = JsonSerializer.Deserialize<GitHubAlertResponse>(responseBody, JsonOptions)
?? throw new InvalidOperationException("Failed to parse alert response");
return CodeScanningAlert.FromApiResponse(alertResponse);
}
/// <inheritdoc />
public async Task<CodeScanningAlert> UpdateAlertAsync(
string owner,
string repo,
int alertNumber,
AlertUpdate update,
CancellationToken ct)
{
ArgumentException.ThrowIfNullOrWhiteSpace(owner);
ArgumentException.ThrowIfNullOrWhiteSpace(repo);
ArgumentNullException.ThrowIfNull(update);
update.Validate();
_logger.LogInformation(
"Updating alert {AlertNumber} in {Owner}/{Repo} to state {State}",
alertNumber, owner, repo, update.State);
var client = _httpClientFactory.CreateClient(HttpClientName);
var url = $"/repos/{owner}/{repo}/code-scanning/alerts/{alertNumber}";
var body = new Dictionary<string, object?>
{
["state"] = update.State
};
if (!string.IsNullOrEmpty(update.DismissedReason))
body["dismissed_reason"] = update.DismissedReason;
if (!string.IsNullOrEmpty(update.DismissedComment))
body["dismissed_comment"] = update.DismissedComment;
var content = new StringContent(
JsonSerializer.Serialize(body, JsonOptions),
Encoding.UTF8,
"application/json");
var request = new HttpRequestMessage(HttpMethod.Patch, url) { Content = content };
var response = await client.SendAsync(request, ct);
await EnsureSuccessStatusCodeAsync(response, "update alert", ct);
var responseBody = await response.Content.ReadAsStringAsync(ct);
var alertResponse = JsonSerializer.Deserialize<GitHubAlertResponse>(responseBody, JsonOptions)
?? throw new InvalidOperationException("Failed to parse alert response");
return CodeScanningAlert.FromApiResponse(alertResponse);
}
private static async Task<byte[]> CompressGzipAsync(string content, CancellationToken ct)
{
var bytes = Encoding.UTF8.GetBytes(content);
using var output = new MemoryStream();
await using (var gzip = new GZipStream(output, CompressionLevel.Optimal, leaveOpen: true))
{
await gzip.WriteAsync(bytes, ct);
}
return output.ToArray();
}
private async Task EnsureSuccessStatusCodeAsync(HttpResponseMessage response, string operation, CancellationToken ct)
{
if (response.IsSuccessStatusCode)
return;
var body = await response.Content.ReadAsStringAsync(ct);
var errorMessage = response.StatusCode switch
{
HttpStatusCode.Unauthorized => "GitHub authentication failed. Check your token.",
HttpStatusCode.Forbidden => "Access forbidden. Check repository permissions.",
HttpStatusCode.NotFound => "Repository or resource not found.",
HttpStatusCode.UnprocessableEntity => $"Validation failed: {body}",
_ => $"GitHub API error ({response.StatusCode}): {body}"
};
_logger.LogError("Failed to {Operation}: {Error}", operation, errorMessage);
throw new GitHubApiException(errorMessage, response.StatusCode);
}
}
/// <summary>
/// Exception for GitHub API errors.
/// </summary>
public sealed class GitHubApiException : Exception
{
public HttpStatusCode StatusCode { get; }
public GitHubApiException(string message, HttpStatusCode statusCode)
: base(message)
{
StatusCode = statusCode;
}
}

View File

@@ -0,0 +1,67 @@
// <copyright file="GitHubCodeScanningExtensions.cs" company="StellaOps">
// Copyright (c) StellaOps. Licensed under the AGPL-3.0-or-later.
// </copyright>
using System.Net.Http.Headers;
using Microsoft.Extensions.DependencyInjection;
namespace StellaOps.Integrations.Plugin.GitHubApp.CodeScanning;
/// <summary>
/// DI extensions for GitHub Code Scanning client.
/// Sprint: SPRINT_20260109_010_002 Task: DI registration
/// </summary>
public static class GitHubCodeScanningExtensions
{
/// <summary>
/// Adds GitHub Code Scanning client services.
/// </summary>
/// <param name="services">Service collection.</param>
/// <param name="configureClient">Optional HTTP client configuration.</param>
/// <returns>Service collection for chaining.</returns>
public static IServiceCollection AddGitHubCodeScanningClient(
this IServiceCollection services,
Action<HttpClient>? configureClient = null)
{
services.AddHttpClient(GitHubCodeScanningClient.HttpClientName, client =>
{
// Default configuration for GitHub API
client.BaseAddress = new Uri("https://api.github.com");
client.DefaultRequestHeaders.Accept.Add(
new MediaTypeWithQualityHeaderValue("application/vnd.github+json"));
client.DefaultRequestHeaders.Add("X-GitHub-Api-Version", "2022-11-28");
client.DefaultRequestHeaders.UserAgent.Add(
new ProductInfoHeaderValue("StellaOps", "1.0"));
client.Timeout = TimeSpan.FromMinutes(5); // Large SARIF uploads
configureClient?.Invoke(client);
});
services.AddSingleton<IGitHubCodeScanningClient, GitHubCodeScanningClient>();
return services;
}
/// <summary>
/// Adds GitHub Code Scanning client for GitHub Enterprise Server.
/// </summary>
/// <param name="services">Service collection.</param>
/// <param name="baseUrl">GHES base URL.</param>
/// <param name="configureClient">Optional HTTP client configuration.</param>
/// <returns>Service collection for chaining.</returns>
public static IServiceCollection AddGitHubEnterpriseCodeScanningClient(
this IServiceCollection services,
string baseUrl,
Action<HttpClient>? configureClient = null)
{
ArgumentException.ThrowIfNullOrWhiteSpace(baseUrl);
var apiUrl = baseUrl.TrimEnd('/') + "/api/v3";
return services.AddGitHubCodeScanningClient(client =>
{
client.BaseAddress = new Uri(apiUrl);
configureClient?.Invoke(client);
});
}
}

View File

@@ -0,0 +1,100 @@
// <copyright file="IGitHubCodeScanningClient.cs" company="StellaOps">
// Copyright (c) StellaOps. Licensed under the AGPL-3.0-or-later.
// </copyright>
namespace StellaOps.Integrations.Plugin.GitHubApp.CodeScanning;
/// <summary>
/// Client for GitHub Code Scanning API.
/// Sprint: SPRINT_20260109_010_002 Task: Create interface
/// </summary>
public interface IGitHubCodeScanningClient
{
/// <summary>
/// Upload SARIF to GitHub Code Scanning.
/// </summary>
/// <param name="owner">Repository owner.</param>
/// <param name="repo">Repository name.</param>
/// <param name="request">Upload request.</param>
/// <param name="ct">Cancellation token.</param>
/// <returns>Upload result with SARIF ID.</returns>
Task<SarifUploadResult> UploadSarifAsync(
string owner,
string repo,
SarifUploadRequest request,
CancellationToken ct);
/// <summary>
/// Get SARIF upload processing status.
/// </summary>
/// <param name="owner">Repository owner.</param>
/// <param name="repo">Repository name.</param>
/// <param name="sarifId">SARIF upload ID.</param>
/// <param name="ct">Cancellation token.</param>
/// <returns>Processing status.</returns>
Task<SarifUploadStatus> GetUploadStatusAsync(
string owner,
string repo,
string sarifId,
CancellationToken ct);
/// <summary>
/// Wait for SARIF processing to complete.
/// </summary>
/// <param name="owner">Repository owner.</param>
/// <param name="repo">Repository name.</param>
/// <param name="sarifId">SARIF upload ID.</param>
/// <param name="timeout">Maximum wait time.</param>
/// <param name="ct">Cancellation token.</param>
/// <returns>Final processing status.</returns>
Task<SarifUploadStatus> WaitForProcessingAsync(
string owner,
string repo,
string sarifId,
TimeSpan timeout,
CancellationToken ct);
/// <summary>
/// List code scanning alerts for a repository.
/// </summary>
/// <param name="owner">Repository owner.</param>
/// <param name="repo">Repository name.</param>
/// <param name="filter">Optional filter.</param>
/// <param name="ct">Cancellation token.</param>
/// <returns>List of alerts.</returns>
Task<IReadOnlyList<CodeScanningAlert>> ListAlertsAsync(
string owner,
string repo,
AlertFilter? filter,
CancellationToken ct);
/// <summary>
/// Get a specific code scanning alert.
/// </summary>
/// <param name="owner">Repository owner.</param>
/// <param name="repo">Repository name.</param>
/// <param name="alertNumber">Alert number.</param>
/// <param name="ct">Cancellation token.</param>
/// <returns>Alert details.</returns>
Task<CodeScanningAlert> GetAlertAsync(
string owner,
string repo,
int alertNumber,
CancellationToken ct);
/// <summary>
/// Update alert state (dismiss/reopen).
/// </summary>
/// <param name="owner">Repository owner.</param>
/// <param name="repo">Repository name.</param>
/// <param name="alertNumber">Alert number.</param>
/// <param name="update">Update request.</param>
/// <param name="ct">Cancellation token.</param>
/// <returns>Updated alert.</returns>
Task<CodeScanningAlert> UpdateAlertAsync(
string owner,
string repo,
int alertNumber,
AlertUpdate update,
CancellationToken ct);
}

View File

@@ -0,0 +1,20 @@
// <copyright file="ProcessingStatus.cs" company="StellaOps">
// Copyright (c) StellaOps. Licensed under the AGPL-3.0-or-later.
// </copyright>
namespace StellaOps.Integrations.Plugin.GitHubApp.CodeScanning;
/// <summary>
/// Processing status for SARIF uploads.
/// </summary>
public enum ProcessingStatus
{
/// <summary>Upload is pending processing.</summary>
Pending,
/// <summary>Processing completed successfully.</summary>
Complete,
/// <summary>Processing failed.</summary>
Failed
}

View File

@@ -0,0 +1,63 @@
// <copyright file="SarifUploadRequest.cs" company="StellaOps">
// Copyright (c) StellaOps. Licensed under the AGPL-3.0-or-later.
// </copyright>
namespace StellaOps.Integrations.Plugin.GitHubApp.CodeScanning;
/// <summary>
/// Request to upload SARIF to GitHub Code Scanning.
/// Sprint: SPRINT_20260109_010_002 Task: Implement models
/// </summary>
public sealed record SarifUploadRequest
{
/// <summary>
/// Commit SHA for the analysis.
/// </summary>
public required string CommitSha { get; init; }
/// <summary>
/// Git ref (e.g., refs/heads/main).
/// </summary>
public required string Ref { get; init; }
/// <summary>
/// SARIF content (raw JSON string).
/// </summary>
public required string SarifContent { get; init; }
/// <summary>
/// Optional checkout URI for file paths.
/// </summary>
public string? CheckoutUri { get; init; }
/// <summary>
/// Analysis start time.
/// </summary>
public DateTimeOffset? StartedAt { get; init; }
/// <summary>
/// Tool name for categorization.
/// </summary>
public string? ToolName { get; init; }
/// <summary>
/// Validates the request.
/// </summary>
public void Validate()
{
if (string.IsNullOrWhiteSpace(CommitSha))
throw new ArgumentException("CommitSha is required", nameof(CommitSha));
if (CommitSha.Length != 40)
throw new ArgumentException("CommitSha must be a 40-character SHA", nameof(CommitSha));
if (string.IsNullOrWhiteSpace(Ref))
throw new ArgumentException("Ref is required", nameof(Ref));
if (!Ref.StartsWith("refs/", StringComparison.Ordinal))
throw new ArgumentException("Ref must start with 'refs/'", nameof(Ref));
if (string.IsNullOrWhiteSpace(SarifContent))
throw new ArgumentException("SarifContent is required", nameof(SarifContent));
}
}

View File

@@ -0,0 +1,53 @@
// <copyright file="SarifUploadResult.cs" company="StellaOps">
// Copyright (c) StellaOps. Licensed under the AGPL-3.0-or-later.
// </copyright>
using System.Text.Json.Serialization;
namespace StellaOps.Integrations.Plugin.GitHubApp.CodeScanning;
/// <summary>
/// Result of uploading SARIF to GitHub Code Scanning.
/// Sprint: SPRINT_20260109_010_002 Task: Implement models
/// </summary>
public sealed record SarifUploadResult
{
/// <summary>
/// Upload ID for status polling.
/// </summary>
[JsonPropertyName("id")]
public required string Id { get; init; }
/// <summary>
/// API URL for status.
/// </summary>
[JsonPropertyName("url")]
public required string Url { get; init; }
/// <summary>
/// Initial processing status.
/// </summary>
public required ProcessingStatus Status { get; init; }
/// <summary>
/// Creates a pending result from GitHub API response.
/// </summary>
public static SarifUploadResult FromApiResponse(GitHubSarifUploadResponse response) => new()
{
Id = response.Id ?? throw new InvalidOperationException("Upload ID is missing"),
Url = response.Url ?? throw new InvalidOperationException("Upload URL is missing"),
Status = ProcessingStatus.Pending
};
}
/// <summary>
/// GitHub API response for SARIF upload.
/// </summary>
public sealed record GitHubSarifUploadResponse
{
[JsonPropertyName("id")]
public string? Id { get; init; }
[JsonPropertyName("url")]
public string? Url { get; init; }
}

View File

@@ -0,0 +1,104 @@
// <copyright file="SarifUploadStatus.cs" company="StellaOps">
// Copyright (c) StellaOps. Licensed under the AGPL-3.0-or-later.
// </copyright>
using System.Collections.Immutable;
using System.Text.Json.Serialization;
namespace StellaOps.Integrations.Plugin.GitHubApp.CodeScanning;
/// <summary>
/// Processing status for a SARIF upload.
/// Sprint: SPRINT_20260109_010_002 Task: Implement models
/// </summary>
public sealed record SarifUploadStatus
{
/// <summary>
/// Processing status.
/// </summary>
public required ProcessingStatus Status { get; init; }
/// <summary>
/// Analysis URL (when complete).
/// </summary>
public string? AnalysisUrl { get; init; }
/// <summary>
/// Error messages (when failed).
/// </summary>
public ImmutableArray<string> Errors { get; init; } = [];
/// <summary>
/// Processing started at.
/// </summary>
public DateTimeOffset? ProcessingStartedAt { get; init; }
/// <summary>
/// Processing completed at.
/// </summary>
public DateTimeOffset? ProcessingCompletedAt { get; init; }
/// <summary>
/// Number of results found.
/// </summary>
public int? ResultsCount { get; init; }
/// <summary>
/// Number of rules triggered.
/// </summary>
public int? RulesCount { get; init; }
/// <summary>
/// Creates status from GitHub API response.
/// </summary>
public static SarifUploadStatus FromApiResponse(GitHubSarifStatusResponse response)
{
var status = response.ProcessingStatus?.ToLowerInvariant() switch
{
"pending" => ProcessingStatus.Pending,
"complete" => ProcessingStatus.Complete,
"failed" => ProcessingStatus.Failed,
_ => ProcessingStatus.Pending
};
return new SarifUploadStatus
{
Status = status,
AnalysisUrl = response.AnalysesUrl,
Errors = response.Errors?.ToImmutableArray() ?? [],
ResultsCount = response.ResultsCount,
RulesCount = response.RulesCount
};
}
/// <summary>
/// Whether processing is still in progress.
/// </summary>
public bool IsInProgress => Status == ProcessingStatus.Pending;
/// <summary>
/// Whether processing has completed (success or failure).
/// </summary>
public bool IsComplete => Status is ProcessingStatus.Complete or ProcessingStatus.Failed;
}
/// <summary>
/// GitHub API response for SARIF status.
/// </summary>
public sealed record GitHubSarifStatusResponse
{
[JsonPropertyName("processing_status")]
public string? ProcessingStatus { get; init; }
[JsonPropertyName("analyses_url")]
public string? AnalysesUrl { get; init; }
[JsonPropertyName("errors")]
public string[]? Errors { get; init; }
[JsonPropertyName("results_count")]
public int? ResultsCount { get; init; }
[JsonPropertyName("rules_count")]
public int? RulesCount { get; init; }
}

View File

@@ -9,6 +9,11 @@
<RootNamespace>StellaOps.Integrations.Plugin.GitHubApp</RootNamespace>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="Microsoft.Extensions.Http" />
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\..\__Libraries\StellaOps.Integrations.Contracts\StellaOps.Integrations.Contracts.csproj" />
</ItemGroup>

View File

@@ -0,0 +1,471 @@
// <copyright file="GitHubCodeScanningClientTests.cs" company="StellaOps">
// Copyright (c) StellaOps. Licensed under the AGPL-3.0-or-later.
// </copyright>
using System.Net;
using System.Text.Json;
using FluentAssertions;
using Microsoft.Extensions.Logging.Abstractions;
using Moq;
using Moq.Protected;
using StellaOps.Integrations.Plugin.GitHubApp.CodeScanning;
using Xunit;
namespace StellaOps.Integrations.Tests.CodeScanning;
/// <summary>
/// Tests for <see cref="GitHubCodeScanningClient"/>.
/// </summary>
[Trait("Category", "Unit")]
public class GitHubCodeScanningClientTests
{
private readonly Mock<HttpMessageHandler> _httpHandlerMock;
private readonly IHttpClientFactory _httpClientFactory;
public GitHubCodeScanningClientTests()
{
_httpHandlerMock = new Mock<HttpMessageHandler>();
var httpClient = new HttpClient(_httpHandlerMock.Object)
{
BaseAddress = new Uri("https://api.github.com")
};
var factoryMock = new Mock<IHttpClientFactory>();
factoryMock
.Setup(f => f.CreateClient(GitHubCodeScanningClient.HttpClientName))
.Returns(httpClient);
_httpClientFactory = factoryMock.Object;
}
[Fact]
public async Task UploadSarifAsync_Success_ReturnsResult()
{
// Arrange
var responseJson = JsonSerializer.Serialize(new
{
id = "sarif-123",
url = "https://api.github.com/repos/owner/repo/code-scanning/sarifs/sarif-123"
});
SetupHttpResponse(HttpStatusCode.Accepted, responseJson);
var client = CreateClient();
var request = new SarifUploadRequest
{
CommitSha = "a".PadRight(40, 'b'),
Ref = "refs/heads/main",
SarifContent = "{\"version\":\"2.1.0\",\"runs\":[]}"
};
// Act
var result = await client.UploadSarifAsync("owner", "repo", request, CancellationToken.None);
// Assert
result.Id.Should().Be("sarif-123");
result.Status.Should().Be(ProcessingStatus.Pending);
}
[Fact]
public async Task UploadSarifAsync_InvalidCommitSha_Throws()
{
// Arrange
var client = CreateClient();
var request = new SarifUploadRequest
{
CommitSha = "short",
Ref = "refs/heads/main",
SarifContent = "{}"
};
// Act & Assert
await Assert.ThrowsAsync<ArgumentException>(
() => client.UploadSarifAsync("owner", "repo", request, CancellationToken.None));
}
[Fact]
public async Task UploadSarifAsync_InvalidRef_Throws()
{
// Arrange
var client = CreateClient();
var request = new SarifUploadRequest
{
CommitSha = "a".PadRight(40, 'b'),
Ref = "main", // Missing refs/ prefix
SarifContent = "{}"
};
// Act & Assert
await Assert.ThrowsAsync<ArgumentException>(
() => client.UploadSarifAsync("owner", "repo", request, CancellationToken.None));
}
[Fact]
public async Task GetUploadStatusAsync_Complete_ReturnsStatus()
{
// Arrange
var responseJson = JsonSerializer.Serialize(new
{
processing_status = "complete",
analyses_url = "https://api.github.com/repos/owner/repo/code-scanning/analyses",
results_count = 5,
rules_count = 3
});
SetupHttpResponse(HttpStatusCode.OK, responseJson);
var client = CreateClient();
// Act
var status = await client.GetUploadStatusAsync("owner", "repo", "sarif-123", CancellationToken.None);
// Assert
status.Status.Should().Be(ProcessingStatus.Complete);
status.ResultsCount.Should().Be(5);
status.RulesCount.Should().Be(3);
status.IsComplete.Should().BeTrue();
}
[Fact]
public async Task GetUploadStatusAsync_Pending_ReturnsStatus()
{
// Arrange
var responseJson = JsonSerializer.Serialize(new
{
processing_status = "pending"
});
SetupHttpResponse(HttpStatusCode.OK, responseJson);
var client = CreateClient();
// Act
var status = await client.GetUploadStatusAsync("owner", "repo", "sarif-123", CancellationToken.None);
// Assert
status.Status.Should().Be(ProcessingStatus.Pending);
status.IsInProgress.Should().BeTrue();
}
[Fact]
public async Task GetUploadStatusAsync_Failed_ReturnsErrors()
{
// Arrange
var responseJson = JsonSerializer.Serialize(new
{
processing_status = "failed",
errors = new[] { "Invalid SARIF", "Missing runs" }
});
SetupHttpResponse(HttpStatusCode.OK, responseJson);
var client = CreateClient();
// Act
var status = await client.GetUploadStatusAsync("owner", "repo", "sarif-123", CancellationToken.None);
// Assert
status.Status.Should().Be(ProcessingStatus.Failed);
status.Errors.Should().HaveCount(2);
status.Errors.Should().Contain("Invalid SARIF");
}
[Fact]
public async Task ListAlertsAsync_ReturnsAlerts()
{
// Arrange
var alertsData = new object[]
{
new
{
number = 1,
state = "open",
rule = new { id = "csharp/sql-injection", severity = "high", description = "SQL injection" },
tool = new { name = "StellaOps", version = "1.0" },
html_url = "https://github.com/owner/repo/security/code-scanning/1",
created_at = "2026-01-09T10:00:00Z"
},
new
{
number = 2,
state = "dismissed",
rule = new { id = "csharp/xss", severity = "medium", description = "XSS vulnerability" },
tool = new { name = "StellaOps", version = "1.0" },
html_url = "https://github.com/owner/repo/security/code-scanning/2",
created_at = "2026-01-08T10:00:00Z",
dismissed_at = "2026-01-09T11:00:00Z",
dismissed_reason = "false_positive"
}
};
var responseJson = JsonSerializer.Serialize(alertsData);
SetupHttpResponse(HttpStatusCode.OK, responseJson);
var client = CreateClient();
// Act
var alerts = await client.ListAlertsAsync("owner", "repo", null, CancellationToken.None);
// Assert
alerts.Should().HaveCount(2);
alerts[0].Number.Should().Be(1);
alerts[0].State.Should().Be("open");
alerts[0].RuleId.Should().Be("csharp/sql-injection");
alerts[1].DismissedReason.Should().Be("false_positive");
}
[Fact]
public async Task ListAlertsAsync_WithFilter_AppliesQueryString()
{
// Arrange
SetupHttpResponse(HttpStatusCode.OK, "[]");
var client = CreateClient();
var filter = new AlertFilter
{
State = "open",
Severity = "high",
PerPage = 50
};
// Act
await client.ListAlertsAsync("owner", "repo", filter, CancellationToken.None);
// Assert - Verify the request URL contained query parameters
_httpHandlerMock.Protected().Verify(
"SendAsync",
Times.Once(),
ItExpr.Is<HttpRequestMessage>(req =>
req.RequestUri!.Query.Contains("state=open") &&
req.RequestUri.Query.Contains("severity=high") &&
req.RequestUri.Query.Contains("per_page=50")),
ItExpr.IsAny<CancellationToken>());
}
[Fact]
public async Task GetAlertAsync_ReturnsAlert()
{
// Arrange
var responseJson = JsonSerializer.Serialize(new
{
number = 42,
state = "open",
rule = new { id = "csharp/path-traversal", severity = "critical", description = "Path traversal" },
tool = new { name = "StellaOps" },
html_url = "https://github.com/owner/repo/security/code-scanning/42",
created_at = "2026-01-09T10:00:00Z",
most_recent_instance = new
{
@ref = "refs/heads/main",
location = new
{
path = "src/Controllers/FileController.cs",
start_line = 42,
end_line = 45
}
}
});
SetupHttpResponse(HttpStatusCode.OK, responseJson);
var client = CreateClient();
// Act
var alert = await client.GetAlertAsync("owner", "repo", 42, CancellationToken.None);
// Assert
alert.Number.Should().Be(42);
alert.RuleSeverity.Should().Be("critical");
alert.MostRecentInstance.Should().NotBeNull();
alert.MostRecentInstance!.Location!.Path.Should().Be("src/Controllers/FileController.cs");
alert.MostRecentInstance.Location.StartLine.Should().Be(42);
}
[Fact]
public async Task UpdateAlertAsync_Dismiss_ReturnsUpdatedAlert()
{
// Arrange
var responseJson = JsonSerializer.Serialize(new
{
number = 1,
state = "dismissed",
rule = new { id = "test", severity = "low", description = "Test" },
tool = new { name = "StellaOps" },
html_url = "https://github.com/owner/repo/security/code-scanning/1",
created_at = "2026-01-09T10:00:00Z",
dismissed_at = "2026-01-09T12:00:00Z",
dismissed_reason = "false_positive"
});
SetupHttpResponse(HttpStatusCode.OK, responseJson);
var client = CreateClient();
var update = new AlertUpdate
{
State = "dismissed",
DismissedReason = "false_positive",
DismissedComment = "Not applicable to our use case"
};
// Act
var alert = await client.UpdateAlertAsync("owner", "repo", 1, update, CancellationToken.None);
// Assert
alert.State.Should().Be("dismissed");
alert.DismissedReason.Should().Be("false_positive");
}
[Fact]
public async Task UpdateAlertAsync_InvalidState_Throws()
{
// Arrange
var client = CreateClient();
var update = new AlertUpdate
{
State = "invalid"
};
// Act & Assert
await Assert.ThrowsAsync<ArgumentException>(
() => client.UpdateAlertAsync("owner", "repo", 1, update, CancellationToken.None));
}
[Fact]
public async Task UpdateAlertAsync_DismissWithoutReason_Throws()
{
// Arrange
var client = CreateClient();
var update = new AlertUpdate
{
State = "dismissed"
// Missing DismissedReason
};
// Act & Assert
await Assert.ThrowsAsync<ArgumentException>(
() => client.UpdateAlertAsync("owner", "repo", 1, update, CancellationToken.None));
}
[Fact]
public async Task UploadSarifAsync_Unauthorized_ThrowsGitHubApiException()
{
// Arrange
SetupHttpResponse(HttpStatusCode.Unauthorized, "{\"message\":\"Bad credentials\"}");
var client = CreateClient();
var request = new SarifUploadRequest
{
CommitSha = "a".PadRight(40, 'b'),
Ref = "refs/heads/main",
SarifContent = "{}"
};
// Act & Assert
var ex = await Assert.ThrowsAsync<GitHubApiException>(
() => client.UploadSarifAsync("owner", "repo", request, CancellationToken.None));
ex.StatusCode.Should().Be(HttpStatusCode.Unauthorized);
ex.Message.Should().Contain("authentication");
}
[Fact]
public async Task UploadSarifAsync_NotFound_ThrowsGitHubApiException()
{
// Arrange
SetupHttpResponse(HttpStatusCode.NotFound, "{\"message\":\"Not Found\"}");
var client = CreateClient();
var request = new SarifUploadRequest
{
CommitSha = "a".PadRight(40, 'b'),
Ref = "refs/heads/main",
SarifContent = "{}"
};
// Act & Assert
var ex = await Assert.ThrowsAsync<GitHubApiException>(
() => client.UploadSarifAsync("owner", "repo", request, CancellationToken.None));
ex.StatusCode.Should().Be(HttpStatusCode.NotFound);
}
[Fact]
public void AlertFilter_ToQueryString_BuildsCorrectQuery()
{
// Arrange
var filter = new AlertFilter
{
State = "open",
Severity = "high",
Tool = "StellaOps",
Ref = "refs/heads/main",
PerPage = 100,
Page = 2,
Sort = "created",
Direction = "desc"
};
// Act
var query = filter.ToQueryString();
// Assert
query.Should().Contain("state=open");
query.Should().Contain("severity=high");
query.Should().Contain("tool_name=StellaOps");
query.Should().Contain("per_page=100");
query.Should().Contain("page=2");
query.Should().Contain("sort=created");
query.Should().Contain("direction=desc");
}
[Fact]
public void AlertFilter_ToQueryString_Empty_ReturnsEmpty()
{
// Arrange
var filter = new AlertFilter();
// Act
var query = filter.ToQueryString();
// Assert
query.Should().BeEmpty();
}
[Fact]
public void SarifUploadRequest_Validate_EmptySarif_Throws()
{
// Arrange
var request = new SarifUploadRequest
{
CommitSha = "a".PadRight(40, 'b'),
Ref = "refs/heads/main",
SarifContent = ""
};
// Act & Assert
Assert.Throws<ArgumentException>(() => request.Validate());
}
private GitHubCodeScanningClient CreateClient()
{
return new GitHubCodeScanningClient(
_httpClientFactory,
NullLogger<GitHubCodeScanningClient>.Instance,
TimeProvider.System);
}
private void SetupHttpResponse(HttpStatusCode statusCode, string content)
{
_httpHandlerMock.Protected()
.Setup<Task<HttpResponseMessage>>(
"SendAsync",
ItExpr.IsAny<HttpRequestMessage>(),
ItExpr.IsAny<CancellationToken>())
.ReturnsAsync(new HttpResponseMessage
{
StatusCode = statusCode,
Content = new StringContent(content)
});
}
}

View File

@@ -16,6 +16,7 @@
<ProjectReference Include="../../StellaOps.Integrations.WebService/StellaOps.Integrations.WebService.csproj" />
<ProjectReference Include="../../__Libraries/StellaOps.Integrations.Contracts/StellaOps.Integrations.Contracts.csproj" />
<ProjectReference Include="../../__Libraries/StellaOps.Integrations.Core/StellaOps.Integrations.Core.csproj" />
<ProjectReference Include="../../__Plugins/StellaOps.Integrations.Plugin.GitHubApp/StellaOps.Integrations.Plugin.GitHubApp.csproj" />
<ProjectReference Include="../../../__Libraries/StellaOps.TestKit/StellaOps.TestKit.csproj" />
</ItemGroup>
</Project>

View File

@@ -51,7 +51,7 @@ public class PlaybookSuggestionServiceTests
};
// Act
var result = await _service.GetSuggestionsAsync(request);
var result = await _service.GetSuggestionsAsync(request, TestContext.Current.CancellationToken);
// Assert
result.Suggestions.Should().BeEmpty();
@@ -88,7 +88,7 @@ public class PlaybookSuggestionServiceTests
};
// Act
var result = await _service.GetSuggestionsAsync(request);
var result = await _service.GetSuggestionsAsync(request, TestContext.Current.CancellationToken);
// Assert
result.Suggestions.Should().NotBeEmpty();
@@ -121,7 +121,7 @@ public class PlaybookSuggestionServiceTests
};
// Act
var result = await _service.GetSuggestionsAsync(request);
var result = await _service.GetSuggestionsAsync(request, TestContext.Current.CancellationToken);
// Assert
result.Suggestions.Should().NotBeEmpty();
@@ -159,7 +159,7 @@ public class PlaybookSuggestionServiceTests
};
// Act
var result = await _service.GetSuggestionsAsync(request);
var result = await _service.GetSuggestionsAsync(request, TestContext.Current.CancellationToken);
// Assert
result.Suggestions.Should().HaveCount(2);
@@ -185,7 +185,7 @@ public class PlaybookSuggestionServiceTests
};
// Act
var result = await _service.GetSuggestionsAsync(request);
var result = await _service.GetSuggestionsAsync(request, TestContext.Current.CancellationToken);
// Assert
result.Suggestions.Should().NotBeEmpty();
@@ -214,13 +214,13 @@ public class PlaybookSuggestionServiceTests
};
// Act
var result = await _service.GetSuggestionsAsync(request);
var result = await _service.GetSuggestionsAsync(request, TestContext.Current.CancellationToken);
// Assert
result.Suggestions.Should().NotBeEmpty();
var suggestion = result.Suggestions.First();
suggestion.Confidence.Should().BeGreaterThan(0);
suggestion.Confidence.Should().BeLessOrEqualTo(1);
suggestion.Confidence.Should().BeLessThanOrEqualTo(1);
}
[Fact]
@@ -243,7 +243,7 @@ public class PlaybookSuggestionServiceTests
};
// Act
var result = await _service.GetSuggestionsAsync(request);
var result = await _service.GetSuggestionsAsync(request, TestContext.Current.CancellationToken);
// Assert
result.Suggestions.Should().NotBeEmpty();

View File

@@ -450,7 +450,7 @@ public sealed class InMemoryMirrorEvidenceStoreTests
await store.StoreAsync(evidence);
Assert.Single(store);
Assert.Equal(1, store.Count);
}
[Fact]
@@ -502,7 +502,7 @@ public sealed class InMemoryMirrorEvidenceStoreTests
store.Clear();
Assert.Empty(store);
Assert.Equal(0, store.Count);
}
}
@@ -731,7 +731,7 @@ public sealed class MirrorOperationRecorderTests
Assert.NotNull(result.EvidencePointer);
// Verify evidence was stored
Assert.Single(_evidenceStore);
Assert.Equal(1, _evidenceStore.Count);
var evidence = await _evidenceStore.GetAsync(context.OperationId);
Assert.NotNull(evidence);
Assert.Equal(MirrorOperationType.BundleExport, evidence.OperationType);

View File

@@ -417,7 +417,7 @@ public class EventPublishingTests
var result = await store.TryMarkAsync("key-1", TimeSpan.FromMinutes(5), CT);
Assert.True(result);
Assert.Single(store);
Assert.Equal(1, store.Count);
}
[Fact]
@@ -429,7 +429,7 @@ public class EventPublishingTests
var result = await store.TryMarkAsync("key-1", TimeSpan.FromMinutes(5), CT);
Assert.False(result);
Assert.Single(store);
Assert.Equal(1, store.Count);
}
[Fact]
@@ -451,7 +451,7 @@ public class EventPublishingTests
await store.RemoveAsync("key-1", CT);
Assert.False(await store.ExistsAsync("key-1", CT));
Assert.Empty(store);
Assert.Equal(0, store.Count);
}
[Fact]
@@ -463,7 +463,7 @@ public class EventPublishingTests
store.Clear();
Assert.Empty(store);
Assert.Equal(0, store.Count);
}
[Fact]

View File

@@ -243,7 +243,7 @@ public sealed class TimelineEventTests
// Assert
Assert.True(result.Success);
Assert.False(result.Deduplicated);
Assert.Single(sink);
Assert.Single(sink.GetEvents());
var stored = sink.GetEvents()[0];
Assert.Equal(evt.EventId, stored.EventId);
@@ -281,7 +281,7 @@ public sealed class TimelineEventTests
Assert.True(result2.Success);
Assert.True(result2.Deduplicated);
Assert.Single(sink);
Assert.Single(sink.GetEvents());
}
[Fact]

View File

@@ -414,7 +414,7 @@ public sealed class InMemoryJobAttestationStoreTests
await store.StoreAsync(attestation);
Assert.Single(store);
Assert.Equal(1, store.Count);
}
[Fact]
@@ -482,7 +482,7 @@ public sealed class InMemoryJobAttestationStoreTests
store.Clear();
Assert.Empty(store);
Assert.Equal(0, store.Count);
}
}
@@ -552,7 +552,7 @@ public sealed class JobAttestationServiceTests
var result = await _service.GenerateJobCompletionAttestationAsync(request);
Assert.Single(_store);
Assert.Equal(1, _store.Count);
var stored = await _store.GetAsync(result.Attestation!.AttestationId);
Assert.NotNull(stored);
}

View File

@@ -302,7 +302,7 @@ public sealed class InMemoryJobCapsuleStoreTests
await store.StoreAsync(capsule, CancellationToken.None);
Assert.Single(store);
Assert.Single(store.GetAll());
}
[Fact]
@@ -358,10 +358,10 @@ public sealed class InMemoryJobCapsuleStoreTests
var capsule = JobCapsule.Create("tenant-1", Guid.NewGuid(), "test.job", JobCapsuleKind.JobScheduling, inputs);
await store.StoreAsync(capsule, CancellationToken.None);
Assert.Single(store);
Assert.Single(store.GetAll());
store.Clear();
Assert.Empty(store);
Assert.Equal(0, store.Count);
}
}

View File

@@ -18,7 +18,7 @@ public sealed class ScaleMetricsTests
// Assert
var percentiles = metrics.GetDispatchLatencyPercentiles("tenant-1");
Assert.Single(percentiles);
Assert.Equal(1, percentiles.Count);
Assert.Equal(100, percentiles.P95);
}
@@ -58,7 +58,7 @@ public sealed class ScaleMetricsTests
var percentiles = metrics.GetDispatchLatencyPercentiles();
// Assert
Assert.Empty(percentiles);
Assert.Equal(0, percentiles.Count);
Assert.Equal(0, percentiles.P95);
}
@@ -77,7 +77,7 @@ public sealed class ScaleMetricsTests
// Assert
Assert.Equal(2, tenant1Percentiles.Count);
Assert.Single(tenant2Percentiles);
Assert.Equal(1, tenant2Percentiles.Count);
Assert.Equal(100, tenant2Percentiles.P95);
}
@@ -95,7 +95,7 @@ public sealed class ScaleMetricsTests
// Assert
var percentiles = metrics.GetDispatchLatencyPercentiles("tenant-1");
Assert.Single(percentiles);
Assert.Equal(1, percentiles.Count);
Assert.True(percentiles.P95 >= 10);
}
@@ -210,7 +210,7 @@ public sealed class ScaleMetricsTests
// Assert
Assert.Equal(50, snapshot.TotalQueueDepth);
Assert.Equal(10, snapshot.TotalActiveJobs);
Assert.Single(snapshot.DispatchLatency);
Assert.Equal(1, snapshot.DispatchLatency.Count);
Assert.Single(snapshot.QueueDepthByKey);
Assert.Single(snapshot.ActiveJobsByKey);
}
@@ -229,7 +229,7 @@ public sealed class ScaleMetricsTests
// Assert
var snapshot = metrics.GetSnapshot();
Assert.Equal(0, snapshot.TotalQueueDepth);
Assert.Empty(snapshot.DispatchLatency);
Assert.Equal(0, snapshot.DispatchLatency.Count);
}
[Fact]

View File

@@ -24,11 +24,11 @@ public sealed class HealthEndpointsTests : IClassFixture<PlatformWebApplicationF
using var client = factory.CreateClient();
client.DefaultRequestHeaders.Add("X-StellaOps-Tenant", tenantId);
var first = await client.GetFromJsonAsync<PlatformItemResponse<PlatformHealthSummary>>("/api/v1/platform/health/summary");
var first = await client.GetFromJsonAsync<PlatformItemResponse<PlatformHealthSummary>>("/api/v1/platform/health/summary", TestContext.Current.CancellationToken);
Assert.NotNull(first);
Assert.False(first!.Cached);
var second = await client.GetFromJsonAsync<PlatformItemResponse<PlatformHealthSummary>>("/api/v1/platform/health/summary");
var second = await client.GetFromJsonAsync<PlatformItemResponse<PlatformHealthSummary>>("/api/v1/platform/health/summary", TestContext.Current.CancellationToken);
Assert.NotNull(second);
Assert.True(second!.Cached);
Assert.Equal(first.DataAsOf, second.DataAsOf);

View File

@@ -23,7 +23,7 @@ public sealed class MetadataEndpointsTests : IClassFixture<PlatformWebApplicatio
client.DefaultRequestHeaders.Add("X-StellaOps-Tenant", "tenant-metadata");
var response = await client.GetFromJsonAsync<PlatformItemResponse<PlatformMetadata>>(
"/api/v1/platform/metadata");
"/api/v1/platform/metadata", TestContext.Current.CancellationToken);
Assert.NotNull(response);
var ids = response!.Item.Capabilities.Select(cap => cap.Id).ToArray();

View File

@@ -23,10 +23,10 @@ public sealed class OnboardingEndpointsTests : IClassFixture<PlatformWebApplicat
client.DefaultRequestHeaders.Add("X-StellaOps-Tenant", "tenant-onboarding");
client.DefaultRequestHeaders.Add("X-StellaOps-Actor", "actor-onboarding");
var response = await client.PostAsync("/api/v1/platform/onboarding/complete/connect-scanner", null);
var response = await client.PostAsync("/api/v1/platform/onboarding/complete/connect-scanner", null, TestContext.Current.CancellationToken);
response.EnsureSuccessStatusCode();
var state = await response.Content.ReadFromJsonAsync<PlatformOnboardingState>();
var state = await response.Content.ReadFromJsonAsync<PlatformOnboardingState>(TestContext.Current.CancellationToken);
Assert.NotNull(state);
var step = state!.Steps.FirstOrDefault(item => item.Step == "connect-scanner");

View File

@@ -31,10 +31,10 @@ public sealed class PreferencesEndpointsTests : IClassFixture<PlatformWebApplica
["filters"] = new JsonObject { ["scope"] = "tenant" }
});
var updateResponse = await client.PutAsJsonAsync("/api/v1/platform/preferences/dashboard", request);
var updateResponse = await client.PutAsJsonAsync("/api/v1/platform/preferences/dashboard", request, TestContext.Current.CancellationToken);
updateResponse.EnsureSuccessStatusCode();
var updated = await client.GetFromJsonAsync<PlatformDashboardPreferences>("/api/v1/platform/preferences/dashboard");
var updated = await client.GetFromJsonAsync<PlatformDashboardPreferences>("/api/v1/platform/preferences/dashboard", TestContext.Current.CancellationToken);
Assert.NotNull(updated);
Assert.Equal("tenant-preferences", updated!.TenantId);

View File

@@ -23,7 +23,7 @@ public sealed class QuotaEndpointsTests : IClassFixture<PlatformWebApplicationFa
client.DefaultRequestHeaders.Add("X-StellaOps-Tenant", "tenant-quotas");
var response = await client.GetFromJsonAsync<PlatformListResponse<PlatformQuotaUsage>>(
"/api/v1/platform/quotas/summary");
"/api/v1/platform/quotas/summary", TestContext.Current.CancellationToken);
Assert.NotNull(response);
var items = response!.Items.ToArray();

View File

@@ -23,7 +23,7 @@ public sealed class SearchEndpointsTests : IClassFixture<PlatformWebApplicationF
client.DefaultRequestHeaders.Add("X-StellaOps-Tenant", "tenant-search");
var response = await client.GetFromJsonAsync<PlatformListResponse<PlatformSearchItem>>(
"/api/v1/platform/search?limit=5");
"/api/v1/platform/search?limit=5", TestContext.Current.CancellationToken);
Assert.NotNull(response);
var items = response!.Items.Select(item => item.EntityId).ToArray();

View File

@@ -11,16 +11,16 @@ namespace StellaOps.Policy.Engine.Storage.InMemory;
/// In-memory implementation of IExceptionRepository for offline/test runs.
/// Provides minimal semantics needed for lifecycle processing.
/// </summary>
public sealed class InMemoryExceptionRepository(TimeProvider timeProvider, IGuidProvider guidProvider) : IExceptionRepository
public sealed class InMemoryExceptionRepository : IExceptionRepository
{
private readonly TimeProvider _timeProvider = timeProvider;
private readonly IGuidProvider _guidProvider = guidProvider;
private readonly ConcurrentDictionary<(string Tenant, Guid Id), ExceptionEntity> _exceptions = new();
private readonly TimeProvider _timeProvider;
private readonly IGuidProvider _guidProvider;
private readonly ConcurrentDictionary<(string Tenant, Guid Id), ExceptionEntity> _exceptions = new();
public InMemoryExceptionRepository(TimeProvider? timeProvider = null)
public InMemoryExceptionRepository(TimeProvider? timeProvider = null, IGuidProvider? guidProvider = null)
{
_timeProvider = timeProvider ?? TimeProvider.System;
_guidProvider = guidProvider ?? new SystemGuidProvider();
}
public Task<ExceptionEntity> CreateAsync(ExceptionEntity exception, CancellationToken cancellationToken = default)

View File

@@ -29,7 +29,7 @@ public sealed class BatchSimulationOrchestrator : IBatchSimulationOrchestrator,
{
_simulationService = simulationService ?? throw new ArgumentNullException(nameof(simulationService));
_timeProvider = timeProvider ?? TimeProvider.System;
_guidProvider = guidProvider ?? GuidProvider.Default;
_guidProvider = guidProvider ?? SystemGuidProvider.Instance;
// Start background processing
_processingTask = Task.Run(ProcessJobsAsync);

View File

@@ -25,7 +25,7 @@ public sealed class ReviewWorkflowService : IReviewWorkflowService
{
_packStore = packStore ?? throw new ArgumentNullException(nameof(packStore));
_timeProvider = timeProvider ?? TimeProvider.System;
_guidProvider = guidProvider ?? GuidProvider.Default;
_guidProvider = guidProvider ?? SystemGuidProvider.Instance;
}
public async Task<ReviewRequest> SubmitForReviewAsync(

View File

@@ -16,4 +16,8 @@
<PackageReference Include="Microsoft.Extensions.Options" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\..\__Libraries\StellaOps.Determinism.Abstractions\StellaOps.Determinism.Abstractions.csproj" />
</ItemGroup>
</Project>

View File

@@ -6,11 +6,10 @@ namespace StellaOps.Policy.Registry.Storage;
/// <summary>
/// In-memory implementation of IVerificationPolicyStore for testing and development.
/// </summary>
public sealed class InMemoryVerificationPolicyStore(TimeProvider timeProvider) : IVerificationPolicyStore
public sealed class InMemoryVerificationPolicyStore : IVerificationPolicyStore
{
private readonly TimeProvider _timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider));
private readonly ConcurrentDictionary<(Guid TenantId, string PolicyId), VerificationPolicyEntity> _policies = new();
private readonly TimeProvider _timeProvider;
private readonly ConcurrentDictionary<(Guid TenantId, string PolicyId), VerificationPolicyEntity> _policies = new();
public InMemoryVerificationPolicyStore(TimeProvider? timeProvider = null)
{

View File

@@ -71,7 +71,7 @@ public class DeterminismPropertyTests
/// Property: Parallel execution produces consistent results.
/// </summary>
[Fact]
public void Entropy_ParallelExecution_ProducesConsistentResults()
public async Task Entropy_ParallelExecution_ProducesConsistentResults()
{
// Arrange
var calculator = new UncertaintyScoreCalculator(NullLogger<UncertaintyScoreCalculator>.Instance);
@@ -82,7 +82,7 @@ public class DeterminismPropertyTests
.Select(_ => Task.Run(() => calculator.CalculateEntropy(snapshot)))
.ToArray();
Task.WaitAll(tasks);
await Task.WhenAll(tasks);
var results = tasks.Select(t => t.Result).ToList();
// Assert - all results should be identical

View File

@@ -12,7 +12,7 @@ public sealed class RvaBuilderTests
[Fact]
public void Build_ValidInputs_CreatesRva()
{
var rva = new RvaBuilder(_hasher)
var rva = new RvaBuilder(_hasher, TimeProvider.System)
.WithVerdict(RiskVerdictStatus.Pass)
.WithSubject("sha256:abc123", "container-image", "myapp:v1.0")
.WithPolicy("policy-1", "1.0", "sha256:xyz")
@@ -31,7 +31,7 @@ public sealed class RvaBuilderTests
[Fact]
public void Build_MissingSubject_Throws()
{
var builder = new RvaBuilder(_hasher)
var builder = new RvaBuilder(_hasher, TimeProvider.System)
.WithVerdict(RiskVerdictStatus.Pass)
.WithPolicy("p", "1.0", "sha256:x")
.WithKnowledgeSnapshot("ksm:sha256:y");
@@ -45,7 +45,7 @@ public sealed class RvaBuilderTests
[Fact]
public void Build_MissingPolicy_Throws()
{
var builder = new RvaBuilder(_hasher)
var builder = new RvaBuilder(_hasher, TimeProvider.System)
.WithVerdict(RiskVerdictStatus.Pass)
.WithSubject("sha256:abc", "container-image")
.WithKnowledgeSnapshot("ksm:sha256:y");
@@ -59,7 +59,7 @@ public sealed class RvaBuilderTests
[Fact]
public void Build_MissingSnapshot_Throws()
{
var builder = new RvaBuilder(_hasher)
var builder = new RvaBuilder(_hasher, TimeProvider.System)
.WithVerdict(RiskVerdictStatus.Pass)
.WithSubject("sha256:abc", "container-image")
.WithPolicy("p", "1.0", "sha256:x");
@@ -158,7 +158,7 @@ public sealed class RvaBuilderTests
private RvaBuilder CreateBuilder()
{
return new RvaBuilder(_hasher)
return new RvaBuilder(_hasher, TimeProvider.System)
.WithVerdict(RiskVerdictStatus.Pass)
.WithSubject("sha256:test123", "container-image", "test:v1")
.WithPolicy("policy-1", "1.0", "sha256:policy")

View File

@@ -23,7 +23,8 @@ public sealed class RvaVerifierTests
NullLogger<SnapshotService>.Instance);
_verifier = new RvaVerifier(
_snapshotService,
NullLogger<RvaVerifier>.Instance);
NullLogger<RvaVerifier>.Instance,
TimeProvider.System);
}
[Fact]
@@ -124,7 +125,7 @@ public sealed class RvaVerifierTests
private RiskVerdictAttestation CreateValidRva(DateTimeOffset? expiresAt = null)
{
return new RvaBuilder(_hasher)
return new RvaBuilder(_hasher, TimeProvider.System)
.WithVerdict(RiskVerdictStatus.Pass)
.WithSubject("sha256:test123", "container-image", "test:v1")
.WithPolicy("policy-1", "1.0", "sha256:policy")

View File

@@ -15,6 +15,7 @@ using Microsoft.Extensions.Options;
using Microsoft.Extensions.Hosting;
using Microsoft.IdentityModel.Tokens;
using StellaOps.Auth.Client;
using StellaOps.Determinism;
using StellaOps.Policy.Gateway.Clients;
using StellaOps.Policy.Gateway.Contracts;
using StellaOps.Policy.Gateway.Options;
@@ -41,7 +42,7 @@ public class PolicyEngineClientTests
var optionsMonitor = new TestOptionsMonitor(options);
var tokenClient = new StubTokenClient();
var dpopGenerator = new PolicyGatewayDpopProofGenerator(new StubHostEnvironment(), optionsMonitor, TimeProvider.System, NullLogger<PolicyGatewayDpopProofGenerator>.Instance);
var dpopGenerator = new PolicyGatewayDpopProofGenerator(new StubHostEnvironment(), optionsMonitor, TimeProvider.System, SystemGuidProvider.Instance, NullLogger<PolicyGatewayDpopProofGenerator>.Instance);
var tokenProvider = new PolicyEngineTokenProvider(tokenClient, optionsMonitor, dpopGenerator, TimeProvider.System, NullLogger<PolicyEngineTokenProvider>.Instance);
using var recordingHandler = new RecordingHandler();

View File

@@ -8,12 +8,11 @@ using Microsoft.Extensions.Hosting;
using Microsoft.Extensions.Logging.Abstractions;
using Microsoft.Extensions.Options;
using Microsoft.IdentityModel.Tokens;
using StellaOps.Determinism;
using StellaOps.Policy.Gateway.Options;
using StellaOps.Policy.Gateway.Services;
using Xunit;
using StellaOps.TestKit;
using Xunit;
namespace StellaOps.Policy.Gateway.Tests;
public sealed class PolicyGatewayDpopProofGeneratorTests
@@ -29,6 +28,7 @@ public sealed class PolicyGatewayDpopProofGeneratorTests
new StubHostEnvironment(AppContext.BaseDirectory),
new TestOptionsMonitor(options),
TimeProvider.System,
SystemGuidProvider.Instance,
NullLogger<PolicyGatewayDpopProofGenerator>.Instance);
var exception = Assert.Throws<InvalidOperationException>(() =>
@@ -52,6 +52,7 @@ public sealed class PolicyGatewayDpopProofGeneratorTests
new StubHostEnvironment(tempRoot.FullName),
new TestOptionsMonitor(options),
TimeProvider.System,
SystemGuidProvider.Instance,
NullLogger<PolicyGatewayDpopProofGenerator>.Instance);
var exception = Assert.Throws<FileNotFoundException>(() =>
@@ -82,6 +83,7 @@ public sealed class PolicyGatewayDpopProofGeneratorTests
new StubHostEnvironment(tempRoot.FullName),
new TestOptionsMonitor(options),
TimeProvider.System,
SystemGuidProvider.Instance,
NullLogger<PolicyGatewayDpopProofGenerator>.Instance);
const string accessToken = "sample-access-token";

View File

@@ -8,6 +8,7 @@ using FluentAssertions;
using Microsoft.Extensions.Logging.Abstractions;
using Microsoft.Extensions.Time.Testing;
using Moq;
using StellaOps.Determinism;
using StellaOps.Policy.Exceptions.Models;
using StellaOps.Policy.Exceptions.Repositories;
using StellaOps.Policy.Gateway.Services;
@@ -32,6 +33,7 @@ public class ExceptionServiceTests
_repositoryMock.Object,
_notificationMock.Object,
_timeProvider,
new SequentialGuidProvider(),
NullLogger<ExceptionService>.Instance);
}

View File

@@ -2,6 +2,7 @@ using System.Collections.Immutable;
using FluentAssertions;
using Microsoft.Extensions.Logging.Abstractions;
using Microsoft.Extensions.Options;
using StellaOps.Determinism;
using StellaOps.Policy.Exceptions.Models;
using StellaOps.Policy.Exceptions.Repositories;
using StellaOps.Policy.Persistence.Postgres;
@@ -28,7 +29,7 @@ public sealed class ExceptionObjectRepositoryTests : IAsyncLifetime
var options = fixture.Fixture.CreateOptions();
options.SchemaName = fixture.SchemaName;
var dataSource = new PolicyDataSource(Options.Create(options), NullLogger<PolicyDataSource>.Instance);
_repository = new PostgresExceptionObjectRepository(dataSource, NullLogger<PostgresExceptionObjectRepository>.Instance);
_repository = new PostgresExceptionObjectRepository(dataSource, NullLogger<PostgresExceptionObjectRepository>.Instance, TimeProvider.System, SystemGuidProvider.Instance);
}
public ValueTask InitializeAsync() => new(_fixture.TruncateAllTablesAsync());

View File

@@ -173,7 +173,7 @@ public sealed class PostgresExceptionApplicationRepositoryTests : IAsyncLifetime
string findId,
string? vulnId = null,
string eff = "suppress") =>
ExceptionApplication.Create(_tenantId, excId, findId, "affected", "not_affected", "test", eff, vulnId);
ExceptionApplication.Create(_tenantId, excId, findId, "affected", "not_affected", "test", eff, Guid.NewGuid(), DateTimeOffset.UtcNow, vulnId);
}

View File

@@ -2,6 +2,7 @@ using System.Collections.Immutable;
using FluentAssertions;
using Microsoft.Extensions.Logging.Abstractions;
using Microsoft.Extensions.Options;
using StellaOps.Determinism;
using StellaOps.Policy.Exceptions.Models;
using StellaOps.Policy.Exceptions.Repositories;
using StellaOps.Policy.Persistence.Postgres;
@@ -29,7 +30,7 @@ public sealed class PostgresExceptionObjectRepositoryTests : IAsyncLifetime
var options = fixture.Fixture.CreateOptions();
options.SchemaName = fixture.SchemaName;
var dataSource = new PolicyDataSource(Options.Create(options), NullLogger<PolicyDataSource>.Instance);
_repository = new PostgresExceptionObjectRepository(dataSource, NullLogger<PostgresExceptionObjectRepository>.Instance);
_repository = new PostgresExceptionObjectRepository(dataSource, NullLogger<PostgresExceptionObjectRepository>.Instance, TimeProvider.System, SystemGuidProvider.Instance);
}
public ValueTask InitializeAsync() => new(_fixture.TruncateAllTablesAsync());

View File

@@ -1,5 +1,6 @@
using System.Collections.Immutable;
using FluentAssertions;
using StellaOps.Determinism;
using StellaOps.Policy.Scoring.Engine;
using StellaOps.Policy.Scoring.Receipts;
using StellaOps.Policy.Scoring.Tests.Fakes;
@@ -25,7 +26,7 @@ public sealed class CvssPipelineIntegrationTests
{
// Arrange
var repository = new InMemoryReceiptRepository();
var builder = new ReceiptBuilder(_v4Engine, repository);
var builder = new ReceiptBuilder(_v4Engine, repository, TimeProvider.System, SystemGuidProvider.Instance);
var policy = CreateTestPolicy();
var baseMetrics = CreateMaxSeverityBaseMetrics();
@@ -61,7 +62,7 @@ public sealed class CvssPipelineIntegrationTests
{
// Arrange
var repository = new InMemoryReceiptRepository();
var builder = new ReceiptBuilder(_v4Engine, repository);
var builder = new ReceiptBuilder(_v4Engine, repository, TimeProvider.System, SystemGuidProvider.Instance);
var policy = CreateTestPolicy();
var baseMetrics = CreateMaxSeverityBaseMetrics();
@@ -149,8 +150,8 @@ public sealed class CvssPipelineIntegrationTests
// Arrange
var repository1 = new InMemoryReceiptRepository();
var repository2 = new InMemoryReceiptRepository();
var builder1 = new ReceiptBuilder(_v4Engine, repository1);
var builder2 = new ReceiptBuilder(_v4Engine, repository2);
var builder1 = new ReceiptBuilder(_v4Engine, repository1, TimeProvider.System, SystemGuidProvider.Instance);
var builder2 = new ReceiptBuilder(_v4Engine, repository2, TimeProvider.System, SystemGuidProvider.Instance);
var policy = CreateTestPolicy();
var baseMetrics = CreateMaxSeverityBaseMetrics();

View File

@@ -2,6 +2,7 @@ using System.Collections.Immutable;
using FluentAssertions;
using System.Linq;
using StellaOps.Attestor.Envelope;
using StellaOps.Determinism;
using StellaOps.Policy.Scoring.Engine;
using StellaOps.Policy.Scoring.Receipts;
using StellaOps.Policy.Scoring.Tests.Fakes;
@@ -60,7 +61,7 @@ public sealed class ReceiptBuilderTests
})
};
var builder = new ReceiptBuilder(_engine, _repository);
var builder = new ReceiptBuilder(_engine, _repository, TimeProvider.System, SystemGuidProvider.Instance);
// Act
var receipt1 = await builder.CreateAsync(request);
@@ -118,7 +119,7 @@ public sealed class ReceiptBuilderTests
var evidence2 = evidence1.Reverse().ToImmutableList();
var builder = new ReceiptBuilder(_engine, _repository);
var builder = new ReceiptBuilder(_engine, _repository, TimeProvider.System, SystemGuidProvider.Instance);
var r1 = await builder.CreateAsync(new CreateReceiptRequest
{
@@ -182,7 +183,7 @@ public sealed class ReceiptBuilderTests
SigningKey = signingKey
};
var builder = new ReceiptBuilder(_engine, _repository);
var builder = new ReceiptBuilder(_engine, _repository, TimeProvider.System, SystemGuidProvider.Instance);
// Act
var receipt = await builder.CreateAsync(request);
@@ -240,7 +241,7 @@ public sealed class ReceiptBuilderTests
})
};
var builder = new ReceiptBuilder(_engine, _repository);
var builder = new ReceiptBuilder(_engine, _repository, TimeProvider.System, SystemGuidProvider.Instance);
// Act
var act = async () => await builder.CreateAsync(request);

View File

@@ -222,7 +222,7 @@ public class ProofLedgerTests
var originalRootHash = ledger.RootHash();
// Act
var json = ledger.ToJson();
var json = ledger.ToJson(DateTimeOffset.UtcNow);
var restored = ProofLedger.FromJson(json);
// Assert
@@ -236,7 +236,7 @@ public class ProofLedgerTests
// Arrange
var nodes = CreateTestNodes();
var ledger = ProofLedger.FromNodes(nodes);
var json = ledger.ToJson();
var json = ledger.ToJson(DateTimeOffset.UtcNow);
// Tamper with the JSON (9.0 serializes as 9 without decimal point)
var tampered = json.Replace("\"total\":9,", "\"total\":8,");

View File

@@ -274,7 +274,7 @@ public class VexNormalizerTests
var normalizer = new CsafVexNormalizer();
var subject = CreateTestSubject();
var claim = normalizer.NormalizeStatement(subject, CsafProductStatus.KnownAffected);
var claim = normalizer.NormalizeStatement(subject, CsafProductStatus.KnownAffected, DateTimeOffset.UtcNow);
Assert.Contains(claim.Assertions, a =>
a.Atom == SecurityAtom.Present && a.Value == true);
@@ -288,7 +288,7 @@ public class VexNormalizerTests
var normalizer = new CsafVexNormalizer();
var subject = CreateTestSubject();
var claim = normalizer.NormalizeStatement(subject, CsafProductStatus.KnownNotAffected);
var claim = normalizer.NormalizeStatement(subject, CsafProductStatus.KnownNotAffected, DateTimeOffset.UtcNow);
Assert.Contains(claim.Assertions, a =>
a.Atom == SecurityAtom.Applies && a.Value == false);
@@ -300,7 +300,7 @@ public class VexNormalizerTests
var normalizer = new CsafVexNormalizer();
var subject = CreateTestSubject();
var claim = normalizer.NormalizeStatement(subject, CsafProductStatus.Fixed);
var claim = normalizer.NormalizeStatement(subject, CsafProductStatus.Fixed, DateTimeOffset.UtcNow);
Assert.Contains(claim.Assertions, a =>
a.Atom == SecurityAtom.Fixed && a.Value == true);
@@ -312,7 +312,7 @@ public class VexNormalizerTests
var normalizer = new CsafVexNormalizer();
var subject = CreateTestSubject();
var claim = normalizer.NormalizeStatement(subject, CsafProductStatus.UnderInvestigation);
var claim = normalizer.NormalizeStatement(subject, CsafProductStatus.UnderInvestigation, DateTimeOffset.UtcNow);
Assert.Empty(claim.Assertions);
}
@@ -326,6 +326,7 @@ public class VexNormalizerTests
var claim = normalizer.NormalizeStatement(
subject,
CsafProductStatus.KnownNotAffected,
DateTimeOffset.UtcNow,
CsafFlagLabel.VulnerableCodeNotInExecutePath);
Assert.Contains(claim.Assertions, a =>
@@ -341,6 +342,7 @@ public class VexNormalizerTests
var claim = normalizer.NormalizeStatement(
subject,
CsafProductStatus.KnownNotAffected,
DateTimeOffset.UtcNow,
CsafFlagLabel.ComponentNotPresent);
Assert.Contains(claim.Assertions, a =>

View File

@@ -0,0 +1,210 @@
// <copyright file="PolicySimulationInputLock.cs" company="StellaOps">
// Copyright (c) StellaOps. Licensed under the AGPL-3.0-or-later.
// </copyright>
namespace StellaOps.Replay.Core;
/// <summary>
/// Represents a locked set of inputs required for policy simulation replay.
/// All digests are SHA-256 hex strings (64 characters).
/// </summary>
public sealed record PolicySimulationInputLock
{
/// <summary>
/// Gets or sets the SHA-256 digest of the policy bundle used for simulation.
/// </summary>
public required string PolicyBundleSha256 { get; init; }
/// <summary>
/// Gets or sets the SHA-256 digest of the dependency graph.
/// </summary>
public required string GraphSha256 { get; init; }
/// <summary>
/// Gets or sets the SHA-256 digest of the SBOM used for simulation.
/// </summary>
public required string SbomSha256 { get; init; }
/// <summary>
/// Gets or sets the SHA-256 digest of the time anchor (feed snapshot timestamp proof).
/// </summary>
public required string TimeAnchorSha256 { get; init; }
/// <summary>
/// Gets or sets the SHA-256 digest of the advisory dataset.
/// </summary>
public required string DatasetSha256 { get; init; }
/// <summary>
/// Gets or sets when the input lock was generated.
/// </summary>
public required DateTimeOffset GeneratedAt { get; init; }
/// <summary>
/// Gets or sets whether shadow isolation mode is required for replay.
/// When true, simulation must run in shadow mode to prevent side effects.
/// </summary>
public bool ShadowIsolation { get; init; }
/// <summary>
/// Gets or sets the scopes required for the simulation.
/// </summary>
public string[] RequiredScopes { get; init; } = [];
}
/// <summary>
/// Represents the materialized inputs at replay time for validation against the lock.
/// </summary>
/// <param name="PolicyBundleSha256">The SHA-256 of the current policy bundle.</param>
/// <param name="GraphSha256">The SHA-256 of the current dependency graph.</param>
/// <param name="SbomSha256">The SHA-256 of the current SBOM.</param>
/// <param name="TimeAnchorSha256">The SHA-256 of the current time anchor.</param>
/// <param name="DatasetSha256">The SHA-256 of the current advisory dataset.</param>
/// <param name="ExecutionMode">The execution mode ("shadow" or "live").</param>
/// <param name="AvailableScopes">The scopes available to the current session.</param>
/// <param name="MaterializedAt">When the inputs were materialized.</param>
public sealed record PolicySimulationMaterializedInputs(
string PolicyBundleSha256,
string GraphSha256,
string SbomSha256,
string TimeAnchorSha256,
string DatasetSha256,
string ExecutionMode,
string[] AvailableScopes,
DateTimeOffset MaterializedAt);
/// <summary>
/// Result of validating materialized inputs against an input lock.
/// </summary>
/// <param name="IsValid">Whether the inputs match the lock requirements.</param>
/// <param name="Reason">The reason code for the validation result.</param>
public sealed record PolicySimulationValidationResult(bool IsValid, string Reason)
{
/// <summary>
/// Creates a successful validation result.
/// </summary>
public static PolicySimulationValidationResult Success()
=> new(true, "ok");
/// <summary>
/// Creates a failure result indicating policy bundle digest drift.
/// </summary>
public static PolicySimulationValidationResult PolicyBundleDrift()
=> new(false, "policy-bundle-drift");
/// <summary>
/// Creates a failure result indicating graph digest drift.
/// </summary>
public static PolicySimulationValidationResult GraphDrift()
=> new(false, "graph-drift");
/// <summary>
/// Creates a failure result indicating SBOM digest drift.
/// </summary>
public static PolicySimulationValidationResult SbomDrift()
=> new(false, "sbom-drift");
/// <summary>
/// Creates a failure result indicating time anchor digest drift.
/// </summary>
public static PolicySimulationValidationResult TimeAnchorDrift()
=> new(false, "time-anchor-drift");
/// <summary>
/// Creates a failure result indicating dataset digest drift.
/// </summary>
public static PolicySimulationValidationResult DatasetDrift()
=> new(false, "dataset-drift");
/// <summary>
/// Creates a failure result indicating shadow mode is required but not active.
/// </summary>
public static PolicySimulationValidationResult ShadowModeRequired()
=> new(false, "shadow-mode-required");
/// <summary>
/// Creates a failure result indicating required scopes are missing.
/// </summary>
public static PolicySimulationValidationResult MissingScopes()
=> new(false, "missing-scopes");
/// <summary>
/// Creates a failure result indicating the input lock is stale.
/// </summary>
public static PolicySimulationValidationResult InputsLockStale()
=> new(false, "inputs-lock-stale");
}
/// <summary>
/// Validates materialized inputs against a policy simulation input lock.
/// </summary>
public static class PolicySimulationInputLockValidator
{
/// <summary>
/// Validates that materialized inputs match the locked inputs.
/// </summary>
/// <param name="lock">The input lock to validate against.</param>
/// <param name="inputs">The materialized inputs to validate.</param>
/// <param name="maxAge">Maximum age for the input lock before it's considered stale.</param>
/// <returns>The validation result.</returns>
public static PolicySimulationValidationResult Validate(
PolicySimulationInputLock @lock,
PolicySimulationMaterializedInputs inputs,
TimeSpan maxAge)
{
ArgumentNullException.ThrowIfNull(@lock);
ArgumentNullException.ThrowIfNull(inputs);
// Check if lock is stale
var lockAge = inputs.MaterializedAt - @lock.GeneratedAt;
if (lockAge > maxAge)
{
return PolicySimulationValidationResult.InputsLockStale();
}
// Validate shadow mode requirement
if (@lock.ShadowIsolation &&
!string.Equals(inputs.ExecutionMode, "shadow", StringComparison.OrdinalIgnoreCase))
{
return PolicySimulationValidationResult.ShadowModeRequired();
}
// Validate required scopes
if (@lock.RequiredScopes.Length > 0)
{
var availableSet = new HashSet<string>(inputs.AvailableScopes, StringComparer.OrdinalIgnoreCase);
if (!@lock.RequiredScopes.All(s => availableSet.Contains(s)))
{
return PolicySimulationValidationResult.MissingScopes();
}
}
// Validate digests
if (!string.Equals(@lock.PolicyBundleSha256, inputs.PolicyBundleSha256, StringComparison.OrdinalIgnoreCase))
{
return PolicySimulationValidationResult.PolicyBundleDrift();
}
if (!string.Equals(@lock.GraphSha256, inputs.GraphSha256, StringComparison.OrdinalIgnoreCase))
{
return PolicySimulationValidationResult.GraphDrift();
}
if (!string.Equals(@lock.SbomSha256, inputs.SbomSha256, StringComparison.OrdinalIgnoreCase))
{
return PolicySimulationValidationResult.SbomDrift();
}
if (!string.Equals(@lock.TimeAnchorSha256, inputs.TimeAnchorSha256, StringComparison.OrdinalIgnoreCase))
{
return PolicySimulationValidationResult.TimeAnchorDrift();
}
if (!string.Equals(@lock.DatasetSha256, inputs.DatasetSha256, StringComparison.OrdinalIgnoreCase))
{
return PolicySimulationValidationResult.DatasetDrift();
}
return PolicySimulationValidationResult.Success();
}
}

View File

@@ -0,0 +1,517 @@
// <copyright file="ReplayExecutor.cs" company="StellaOps">
// Copyright (c) StellaOps. Licensed under the AGPL-3.0-or-later.
// </copyright>
using System.Globalization;
using System.Security.Cryptography;
using System.Text;
using Microsoft.Extensions.Logging;
namespace StellaOps.Replay.Core;
/// <summary>
/// Executes policy evaluation replay with resolved inputs for deterministic verification.
/// Sprint: SPRINT_20260107_006_005 Task RB-003
/// </summary>
public sealed class ReplayExecutor
{
private readonly IReplayPolicyEvaluator _policyEvaluator;
private readonly TimeProvider _timeProvider;
private readonly ILogger<ReplayExecutor> _logger;
private readonly ReplayExecutorOptions _options;
/// <summary>
/// Initializes a new instance of the <see cref="ReplayExecutor"/> class.
/// </summary>
public ReplayExecutor(
IReplayPolicyEvaluator policyEvaluator,
TimeProvider timeProvider,
ILogger<ReplayExecutor> logger,
ReplayExecutorOptions? options = null)
{
_policyEvaluator = policyEvaluator ?? throw new ArgumentNullException(nameof(policyEvaluator));
_timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider));
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
_options = options ?? new ReplayExecutorOptions();
}
/// <summary>
/// Executes a replay using resolved inputs.
/// </summary>
/// <param name="resolvedInputs">The resolved inputs from InputManifestResolver.</param>
/// <param name="originalVerdict">The original verdict to compare against.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>The replay execution result.</returns>
public async Task<ReplayExecutionResult> ExecuteAsync(
ResolvedInputs resolvedInputs,
OriginalVerdict originalVerdict,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(resolvedInputs);
ArgumentNullException.ThrowIfNull(originalVerdict);
var startTime = _timeProvider.GetUtcNow();
var replayId = GenerateReplayId();
_logger.LogInformation(
"Starting replay execution {ReplayId} for correlation {CorrelationId}",
replayId,
originalVerdict.CorrelationId);
try
{
// Create deterministic context with timestamp override
var context = new ReplayContext
{
ReplayId = replayId,
CorrelationId = originalVerdict.CorrelationId,
TimestampOverride = resolvedInputs.TimestampOverride ?? originalVerdict.EvaluatedAt,
RandomSeed = resolvedInputs.RandomSeed ?? 0,
ToolchainVersion = resolvedInputs.ToolchainVersion
};
// Execute policy evaluation with resolved inputs
var replayedVerdict = await _policyEvaluator.EvaluateAsync(
context,
resolvedInputs,
cancellationToken).ConfigureAwait(false);
// Compute digests for comparison
var originalDigest = ComputeVerdictDigest(originalVerdict);
var replayDigest = ComputeVerdictDigest(replayedVerdict);
// Compare results
var deterministicMatch = string.Equals(
originalDigest,
replayDigest,
StringComparison.OrdinalIgnoreCase);
var endTime = _timeProvider.GetUtcNow();
var duration = endTime - startTime;
_logger.LogInformation(
"Replay {ReplayId} completed: match={Match}, duration={Duration}ms",
replayId,
deterministicMatch,
duration.TotalMilliseconds);
return new ReplayExecutionResult
{
ReplayId = replayId,
CorrelationId = originalVerdict.CorrelationId,
Status = ReplayExecutionStatus.Completed,
OriginalDigest = originalDigest,
ReplayDigest = replayDigest,
DeterministicMatch = deterministicMatch,
OriginalVerdict = originalVerdict,
ReplayedVerdict = replayedVerdict,
StartedAt = startTime,
CompletedAt = endTime,
Duration = duration,
Diff = deterministicMatch ? null : GenerateDiff(originalVerdict, replayedVerdict)
};
}
catch (OperationCanceledException)
{
_logger.LogWarning("Replay {ReplayId} was cancelled", replayId);
return new ReplayExecutionResult
{
ReplayId = replayId,
CorrelationId = originalVerdict.CorrelationId,
Status = ReplayExecutionStatus.Cancelled,
StartedAt = startTime,
CompletedAt = _timeProvider.GetUtcNow()
};
}
catch (Exception ex)
{
_logger.LogError(ex, "Replay {ReplayId} failed", replayId);
return new ReplayExecutionResult
{
ReplayId = replayId,
CorrelationId = originalVerdict.CorrelationId,
Status = ReplayExecutionStatus.Failed,
Error = ex.Message,
StartedAt = startTime,
CompletedAt = _timeProvider.GetUtcNow()
};
}
}
/// <summary>
/// Executes a batch of replays.
/// </summary>
public async Task<IReadOnlyList<ReplayExecutionResult>> ExecuteBatchAsync(
IReadOnlyList<ReplayRequest> requests,
CancellationToken cancellationToken = default)
{
var results = new List<ReplayExecutionResult>(requests.Count);
foreach (var request in requests)
{
cancellationToken.ThrowIfCancellationRequested();
var result = await ExecuteAsync(
request.ResolvedInputs,
request.OriginalVerdict,
cancellationToken).ConfigureAwait(false);
results.Add(result);
// Limit concurrency
if (_options.DelayBetweenReplays > TimeSpan.Zero)
{
await Task.Delay(_options.DelayBetweenReplays, cancellationToken)
.ConfigureAwait(false);
}
}
return results;
}
private static string GenerateReplayId()
{
return $"rpl-{Guid.NewGuid():N}"[..20];
}
private static string ComputeVerdictDigest(OriginalVerdict verdict)
{
var builder = new StringBuilder();
builder.Append(verdict.Outcome);
builder.Append('|');
builder.Append(verdict.Severity);
builder.Append('|');
builder.Append(verdict.Score.ToString("F4", CultureInfo.InvariantCulture));
builder.Append('|');
builder.Append(verdict.FindingsCount.ToString(CultureInfo.InvariantCulture));
var bytes = Encoding.UTF8.GetBytes(builder.ToString());
var hash = SHA256.HashData(bytes);
return Convert.ToHexString(hash).ToLowerInvariant();
}
private static string ComputeVerdictDigest(ReplayedVerdict verdict)
{
var builder = new StringBuilder();
builder.Append(verdict.Outcome);
builder.Append('|');
builder.Append(verdict.Severity);
builder.Append('|');
builder.Append(verdict.Score.ToString("F4", CultureInfo.InvariantCulture));
builder.Append('|');
builder.Append(verdict.FindingsCount.ToString(CultureInfo.InvariantCulture));
var bytes = Encoding.UTF8.GetBytes(builder.ToString());
var hash = SHA256.HashData(bytes);
return Convert.ToHexString(hash).ToLowerInvariant();
}
private static VerdictDiff GenerateDiff(OriginalVerdict original, ReplayedVerdict replayed)
{
var differences = new List<DiffEntry>();
if (!string.Equals(original.Outcome, replayed.Outcome, StringComparison.OrdinalIgnoreCase))
{
differences.Add(new DiffEntry("outcome", original.Outcome, replayed.Outcome));
}
if (!string.Equals(original.Severity, replayed.Severity, StringComparison.OrdinalIgnoreCase))
{
differences.Add(new DiffEntry("severity", original.Severity, replayed.Severity));
}
if (Math.Abs(original.Score - replayed.Score) > 0.0001)
{
differences.Add(new DiffEntry(
"score",
original.Score.ToString("F4", CultureInfo.InvariantCulture),
replayed.Score.ToString("F4", CultureInfo.InvariantCulture)));
}
if (original.FindingsCount != replayed.FindingsCount)
{
differences.Add(new DiffEntry(
"findingsCount",
original.FindingsCount.ToString(CultureInfo.InvariantCulture),
replayed.FindingsCount.ToString(CultureInfo.InvariantCulture)));
}
return new VerdictDiff
{
Differences = differences,
Summary = $"{differences.Count} field(s) differ between original and replay"
};
}
}
/// <summary>
/// Options for replay execution.
/// </summary>
public sealed record ReplayExecutorOptions
{
/// <summary>
/// Maximum time to wait for a single replay.
/// </summary>
public TimeSpan Timeout { get; init; } = TimeSpan.FromMinutes(5);
/// <summary>
/// Delay between replays in batch execution.
/// </summary>
public TimeSpan DelayBetweenReplays { get; init; } = TimeSpan.Zero;
/// <summary>
/// Whether to capture detailed execution trace.
/// </summary>
public bool CaptureTrace { get; init; } = false;
}
/// <summary>
/// Interface for policy evaluation during replay.
/// </summary>
public interface IReplayPolicyEvaluator
{
/// <summary>
/// Evaluates policy with the given context and resolved inputs.
/// </summary>
Task<ReplayedVerdict> EvaluateAsync(
ReplayContext context,
ResolvedInputs inputs,
CancellationToken cancellationToken = default);
}
/// <summary>
/// Context for replay execution.
/// </summary>
public sealed record ReplayContext
{
/// <summary>
/// Unique replay identifier.
/// </summary>
public required string ReplayId { get; init; }
/// <summary>
/// Correlation ID from the original execution.
/// </summary>
public required string CorrelationId { get; init; }
/// <summary>
/// Override timestamp for deterministic replay.
/// </summary>
public DateTimeOffset TimestampOverride { get; init; }
/// <summary>
/// Random seed for deterministic replay.
/// </summary>
public int RandomSeed { get; init; }
/// <summary>
/// Toolchain version used for evaluation.
/// </summary>
public string? ToolchainVersion { get; init; }
}
/// <summary>
/// Original verdict to replay against.
/// </summary>
public sealed record OriginalVerdict
{
/// <summary>
/// Correlation ID from original execution.
/// </summary>
public required string CorrelationId { get; init; }
/// <summary>
/// The verdict outcome (e.g., "pass", "fail", "warn").
/// </summary>
public required string Outcome { get; init; }
/// <summary>
/// Severity level (e.g., "critical", "high", "medium", "low").
/// </summary>
public required string Severity { get; init; }
/// <summary>
/// Numeric score (0.0 to 1.0).
/// </summary>
public double Score { get; init; }
/// <summary>
/// Number of findings.
/// </summary>
public int FindingsCount { get; init; }
/// <summary>
/// When the original evaluation occurred.
/// </summary>
public DateTimeOffset EvaluatedAt { get; init; }
/// <summary>
/// SHA-256 digest of the original verdict.
/// </summary>
public string? Digest { get; init; }
}
/// <summary>
/// Verdict produced by replay execution.
/// </summary>
public sealed record ReplayedVerdict
{
/// <summary>
/// The verdict outcome (e.g., "pass", "fail", "warn").
/// </summary>
public required string Outcome { get; init; }
/// <summary>
/// Severity level (e.g., "critical", "high", "medium", "low").
/// </summary>
public required string Severity { get; init; }
/// <summary>
/// Numeric score (0.0 to 1.0).
/// </summary>
public double Score { get; init; }
/// <summary>
/// Number of findings.
/// </summary>
public int FindingsCount { get; init; }
/// <summary>
/// When the replay evaluation occurred.
/// </summary>
public DateTimeOffset EvaluatedAt { get; init; }
}
/// <summary>
/// Request for batch replay execution.
/// </summary>
public sealed record ReplayRequest
{
/// <summary>
/// The resolved inputs for replay.
/// </summary>
public required ResolvedInputs ResolvedInputs { get; init; }
/// <summary>
/// The original verdict to compare against.
/// </summary>
public required OriginalVerdict OriginalVerdict { get; init; }
}
/// <summary>
/// Result of replay execution.
/// </summary>
public sealed record ReplayExecutionResult
{
/// <summary>
/// Unique replay identifier.
/// </summary>
public required string ReplayId { get; init; }
/// <summary>
/// Correlation ID from the original execution.
/// </summary>
public required string CorrelationId { get; init; }
/// <summary>
/// Execution status.
/// </summary>
public required ReplayExecutionStatus Status { get; init; }
/// <summary>
/// SHA-256 digest of the original verdict.
/// </summary>
public string? OriginalDigest { get; init; }
/// <summary>
/// SHA-256 digest of the replayed verdict.
/// </summary>
public string? ReplayDigest { get; init; }
/// <summary>
/// Whether the replay produced deterministic output.
/// </summary>
public bool? DeterministicMatch { get; init; }
/// <summary>
/// The original verdict.
/// </summary>
public OriginalVerdict? OriginalVerdict { get; init; }
/// <summary>
/// The replayed verdict.
/// </summary>
public ReplayedVerdict? ReplayedVerdict { get; init; }
/// <summary>
/// When the replay started.
/// </summary>
public DateTimeOffset StartedAt { get; init; }
/// <summary>
/// When the replay completed.
/// </summary>
public DateTimeOffset? CompletedAt { get; init; }
/// <summary>
/// Total replay duration.
/// </summary>
public TimeSpan? Duration { get; init; }
/// <summary>
/// Diff report if not deterministic.
/// </summary>
public VerdictDiff? Diff { get; init; }
/// <summary>
/// Error message if failed.
/// </summary>
public string? Error { get; init; }
}
/// <summary>
/// Status of replay execution.
/// </summary>
public enum ReplayExecutionStatus
{
/// <summary>Replay has not started.</summary>
NotStarted,
/// <summary>Replay is in progress.</summary>
InProgress,
/// <summary>Replay completed successfully.</summary>
Completed,
/// <summary>Replay failed with an error.</summary>
Failed,
/// <summary>Replay was cancelled.</summary>
Cancelled
}
/// <summary>
/// Diff between original and replayed verdicts.
/// </summary>
public sealed record VerdictDiff
{
/// <summary>
/// List of differences found.
/// </summary>
public IReadOnlyList<DiffEntry> Differences { get; init; } = [];
/// <summary>
/// Human-readable summary.
/// </summary>
public required string Summary { get; init; }
}
/// <summary>
/// A single difference entry.
/// </summary>
/// <param name="Field">The field name that differs.</param>
/// <param name="OriginalValue">The original value.</param>
/// <param name="ReplayedValue">The replayed value.</param>
public sealed record DiffEntry(string Field, string OriginalValue, string ReplayedValue);

View File

@@ -0,0 +1,444 @@
// <copyright file="ReplayJobQueue.cs" company="StellaOps">
// Copyright (c) StellaOps. Licensed under the AGPL-3.0-or-later.
// </copyright>
using System.Collections.Concurrent;
using System.Threading.Channels;
using Microsoft.Extensions.Logging;
namespace StellaOps.Replay.Core;
/// <summary>
/// Queue for managing async replay job execution with concurrency limits.
/// Sprint: SPRINT_20260107_006_005 Task RB-006
/// </summary>
public sealed class ReplayJobQueue : IDisposable
{
private readonly ReplayExecutor _executor;
private readonly TimeProvider _timeProvider;
private readonly ILogger<ReplayJobQueue> _logger;
private readonly ReplayJobQueueOptions _options;
private readonly Channel<ReplayJob> _jobChannel;
private readonly ConcurrentDictionary<string, ReplayJob> _jobs = new();
private readonly CancellationTokenSource _shutdownCts = new();
private readonly Task[] _workerTasks;
private bool _disposed;
/// <summary>
/// Initializes a new instance of the <see cref="ReplayJobQueue"/> class.
/// </summary>
public ReplayJobQueue(
ReplayExecutor executor,
TimeProvider timeProvider,
ILogger<ReplayJobQueue> logger,
ReplayJobQueueOptions? options = null)
{
_executor = executor ?? throw new ArgumentNullException(nameof(executor));
_timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider));
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
_options = options ?? new ReplayJobQueueOptions();
_jobChannel = Channel.CreateBounded<ReplayJob>(new BoundedChannelOptions(_options.MaxQueueSize)
{
FullMode = BoundedChannelFullMode.Wait,
SingleReader = false,
SingleWriter = false
});
// Start worker tasks
_workerTasks = new Task[_options.MaxConcurrentReplays];
for (int i = 0; i < _options.MaxConcurrentReplays; i++)
{
var workerId = i;
_workerTasks[i] = Task.Run(() => WorkerLoopAsync(workerId, _shutdownCts.Token));
}
_logger.LogInformation(
"ReplayJobQueue started with {WorkerCount} workers, max queue size {QueueSize}",
_options.MaxConcurrentReplays,
_options.MaxQueueSize);
}
/// <summary>
/// Enqueues a replay job for async execution.
/// </summary>
/// <param name="request">The replay request.</param>
/// <param name="cancellationToken">Cancellation token for the enqueue operation.</param>
/// <returns>The job ID for tracking.</returns>
public async Task<string> EnqueueAsync(
ReplayJobRequest request,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(request);
ObjectDisposedException.ThrowIf(_disposed, this);
var jobId = GenerateJobId();
var now = _timeProvider.GetUtcNow();
var job = new ReplayJob
{
JobId = jobId,
Request = request,
Status = ReplayJobStatus.Queued,
CreatedAt = now,
CancellationSource = CancellationTokenSource.CreateLinkedTokenSource(cancellationToken)
};
_jobs[jobId] = job;
await _jobChannel.Writer.WriteAsync(job, cancellationToken).ConfigureAwait(false);
_logger.LogInformation("Enqueued replay job {JobId} for correlation {CorrelationId}",
jobId, request.OriginalVerdict.CorrelationId);
return jobId;
}
/// <summary>
/// Gets the status of a replay job.
/// </summary>
public ReplayJobStatus? GetJobStatus(string jobId)
{
return _jobs.TryGetValue(jobId, out var job) ? job.Status : null;
}
/// <summary>
/// Gets the full job details.
/// </summary>
public ReplayJob? GetJob(string jobId)
{
return _jobs.TryGetValue(jobId, out var job) ? job : null;
}
/// <summary>
/// Cancels a pending or running job.
/// </summary>
public bool CancelJob(string jobId)
{
if (!_jobs.TryGetValue(jobId, out var job))
{
return false;
}
if (job.Status is ReplayJobStatus.Completed or ReplayJobStatus.Failed or ReplayJobStatus.Cancelled)
{
return false;
}
job.CancellationSource.Cancel();
UpdateJobStatus(jobId, ReplayJobStatus.Cancelled);
_logger.LogInformation("Cancelled replay job {JobId}", jobId);
return true;
}
/// <summary>
/// Gets statistics about the queue.
/// </summary>
public ReplayQueueStats GetStats()
{
var queued = _jobs.Values.Count(j => j.Status == ReplayJobStatus.Queued);
var running = _jobs.Values.Count(j => j.Status == ReplayJobStatus.Running);
var completed = _jobs.Values.Count(j => j.Status == ReplayJobStatus.Completed);
var failed = _jobs.Values.Count(j => j.Status == ReplayJobStatus.Failed);
var cancelled = _jobs.Values.Count(j => j.Status == ReplayJobStatus.Cancelled);
return new ReplayQueueStats
{
QueuedJobs = queued,
RunningJobs = running,
CompletedJobs = completed,
FailedJobs = failed,
CancelledJobs = cancelled,
TotalJobs = _jobs.Count,
MaxConcurrentWorkers = _options.MaxConcurrentReplays,
MaxQueueSize = _options.MaxQueueSize
};
}
private async Task WorkerLoopAsync(int workerId, CancellationToken shutdownToken)
{
_logger.LogDebug("Replay worker {WorkerId} started", workerId);
try
{
await foreach (var job in _jobChannel.Reader.ReadAllAsync(shutdownToken))
{
if (shutdownToken.IsCancellationRequested)
{
break;
}
await ProcessJobAsync(workerId, job).ConfigureAwait(false);
}
}
catch (OperationCanceledException) when (shutdownToken.IsCancellationRequested)
{
// Normal shutdown
}
catch (Exception ex)
{
_logger.LogError(ex, "Replay worker {WorkerId} crashed", workerId);
}
_logger.LogDebug("Replay worker {WorkerId} stopped", workerId);
}
private async Task ProcessJobAsync(int workerId, ReplayJob job)
{
if (job.CancellationSource.IsCancellationRequested)
{
UpdateJobStatus(job.JobId, ReplayJobStatus.Cancelled);
return;
}
_logger.LogInformation("Worker {WorkerId} starting job {JobId}", workerId, job.JobId);
UpdateJobStatus(job.JobId, ReplayJobStatus.Running);
job.StartedAt = _timeProvider.GetUtcNow();
try
{
// Create combined cancellation with timeout
using var timeoutCts = new CancellationTokenSource(_options.JobTimeout);
using var combinedCts = CancellationTokenSource.CreateLinkedTokenSource(
job.CancellationSource.Token,
timeoutCts.Token);
var result = await _executor.ExecuteAsync(
job.Request.ResolvedInputs,
job.Request.OriginalVerdict,
combinedCts.Token).ConfigureAwait(false);
job.Result = result;
job.CompletedAt = _timeProvider.GetUtcNow();
UpdateJobStatus(job.JobId, result.Status == ReplayExecutionStatus.Completed
? ReplayJobStatus.Completed
: ReplayJobStatus.Failed);
_logger.LogInformation(
"Worker {WorkerId} completed job {JobId}: match={Match}",
workerId,
job.JobId,
result.DeterministicMatch);
}
catch (OperationCanceledException) when (job.CancellationSource.IsCancellationRequested)
{
UpdateJobStatus(job.JobId, ReplayJobStatus.Cancelled);
_logger.LogInformation("Worker {WorkerId} cancelled job {JobId}", workerId, job.JobId);
}
catch (OperationCanceledException)
{
UpdateJobStatus(job.JobId, ReplayJobStatus.TimedOut);
job.Error = $"Job timed out after {_options.JobTimeout.TotalSeconds} seconds";
_logger.LogWarning("Worker {WorkerId} job {JobId} timed out", workerId, job.JobId);
}
catch (Exception ex)
{
UpdateJobStatus(job.JobId, ReplayJobStatus.Failed);
job.Error = ex.Message;
job.CompletedAt = _timeProvider.GetUtcNow();
_logger.LogError(ex, "Worker {WorkerId} job {JobId} failed", workerId, job.JobId);
}
}
private void UpdateJobStatus(string jobId, ReplayJobStatus status)
{
if (_jobs.TryGetValue(jobId, out var job))
{
_jobs[jobId] = job with { Status = status };
}
}
private static string GenerateJobId()
{
return $"job-{Guid.NewGuid():N}"[..20];
}
/// <inheritdoc/>
public void Dispose()
{
if (_disposed)
{
return;
}
_disposed = true;
_shutdownCts.Cancel();
_jobChannel.Writer.Complete();
try
{
Task.WaitAll(_workerTasks, TimeSpan.FromSeconds(10));
}
catch (AggregateException)
{
// Workers may have already stopped
}
_shutdownCts.Dispose();
foreach (var job in _jobs.Values)
{
job.CancellationSource.Dispose();
}
_logger.LogInformation("ReplayJobQueue disposed");
}
}
/// <summary>
/// Options for the replay job queue.
/// </summary>
public sealed record ReplayJobQueueOptions
{
/// <summary>
/// Maximum concurrent replay workers.
/// </summary>
public int MaxConcurrentReplays { get; init; } = 2;
/// <summary>
/// Maximum queue size before blocking.
/// </summary>
public int MaxQueueSize { get; init; } = 100;
/// <summary>
/// Timeout for individual jobs.
/// </summary>
public TimeSpan JobTimeout { get; init; } = TimeSpan.FromMinutes(5);
/// <summary>
/// How long to retain completed jobs.
/// </summary>
public TimeSpan JobRetentionPeriod { get; init; } = TimeSpan.FromHours(1);
}
/// <summary>
/// Request to enqueue a replay job.
/// </summary>
public sealed record ReplayJobRequest
{
/// <summary>
/// The resolved inputs for replay.
/// </summary>
public required ResolvedInputs ResolvedInputs { get; init; }
/// <summary>
/// The original verdict to compare against.
/// </summary>
public required OriginalVerdict OriginalVerdict { get; init; }
/// <summary>
/// Optional priority (higher = more urgent).
/// </summary>
public int Priority { get; init; } = 0;
}
/// <summary>
/// A replay job in the queue.
/// </summary>
public sealed record ReplayJob
{
/// <summary>
/// Unique job identifier.
/// </summary>
public required string JobId { get; init; }
/// <summary>
/// The replay request.
/// </summary>
public required ReplayJobRequest Request { get; init; }
/// <summary>
/// Current job status.
/// </summary>
public required ReplayJobStatus Status { get; init; }
/// <summary>
/// When the job was created.
/// </summary>
public required DateTimeOffset CreatedAt { get; init; }
/// <summary>
/// When the job started executing.
/// </summary>
public DateTimeOffset? StartedAt { get; set; }
/// <summary>
/// When the job completed.
/// </summary>
public DateTimeOffset? CompletedAt { get; set; }
/// <summary>
/// The execution result.
/// </summary>
public ReplayExecutionResult? Result { get; set; }
/// <summary>
/// Error message if failed.
/// </summary>
public string? Error { get; set; }
/// <summary>
/// Cancellation source for this job.
/// </summary>
public required CancellationTokenSource CancellationSource { get; init; }
}
/// <summary>
/// Status of a replay job.
/// </summary>
public enum ReplayJobStatus
{
/// <summary>Job is queued and waiting.</summary>
Queued,
/// <summary>Job is currently running.</summary>
Running,
/// <summary>Job completed successfully.</summary>
Completed,
/// <summary>Job failed with an error.</summary>
Failed,
/// <summary>Job was cancelled.</summary>
Cancelled,
/// <summary>Job timed out.</summary>
TimedOut
}
/// <summary>
/// Statistics about the replay queue.
/// </summary>
public sealed record ReplayQueueStats
{
/// <summary>Number of jobs waiting in queue.</summary>
public int QueuedJobs { get; init; }
/// <summary>Number of jobs currently running.</summary>
public int RunningJobs { get; init; }
/// <summary>Number of completed jobs.</summary>
public int CompletedJobs { get; init; }
/// <summary>Number of failed jobs.</summary>
public int FailedJobs { get; init; }
/// <summary>Number of cancelled jobs.</summary>
public int CancelledJobs { get; init; }
/// <summary>Total jobs tracked.</summary>
public int TotalJobs { get; init; }
/// <summary>Maximum concurrent workers configured.</summary>
public int MaxConcurrentWorkers { get; init; }
/// <summary>Maximum queue size configured.</summary>
public int MaxQueueSize { get; init; }
}

View File

@@ -12,10 +12,11 @@
</ItemGroup>
<ItemGroup>
<PackageReference Include="FluentAssertions" />
<PackageReference Include="Microsoft.Extensions.Caching.Memory" />
<PackageReference Include="Moq" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="../../../__Libraries/StellaOps.Replay.Core/StellaOps.Replay.Core.csproj" />
<ProjectReference Include="../../__Libraries/StellaOps.Replay.Core/StellaOps.Replay.Core.csproj" />
<ProjectReference Include="../../../__Libraries/StellaOps.AuditPack/StellaOps.AuditPack.csproj" />
<ProjectReference Include="../../../__Libraries/StellaOps.TestKit/StellaOps.TestKit.csproj" />
<ProjectReference Include="../../StellaOps.Replay.WebService/StellaOps.Replay.WebService.csproj" />

View File

@@ -0,0 +1,457 @@
// <copyright file="DeterminismVerifierTests.cs" company="StellaOps">
// Copyright (c) StellaOps. Licensed under the AGPL-3.0-or-later.
// </copyright>
using System.Collections.Immutable;
using FluentAssertions;
using Microsoft.Extensions.Logging;
using Moq;
using StellaOps.Replay.Core;
using Xunit;
namespace StellaOps.Replay.Core.Tests.Unit;
/// <summary>
/// Unit tests for <see cref="DeterminismVerifier"/>.
/// Sprint: SPRINT_20260107_006_005 Task RB-010
/// </summary>
[Trait("Category", "Unit")]
public sealed class DeterminismVerifierTests
{
private readonly Mock<ILogger<DeterminismVerifier>> _logger;
private readonly DeterminismVerifier _sut;
public DeterminismVerifierTests()
{
_logger = new Mock<ILogger<DeterminismVerifier>>();
_sut = new DeterminismVerifier(_logger.Object);
}
#region Verify Tests
[Fact]
public void Verify_IdenticalVerdicts_ReturnsDeterministic()
{
// Arrange
var findings = CreateFindings("finding-1", "CVE-2023-1234", "pkg:npm/lodash@4.17.21", "high");
var original = CreateVerdict("verdict-1", VerdictOutcome.Pass, "medium", findings);
var replay = CreateVerdict("verdict-1", VerdictOutcome.Pass, "medium", findings);
// Act
var result = _sut.Verify(original, replay);
// Assert
result.IsDeterministic.Should().BeTrue();
result.Differences.Should().BeEmpty();
result.DeterminismScore.Should().Be(1.0);
result.OriginalDigest.Should().Be(result.ReplayDigest);
}
[Fact]
public void Verify_DifferentOutcome_ReturnsNonDeterministicWithCriticalDifference()
{
// Arrange
var findings = CreateFindings("finding-1", "CVE-2023-1234", "pkg:npm/lodash@4.17.21", "high");
var original = CreateVerdict("verdict-1", VerdictOutcome.Pass, "medium", findings);
var replay = CreateVerdict("verdict-1", VerdictOutcome.Fail, "medium", findings);
// Act
var result = _sut.Verify(original, replay);
// Assert
result.IsDeterministic.Should().BeFalse();
result.Differences.Should().ContainSingle(d =>
d.Field == "Outcome" &&
d.Severity == DifferenceSeverity.Critical);
result.DeterminismScore.Should().BeLessThan(1.0);
}
[Fact]
public void Verify_DifferentSeverity_ReturnsNonDeterministicWithHighDifference()
{
// Arrange
var findings = CreateFindings("finding-1", "CVE-2023-1234", "pkg:npm/lodash@4.17.21", "high");
var original = CreateVerdict("verdict-1", VerdictOutcome.Pass, "medium", findings);
var replay = CreateVerdict("verdict-1", VerdictOutcome.Pass, "high", findings);
// Act
var result = _sut.Verify(original, replay);
// Assert
result.IsDeterministic.Should().BeFalse();
result.Differences.Should().ContainSingle(d =>
d.Field == "Severity" &&
d.Severity == DifferenceSeverity.High);
}
[Fact]
public void Verify_DifferentFindingCount_ReturnsNonDeterministic()
{
// Arrange
var originalFindings = CreateFindings("finding-1", "CVE-2023-1234", "pkg:npm/lodash@4.17.21", "high");
var replayFindings = ImmutableArray<FindingRecord>.Empty;
var original = CreateVerdict("verdict-1", VerdictOutcome.Pass, "medium", originalFindings);
var replay = CreateVerdict("verdict-1", VerdictOutcome.Pass, "medium", replayFindings);
// Act
var result = _sut.Verify(original, replay);
// Assert
result.IsDeterministic.Should().BeFalse();
result.Differences.Should().Contain(d => d.Field == "FindingCount");
}
[Fact]
public void Verify_MissingFindingInReplay_ReturnsNonDeterministic()
{
// Arrange
var originalFindings = ImmutableArray.Create(
new FindingRecord { FindingId = "f1", VulnerabilityId = "CVE-1", Component = "pkg:a", Severity = "high" },
new FindingRecord { FindingId = "f2", VulnerabilityId = "CVE-2", Component = "pkg:b", Severity = "medium" });
var replayFindings = ImmutableArray.Create(
new FindingRecord { FindingId = "f1", VulnerabilityId = "CVE-1", Component = "pkg:a", Severity = "high" });
var original = CreateVerdict("verdict-1", VerdictOutcome.Pass, "medium", originalFindings);
var replay = CreateVerdict("verdict-1", VerdictOutcome.Pass, "medium", replayFindings);
// Act
var result = _sut.Verify(original, replay);
// Assert
result.IsDeterministic.Should().BeFalse();
result.Differences.Should().Contain(d =>
d.Field == "Finding:f2" &&
d.OriginalValue == "Present" &&
d.ReplayValue == "Missing");
}
[Fact]
public void Verify_NewFindingInReplay_ReturnsNonDeterministic()
{
// Arrange
var originalFindings = ImmutableArray.Create(
new FindingRecord { FindingId = "f1", VulnerabilityId = "CVE-1", Component = "pkg:a", Severity = "high" });
var replayFindings = ImmutableArray.Create(
new FindingRecord { FindingId = "f1", VulnerabilityId = "CVE-1", Component = "pkg:a", Severity = "high" },
new FindingRecord { FindingId = "f2", VulnerabilityId = "CVE-2", Component = "pkg:b", Severity = "medium" });
var original = CreateVerdict("verdict-1", VerdictOutcome.Pass, "medium", originalFindings);
var replay = CreateVerdict("verdict-1", VerdictOutcome.Pass, "medium", replayFindings);
// Act
var result = _sut.Verify(original, replay);
// Assert
result.IsDeterministic.Should().BeFalse();
result.Differences.Should().Contain(d =>
d.Field == "Finding:f2" &&
d.OriginalValue == "Missing" &&
d.ReplayValue == "Present");
}
[Fact]
public void Verify_FindingSeverityDiffers_ReturnsNonDeterministic()
{
// Arrange
var originalFindings = ImmutableArray.Create(
new FindingRecord { FindingId = "f1", VulnerabilityId = "CVE-1", Component = "pkg:a", Severity = "high" });
var replayFindings = ImmutableArray.Create(
new FindingRecord { FindingId = "f1", VulnerabilityId = "CVE-1", Component = "pkg:a", Severity = "critical" });
var original = CreateVerdict("verdict-1", VerdictOutcome.Pass, "medium", originalFindings);
var replay = CreateVerdict("verdict-1", VerdictOutcome.Pass, "medium", replayFindings);
// Act
var result = _sut.Verify(original, replay);
// Assert
result.IsDeterministic.Should().BeFalse();
result.Differences.Should().Contain(d =>
d.Field == "Finding:f1:Severity" &&
d.OriginalValue == "high" &&
d.ReplayValue == "critical");
}
[Fact]
public void Verify_DifferentRuleOrder_ReturnsNonDeterministic()
{
// Arrange
var findings = CreateFindings("finding-1", "CVE-2023-1234", "pkg:npm/lodash@4.17.21", "high");
var original = CreateVerdictWithRules("verdict-1", VerdictOutcome.Pass, "medium", findings,
["rule-1", "rule-2", "rule-3"]);
var replay = CreateVerdictWithRules("verdict-1", VerdictOutcome.Pass, "medium", findings,
["rule-1", "rule-3", "rule-2"]);
// Act
var result = _sut.Verify(original, replay);
// Assert
result.IsDeterministic.Should().BeFalse();
result.Differences.Should().Contain(d =>
d.Field == "RuleOrder" &&
d.Severity == DifferenceSeverity.Low);
}
#endregion
#region ComputeVerdictDigest Tests
[Fact]
public void ComputeVerdictDigest_SameVerdict_ProducesSameDigest()
{
// Arrange
var findings = CreateFindings("finding-1", "CVE-2023-1234", "pkg:npm/lodash@4.17.21", "high");
var verdict1 = CreateVerdict("verdict-1", VerdictOutcome.Pass, "medium", findings);
var verdict2 = CreateVerdict("verdict-1", VerdictOutcome.Pass, "medium", findings);
// Act
var digest1 = _sut.ComputeVerdictDigest(verdict1);
var digest2 = _sut.ComputeVerdictDigest(verdict2);
// Assert
digest1.Should().Be(digest2);
}
[Fact]
public void ComputeVerdictDigest_DifferentVerdict_ProducesDifferentDigest()
{
// Arrange
var findings = CreateFindings("finding-1", "CVE-2023-1234", "pkg:npm/lodash@4.17.21", "high");
var verdict1 = CreateVerdict("verdict-1", VerdictOutcome.Pass, "medium", findings);
var verdict2 = CreateVerdict("verdict-1", VerdictOutcome.Fail, "medium", findings);
// Act
var digest1 = _sut.ComputeVerdictDigest(verdict1);
var digest2 = _sut.ComputeVerdictDigest(verdict2);
// Assert
digest1.Should().NotBe(digest2);
}
[Fact]
public void ComputeVerdictDigest_ReturnsSha256Format()
{
// Arrange
var findings = CreateFindings("finding-1", "CVE-2023-1234", "pkg:npm/lodash@4.17.21", "high");
var verdict = CreateVerdict("verdict-1", VerdictOutcome.Pass, "medium", findings);
// Act
var digest = _sut.ComputeVerdictDigest(verdict);
// Assert
digest.Should().StartWith("sha256:");
digest.Length.Should().Be(7 + 64); // "sha256:" + 64 hex chars
}
[Fact]
public void ComputeVerdictDigest_FindingOrderDoesNotAffectDigest()
{
// Arrange
var findings1 = ImmutableArray.Create(
new FindingRecord { FindingId = "f1", VulnerabilityId = "CVE-1", Component = "pkg:a", Severity = "high" },
new FindingRecord { FindingId = "f2", VulnerabilityId = "CVE-2", Component = "pkg:b", Severity = "medium" });
var findings2 = ImmutableArray.Create(
new FindingRecord { FindingId = "f2", VulnerabilityId = "CVE-2", Component = "pkg:b", Severity = "medium" },
new FindingRecord { FindingId = "f1", VulnerabilityId = "CVE-1", Component = "pkg:a", Severity = "high" });
var verdict1 = CreateVerdict("verdict-1", VerdictOutcome.Pass, "medium", findings1);
var verdict2 = CreateVerdict("verdict-1", VerdictOutcome.Pass, "medium", findings2);
// Act
var digest1 = _sut.ComputeVerdictDigest(verdict1);
var digest2 = _sut.ComputeVerdictDigest(verdict2);
// Assert - should be the same due to deterministic ordering in digest calculation
digest1.Should().Be(digest2);
}
#endregion
#region GenerateDiffReport Tests
[Fact]
public void GenerateDiffReport_MatchingResult_ContainsMatchMessage()
{
// Arrange
var result = new VerificationResult
{
OriginalDigest = "sha256:abc123",
ReplayDigest = "sha256:abc123",
IsDeterministic = true,
DeterminismScore = 1.0,
VerifiedAt = DateTimeOffset.UtcNow
};
// Act
var report = _sut.GenerateDiffReport(result);
// Assert
report.Should().Contain("## Result: MATCH");
report.Should().Contain("identical verdict");
}
[Fact]
public void GenerateDiffReport_MismatchResult_ContainsDifferences()
{
// Arrange
var result = new VerificationResult
{
OriginalDigest = "sha256:abc123",
ReplayDigest = "sha256:def456",
IsDeterministic = false,
DeterminismScore = 0.5,
VerifiedAt = DateTimeOffset.UtcNow,
Differences = ImmutableArray.Create(
new VerdictDifference
{
Field = "Outcome",
OriginalValue = "Pass",
ReplayValue = "Fail",
Severity = DifferenceSeverity.Critical,
Explanation = "The final decision differs"
})
};
// Act
var report = _sut.GenerateDiffReport(result);
// Assert
report.Should().Contain("## Result: MISMATCH");
report.Should().Contain("### Outcome");
report.Should().Contain("**Original:** `Pass`");
report.Should().Contain("**Replay:** `Fail`");
report.Should().Contain("## Possible Causes");
}
[Fact]
public void GenerateDiffReport_ContainsDigestTable()
{
// Arrange
var result = new VerificationResult
{
OriginalDigest = "sha256:originaldigest",
ReplayDigest = "sha256:replaydigest",
IsDeterministic = false,
DeterminismScore = 0.8,
VerifiedAt = DateTimeOffset.UtcNow
};
// Act
var report = _sut.GenerateDiffReport(result);
// Assert
report.Should().Contain("## Digests");
report.Should().Contain("`sha256:originaldigest`");
report.Should().Contain("`sha256:replaydigest`");
}
#endregion
#region DeterminismScore Tests
[Fact]
public void Verify_NoDifferences_ScoreIsOne()
{
// Arrange
var findings = CreateFindings("finding-1", "CVE-2023-1234", "pkg:npm/lodash@4.17.21", "high");
var original = CreateVerdict("verdict-1", VerdictOutcome.Pass, "medium", findings);
var replay = CreateVerdict("verdict-1", VerdictOutcome.Pass, "medium", findings);
// Act
var result = _sut.Verify(original, replay);
// Assert
result.DeterminismScore.Should().Be(1.0);
}
[Fact]
public void Verify_CriticalDifference_ScoreDecreasesSignificantly()
{
// Arrange
var findings = CreateFindings("finding-1", "CVE-2023-1234", "pkg:npm/lodash@4.17.21", "high");
var original = CreateVerdict("verdict-1", VerdictOutcome.Pass, "medium", findings);
var replay = CreateVerdict("verdict-1", VerdictOutcome.Fail, "medium", findings);
// Act
var result = _sut.Verify(original, replay);
// Assert
result.DeterminismScore.Should().BeLessThanOrEqualTo(0.5); // Critical penalty is 0.5
}
[Fact]
public void Verify_MultipleLowDifferences_ScoreDecreasesModestly()
{
// Arrange
var findings = CreateFindings("finding-1", "CVE-2023-1234", "pkg:npm/lodash@4.17.21", "high");
var original = CreateVerdictWithRules("verdict-1", VerdictOutcome.Pass, "medium", findings,
["rule-1", "rule-2", "rule-3"]);
var replay = CreateVerdictWithRules("verdict-1", VerdictOutcome.Pass, "medium", findings,
["rule-3", "rule-2", "rule-1"]);
// Act
var result = _sut.Verify(original, replay);
// Assert
result.DeterminismScore.Should().BeGreaterThan(0.8); // Low penalty of 0.05
}
#endregion
#region Helper Methods
private static ImmutableArray<FindingRecord> CreateFindings(
string findingId,
string cveId,
string component,
string severity)
{
return ImmutableArray.Create(new FindingRecord
{
FindingId = findingId,
VulnerabilityId = cveId,
Component = component,
Severity = severity
});
}
private static VerdictRecord CreateVerdict(
string verdictId,
VerdictOutcome outcome,
string severity,
ImmutableArray<FindingRecord> findings)
{
return new VerdictRecord
{
VerdictId = verdictId,
Outcome = outcome,
Severity = severity,
PolicyId = "policy-default",
RuleIds = ImmutableArray.Create("rule-1"),
Findings = findings,
RenderedAt = DateTimeOffset.UtcNow
};
}
private static VerdictRecord CreateVerdictWithRules(
string verdictId,
VerdictOutcome outcome,
string severity,
ImmutableArray<FindingRecord> findings,
string[] ruleIds)
{
return new VerdictRecord
{
VerdictId = verdictId,
Outcome = outcome,
Severity = severity,
PolicyId = "policy-default",
RuleIds = ruleIds.ToImmutableArray(),
Findings = findings,
RenderedAt = DateTimeOffset.UtcNow
};
}
#endregion
}

View File

@@ -0,0 +1,452 @@
// <copyright file="InputManifestResolverTests.cs" company="StellaOps">
// Copyright (c) StellaOps. Licensed under the AGPL-3.0-or-later.
// </copyright>
using System.Collections.Immutable;
using FluentAssertions;
using Microsoft.Extensions.Caching.Memory;
using Microsoft.Extensions.Logging;
using Moq;
using StellaOps.Replay.Core;
using Xunit;
namespace StellaOps.Replay.Core.Tests.Unit;
/// <summary>
/// Unit tests for <see cref="InputManifestResolver"/>.
/// Sprint: SPRINT_20260107_006_005 Task RB-010
/// </summary>
[Trait("Category", "Unit")]
public sealed class InputManifestResolverTests : IDisposable
{
private readonly Mock<IFeedSnapshotStore> _feedStore;
private readonly Mock<IPolicyManifestStore> _policyStore;
private readonly Mock<IVexDocumentStore> _vexStore;
private readonly MemoryCache _cache;
private readonly Mock<ILogger<InputManifestResolver>> _logger;
private readonly InputManifestResolver _sut;
public InputManifestResolverTests()
{
_feedStore = new Mock<IFeedSnapshotStore>();
_policyStore = new Mock<IPolicyManifestStore>();
_vexStore = new Mock<IVexDocumentStore>();
_cache = new MemoryCache(new MemoryCacheOptions());
_logger = new Mock<ILogger<InputManifestResolver>>();
_sut = new InputManifestResolver(
_feedStore.Object,
_policyStore.Object,
_vexStore.Object,
_cache,
_logger.Object);
}
public void Dispose()
{
_cache.Dispose();
}
#region ResolveAsync Tests
[Fact]
public async Task ResolveAsync_EmptyManifest_ReturnsCompleteWithNoData()
{
// Arrange
var manifest = new InputManifest();
// Act
var result = await _sut.ResolveAsync(manifest);
// Assert
result.IsComplete.Should().BeTrue();
result.Errors.Should().BeEmpty();
result.FeedData.Should().BeNull();
result.PolicyBundle.Should().BeNull();
result.VexDocuments.Should().BeEmpty();
}
[Fact]
public async Task ResolveAsync_FeedSnapshotExists_ResolvesFeedData()
{
// Arrange
var feedHash = "sha256:abc123";
var feedData = new FeedData
{
Hash = feedHash,
Content = "test feed content"u8.ToArray(),
SnapshotAt = DateTimeOffset.UtcNow
};
_feedStore.Setup(x => x.GetAsync(feedHash, It.IsAny<CancellationToken>()))
.ReturnsAsync(feedData);
var manifest = new InputManifest { FeedSnapshotHash = feedHash };
// Act
var result = await _sut.ResolveAsync(manifest);
// Assert
result.IsComplete.Should().BeTrue();
result.FeedData.Should().Be(feedData);
result.Errors.Should().BeEmpty();
}
[Fact]
public async Task ResolveAsync_FeedSnapshotNotFound_AddsError()
{
// Arrange
var feedHash = "sha256:notfound";
_feedStore.Setup(x => x.GetAsync(feedHash, It.IsAny<CancellationToken>()))
.ReturnsAsync((FeedData?)null);
var manifest = new InputManifest { FeedSnapshotHash = feedHash };
// Act
var result = await _sut.ResolveAsync(manifest);
// Assert
result.IsComplete.Should().BeFalse();
result.FeedData.Should().BeNull();
result.Errors.Should().ContainSingle(e =>
e.Type == InputType.FeedSnapshot &&
e.Hash == feedHash &&
e.Message.Contains("Not found"));
}
[Fact]
public async Task ResolveAsync_PolicyManifestExists_ResolvesPolicyBundle()
{
// Arrange
var policyHash = "sha256:policy123";
var policyBundle = new PolicyBundle
{
Hash = policyHash,
Content = ImmutableArray.Create((byte)1, (byte)2, (byte)3),
Version = "1.0.0"
};
_policyStore.Setup(x => x.GetAsync(policyHash, It.IsAny<CancellationToken>()))
.ReturnsAsync(policyBundle);
var manifest = new InputManifest { PolicyManifestHash = policyHash };
// Act
var result = await _sut.ResolveAsync(manifest);
// Assert
result.IsComplete.Should().BeTrue();
result.PolicyBundle.Should().Be(policyBundle);
}
[Fact]
public async Task ResolveAsync_VexDocumentsExist_ResolvesAllDocuments()
{
// Arrange
var vex1 = new VexDocument { Hash = "sha256:vex1", Content = "{}", Format = "OpenVEX" };
var vex2 = new VexDocument { Hash = "sha256:vex2", Content = "{}", Format = "CSAF" };
_vexStore.Setup(x => x.GetAsync("sha256:vex1", It.IsAny<CancellationToken>()))
.ReturnsAsync(vex1);
_vexStore.Setup(x => x.GetAsync("sha256:vex2", It.IsAny<CancellationToken>()))
.ReturnsAsync(vex2);
var manifest = new InputManifest
{
VexDocumentHashes = ImmutableArray.Create("sha256:vex1", "sha256:vex2")
};
// Act
var result = await _sut.ResolveAsync(manifest);
// Assert
result.IsComplete.Should().BeTrue();
result.VexDocuments.Should().HaveCount(2);
result.VexDocuments.Should().Contain(vex1);
result.VexDocuments.Should().Contain(vex2);
}
[Fact]
public async Task ResolveAsync_PartialVexNotFound_AddsErrorButIncludesFound()
{
// Arrange
var vex1 = new VexDocument { Hash = "sha256:vex1", Content = "{}", Format = "OpenVEX" };
_vexStore.Setup(x => x.GetAsync("sha256:vex1", It.IsAny<CancellationToken>()))
.ReturnsAsync(vex1);
_vexStore.Setup(x => x.GetAsync("sha256:vex2", It.IsAny<CancellationToken>()))
.ReturnsAsync((VexDocument?)null);
var manifest = new InputManifest
{
VexDocumentHashes = ImmutableArray.Create("sha256:vex1", "sha256:vex2")
};
// Act
var result = await _sut.ResolveAsync(manifest);
// Assert
result.IsComplete.Should().BeFalse();
result.VexDocuments.Should().ContainSingle(v => v.Hash == "sha256:vex1");
result.Errors.Should().ContainSingle(e =>
e.Type == InputType.VexDocument &&
e.Hash == "sha256:vex2");
}
[Fact]
public async Task ResolveAsync_FeedStoreThrowsException_AddsError()
{
// Arrange
var feedHash = "sha256:error";
_feedStore.Setup(x => x.GetAsync(feedHash, It.IsAny<CancellationToken>()))
.ThrowsAsync(new InvalidOperationException("Connection failed"));
var manifest = new InputManifest { FeedSnapshotHash = feedHash };
// Act
var result = await _sut.ResolveAsync(manifest);
// Assert
result.IsComplete.Should().BeFalse();
result.FeedData.Should().BeNull();
result.Errors.Should().ContainSingle(e =>
e.Type == InputType.FeedSnapshot &&
e.Message.Contains("Connection failed"));
}
[Fact]
public async Task ResolveAsync_PassThroughFields_CopiedToResult()
{
// Arrange
var timestamp = DateTimeOffset.Parse("2026-01-09T12:00:00Z");
var manifest = new InputManifest
{
SourceCodeHash = "sha256:source",
BaseImageDigest = "sha256:baseimage",
ToolchainVersion = "1.2.3",
RandomSeed = 42,
TimestampOverride = timestamp
};
// Act
var result = await _sut.ResolveAsync(manifest);
// Assert
result.SourceCodeHash.Should().Be("sha256:source");
result.BaseImageDigest.Should().Be("sha256:baseimage");
result.ToolchainVersion.Should().Be("1.2.3");
result.RandomSeed.Should().Be(42);
result.TimestampOverride.Should().Be(timestamp);
}
[Fact]
public async Task ResolveAsync_CachesResolvedInputs()
{
// Arrange
var feedHash = "sha256:cacheable";
var feedData = new FeedData
{
Hash = feedHash,
Content = "cached"u8.ToArray(),
SnapshotAt = DateTimeOffset.UtcNow
};
_feedStore.Setup(x => x.GetAsync(feedHash, It.IsAny<CancellationToken>()))
.ReturnsAsync(feedData);
var manifest = new InputManifest { FeedSnapshotHash = feedHash };
// Act - first call
await _sut.ResolveAsync(manifest);
// Act - second call
await _sut.ResolveAsync(manifest);
// Assert - store should only be called once due to caching
_feedStore.Verify(x => x.GetAsync(feedHash, It.IsAny<CancellationToken>()), Times.Once);
}
#endregion
#region ValidateAsync Tests
[Fact]
public async Task ValidateAsync_AllInputsExist_ReturnsValid()
{
// Arrange
_feedStore.Setup(x => x.ExistsAsync("sha256:feed", It.IsAny<CancellationToken>()))
.ReturnsAsync(true);
_policyStore.Setup(x => x.ExistsAsync("sha256:policy", It.IsAny<CancellationToken>()))
.ReturnsAsync(true);
_vexStore.Setup(x => x.ExistsAsync("sha256:vex", It.IsAny<CancellationToken>()))
.ReturnsAsync(true);
var manifest = new InputManifest
{
FeedSnapshotHash = "sha256:feed",
PolicyManifestHash = "sha256:policy",
VexDocumentHashes = ImmutableArray.Create("sha256:vex")
};
// Act
var result = await _sut.ValidateAsync(manifest);
// Assert
result.IsValid.Should().BeTrue();
result.MissingInputs.Should().BeEmpty();
}
[Fact]
public async Task ValidateAsync_FeedMissing_ReturnsInvalidWithMissingList()
{
// Arrange
_feedStore.Setup(x => x.ExistsAsync("sha256:missing", It.IsAny<CancellationToken>()))
.ReturnsAsync(false);
var manifest = new InputManifest { FeedSnapshotHash = "sha256:missing" };
// Act
var result = await _sut.ValidateAsync(manifest);
// Assert
result.IsValid.Should().BeFalse();
result.MissingInputs.Should().ContainSingle(m => m.Contains("Feed snapshot") && m.Contains("sha256:missing"));
}
[Fact]
public async Task ValidateAsync_PolicyMissing_ReturnsInvalidWithMissingList()
{
// Arrange
_policyStore.Setup(x => x.ExistsAsync("sha256:missing", It.IsAny<CancellationToken>()))
.ReturnsAsync(false);
var manifest = new InputManifest { PolicyManifestHash = "sha256:missing" };
// Act
var result = await _sut.ValidateAsync(manifest);
// Assert
result.IsValid.Should().BeFalse();
result.MissingInputs.Should().ContainSingle(m => m.Contains("Policy manifest"));
}
[Fact]
public async Task ValidateAsync_VexMissing_ReturnsInvalidWithMissingList()
{
// Arrange
_vexStore.Setup(x => x.ExistsAsync("sha256:vex1", It.IsAny<CancellationToken>()))
.ReturnsAsync(true);
_vexStore.Setup(x => x.ExistsAsync("sha256:vex2", It.IsAny<CancellationToken>()))
.ReturnsAsync(false);
var manifest = new InputManifest
{
VexDocumentHashes = ImmutableArray.Create("sha256:vex1", "sha256:vex2")
};
// Act
var result = await _sut.ValidateAsync(manifest);
// Assert
result.IsValid.Should().BeFalse();
result.MissingInputs.Should().ContainSingle(m => m.Contains("VEX document") && m.Contains("sha256:vex2"));
}
[Fact]
public async Task ValidateAsync_EmptyManifest_ReturnsValid()
{
// Arrange
var manifest = new InputManifest();
// Act
var result = await _sut.ValidateAsync(manifest);
// Assert
result.IsValid.Should().BeTrue();
result.MissingInputs.Should().BeEmpty();
}
#endregion
#region InputManifest Model Tests
[Fact]
public void InputManifest_DefaultValues_AreCorrect()
{
// Arrange & Act
var manifest = new InputManifest();
// Assert
manifest.FeedSnapshotHash.Should().BeNull();
manifest.PolicyManifestHash.Should().BeNull();
manifest.SourceCodeHash.Should().BeNull();
manifest.BaseImageDigest.Should().BeNull();
manifest.VexDocumentHashes.Should().BeEmpty();
manifest.ToolchainVersion.Should().BeNull();
manifest.RandomSeed.Should().BeNull();
manifest.TimestampOverride.Should().BeNull();
}
[Fact]
public void InputManifest_WithInitializer_SetsValues()
{
// Arrange & Act
var timestamp = DateTimeOffset.UtcNow;
var manifest = new InputManifest
{
FeedSnapshotHash = "feed",
PolicyManifestHash = "policy",
SourceCodeHash = "source",
BaseImageDigest = "image",
VexDocumentHashes = ImmutableArray.Create("vex1", "vex2"),
ToolchainVersion = "1.0",
RandomSeed = 42,
TimestampOverride = timestamp
};
// Assert
manifest.FeedSnapshotHash.Should().Be("feed");
manifest.PolicyManifestHash.Should().Be("policy");
manifest.SourceCodeHash.Should().Be("source");
manifest.BaseImageDigest.Should().Be("image");
manifest.VexDocumentHashes.Should().HaveCount(2);
manifest.ToolchainVersion.Should().Be("1.0");
manifest.RandomSeed.Should().Be(42);
manifest.TimestampOverride.Should().Be(timestamp);
}
#endregion
#region ResolvedInputs Model Tests
[Fact]
public void ResolvedInputs_DefaultValues_AreCorrect()
{
// Arrange & Act
var resolved = new ResolvedInputs();
// Assert
resolved.FeedData.Should().BeNull();
resolved.PolicyBundle.Should().BeNull();
resolved.VexDocuments.Should().BeEmpty();
resolved.Errors.Should().BeEmpty();
resolved.IsComplete.Should().BeFalse();
}
#endregion
#region InputResolutionError Model Tests
[Fact]
public void InputResolutionError_RecordEquality_Works()
{
// Arrange
var error1 = new InputResolutionError(InputType.FeedSnapshot, "hash1", "message1");
var error2 = new InputResolutionError(InputType.FeedSnapshot, "hash1", "message1");
var error3 = new InputResolutionError(InputType.PolicyManifest, "hash1", "message1");
// Assert
error1.Should().Be(error2);
error1.Should().NotBe(error3);
}
#endregion
}

View File

@@ -13,6 +13,8 @@ using StellaOps.AuditPack.Services;
using StellaOps.Replay.WebService;
using StellaOps.TestKit;
using Xunit;
using AuditPackResult = StellaOps.AuditPack.Services.ReplayExecutionResult;
using AuditPackStatus = StellaOps.AuditPack.Services.ReplayStatus;
namespace StellaOps.Replay.Core.Tests;
@@ -52,12 +54,12 @@ public class VerdictReplayEndpointsTests
};
}
private static ReplayExecutionResult CreateSuccessResult(bool match = true)
private static AuditPackResult CreateSuccessResult(bool match = true)
{
return new ReplayExecutionResult
return new AuditPackResult
{
Success = true,
Status = match ? ReplayStatus.Match : ReplayStatus.Drift,
Status = match ? AuditPackStatus.Match : AuditPackStatus.Drift,
VerdictMatches = match,
DecisionMatches = match,
OriginalVerdictDigest = "sha256:verdict",
@@ -115,7 +117,7 @@ public class VerdictReplayEndpointsTests
ConfidenceScore = 0.95,
ExpectedOutcome = new ReplayOutcomePrediction
{
ExpectedStatus = ReplayStatus.Match,
ExpectedStatus = AuditPackStatus.Match,
ExpectedDecision = "pass"
}
});
@@ -155,14 +157,14 @@ public class VerdictReplayEndpointsTests
public void CompareDivergence_DetectsDifferences()
{
// Arrange
var original = new ReplayExecutionResult
var original = new AuditPackResult
{
Success = true,
OriginalVerdictDigest = "sha256:aaa",
OriginalDecision = "pass"
};
var replayed = new ReplayExecutionResult
var replayed = new AuditPackResult
{
Success = true,
ReplayedVerdictDigest = "sha256:bbb",
@@ -174,8 +176,8 @@ public class VerdictReplayEndpointsTests
};
_mockPredicate.Setup(p => p.CompareDivergence(
It.IsAny<ReplayExecutionResult>(),
It.IsAny<ReplayExecutionResult>()))
It.IsAny<AuditPackResult>(),
It.IsAny<AuditPackResult>()))
.Returns(new ReplayDivergenceReport
{
HasDivergence = true,
@@ -218,10 +220,10 @@ public class VerdictReplayEndpointsTests
public void ReplayExecutionResult_DriftItems_ArePopulated()
{
// Arrange
var result = new ReplayExecutionResult
var result = new AuditPackResult
{
Success = true,
Status = ReplayStatus.Drift,
Status = AuditPackStatus.Drift,
VerdictMatches = false,
Drifts =
[

View File

@@ -11,6 +11,8 @@ using StellaOps.AuditPack.Models;
using StellaOps.AuditPack.Services;
using StellaOps.TestKit;
using Xunit;
using AuditPackResult = StellaOps.AuditPack.Services.ReplayExecutionResult;
using AuditPackStatus = StellaOps.AuditPack.Services.ReplayStatus;
namespace StellaOps.Replay.Core.Tests;
@@ -195,7 +197,7 @@ public class VerdictReplayIntegrationTests
{
// Arrange
var attestationService = new ReplayAttestationService();
var replays = new List<(AuditBundleManifest, ReplayExecutionResult)>
var replays = new List<(AuditBundleManifest, AuditPackResult)>
{
(CreateTestManifest("bundle-1"), CreateMatchingReplayResult()),
(CreateTestManifest("bundle-2"), CreateDivergentReplayResult()),
@@ -358,12 +360,12 @@ public class VerdictReplayIntegrationTests
};
}
private static ReplayExecutionResult CreateMatchingReplayResult()
private static AuditPackResult CreateMatchingReplayResult()
{
return new ReplayExecutionResult
return new AuditPackResult
{
Success = true,
Status = ReplayStatus.Match,
Status = AuditPackStatus.Match,
VerdictMatches = true,
DecisionMatches = true,
OriginalVerdictDigest = "sha256:verdict-digest-123",
@@ -376,12 +378,12 @@ public class VerdictReplayIntegrationTests
};
}
private static ReplayExecutionResult CreateDivergentReplayResult()
private static AuditPackResult CreateDivergentReplayResult()
{
return new ReplayExecutionResult
return new AuditPackResult
{
Success = true,
Status = ReplayStatus.Drift,
Status = AuditPackStatus.Drift,
VerdictMatches = false,
DecisionMatches = false,
OriginalVerdictDigest = "sha256:verdict-original",

View File

@@ -0,0 +1,346 @@
// <copyright file="ValidationEndpoints.cs" company="StellaOps">
// Copyright (c) StellaOps. Licensed under the AGPL-3.0-or-later.
// </copyright>
using System.Text.Json;
using System.Text.Json.Serialization;
using Microsoft.AspNetCore.Http;
using Microsoft.AspNetCore.Mvc;
using Microsoft.AspNetCore.Routing;
using Microsoft.Extensions.Options;
using StellaOps.Scanner.Validation;
using StellaOps.Scanner.WebService.Security;
namespace StellaOps.Scanner.WebService.Endpoints;
/// <summary>
/// SBOM validation endpoints.
/// Sprint: SPRINT_20260107_005_003 Task VG-006
/// </summary>
internal static class ValidationEndpoints
{
/// <summary>
/// Maps SBOM validation endpoints.
/// </summary>
public static void MapValidationEndpoints(this IEndpointRouteBuilder app)
{
ArgumentNullException.ThrowIfNull(app);
var group = app.MapGroup("/api/v1/sbom")
.WithTags("Validation")
.RequireAuthorization();
// POST /api/v1/sbom/validate
group.MapPost("/validate", ValidateSbomAsync)
.WithName("scanner.sbom.validate")
.WithDescription("Validates an SBOM document against CycloneDX or SPDX schemas")
.Accepts<byte[]>(
"application/vnd.cyclonedx+json",
"application/vnd.cyclonedx+xml",
"application/spdx+json",
"text/spdx",
"application/json",
"application/octet-stream")
.Produces<SbomValidationResponseDto>(StatusCodes.Status200OK)
.Produces(StatusCodes.Status400BadRequest)
.Produces(StatusCodes.Status503ServiceUnavailable)
.RequireAuthorization(ScannerPolicies.ScansRead);
// GET /api/v1/sbom/validators
group.MapGet("/validators", GetValidatorsAsync)
.WithName("scanner.sbom.validators")
.WithDescription("Gets information about available SBOM validators")
.Produces<ValidatorsInfoResponseDto>(StatusCodes.Status200OK)
.RequireAuthorization(ScannerPolicies.ScansRead);
}
private static async Task<IResult> ValidateSbomAsync(
HttpContext context,
[FromServices] CompositeValidator validator,
[FromServices] IOptions<ValidationGateOptions> options,
[FromQuery] string? format = null,
CancellationToken cancellationToken = default)
{
var gateOptions = options.Value;
// Check if validation is enabled
if (!gateOptions.Enabled || gateOptions.Mode == SbomValidationMode.Off)
{
return Results.Ok(new SbomValidationResponseDto
{
IsValid = true,
Format = "unknown",
ValidatorName = "validation-disabled",
ValidatorVersion = "n/a",
Message = "Validation is disabled",
Diagnostics = Array.Empty<ValidationDiagnosticDto>()
});
}
// Read request body
using var memoryStream = new MemoryStream();
await context.Request.Body.CopyToAsync(memoryStream, cancellationToken).ConfigureAwait(false);
var sbomBytes = memoryStream.ToArray();
if (sbomBytes.Length == 0)
{
return Results.BadRequest(new ProblemDetails
{
Title = "Empty request body",
Detail = "SBOM document is required",
Status = StatusCodes.Status400BadRequest
});
}
// Determine format
SbomFormat sbomFormat;
if (!string.IsNullOrWhiteSpace(format))
{
if (!Enum.TryParse<SbomFormat>(format, ignoreCase: true, out sbomFormat))
{
return Results.BadRequest(new ProblemDetails
{
Title = "Invalid format",
Detail = $"Unknown SBOM format: {format}. Supported: CycloneDxJson, CycloneDxXml, Spdx23Json, Spdx23TagValue, Spdx3JsonLd",
Status = StatusCodes.Status400BadRequest
});
}
}
else
{
// Auto-detect format
sbomFormat = CompositeValidator.DetectFormat(sbomBytes);
if (sbomFormat == SbomFormat.Unknown)
{
// Try content-type header
sbomFormat = DetectFormatFromContentType(context.Request.ContentType);
}
}
// Run validation
var validationOptions = gateOptions.ToValidationOptions();
SbomValidationResult result;
if (sbomFormat == SbomFormat.Unknown)
{
result = await validator.ValidateAutoAsync(sbomBytes, validationOptions, cancellationToken)
.ConfigureAwait(false);
}
else
{
result = await validator.ValidateAsync(sbomBytes, sbomFormat, validationOptions, cancellationToken)
.ConfigureAwait(false);
}
// Check if validator is available
if (result.Diagnostics.Any(d => d.Code == "VALIDATOR_UNAVAILABLE"))
{
return Results.Problem(
title: "Validator unavailable",
detail: result.Diagnostics.FirstOrDefault(d => d.Code == "VALIDATOR_UNAVAILABLE")?.Message,
statusCode: StatusCodes.Status503ServiceUnavailable);
}
var response = new SbomValidationResponseDto
{
IsValid = result.IsValid,
Format = result.Format.ToString(),
ValidatorName = result.ValidatorName,
ValidatorVersion = result.ValidatorVersion,
ValidationDurationMs = (int)result.ValidationDuration.TotalMilliseconds,
ErrorCount = result.ErrorCount,
WarningCount = result.WarningCount,
SchemaVersion = result.SchemaVersion,
Diagnostics = result.Diagnostics.Select(d => new ValidationDiagnosticDto
{
Severity = d.Severity.ToString(),
Code = d.Code,
Message = d.Message,
Path = d.Path,
Line = d.Line,
Suggestion = d.Suggestion
}).ToArray()
};
return Results.Ok(response);
}
private static async Task<IResult> GetValidatorsAsync(
[FromServices] CompositeValidator validator,
CancellationToken cancellationToken)
{
var info = await validator.GetInfoAsync(cancellationToken).ConfigureAwait(false);
var response = new ValidatorsInfoResponseDto
{
IsAvailable = info.IsAvailable,
Name = info.Name,
Version = info.Version,
SupportedFormats = info.SupportedFormats.Select(f => f.ToString()).ToArray(),
SupportedSchemaVersions = info.SupportedSchemaVersions.ToArray()
};
return Results.Ok(response);
}
private static SbomFormat DetectFormatFromContentType(string? contentType)
{
if (string.IsNullOrWhiteSpace(contentType))
{
return SbomFormat.Unknown;
}
return contentType.ToLowerInvariant() switch
{
var ct when ct.Contains("cyclonedx+json") => SbomFormat.CycloneDxJson,
var ct when ct.Contains("cyclonedx+xml") || ct.Contains("cyclonedx") && ct.Contains("xml") => SbomFormat.CycloneDxXml,
var ct when ct.Contains("spdx+json") || ct.Contains("spdx") && ct.Contains("json") => SbomFormat.Spdx23Json,
var ct when ct.Contains("text/spdx") => SbomFormat.Spdx23TagValue,
_ => SbomFormat.Unknown
};
}
}
/// <summary>
/// Response DTO for SBOM validation.
/// </summary>
public sealed class SbomValidationResponseDto
{
/// <summary>
/// Gets or sets whether the SBOM is valid.
/// </summary>
[JsonPropertyName("isValid")]
public bool IsValid { get; set; }
/// <summary>
/// Gets or sets the SBOM format.
/// </summary>
[JsonPropertyName("format")]
public required string Format { get; set; }
/// <summary>
/// Gets or sets the validator name.
/// </summary>
[JsonPropertyName("validatorName")]
public required string ValidatorName { get; set; }
/// <summary>
/// Gets or sets the validator version.
/// </summary>
[JsonPropertyName("validatorVersion")]
public required string ValidatorVersion { get; set; }
/// <summary>
/// Gets or sets the validation duration in milliseconds.
/// </summary>
[JsonPropertyName("validationDurationMs")]
public int ValidationDurationMs { get; set; }
/// <summary>
/// Gets or sets the error count.
/// </summary>
[JsonPropertyName("errorCount")]
public int ErrorCount { get; set; }
/// <summary>
/// Gets or sets the warning count.
/// </summary>
[JsonPropertyName("warningCount")]
public int WarningCount { get; set; }
/// <summary>
/// Gets or sets the schema version validated against.
/// </summary>
[JsonPropertyName("schemaVersion")]
public string? SchemaVersion { get; set; }
/// <summary>
/// Gets or sets a message (for disabled validation).
/// </summary>
[JsonPropertyName("message")]
public string? Message { get; set; }
/// <summary>
/// Gets or sets the validation diagnostics.
/// </summary>
[JsonPropertyName("diagnostics")]
public required ValidationDiagnosticDto[] Diagnostics { get; set; }
}
/// <summary>
/// DTO for a validation diagnostic.
/// </summary>
public sealed class ValidationDiagnosticDto
{
/// <summary>
/// Gets or sets the severity.
/// </summary>
[JsonPropertyName("severity")]
public required string Severity { get; set; }
/// <summary>
/// Gets or sets the diagnostic code.
/// </summary>
[JsonPropertyName("code")]
public required string Code { get; set; }
/// <summary>
/// Gets or sets the message.
/// </summary>
[JsonPropertyName("message")]
public required string Message { get; set; }
/// <summary>
/// Gets or sets the JSON path.
/// </summary>
[JsonPropertyName("path")]
public string? Path { get; set; }
/// <summary>
/// Gets or sets the line number.
/// </summary>
[JsonPropertyName("line")]
public int? Line { get; set; }
/// <summary>
/// Gets or sets a suggestion.
/// </summary>
[JsonPropertyName("suggestion")]
public string? Suggestion { get; set; }
}
/// <summary>
/// Response DTO for validators info.
/// </summary>
public sealed class ValidatorsInfoResponseDto
{
/// <summary>
/// Gets or sets whether validators are available.
/// </summary>
[JsonPropertyName("isAvailable")]
public bool IsAvailable { get; set; }
/// <summary>
/// Gets or sets the composite validator name.
/// </summary>
[JsonPropertyName("name")]
public required string Name { get; set; }
/// <summary>
/// Gets or sets the version.
/// </summary>
[JsonPropertyName("version")]
public required string Version { get; set; }
/// <summary>
/// Gets or sets the supported formats.
/// </summary>
[JsonPropertyName("supportedFormats")]
public required string[] SupportedFormats { get; set; }
/// <summary>
/// Gets or sets the supported schema versions.
/// </summary>
[JsonPropertyName("supportedSchemaVersions")]
public required string[] SupportedSchemaVersions { get; set; }
}

View File

@@ -2,8 +2,10 @@
// Copyright (c) StellaOps. Licensed under the AGPL-3.0-or-later.
// </copyright>
using System.Collections.Immutable;
using Microsoft.Extensions.Logging;
using StellaOps.Canonical.Json;
using StellaOps.Scanner.Core.Contracts;
using StellaOps.Scanner.Emit.Composition;
using StellaOps.Scanner.Emit.Spdx;
using StellaOps.Scanner.WebService.Domain;
@@ -102,8 +104,8 @@ public sealed class SbomExportService : ISbomExportService
artifact.JsonBytes,
SbomExportFormat.Spdx3,
profile,
artifact.JsonDigest,
artifact.ComponentCount));
artifact.JsonSha256,
0)); // ComponentCount not available on SpdxArtifact
}
private async Task<SbomExportResult> ExportSpdx2Async(
@@ -177,25 +179,73 @@ public sealed class SbomExportService : ISbomExportService
ScanSnapshot snapshot,
IReadOnlyList<SbomLayerFragment> layerFragments)
{
// Convert SbomLayerFragment to the format expected by SpdxComposer
var fragments = layerFragments.Select(f => new Scanner.Core.Contracts.LayerSbomFragment
// Convert SbomLayerFragment to LayerComponentFragment for SpdxComposer
var fragments = layerFragments.Select(f => new LayerComponentFragment
{
LayerDigest = f.LayerDigest,
Order = f.Order,
ComponentPurls = f.ComponentPurls.ToList()
}).ToList();
Components = f.ComponentPurls
.Select(purl => new ComponentRecord
{
Identity = ComponentIdentity.Create(
key: purl,
name: ExtractNameFromPurl(purl),
version: ExtractVersionFromPurl(purl),
purl: purl),
LayerDigest = f.LayerDigest
})
.ToImmutableArray()
}).ToImmutableArray();
return new SbomCompositionRequest
var image = new ImageArtifactDescriptor
{
Image = new Scanner.Core.Contracts.ImageReference
{
ImageDigest = snapshot.Target.Digest ?? string.Empty,
ImageRef = snapshot.Target.Reference ?? string.Empty
},
LayerFragments = fragments,
GeneratedAt = _timeProvider.GetUtcNow(),
GeneratorVersion = "StellaOps-Scanner/1.0"
ImageDigest = snapshot.Target.Digest ?? string.Empty,
ImageReference = snapshot.Target.Reference
};
return SbomCompositionRequest.Create(
image,
fragments,
_timeProvider.GetUtcNow(),
generatorName: "StellaOps-Scanner",
generatorVersion: "1.0");
}
private static string ExtractNameFromPurl(string purl)
{
// Basic PURL parsing: pkg:type/namespace/name@version
// Returns the name portion
try
{
var withoutScheme = purl.StartsWith("pkg:", StringComparison.OrdinalIgnoreCase)
? purl[4..]
: purl;
var atIndex = withoutScheme.IndexOf('@');
var pathPart = atIndex >= 0 ? withoutScheme[..atIndex] : withoutScheme;
var slashIndex = pathPart.LastIndexOf('/');
return slashIndex >= 0 ? pathPart[(slashIndex + 1)..] : pathPart;
}
catch
{
return purl;
}
}
private static string? ExtractVersionFromPurl(string purl)
{
// Basic PURL parsing: pkg:type/namespace/name@version
// Returns the version portion
try
{
var atIndex = purl.IndexOf('@');
if (atIndex < 0) return null;
var versionPart = purl[(atIndex + 1)..];
var queryIndex = versionPart.IndexOf('?');
return queryIndex >= 0 ? versionPart[..queryIndex] : versionPart;
}
catch
{
return null;
}
}
private static int EstimateComponentCount(byte[] sbomBytes)

View File

@@ -83,7 +83,7 @@ public interface ISecretExceptionPatternService
/// </summary>
public sealed class SecretDetectionSettingsService : ISecretDetectionSettingsService
{
private readonly ISecretDetectionSettingsRepository _repository;
private readonly Storage.Repositories.ISecretDetectionSettingsRepository _repository;
private readonly TimeProvider _timeProvider;
private static readonly JsonSerializerOptions JsonOptions = new()
@@ -92,7 +92,7 @@ public sealed class SecretDetectionSettingsService : ISecretDetectionSettingsSer
};
public SecretDetectionSettingsService(
ISecretDetectionSettingsRepository repository,
Storage.Repositories.ISecretDetectionSettingsRepository repository,
TimeProvider timeProvider)
{
_repository = repository ?? throw new ArgumentNullException(nameof(repository));

View File

@@ -53,6 +53,7 @@
<ProjectReference Include="../__Libraries/StellaOps.Scanner.Orchestration/StellaOps.Scanner.Orchestration.csproj" />
<ProjectReference Include="../__Libraries/StellaOps.Scanner.Sources/StellaOps.Scanner.Sources.csproj" />
<ProjectReference Include="../__Libraries/StellaOps.Scanner.Emit/StellaOps.Scanner.Emit.csproj" />
<ProjectReference Include="../__Libraries/StellaOps.Scanner.Validation/StellaOps.Scanner.Validation.csproj" />
<ProjectReference Include="../../Router/__Libraries/StellaOps.Router.AspNet/StellaOps.Router.AspNet.csproj" />
</ItemGroup>

View File

@@ -14,6 +14,7 @@ using ProtoSerializer = CycloneDX.Protobuf.Serializer;
using StellaOps.Scanner.Core.Contracts;
using StellaOps.Scanner.Core.Utility;
using StellaOps.Scanner.Emit.Evidence;
using StellaOps.Scanner.Emit.Pedigree;
namespace StellaOps.Scanner.Emit.Composition;
@@ -178,7 +179,7 @@ public sealed class CycloneDxComposer
SpecVersion = SpecificationVersion.v1_6,
Version = 1,
Metadata = BuildMetadata(request, view, generatedAt),
Components = BuildComponents(components),
Components = BuildComponents(request, components),
Dependencies = BuildDependencies(components),
};
@@ -318,9 +319,19 @@ public sealed class CycloneDxComposer
return purlBuilder.ToString();
}
private static List<Component> BuildComponents(ImmutableArray<AggregatedComponent> components)
/// <summary>
/// Builds CycloneDX component models from aggregated components.
/// Sprint: SPRINT_20260107_005_002 Task PD-009 - Added pedigree support.
/// </summary>
private static List<Component> BuildComponents(
SbomCompositionRequest request,
ImmutableArray<AggregatedComponent> components)
{
var evidenceMapper = new CycloneDxEvidenceMapper();
var pedigreeMapper = request.IncludePedigree && request.PedigreeDataByPurl is not null
? new CycloneDxPedigreeMapper()
: null;
var result = new List<Component>(components.Length);
foreach (var component in components)
{
@@ -337,6 +348,16 @@ public sealed class CycloneDxComposer
Evidence = evidenceMapper.Map(component),
};
// Apply pedigree data if available and enabled
// Sprint: SPRINT_20260107_005_002 Task PD-009
if (pedigreeMapper is not null && !string.IsNullOrEmpty(component.Identity.Purl))
{
if (request.PedigreeDataByPurl!.TryGetValue(component.Identity.Purl, out var pedigreeData))
{
model.Pedigree = pedigreeMapper.Map(pedigreeData);
}
}
result.Add(model);
}

View File

@@ -4,6 +4,7 @@ using System.Collections.Immutable;
using System.Linq;
using StellaOps.Scanner.Core.Contracts;
using StellaOps.Scanner.Core.Utility;
using StellaOps.Scanner.Emit.Pedigree;
namespace StellaOps.Scanner.Emit.Composition;
@@ -45,6 +46,21 @@ public sealed record SbomCompositionRequest
public ImmutableArray<SbomPolicyFinding> PolicyFindings { get; init; }
= ImmutableArray<SbomPolicyFinding>.Empty;
/// <summary>
/// Gets the pre-fetched pedigree data keyed by component PURL.
/// This enables synchronous composition while allowing async pedigree lookups
/// to happen before calling <see cref="CycloneDxComposer.Compose"/>.
/// Sprint: SPRINT_20260107_005_002 Task PD-009
/// </summary>
public IReadOnlyDictionary<string, PedigreeData>? PedigreeDataByPurl { get; init; }
= null;
/// <summary>
/// Gets whether pedigree data should be included in the SBOM.
/// Defaults to true if pedigree data is provided.
/// </summary>
public bool IncludePedigree { get; init; } = true;
public static SbomCompositionRequest Create(
ImageArtifactDescriptor image,
IEnumerable<LayerComponentFragment> fragments,
@@ -52,7 +68,9 @@ public sealed record SbomCompositionRequest
string? generatorName = null,
string? generatorVersion = null,
IReadOnlyDictionary<string, string>? properties = null,
IEnumerable<SbomPolicyFinding>? policyFindings = null)
IEnumerable<SbomPolicyFinding>? policyFindings = null,
IReadOnlyDictionary<string, PedigreeData>? pedigreeData = null,
bool includePedigree = true)
{
ArgumentNullException.ThrowIfNull(image);
ArgumentNullException.ThrowIfNull(fragments);
@@ -75,6 +93,8 @@ public sealed record SbomCompositionRequest
GeneratorVersion = Normalize(generatorVersion),
AdditionalProperties = properties,
PolicyFindings = NormalizePolicyFindings(policyFindings),
PedigreeDataByPurl = pedigreeData,
IncludePedigree = includePedigree,
};
}

View File

@@ -0,0 +1,604 @@
// <copyright file="SbomValidationPipeline.cs" company="StellaOps">
// Copyright (c) StellaOps. Licensed under the AGPL-3.0-or-later.
// </copyright>
using System.Collections.Immutable;
using System.Diagnostics.Metrics;
using System.Text;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
using StellaOps.Scanner.Validation;
namespace StellaOps.Scanner.Emit.Composition;
/// <summary>
/// Pipeline configuration for SBOM validation after generation.
/// </summary>
public sealed class SbomValidationPipelineOptions
{
/// <summary>
/// Whether validation is enabled. Default: true.
/// </summary>
public bool Enabled { get; set; } = true;
/// <summary>
/// Whether to fail composition when validation fails. Default: true.
/// </summary>
public bool FailOnError { get; set; } = true;
/// <summary>
/// Whether to validate CycloneDX SBOMs. Default: true.
/// </summary>
public bool ValidateCycloneDx { get; set; } = true;
/// <summary>
/// Whether to validate SPDX SBOMs. Default: true.
/// </summary>
public bool ValidateSpdx { get; set; } = true;
/// <summary>
/// Timeout for validation operations. Default: 60 seconds.
/// </summary>
public TimeSpan ValidationTimeout { get; set; } = TimeSpan.FromSeconds(60);
}
/// <summary>
/// Result of SBOM validation pipeline execution.
/// </summary>
public sealed record SbomValidationPipelineResult
{
/// <summary>
/// Gets whether all validations passed.
/// </summary>
public required bool IsValid { get; init; }
/// <summary>
/// Gets the CycloneDX inventory validation result, if performed.
/// </summary>
public SbomValidationResult? CycloneDxInventoryResult { get; init; }
/// <summary>
/// Gets the CycloneDX usage validation result, if performed.
/// </summary>
public SbomValidationResult? CycloneDxUsageResult { get; init; }
/// <summary>
/// Gets the SPDX inventory validation result, if performed.
/// </summary>
public SbomValidationResult? SpdxInventoryResult { get; init; }
/// <summary>
/// Gets the per-layer validation results, if performed.
/// </summary>
public ImmutableArray<LayerValidationResult> LayerResults { get; init; } = [];
/// <summary>
/// Gets the total number of errors across all validations.
/// </summary>
public int TotalErrorCount =>
(CycloneDxInventoryResult?.ErrorCount ?? 0) +
(CycloneDxUsageResult?.ErrorCount ?? 0) +
(SpdxInventoryResult?.ErrorCount ?? 0) +
LayerResults.Sum(r => r.CycloneDxResult?.ErrorCount ?? 0) +
LayerResults.Sum(r => r.SpdxResult?.ErrorCount ?? 0);
/// <summary>
/// Gets the total number of warnings across all validations.
/// </summary>
public int TotalWarningCount =>
(CycloneDxInventoryResult?.WarningCount ?? 0) +
(CycloneDxUsageResult?.WarningCount ?? 0) +
(SpdxInventoryResult?.WarningCount ?? 0) +
LayerResults.Sum(r => r.CycloneDxResult?.WarningCount ?? 0) +
LayerResults.Sum(r => r.SpdxResult?.WarningCount ?? 0);
/// <summary>
/// Gets whether validation was skipped entirely.
/// </summary>
public bool WasSkipped { get; init; }
/// <summary>
/// Creates a successful validation result.
/// </summary>
public static SbomValidationPipelineResult Success(
SbomValidationResult? cycloneDxInventory = null,
SbomValidationResult? cycloneDxUsage = null,
SbomValidationResult? spdxInventory = null,
ImmutableArray<LayerValidationResult>? layerResults = null) =>
new()
{
IsValid = true,
CycloneDxInventoryResult = cycloneDxInventory,
CycloneDxUsageResult = cycloneDxUsage,
SpdxInventoryResult = spdxInventory,
LayerResults = layerResults ?? []
};
/// <summary>
/// Creates a failed validation result.
/// </summary>
public static SbomValidationPipelineResult Failure(
SbomValidationResult? cycloneDxInventory = null,
SbomValidationResult? cycloneDxUsage = null,
SbomValidationResult? spdxInventory = null,
ImmutableArray<LayerValidationResult>? layerResults = null) =>
new()
{
IsValid = false,
CycloneDxInventoryResult = cycloneDxInventory,
CycloneDxUsageResult = cycloneDxUsage,
SpdxInventoryResult = spdxInventory,
LayerResults = layerResults ?? []
};
/// <summary>
/// Creates a skipped validation result.
/// </summary>
public static SbomValidationPipelineResult Skipped() =>
new() { IsValid = true, WasSkipped = true };
}
/// <summary>
/// Validation result for a single layer.
/// </summary>
public sealed record LayerValidationResult
{
/// <summary>
/// Gets the layer identifier (digest or index).
/// </summary>
public required string LayerId { get; init; }
/// <summary>
/// Gets the CycloneDX validation result for this layer.
/// </summary>
public SbomValidationResult? CycloneDxResult { get; init; }
/// <summary>
/// Gets the SPDX validation result for this layer.
/// </summary>
public SbomValidationResult? SpdxResult { get; init; }
/// <summary>
/// Gets whether this layer's validation passed.
/// </summary>
public bool IsValid =>
(CycloneDxResult?.IsValid ?? true) &&
(SpdxResult?.IsValid ?? true);
}
/// <summary>
/// Pipeline for validating generated SBOMs.
/// </summary>
/// <remarks>
/// Sprint: SPRINT_20260107_005_003 Task VG-005
/// This pipeline runs validation after SBOM generation and can optionally
/// fail the composition if validation errors are detected.
/// </remarks>
public sealed class SbomValidationPipeline
{
private readonly ISbomValidator _validator;
private readonly IOptions<SbomValidationPipelineOptions> _options;
private readonly ILogger<SbomValidationPipeline> _logger;
private readonly TimeProvider _timeProvider;
// Metrics
private readonly Counter<long> _validationRuns;
private readonly Counter<long> _validationPassed;
private readonly Counter<long> _validationFailed;
private readonly Counter<long> _validationSkipped;
private readonly Histogram<double> _validationDuration;
/// <summary>
/// Initializes a new instance of the <see cref="SbomValidationPipeline"/> class.
/// </summary>
public SbomValidationPipeline(
ISbomValidator validator,
IOptions<SbomValidationPipelineOptions> options,
ILogger<SbomValidationPipeline> logger,
TimeProvider timeProvider,
IMeterFactory? meterFactory = null)
{
_validator = validator ?? throw new ArgumentNullException(nameof(validator));
_options = options ?? throw new ArgumentNullException(nameof(options));
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
_timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider));
// Initialize metrics
var meter = meterFactory?.Create("StellaOps.Scanner.Validation") ??
new Meter("StellaOps.Scanner.Validation");
_validationRuns = meter.CreateCounter<long>(
"sbom.validation.runs",
"runs",
"Total number of validation pipeline runs");
_validationPassed = meter.CreateCounter<long>(
"sbom.validation.passed",
"runs",
"Number of validation runs that passed");
_validationFailed = meter.CreateCounter<long>(
"sbom.validation.failed",
"runs",
"Number of validation runs that failed");
_validationSkipped = meter.CreateCounter<long>(
"sbom.validation.skipped",
"runs",
"Number of validation runs that were skipped");
_validationDuration = meter.CreateHistogram<double>(
"sbom.validation.duration",
"ms",
"Duration of validation pipeline execution");
}
/// <summary>
/// Validates a composition result.
/// </summary>
/// <param name="result">The composition result to validate.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>The validation pipeline result.</returns>
/// <exception cref="SbomValidationException">
/// Thrown when validation fails and <see cref="SbomValidationPipelineOptions.FailOnError"/> is true.
/// </exception>
public async Task<SbomValidationPipelineResult> ValidateAsync(
SbomCompositionResult result,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(result);
var opts = _options.Value;
var startTime = _timeProvider.GetTimestamp();
_validationRuns.Add(1);
try
{
if (!opts.Enabled)
{
_logger.LogDebug("SBOM validation is disabled, skipping");
_validationSkipped.Add(1);
return SbomValidationPipelineResult.Skipped();
}
_logger.LogInformation("Starting SBOM validation pipeline");
var validationOptions = new SbomValidationOptions
{
Timeout = opts.ValidationTimeout
};
// Validate main SBOMs in parallel
var tasks = new List<Task<(string Name, SbomValidationResult? Result)>>();
if (opts.ValidateCycloneDx)
{
tasks.Add(ValidateCycloneDxAsync(
"CycloneDX-Inventory",
result.Inventory.JsonBytes,
validationOptions,
cancellationToken));
if (result.Usage is not null)
{
tasks.Add(ValidateCycloneDxAsync(
"CycloneDX-Usage",
result.Usage.JsonBytes,
validationOptions,
cancellationToken));
}
}
if (opts.ValidateSpdx && result.SpdxInventory is not null)
{
tasks.Add(ValidateSpdxAsync(
"SPDX-Inventory",
result.SpdxInventory.JsonBytes,
validationOptions,
cancellationToken));
}
var mainResults = await Task.WhenAll(tasks).ConfigureAwait(false);
// Extract results by name
SbomValidationResult? cdxInventory = null;
SbomValidationResult? cdxUsage = null;
SbomValidationResult? spdxInventory = null;
foreach (var (name, validationResult) in mainResults)
{
switch (name)
{
case "CycloneDX-Inventory":
cdxInventory = validationResult;
break;
case "CycloneDX-Usage":
cdxUsage = validationResult;
break;
case "SPDX-Inventory":
spdxInventory = validationResult;
break;
}
}
// Validate layer SBOMs if present
var layerResults = await ValidateLayersAsync(
result.LayerSbomArtifacts,
validationOptions,
opts,
cancellationToken).ConfigureAwait(false);
// Determine overall validity
var allValid =
(cdxInventory?.IsValid ?? true) &&
(cdxUsage?.IsValid ?? true) &&
(spdxInventory?.IsValid ?? true) &&
layerResults.All(r => r.IsValid);
var pipelineResult = allValid
? SbomValidationPipelineResult.Success(cdxInventory, cdxUsage, spdxInventory, layerResults)
: SbomValidationPipelineResult.Failure(cdxInventory, cdxUsage, spdxInventory, layerResults);
// Log summary
LogValidationSummary(pipelineResult);
// Update metrics
if (allValid)
{
_validationPassed.Add(1);
}
else
{
_validationFailed.Add(1);
}
// Throw if configured to fail on error
if (!allValid && opts.FailOnError)
{
throw new SbomValidationException(
$"SBOM validation failed with {pipelineResult.TotalErrorCount} error(s)",
pipelineResult);
}
return pipelineResult;
}
finally
{
var elapsed = _timeProvider.GetElapsedTime(startTime);
_validationDuration.Record(elapsed.TotalMilliseconds);
}
}
private async Task<(string Name, SbomValidationResult? Result)> ValidateCycloneDxAsync(
string name,
byte[] content,
SbomValidationOptions options,
CancellationToken cancellationToken)
{
try
{
_logger.LogDebug("Validating {Name} ({Size} bytes)", name, content.Length);
var result = await _validator.ValidateAsync(
content,
SbomFormat.CycloneDxJson,
options,
cancellationToken).ConfigureAwait(false);
LogValidationResult(name, result);
return (name, result);
}
catch (Exception ex) when (ex is not OperationCanceledException)
{
_logger.LogWarning(ex, "Failed to validate {Name}: {Message}", name, ex.Message);
return (name, SbomValidationResult.ValidatorUnavailable(
SbomFormat.CycloneDxJson,
"CycloneDX",
ex.Message));
}
}
private async Task<(string Name, SbomValidationResult? Result)> ValidateSpdxAsync(
string name,
byte[] content,
SbomValidationOptions options,
CancellationToken cancellationToken)
{
try
{
_logger.LogDebug("Validating {Name} ({Size} bytes)", name, content.Length);
var result = await _validator.ValidateAsync(
content,
SbomFormat.Spdx3JsonLd,
options,
cancellationToken).ConfigureAwait(false);
LogValidationResult(name, result);
return (name, result);
}
catch (Exception ex) when (ex is not OperationCanceledException)
{
_logger.LogWarning(ex, "Failed to validate {Name}: {Message}", name, ex.Message);
return (name, SbomValidationResult.ValidatorUnavailable(
SbomFormat.Spdx3JsonLd,
"SPDX",
ex.Message));
}
}
private async Task<ImmutableArray<LayerValidationResult>> ValidateLayersAsync(
ImmutableArray<LayerSbomArtifact> layerArtifacts,
SbomValidationOptions options,
SbomValidationPipelineOptions pipelineOptions,
CancellationToken cancellationToken)
{
if (layerArtifacts.IsDefaultOrEmpty)
{
return [];
}
_logger.LogDebug("Validating {Count} layer SBOMs", layerArtifacts.Length);
var results = new List<LayerValidationResult>();
foreach (var layer in layerArtifacts)
{
SbomValidationResult? cdxResult = null;
SbomValidationResult? spdxResult = null;
if (pipelineOptions.ValidateCycloneDx && layer.CycloneDxJsonBytes is not null)
{
var (_, result) = await ValidateCycloneDxAsync(
$"Layer-{layer.LayerDigest}-CDX",
layer.CycloneDxJsonBytes,
options,
cancellationToken).ConfigureAwait(false);
cdxResult = result;
}
if (pipelineOptions.ValidateSpdx && layer.SpdxJsonBytes is not null)
{
var (_, result) = await ValidateSpdxAsync(
$"Layer-{layer.LayerDigest}-SPDX",
layer.SpdxJsonBytes,
options,
cancellationToken).ConfigureAwait(false);
spdxResult = result;
}
results.Add(new LayerValidationResult
{
LayerId = layer.LayerDigest,
CycloneDxResult = cdxResult,
SpdxResult = spdxResult
});
}
return [.. results];
}
private void LogValidationResult(string name, SbomValidationResult result)
{
if (result.IsValid)
{
if (result.WarningCount > 0)
{
_logger.LogInformation(
"{Name} validation passed with {WarningCount} warning(s)",
name,
result.WarningCount);
}
else
{
_logger.LogInformation("{Name} validation passed", name);
}
}
else
{
_logger.LogWarning(
"{Name} validation failed with {ErrorCount} error(s), {WarningCount} warning(s)",
name,
result.ErrorCount,
result.WarningCount);
foreach (var diagnostic in result.Diagnostics.Where(d => d.Severity == SbomValidationSeverity.Error))
{
_logger.LogWarning(" [{Code}] {Message}", diagnostic.Code, diagnostic.Message);
}
}
}
private void LogValidationSummary(SbomValidationPipelineResult result)
{
var sb = new StringBuilder();
sb.AppendLine("SBOM validation summary:");
if (result.CycloneDxInventoryResult is not null)
{
sb.AppendLine($" - CycloneDX Inventory: {(result.CycloneDxInventoryResult.IsValid ? "PASSED" : "FAILED")}");
}
if (result.CycloneDxUsageResult is not null)
{
sb.AppendLine($" - CycloneDX Usage: {(result.CycloneDxUsageResult.IsValid ? "PASSED" : "FAILED")}");
}
if (result.SpdxInventoryResult is not null)
{
sb.AppendLine($" - SPDX Inventory: {(result.SpdxInventoryResult.IsValid ? "PASSED" : "FAILED")}");
}
if (!result.LayerResults.IsDefaultOrEmpty)
{
var passedLayers = result.LayerResults.Count(r => r.IsValid);
sb.AppendLine($" - Layers: {passedLayers}/{result.LayerResults.Length} passed");
}
sb.AppendLine($" Total errors: {result.TotalErrorCount}");
sb.AppendLine($" Total warnings: {result.TotalWarningCount}");
_logger.LogInformation(sb.ToString());
}
}
/// <summary>
/// Exception thrown when SBOM validation fails.
/// </summary>
public sealed class SbomValidationException : Exception
{
/// <summary>
/// Gets the validation pipeline result.
/// </summary>
public SbomValidationPipelineResult? Result { get; }
/// <summary>
/// Initializes a new instance of the <see cref="SbomValidationException"/> class.
/// </summary>
public SbomValidationException(string message)
: base(message)
{
}
/// <summary>
/// Initializes a new instance of the <see cref="SbomValidationException"/> class.
/// </summary>
public SbomValidationException(string message, SbomValidationPipelineResult result)
: base(message)
{
Result = result;
}
/// <summary>
/// Initializes a new instance of the <see cref="SbomValidationException"/> class.
/// </summary>
public SbomValidationException(string message, Exception innerException)
: base(message, innerException)
{
}
}
/// <summary>
/// Extension methods for registering the validation pipeline.
/// </summary>
public static class SbomValidationPipelineExtensions
{
/// <summary>
/// Adds the SBOM validation pipeline to the service collection.
/// </summary>
public static IServiceCollection AddSbomValidationPipeline(
this IServiceCollection services,
Action<SbomValidationPipelineOptions>? configure = null)
{
services.AddOptions<SbomValidationPipelineOptions>()
.Configure(configure ?? (_ => { }))
.ValidateDataAnnotations()
.ValidateOnStart();
services.AddSingleton<SbomValidationPipeline>();
return services;
}
}

View File

@@ -3,11 +3,12 @@
// </copyright>
using CycloneDX.Models;
using CdxPedigree = CycloneDX.Models.Pedigree;
namespace StellaOps.Scanner.Emit.Pedigree;
/// <summary>
/// Maps <see cref="PedigreeData"/> to CycloneDX <see cref="Pedigree"/> model.
/// Maps <see cref="PedigreeData"/> to CycloneDX <see cref="CdxPedigree"/> model.
/// Sprint: SPRINT_20260107_005_002 Task PD-003
/// </summary>
public sealed class CycloneDxPedigreeMapper
@@ -17,14 +18,14 @@ public sealed class CycloneDxPedigreeMapper
/// </summary>
/// <param name="data">The pedigree data to map.</param>
/// <returns>CycloneDX pedigree model, or null if no data.</returns>
public Pedigree? Map(PedigreeData? data)
public CdxPedigree? Map(PedigreeData? data)
{
if (data is null || !data.HasData)
{
return null;
}
return new Pedigree
return new CdxPedigree
{
Ancestors = MapAncestors(data.Ancestors),
Variants = MapVariants(data.Variants),
@@ -158,7 +159,7 @@ public sealed class CycloneDxPedigreeMapper
{
Name = actor.Name,
Email = actor.Email,
Timestamp = actor.Timestamp
Timestamp = actor.Timestamp?.UtcDateTime
};
}

View File

@@ -10,6 +10,7 @@
<ProjectReference Include="..\StellaOps.Scanner.Core\StellaOps.Scanner.Core.csproj" />
<ProjectReference Include="..\StellaOps.Scanner.EntryTrace\StellaOps.Scanner.EntryTrace.csproj" />
<ProjectReference Include="..\StellaOps.Scanner.Storage\StellaOps.Scanner.Storage.csproj" />
<ProjectReference Include="..\StellaOps.Scanner.Validation\StellaOps.Scanner.Validation.csproj" />
<ProjectReference Include="..\..\StellaOps.Scanner.Analyzers.Native\StellaOps.Scanner.Analyzers.Native.csproj" />
</ItemGroup>

View File

@@ -10,6 +10,7 @@ using System.Diagnostics;
using System.Security.Cryptography;
using System.Text;
using System.Text.Json;
using System.Text.Encodings.Web;
using System.Text.Json.Serialization;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
@@ -28,7 +29,8 @@ public sealed class DriftAttestationService : IDriftAttestationService
{
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
WriteIndented = false,
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull,
Encoder = JavaScriptEncoder.UnsafeRelaxedJsonEscaping
};
private readonly IDriftSignerClient? _signerClient;

View File

@@ -0,0 +1,242 @@
// <copyright file="FingerprintGeneratorTests.cs" company="StellaOps">
// Copyright (c) StellaOps. Licensed under the AGPL-3.0-or-later.
// </copyright>
using FluentAssertions;
using StellaOps.Scanner.Sarif.Fingerprints;
using StellaOps.Scanner.Sarif.Rules;
using Xunit;
namespace StellaOps.Scanner.Sarif.Tests;
/// <summary>
/// Tests for <see cref="FingerprintGenerator"/>.
/// </summary>
[Trait("Category", "Unit")]
public class FingerprintGeneratorTests
{
private readonly FingerprintGenerator _generator;
public FingerprintGeneratorTests()
{
_generator = new FingerprintGenerator(new SarifRuleRegistry());
}
[Fact]
public void GeneratePrimary_Standard_ReturnsDeterministicFingerprint()
{
// Arrange
var finding = new FindingInput
{
Type = FindingType.Vulnerability,
Title = "Test vulnerability",
VulnerabilityId = "CVE-2024-1234",
ComponentPurl = "pkg:npm/lodash@4.17.20",
Severity = Severity.High
};
// Act
var fp1 = _generator.GeneratePrimary(finding, FingerprintStrategy.Standard);
var fp2 = _generator.GeneratePrimary(finding, FingerprintStrategy.Standard);
// Assert
fp1.Should().NotBeNullOrEmpty();
fp1.Should().Be(fp2, "fingerprints should be deterministic");
fp1.Should().HaveLength(64, "should be SHA-256 hex string");
}
[Fact]
public void GeneratePrimary_DifferentFindings_ProduceDifferentFingerprints()
{
// Arrange
var finding1 = new FindingInput
{
Type = FindingType.Vulnerability,
Title = "Test vulnerability 1",
VulnerabilityId = "CVE-2024-1234",
ComponentPurl = "pkg:npm/lodash@4.17.20",
Severity = Severity.High
};
var finding2 = new FindingInput
{
Type = FindingType.Vulnerability,
Title = "Test vulnerability 2",
VulnerabilityId = "CVE-2024-5678",
ComponentPurl = "pkg:npm/lodash@4.17.20",
Severity = Severity.High
};
// Act
var fp1 = _generator.GeneratePrimary(finding1, FingerprintStrategy.Standard);
var fp2 = _generator.GeneratePrimary(finding2, FingerprintStrategy.Standard);
// Assert
fp1.Should().NotBe(fp2);
}
[Fact]
public void GeneratePrimary_Minimal_UsesFewerFields()
{
// Arrange
var finding1 = new FindingInput
{
Type = FindingType.Vulnerability,
Title = "Test vulnerability",
VulnerabilityId = "CVE-2024-1234",
ComponentPurl = "pkg:npm/lodash@4.17.20",
Severity = Severity.High
};
var finding2 = new FindingInput
{
Type = FindingType.Vulnerability,
Title = "Test vulnerability",
VulnerabilityId = "CVE-2024-1234",
ComponentPurl = "pkg:npm/express@4.18.0", // Different component
Severity = Severity.High
};
// Act
var fp1Standard = _generator.GeneratePrimary(finding1, FingerprintStrategy.Standard);
var fp2Standard = _generator.GeneratePrimary(finding2, FingerprintStrategy.Standard);
var fp1Minimal = _generator.GeneratePrimary(finding1, FingerprintStrategy.Minimal);
var fp2Minimal = _generator.GeneratePrimary(finding2, FingerprintStrategy.Minimal);
// Assert
fp1Standard.Should().NotBe(fp2Standard, "standard fingerprints differ by component");
fp1Minimal.Should().Be(fp2Minimal, "minimal fingerprints ignore component");
}
[Fact]
public void GeneratePrimary_Extended_IncludesReachabilityAndVex()
{
// Arrange
// Use reachability statuses that don't affect the rule ID
var finding1 = new FindingInput
{
Type = FindingType.Vulnerability,
Title = "Test vulnerability",
VulnerabilityId = "CVE-2024-1234",
ComponentPurl = "pkg:npm/lodash@4.17.20",
Severity = Severity.High,
Reachability = ReachabilityStatus.Unknown,
VexStatus = VexStatus.Affected
};
var finding2 = new FindingInput
{
Type = FindingType.Vulnerability,
Title = "Test vulnerability",
VulnerabilityId = "CVE-2024-1234",
ComponentPurl = "pkg:npm/lodash@4.17.20",
Severity = Severity.High,
Reachability = ReachabilityStatus.Contested,
VexStatus = VexStatus.NotAffected
};
// Act
var fp1Standard = _generator.GeneratePrimary(finding1, FingerprintStrategy.Standard);
var fp2Standard = _generator.GeneratePrimary(finding2, FingerprintStrategy.Standard);
var fp1Extended = _generator.GeneratePrimary(finding1, FingerprintStrategy.Extended);
var fp2Extended = _generator.GeneratePrimary(finding2, FingerprintStrategy.Extended);
// Assert
fp1Standard.Should().Be(fp2Standard, "standard fingerprints ignore reachability/vex");
fp1Extended.Should().NotBe(fp2Extended, "extended fingerprints include reachability/vex");
}
[Fact]
public void GeneratePartial_WithComponent_IncludesComponentFingerprint()
{
// Arrange
var finding = new FindingInput
{
Type = FindingType.Vulnerability,
Title = "Test",
ComponentPurl = "pkg:npm/lodash@4.17.20"
};
// Act
var partials = _generator.GeneratePartial(finding);
// Assert
partials.Should().ContainKey("stellaops/component/v1");
}
[Fact]
public void GeneratePartial_WithVulnerability_IncludesVulnFingerprint()
{
// Arrange
var finding = new FindingInput
{
Type = FindingType.Vulnerability,
Title = "Test",
VulnerabilityId = "CVE-2024-1234"
};
// Act
var partials = _generator.GeneratePartial(finding);
// Assert
partials.Should().ContainKey("stellaops/vuln/v1");
}
[Fact]
public void GeneratePartial_WithLocation_IncludesLocationFingerprint()
{
// Arrange
var finding = new FindingInput
{
Type = FindingType.Vulnerability,
Title = "Test",
FilePath = "src/app.ts",
StartLine = 42
};
// Act
var partials = _generator.GeneratePartial(finding);
// Assert
partials.Should().ContainKey("primaryLocationLineHash/v1");
}
[Fact]
public void GeneratePartial_Secret_IncludesTitleFingerprint()
{
// Arrange
var finding = new FindingInput
{
Type = FindingType.Secret,
Title = "AWS Access Key"
};
// Act
var partials = _generator.GeneratePartial(finding);
// Assert
partials.Should().ContainKey("stellaops/title/v1");
}
[Fact]
public void GeneratePartial_SameInputs_ProduceDeterministicResults()
{
// Arrange
var finding = new FindingInput
{
Type = FindingType.Vulnerability,
Title = "Test",
VulnerabilityId = "CVE-2024-1234",
ComponentPurl = "pkg:npm/lodash@4.17.20",
FilePath = "src/app.ts",
StartLine = 42
};
// Act
var partials1 = _generator.GeneratePartial(finding);
var partials2 = _generator.GeneratePartial(finding);
// Assert
partials1.Should().BeEquivalentTo(partials2);
}
}

View File

@@ -0,0 +1,463 @@
// <copyright file="SarifExportServiceTests.cs" company="StellaOps">
// Copyright (c) StellaOps. Licensed under the AGPL-3.0-or-later.
// </copyright>
using System.Collections.Immutable;
using System.Text.Json;
using FluentAssertions;
using Microsoft.Extensions.Time.Testing;
using StellaOps.Scanner.Sarif.Fingerprints;
using StellaOps.Scanner.Sarif.Models;
using StellaOps.Scanner.Sarif.Rules;
using Xunit;
namespace StellaOps.Scanner.Sarif.Tests;
/// <summary>
/// Tests for <see cref="SarifExportService"/>.
/// </summary>
[Trait("Category", "Unit")]
public class SarifExportServiceTests
{
private readonly SarifExportService _service;
private readonly FakeTimeProvider _timeProvider;
public SarifExportServiceTests()
{
_timeProvider = new FakeTimeProvider(new DateTimeOffset(2026, 1, 9, 12, 0, 0, TimeSpan.Zero));
var ruleRegistry = new SarifRuleRegistry();
var fingerprintGenerator = new FingerprintGenerator(ruleRegistry);
_service = new SarifExportService(ruleRegistry, fingerprintGenerator, _timeProvider);
}
[Fact]
public async Task ExportAsync_EmptyFindings_ReturnsValidSarifLog()
{
// Arrange
var options = new SarifExportOptions { ToolVersion = "1.0.0" };
// Act
var log = await _service.ExportAsync([], options, TestContext.Current.CancellationToken);
// Assert
log.Should().NotBeNull();
log.Version.Should().Be("2.1.0");
log.Schema.Should().Contain("sarif-schema-2.1.0.json");
log.Runs.Should().HaveCount(1);
log.Runs[0].Results.Should().BeEmpty();
}
[Fact]
public async Task ExportAsync_SingleVulnerability_MapsCorrectly()
{
// Arrange
var findings = new[]
{
new FindingInput
{
Type = FindingType.Vulnerability,
Title = "Remote Code Execution",
VulnerabilityId = "CVE-2024-1234",
ComponentPurl = "pkg:npm/lodash@4.17.20",
ComponentName = "lodash",
ComponentVersion = "4.17.20",
Severity = Severity.Critical,
CvssScore = 9.8,
CvssVector = "CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:U/C:H/I:H/A:H",
FilePath = "package.json",
StartLine = 10
}
};
var options = new SarifExportOptions { ToolVersion = "1.0.0" };
// Act
var log = await _service.ExportAsync(findings, options, TestContext.Current.CancellationToken);
// Assert
log.Runs.Should().HaveCount(1);
var run = log.Runs[0];
// Check tool
run.Tool.Driver.Name.Should().Be("StellaOps Scanner");
run.Tool.Driver.Version.Should().Be("1.0.0");
run.Tool.Driver.Rules.Should().NotBeNull();
run.Tool.Driver.Rules!.Value.Should().Contain(r => r.Id == "STELLA-VULN-001");
// Check result
run.Results.Should().HaveCount(1);
var result = run.Results[0];
result.RuleId.Should().Be("STELLA-VULN-001");
result.Level.Should().Be(SarifLevel.Error);
result.Message.Text.Should().Contain("CVE-2024-1234");
result.Message.Text.Should().Contain("lodash@4.17.20");
result.Fingerprints.Should().ContainKey("stellaops/v1");
result.Properties.Should().ContainKey("stellaops/cvss/score");
}
[Fact]
public async Task ExportAsync_WithMinimumSeverity_FiltersResults()
{
// Arrange
var findings = new[]
{
new FindingInput
{
Type = FindingType.Vulnerability,
Title = "Critical vuln",
Severity = Severity.Critical
},
new FindingInput
{
Type = FindingType.Vulnerability,
Title = "Low vuln",
Severity = Severity.Low
}
};
var options = new SarifExportOptions
{
ToolVersion = "1.0.0",
MinimumSeverity = Severity.High
};
// Act
var log = await _service.ExportAsync(findings, options, TestContext.Current.CancellationToken);
// Assert
log.Runs[0].Results.Should().HaveCount(1);
log.Runs[0].Results[0].RuleId.Should().Be("STELLA-VULN-001");
}
[Fact]
public async Task ExportAsync_WithVersionControl_IncludesProvenance()
{
// Arrange
var findings = new[]
{
new FindingInput
{
Type = FindingType.Vulnerability,
Title = "Test",
Severity = Severity.Medium
}
};
var options = new SarifExportOptions
{
ToolVersion = "1.0.0",
VersionControl = new VersionControlInfo
{
RepositoryUri = "https://github.com/org/repo",
RevisionId = "abc123",
Branch = "main"
}
};
// Act
var log = await _service.ExportAsync(findings, options, TestContext.Current.CancellationToken);
// Assert
log.Runs[0].VersionControlProvenance.Should().NotBeNull();
log.Runs[0].VersionControlProvenance!.Value.Should().HaveCount(1);
var vcs = log.Runs[0].VersionControlProvenance!.Value[0];
vcs.RepositoryUri.Should().Be("https://github.com/org/repo");
vcs.RevisionId.Should().Be("abc123");
vcs.Branch.Should().Be("main");
}
[Fact]
public async Task ExportAsync_WithReachability_IncludesInProperties()
{
// Arrange
var findings = new[]
{
new FindingInput
{
Type = FindingType.Vulnerability,
Title = "Test",
Severity = Severity.Medium,
Reachability = ReachabilityStatus.RuntimeReachable
}
};
var options = new SarifExportOptions
{
ToolVersion = "1.0.0",
IncludeReachability = true
};
// Act
var log = await _service.ExportAsync(findings, options, TestContext.Current.CancellationToken);
// Assert
var result = log.Runs[0].Results[0];
result.Properties.Should().ContainKey("stellaops/reachability");
result.Properties!["stellaops/reachability"].Should().Be("RuntimeReachable");
}
[Fact]
public async Task ExportAsync_WithVexStatus_IncludesInProperties()
{
// Arrange
var findings = new[]
{
new FindingInput
{
Type = FindingType.Vulnerability,
Title = "Test",
Severity = Severity.Medium,
VexStatus = VexStatus.NotAffected,
VexJustification = "component_not_present"
}
};
var options = new SarifExportOptions
{
ToolVersion = "1.0.0",
IncludeVexStatus = true
};
// Act
var log = await _service.ExportAsync(findings, options, TestContext.Current.CancellationToken);
// Assert
var result = log.Runs[0].Results[0];
result.Properties.Should().ContainKey("stellaops/vex/status");
result.Properties.Should().ContainKey("stellaops/vex/justification");
}
[Fact]
public async Task ExportAsync_WithKev_IncludesInProperties()
{
// Arrange
var findings = new[]
{
new FindingInput
{
Type = FindingType.Vulnerability,
Title = "Test",
Severity = Severity.Medium,
IsKev = true
}
};
var options = new SarifExportOptions
{
ToolVersion = "1.0.0",
IncludeKev = true
};
// Act
var log = await _service.ExportAsync(findings, options, TestContext.Current.CancellationToken);
// Assert
var result = log.Runs[0].Results[0];
result.Properties.Should().ContainKey("stellaops/kev");
result.Properties!["stellaops/kev"].Should().Be(true);
}
[Fact]
public async Task ExportAsync_SecretFinding_MapsCorrectly()
{
// Arrange
var findings = new[]
{
new FindingInput
{
Type = FindingType.Secret,
Title = "AWS Access Key detected",
FilePath = "config/settings.py",
StartLine = 42,
StartColumn = 10,
EndLine = 42,
EndColumn = 30
}
};
var options = new SarifExportOptions { ToolVersion = "1.0.0" };
// Act
var log = await _service.ExportAsync(findings, options, TestContext.Current.CancellationToken);
// Assert
var result = log.Runs[0].Results[0];
result.RuleId.Should().Be("STELLA-SEC-001");
result.Level.Should().Be(SarifLevel.Error);
result.Locations.Should().NotBeNull();
result.Locations!.Value.Should().HaveCount(1);
var location = result.Locations!.Value[0];
location.PhysicalLocation!.ArtifactLocation.Uri.Should().Be("config/settings.py");
location.PhysicalLocation!.Region!.StartLine.Should().Be(42);
}
[Fact]
public async Task ExportToJsonAsync_ProducesValidJson()
{
// Arrange
var findings = new[]
{
new FindingInput
{
Type = FindingType.Vulnerability,
Title = "Test vulnerability",
Severity = Severity.High
}
};
var options = new SarifExportOptions
{
ToolVersion = "1.0.0",
IndentedJson = true
};
// Act
var json = await _service.ExportToJsonAsync(findings, options, TestContext.Current.CancellationToken);
// Assert
json.Should().NotBeNullOrEmpty();
// Validate it's parseable JSON
var doc = JsonDocument.Parse(json);
doc.RootElement.GetProperty("version").GetString().Should().Be("2.1.0");
doc.RootElement.GetProperty("$schema").GetString().Should().Contain("sarif");
doc.RootElement.GetProperty("runs").GetArrayLength().Should().Be(1);
}
[Fact]
public async Task ExportToStreamAsync_WritesToStream()
{
// Arrange
var findings = new[]
{
new FindingInput
{
Type = FindingType.Vulnerability,
Title = "Test",
Severity = Severity.Medium
}
};
var options = new SarifExportOptions { ToolVersion = "1.0.0" };
using var stream = new MemoryStream();
// Act
await _service.ExportToStreamAsync(findings, options, stream, TestContext.Current.CancellationToken);
// Assert
stream.Length.Should().BeGreaterThan(0);
stream.Position = 0;
using var reader = new StreamReader(stream);
var json = await reader.ReadToEndAsync(TestContext.Current.CancellationToken);
json.Should().Contain("\"version\":\"2.1.0\"");
}
[Fact]
public async Task ExportAsync_ResultsAreSortedDeterministically()
{
// Arrange
var findings = new[]
{
new FindingInput { Type = FindingType.Vulnerability, Title = "Z", Severity = Severity.Low },
new FindingInput { Type = FindingType.Secret, Title = "A" },
new FindingInput { Type = FindingType.Vulnerability, Title = "M", Severity = Severity.Critical }
};
var options = new SarifExportOptions { ToolVersion = "1.0.0" };
// Act
var log1 = await _service.ExportAsync(findings, options, TestContext.Current.CancellationToken);
var log2 = await _service.ExportAsync(findings.Reverse(), options, TestContext.Current.CancellationToken);
// Assert
var ruleIds1 = log1.Runs[0].Results.Select(r => r.RuleId).ToList();
var ruleIds2 = log2.Runs[0].Results.Select(r => r.RuleId).ToList();
ruleIds1.Should().Equal(ruleIds2, "results should be sorted deterministically regardless of input order");
}
[Fact]
public async Task ExportAsync_PathNormalization_RemovesSourceRoot()
{
// Arrange
var findings = new[]
{
new FindingInput
{
Type = FindingType.Vulnerability,
Title = "Test",
Severity = Severity.Medium,
FilePath = "C:\\workspace\\src\\app.ts"
}
};
var options = new SarifExportOptions
{
ToolVersion = "1.0.0",
SourceRoot = "C:\\workspace"
};
// Act
var log = await _service.ExportAsync(findings, options, TestContext.Current.CancellationToken);
// Assert
var location = log.Runs[0].Results[0].Locations!.Value[0];
location.PhysicalLocation!.ArtifactLocation.Uri.Should().Be("src/app.ts");
}
[Fact]
public async Task ExportAsync_IncludesInvocationTimestamp()
{
// Arrange
var findings = new[]
{
new FindingInput
{
Type = FindingType.Vulnerability,
Title = "Test",
Severity = Severity.Medium
}
};
var options = new SarifExportOptions { ToolVersion = "1.0.0" };
// Act
var log = await _service.ExportAsync(findings, options, TestContext.Current.CancellationToken);
// Assert
var invocations = log.Runs[0].Invocations;
invocations.Should().NotBeNull();
invocations!.Value.Should().HaveCount(1);
var invocation = invocations!.Value[0];
invocation.ExecutionSuccessful.Should().BeTrue();
invocation.StartTimeUtc.Should().Be(_timeProvider.GetUtcNow());
}
[Fact]
public async Task ExportAsync_WithCategory_IncludesGitHubAlertCategory()
{
// Arrange
var findings = new[]
{
new FindingInput
{
Type = FindingType.Vulnerability,
Title = "Test",
Severity = Severity.Medium
}
};
var options = new SarifExportOptions
{
ToolVersion = "1.0.0",
Category = "security"
};
// Act
var log = await _service.ExportAsync(findings, options, TestContext.Current.CancellationToken);
// Assert
var result = log.Runs[0].Results[0];
result.Properties.Should().ContainKey("github/alertCategory");
result.Properties!["github/alertCategory"].Should().Be("security");
}
}

View File

@@ -0,0 +1,312 @@
// <copyright file="SarifGoldenFixtureTests.cs" company="StellaOps">
// Copyright (c) StellaOps. Licensed under the AGPL-3.0-or-later.
// </copyright>
using System.Text.Json;
using FluentAssertions;
using Microsoft.Extensions.Time.Testing;
using StellaOps.Scanner.Sarif.Fingerprints;
using StellaOps.Scanner.Sarif.Models;
using StellaOps.Scanner.Sarif.Rules;
using Xunit;
namespace StellaOps.Scanner.Sarif.Tests;
/// <summary>
/// Golden fixture tests for SARIF export validation.
/// These tests ensure generated SARIF matches expected structure and is valid.
/// </summary>
[Trait("Category", "Unit")]
public class SarifGoldenFixtureTests
{
private readonly SarifExportService _service;
private readonly FakeTimeProvider _timeProvider;
public SarifGoldenFixtureTests()
{
_timeProvider = new FakeTimeProvider(new DateTimeOffset(2026, 1, 9, 12, 0, 0, TimeSpan.Zero));
var ruleRegistry = new SarifRuleRegistry();
var fingerprintGenerator = new FingerprintGenerator(ruleRegistry);
_service = new SarifExportService(ruleRegistry, fingerprintGenerator, _timeProvider);
}
[Fact]
public async Task GoldenFixture_SingleVulnerability_ValidStructure()
{
// Arrange
var findings = new[]
{
new FindingInput
{
Type = FindingType.Vulnerability,
Title = "SQL Injection in user input handler",
VulnerabilityId = "CVE-2024-12345",
ComponentPurl = "pkg:npm/mysql@2.18.0",
ComponentName = "mysql",
ComponentVersion = "2.18.0",
Severity = Severity.High,
CvssScore = 8.5,
CvssVector = "CVSS:3.1/AV:N/AC:L/PR:L/UI:N/S:U/C:H/I:H/A:N",
FilePath = "src/db/connection.js",
StartLine = 42
}
};
var options = new SarifExportOptions
{
ToolVersion = "1.0.0",
ToolName = "StellaOps Scanner"
};
// Act
var log = await _service.ExportAsync(findings, options, TestContext.Current.CancellationToken);
// Assert - SARIF 2.1.0 structure requirements
log.Version.Should().Be("2.1.0");
log.Schema.Should().Contain("sarif-schema");
log.Runs.Should().HaveCount(1);
var run = log.Runs[0];
run.Tool.Should().NotBeNull();
run.Tool.Driver.Should().NotBeNull();
run.Tool.Driver.Name.Should().Be("StellaOps Scanner");
run.Tool.Driver.Version.Should().Be("1.0.0");
run.Tool.Driver.InformationUri.Should().NotBeNull();
run.Tool.Driver.Rules.Should().NotBeNull();
run.Results.Should().HaveCount(1);
var result = run.Results[0];
result.RuleId.Should().StartWith("STELLA-");
result.Level.Should().Be(SarifLevel.Warning); // High severity maps to warning
result.Message.Should().NotBeNull();
result.Message.Text.Should().Contain("SQL Injection");
// Location validation
result.Locations.Should().NotBeNull();
result.Locations.Should().HaveCountGreaterThan(0);
var location = result.Locations!.Value[0];
location.PhysicalLocation.Should().NotBeNull();
location.PhysicalLocation!.ArtifactLocation.Should().NotBeNull();
location.PhysicalLocation.ArtifactLocation!.Uri.Should().Be("src/db/connection.js");
location.PhysicalLocation.Region.Should().NotBeNull();
location.PhysicalLocation.Region!.StartLine.Should().Be(42);
// Fingerprint validation
result.PartialFingerprints.Should().NotBeNull();
result.PartialFingerprints.Should().ContainKey("primaryLocationLineHash");
}
[Fact]
public async Task GoldenFixture_MixedSeverities_CorrectLevelMapping()
{
// Arrange
var findings = new[]
{
CreateFinding("CVE-2024-0001", "Critical Finding", Severity.Critical, 10.0),
CreateFinding("CVE-2024-0002", "High Finding", Severity.High, 8.0),
CreateFinding("CVE-2024-0003", "Medium Finding", Severity.Medium, 5.0),
CreateFinding("CVE-2024-0004", "Low Finding", Severity.Low, 2.0)
};
var options = new SarifExportOptions { ToolVersion = "1.0.0" };
// Act
var log = await _service.ExportAsync(findings, options, TestContext.Current.CancellationToken);
// Assert
log.Runs[0].Results.Should().HaveCount(4);
var results = log.Runs[0].Results;
results[0].Level.Should().Be(SarifLevel.Error); // Critical -> Error
results[1].Level.Should().Be(SarifLevel.Warning); // High -> Warning
results[2].Level.Should().Be(SarifLevel.Warning); // Medium -> Warning
results[3].Level.Should().Be(SarifLevel.Note); // Low -> Note
}
[Fact]
public async Task GoldenFixture_WithReachabilityData_IncludesProperties()
{
// Arrange
var findings = new[]
{
new FindingInput
{
Type = FindingType.Vulnerability,
Title = "Prototype Pollution",
VulnerabilityId = "CVE-2024-5678",
ComponentPurl = "pkg:npm/lodash@4.17.20",
ComponentName = "lodash",
ComponentVersion = "4.17.20",
Severity = Severity.High,
CvssScore = 7.5,
FilePath = "package-lock.json",
StartLine = 100,
Reachability = ReachabilityStatus.StaticReachable
}
};
var options = new SarifExportOptions
{
ToolVersion = "1.0.0",
IncludeReachability = true
};
// Act
var log = await _service.ExportAsync(findings, options, TestContext.Current.CancellationToken);
// Assert
var result = log.Runs[0].Results[0];
// Properties should be set when reachability data is included
result.Should().NotBeNull();
}
[Fact]
public async Task GoldenFixture_WithVexStatus_IncludesData()
{
// Arrange
var findings = new[]
{
new FindingInput
{
Type = FindingType.Vulnerability,
Title = "Known but not affected",
VulnerabilityId = "CVE-2024-9999",
ComponentPurl = "pkg:npm/test@1.0.0",
ComponentName = "test",
ComponentVersion = "1.0.0",
Severity = Severity.Medium,
CvssScore = 5.0,
FilePath = "package.json",
VexStatus = VexStatus.NotAffected,
VexJustification = "vulnerable_code_not_present"
}
};
var options = new SarifExportOptions
{
ToolVersion = "1.0.0"
};
// Act
var log = await _service.ExportAsync(findings, options, TestContext.Current.CancellationToken);
// Assert
var result = log.Runs[0].Results[0];
result.Should().NotBeNull();
// VEX data should be captured somehow in the result
}
[Fact]
public async Task GoldenFixture_SecretFinding_UsesCorrectRule()
{
// Arrange
var findings = new[]
{
new FindingInput
{
Type = FindingType.Secret,
Title = "AWS Access Key Exposed",
FilePath = "config/settings.py",
StartLine = 15,
Severity = Severity.Critical
}
};
var options = new SarifExportOptions { ToolVersion = "1.0.0" };
// Act
var log = await _service.ExportAsync(findings, options, TestContext.Current.CancellationToken);
// Assert
var result = log.Runs[0].Results[0];
result.RuleId.Should().StartWith("STELLA-SEC-");
result.Level.Should().Be(SarifLevel.Error); // Secrets are always error level
}
[Fact]
public async Task GoldenFixture_LargeBatch_ProcessesEfficiently()
{
// Arrange - Create 100 findings
var findings = Enumerable.Range(1, 100)
.Select(i => CreateFinding(
$"CVE-2024-{i:D5}",
$"Finding {i}",
(Severity)(i % 4 + 1),
(i % 10) + 1.0))
.ToArray();
var options = new SarifExportOptions { ToolVersion = "1.0.0" };
// Act
var log = await _service.ExportAsync(findings, options, TestContext.Current.CancellationToken);
// Assert
log.Runs[0].Results.Should().HaveCount(100);
// All should have unique fingerprints
var fingerprints = log.Runs[0].Results
.Where(r => r.PartialFingerprints != null)
.Select(r => r.PartialFingerprints!.GetValueOrDefault("primaryLocationLineHash"))
.Where(f => f != null)
.ToList();
fingerprints.Distinct().Count().Should().Be(fingerprints.Count);
}
[Fact]
public async Task GoldenFixture_JsonSerialization_ValidJson()
{
// Arrange
var findings = new[]
{
CreateFinding("CVE-2024-TEST", "Test vulnerability", Severity.Medium, 5.0)
};
var options = new SarifExportOptions { ToolVersion = "1.0.0" };
// Act
var json = await _service.ExportToJsonAsync(findings, options, TestContext.Current.CancellationToken);
// Assert - Should be valid JSON
var parsed = JsonDocument.Parse(json);
parsed.RootElement.GetProperty("version").GetString().Should().Be("2.1.0");
parsed.RootElement.GetProperty("$schema").GetString().Should().Contain("sarif");
parsed.RootElement.GetProperty("runs").GetArrayLength().Should().Be(1);
}
[Fact]
public async Task GoldenFixture_DeterministicOutput_SameInputSameOutput()
{
// Arrange
var findings = new[]
{
CreateFinding("CVE-2024-DET", "Determinism test", Severity.High, 7.5)
};
var options = new SarifExportOptions { ToolVersion = "1.0.0" };
// Act - Export twice
var json1 = await _service.ExportToJsonAsync(findings, options, TestContext.Current.CancellationToken);
var json2 = await _service.ExportToJsonAsync(findings, options, TestContext.Current.CancellationToken);
// Assert - Should be identical
json1.Should().Be(json2);
}
private static FindingInput CreateFinding(string cveId, string title, Severity severity, double cvssScore)
{
return new FindingInput
{
Type = FindingType.Vulnerability,
Title = title,
VulnerabilityId = cveId,
ComponentPurl = $"pkg:npm/test-{cveId}@1.0.0",
ComponentName = $"test-{cveId}",
ComponentVersion = "1.0.0",
Severity = severity,
CvssScore = cvssScore,
FilePath = $"package-{cveId}.json",
StartLine = Math.Abs(cveId.GetHashCode() % 1000) + 1
};
}
}

View File

@@ -0,0 +1,255 @@
// <copyright file="SarifRuleRegistryTests.cs" company="StellaOps">
// Copyright (c) StellaOps. Licensed under the AGPL-3.0-or-later.
// </copyright>
using FluentAssertions;
using StellaOps.Scanner.Sarif.Models;
using StellaOps.Scanner.Sarif.Rules;
using Xunit;
namespace StellaOps.Scanner.Sarif.Tests;
/// <summary>
/// Tests for <see cref="SarifRuleRegistry"/>.
/// </summary>
[Trait("Category", "Unit")]
public class SarifRuleRegistryTests
{
private readonly SarifRuleRegistry _registry = new();
[Theory]
[InlineData(Severity.Critical, "STELLA-VULN-001")]
[InlineData(Severity.High, "STELLA-VULN-002")]
[InlineData(Severity.Medium, "STELLA-VULN-003")]
[InlineData(Severity.Low, "STELLA-VULN-004")]
public void GetRuleId_Vulnerability_MapsSeverityCorrectly(Severity severity, string expectedRuleId)
{
// Arrange
var finding = new FindingInput
{
Type = FindingType.Vulnerability,
Title = "Test vulnerability",
Severity = severity
};
// Act
var ruleId = _registry.GetRuleId(finding);
// Assert
ruleId.Should().Be(expectedRuleId);
}
[Fact]
public void GetRuleId_RuntimeReachable_ReturnsReachabilityRule()
{
// Arrange
var finding = new FindingInput
{
Type = FindingType.Vulnerability,
Title = "Test vulnerability",
Severity = Severity.Low,
Reachability = ReachabilityStatus.RuntimeReachable
};
// Act
var ruleId = _registry.GetRuleId(finding);
// Assert
ruleId.Should().Be("STELLA-VULN-005");
}
[Fact]
public void GetRuleId_StaticReachable_ReturnsStaticReachabilityRule()
{
// Arrange
var finding = new FindingInput
{
Type = FindingType.Vulnerability,
Title = "Test vulnerability",
Severity = Severity.Low,
Reachability = ReachabilityStatus.StaticReachable
};
// Act
var ruleId = _registry.GetRuleId(finding);
// Assert
ruleId.Should().Be("STELLA-VULN-006");
}
[Fact]
public void GetRuleId_Secret_ReturnsSecretRule()
{
// Arrange
var finding = new FindingInput
{
Type = FindingType.Secret,
Title = "API key detected"
};
// Act
var ruleId = _registry.GetRuleId(finding);
// Assert
ruleId.Should().Be("STELLA-SEC-001");
}
[Fact]
public void GetRuleId_PrivateKey_ReturnsPrivateKeyRule()
{
// Arrange
var finding = new FindingInput
{
Type = FindingType.Secret,
Title = "Private key exposed in repository"
};
// Act
var ruleId = _registry.GetRuleId(finding);
// Assert
ruleId.Should().Be("STELLA-SEC-002");
}
[Fact]
public void GetRuleId_SupplyChain_ReturnsSupplyChainRule()
{
// Arrange
var finding = new FindingInput
{
Type = FindingType.SupplyChain,
Title = "Unsigned package"
};
// Act
var ruleId = _registry.GetRuleId(finding);
// Assert
ruleId.Should().Be("STELLA-SC-001");
}
[Fact]
public void GetRuleId_Typosquat_ReturnsTyposquatRule()
{
// Arrange
var finding = new FindingInput
{
Type = FindingType.SupplyChain,
Title = "Potential typosquat: lodasj"
};
// Act
var ruleId = _registry.GetRuleId(finding);
// Assert
ruleId.Should().Be("STELLA-SC-003");
}
[Theory]
[InlineData(Severity.Critical, SarifLevel.Error)]
[InlineData(Severity.High, SarifLevel.Error)]
[InlineData(Severity.Medium, SarifLevel.Warning)]
[InlineData(Severity.Low, SarifLevel.Note)]
public void GetLevel_MapsSeverityToLevel(Severity severity, SarifLevel expectedLevel)
{
// Arrange
var finding = new FindingInput
{
Type = FindingType.Vulnerability,
Title = "Test",
Severity = severity
};
// Act
var level = _registry.GetLevel(finding);
// Assert
level.Should().Be(expectedLevel);
}
[Fact]
public void GetLevel_KevElevates_ToError()
{
// Arrange
var finding = new FindingInput
{
Type = FindingType.Vulnerability,
Title = "Test",
Severity = Severity.Low,
IsKev = true
};
// Act
var level = _registry.GetLevel(finding);
// Assert
level.Should().Be(SarifLevel.Error);
}
[Fact]
public void GetLevel_RuntimeReachable_ElevatesToError()
{
// Arrange
var finding = new FindingInput
{
Type = FindingType.Vulnerability,
Title = "Test",
Severity = Severity.Low,
Reachability = ReachabilityStatus.RuntimeReachable
};
// Act
var level = _registry.GetLevel(finding);
// Assert
level.Should().Be(SarifLevel.Error);
}
[Fact]
public void GetAllRules_ReturnsAllDefinedRules()
{
// Act
var rules = _registry.GetAllRules();
// Assert
rules.Should().NotBeEmpty();
rules.Should().Contain(r => r.Id == "STELLA-VULN-001");
rules.Should().Contain(r => r.Id == "STELLA-SEC-001");
rules.Should().Contain(r => r.Id == "STELLA-SC-001");
rules.Should().Contain(r => r.Id == "STELLA-BIN-001");
}
[Fact]
public void GetRulesByType_Vulnerability_ReturnsVulnerabilityRules()
{
// Act
var rules = _registry.GetRulesByType(FindingType.Vulnerability);
// Assert
rules.Should().NotBeEmpty();
rules.Should().OnlyContain(r => r.Id.StartsWith("STELLA-VULN-", StringComparison.Ordinal));
}
[Fact]
public void GetRule_ReturnsRuleDefinition()
{
// Arrange
var finding = new FindingInput
{
Type = FindingType.Vulnerability,
Title = "Critical CVE",
Severity = Severity.Critical
};
// Act
var rule = _registry.GetRule(finding);
// Assert
rule.Should().NotBeNull();
rule.Id.Should().Be("STELLA-VULN-001");
rule.Name.Should().Be("CriticalVulnerability");
rule.ShortDescription.Should().NotBeNull();
rule.DefaultConfiguration.Should().NotBeNull();
rule.DefaultConfiguration!.Level.Should().Be(SarifLevel.Error);
}
}

View File

@@ -0,0 +1,20 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
<LangVersion>preview</LangVersion>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
<IsPackable>false</IsPackable>
<IsTestProject>true</IsTestProject>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="FluentAssertions" />
<PackageReference Include="Moq" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\StellaOps.Scanner.Sarif\StellaOps.Scanner.Sarif.csproj" />
</ItemGroup>
</Project>

View File

@@ -0,0 +1,208 @@
// <copyright file="FindingInput.cs" company="StellaOps">
// Copyright (c) StellaOps. Licensed under the AGPL-3.0-or-later.
// </copyright>
namespace StellaOps.Scanner.Sarif;
/// <summary>
/// Input model for a finding to be exported to SARIF.
/// Sprint: SPRINT_20260109_010_001 Task: Implement findings mapper
/// </summary>
public sealed record FindingInput
{
/// <summary>
/// Gets the finding type.
/// </summary>
public required FindingType Type { get; init; }
/// <summary>
/// Gets the vulnerability ID (CVE, GHSA, etc.) if applicable.
/// </summary>
public string? VulnerabilityId { get; init; }
/// <summary>
/// Gets the component Package URL.
/// </summary>
public string? ComponentPurl { get; init; }
/// <summary>
/// Gets the component name.
/// </summary>
public string? ComponentName { get; init; }
/// <summary>
/// Gets the component version.
/// </summary>
public string? ComponentVersion { get; init; }
/// <summary>
/// Gets the severity.
/// </summary>
public Severity Severity { get; init; } = Severity.Unknown;
/// <summary>
/// Gets the CVSS v3 score (0.0-10.0).
/// </summary>
public double? CvssScore { get; init; }
/// <summary>
/// Gets the CVSS v3 vector string.
/// </summary>
public string? CvssVector { get; init; }
/// <summary>
/// Gets the EPSS probability (0.0-1.0).
/// </summary>
public double? EpssProbability { get; init; }
/// <summary>
/// Gets the EPSS percentile (0.0-1.0).
/// </summary>
public double? EpssPercentile { get; init; }
/// <summary>
/// Gets whether this is in the KEV catalog.
/// </summary>
public bool IsKev { get; init; }
/// <summary>
/// Gets the finding title/summary.
/// </summary>
public required string Title { get; init; }
/// <summary>
/// Gets the detailed description.
/// </summary>
public string? Description { get; init; }
/// <summary>
/// Gets the recommendation/remediation.
/// </summary>
public string? Recommendation { get; init; }
/// <summary>
/// Gets the file path where the finding was detected.
/// </summary>
public string? FilePath { get; init; }
/// <summary>
/// Gets the start line number (1-based).
/// </summary>
public int? StartLine { get; init; }
/// <summary>
/// Gets the end line number (1-based).
/// </summary>
public int? EndLine { get; init; }
/// <summary>
/// Gets the start column (1-based).
/// </summary>
public int? StartColumn { get; init; }
/// <summary>
/// Gets the end column (1-based).
/// </summary>
public int? EndColumn { get; init; }
/// <summary>
/// Gets the artifact digest (sha256:...).
/// </summary>
public string? ArtifactDigest { get; init; }
/// <summary>
/// Gets the reachability status.
/// </summary>
public ReachabilityStatus? Reachability { get; init; }
/// <summary>
/// Gets the VEX status.
/// </summary>
public VexStatus? VexStatus { get; init; }
/// <summary>
/// Gets VEX justification.
/// </summary>
public string? VexJustification { get; init; }
/// <summary>
/// Gets StellaOps evidence URIs.
/// </summary>
public IReadOnlyList<string>? EvidenceUris { get; init; }
/// <summary>
/// Gets attestation digests.
/// </summary>
public IReadOnlyList<string>? AttestationDigests { get; init; }
/// <summary>
/// Gets custom properties to include.
/// </summary>
public IReadOnlyDictionary<string, object>? Properties { get; init; }
}
/// <summary>
/// Type of finding.
/// </summary>
public enum FindingType
{
/// <summary>Software vulnerability (CVE, GHSA, etc.).</summary>
Vulnerability,
/// <summary>Hardcoded secret or credential.</summary>
Secret,
/// <summary>Supply chain issue (unsigned, unknown provenance, etc.).</summary>
SupplyChain,
/// <summary>Binary hardening issue.</summary>
BinaryHardening,
/// <summary>License compliance issue.</summary>
License,
/// <summary>Configuration issue.</summary>
Configuration
}
/// <summary>
/// Reachability status.
/// </summary>
public enum ReachabilityStatus
{
/// <summary>Not analyzed.</summary>
Unknown,
/// <summary>Statically reachable.</summary>
StaticReachable,
/// <summary>Statically unreachable.</summary>
StaticUnreachable,
/// <summary>Confirmed reachable at runtime.</summary>
RuntimeReachable,
/// <summary>Confirmed unreachable at runtime.</summary>
RuntimeUnreachable,
/// <summary>Conflicting evidence.</summary>
Contested
}
/// <summary>
/// VEX status.
/// </summary>
public enum VexStatus
{
/// <summary>Affected by the vulnerability.</summary>
Affected,
/// <summary>Not affected by the vulnerability.</summary>
NotAffected,
/// <summary>Fixed in this version.</summary>
Fixed,
/// <summary>Under investigation.</summary>
UnderInvestigation
}

View File

@@ -0,0 +1,139 @@
// <copyright file="FingerprintGenerator.cs" company="StellaOps">
// Copyright (c) StellaOps. Licensed under the AGPL-3.0-or-later.
// </copyright>
using System.Security.Cryptography;
using System.Text;
using StellaOps.Scanner.Sarif.Rules;
namespace StellaOps.Scanner.Sarif.Fingerprints;
/// <summary>
/// Default implementation of <see cref="IFingerprintGenerator"/>.
/// Sprint: SPRINT_20260109_010_001 Task: Implement fingerprint generator
/// </summary>
/// <remarks>
/// Fingerprint algorithms:
/// - stellaops/v1 (Standard): SHA-256(ruleId | componentPurl | vulnId | artifactDigest)
/// - stellaops/minimal (Minimal): SHA-256(ruleId | vulnId)
/// - stellaops/extended (Extended): SHA-256(ruleId | componentPurl | vulnId | artifactDigest | reachability | vexStatus)
/// </remarks>
public sealed class FingerprintGenerator : IFingerprintGenerator
{
private const string FingerprintVersion = "stellaops/v1";
private const char Separator = '|';
private readonly ISarifRuleRegistry _ruleRegistry;
/// <summary>
/// Initializes a new instance of the <see cref="FingerprintGenerator"/> class.
/// </summary>
/// <param name="ruleRegistry">The rule registry.</param>
public FingerprintGenerator(ISarifRuleRegistry ruleRegistry)
{
_ruleRegistry = ruleRegistry ?? throw new ArgumentNullException(nameof(ruleRegistry));
}
/// <inheritdoc/>
public string GeneratePrimary(FindingInput finding, FingerprintStrategy strategy)
{
ArgumentNullException.ThrowIfNull(finding);
var input = strategy switch
{
FingerprintStrategy.Standard => BuildStandardInput(finding),
FingerprintStrategy.Minimal => BuildMinimalInput(finding),
FingerprintStrategy.Extended => BuildExtendedInput(finding),
_ => throw new ArgumentOutOfRangeException(nameof(strategy), strategy, "Unknown fingerprint strategy")
};
return ComputeSha256(input);
}
/// <inheritdoc/>
public IDictionary<string, string> GeneratePartial(FindingInput finding)
{
ArgumentNullException.ThrowIfNull(finding);
var partials = new Dictionary<string, string>(StringComparer.Ordinal);
// Component-based partial fingerprint
if (!string.IsNullOrEmpty(finding.ComponentPurl))
{
partials["stellaops/component/v1"] = ComputeSha256(finding.ComponentPurl);
}
// Vulnerability-based partial fingerprint
if (!string.IsNullOrEmpty(finding.VulnerabilityId))
{
partials["stellaops/vuln/v1"] = ComputeSha256(finding.VulnerabilityId);
}
// Location-based partial fingerprint (for GitHub fallback)
if (!string.IsNullOrEmpty(finding.FilePath) && finding.StartLine.HasValue)
{
var locationInput = $"{finding.FilePath}:{finding.StartLine}";
partials["primaryLocationLineHash/v1"] = ComputeSha256(locationInput);
}
// Title-based partial fingerprint (for secrets/config issues without CVE)
if (finding.Type is FindingType.Secret or FindingType.Configuration)
{
var titleInput = $"{finding.Type}:{finding.Title}";
partials["stellaops/title/v1"] = ComputeSha256(titleInput);
}
return partials;
}
private string BuildStandardInput(FindingInput finding)
{
var ruleId = _ruleRegistry.GetRuleId(finding);
var parts = new[]
{
ruleId,
finding.ComponentPurl ?? string.Empty,
finding.VulnerabilityId ?? string.Empty,
finding.ArtifactDigest ?? string.Empty
};
return string.Join(Separator, parts);
}
private string BuildMinimalInput(FindingInput finding)
{
var ruleId = _ruleRegistry.GetRuleId(finding);
var parts = new[]
{
ruleId,
finding.VulnerabilityId ?? finding.Title
};
return string.Join(Separator, parts);
}
private string BuildExtendedInput(FindingInput finding)
{
var ruleId = _ruleRegistry.GetRuleId(finding);
var parts = new[]
{
ruleId,
finding.ComponentPurl ?? string.Empty,
finding.VulnerabilityId ?? string.Empty,
finding.ArtifactDigest ?? string.Empty,
finding.Reachability?.ToString() ?? string.Empty,
finding.VexStatus?.ToString() ?? string.Empty
};
return string.Join(Separator, parts);
}
private static string ComputeSha256(string input)
{
var bytes = SHA256.HashData(Encoding.UTF8.GetBytes(input));
return Convert.ToHexStringLower(bytes);
}
}

View File

@@ -0,0 +1,27 @@
// <copyright file="IFingerprintGenerator.cs" company="StellaOps">
// Copyright (c) StellaOps. Licensed under the AGPL-3.0-or-later.
// </copyright>
namespace StellaOps.Scanner.Sarif.Fingerprints;
/// <summary>
/// Interface for generating deterministic fingerprints for SARIF results.
/// Sprint: SPRINT_20260109_010_001 Task: Implement fingerprint generator
/// </summary>
public interface IFingerprintGenerator
{
/// <summary>
/// Generates a primary fingerprint for deduplication.
/// </summary>
/// <param name="finding">The finding.</param>
/// <param name="strategy">The fingerprint strategy.</param>
/// <returns>The fingerprint string.</returns>
string GeneratePrimary(FindingInput finding, FingerprintStrategy strategy);
/// <summary>
/// Generates partial fingerprints for fallback matching.
/// </summary>
/// <param name="finding">The finding.</param>
/// <returns>Dictionary of partial fingerprint names to values.</returns>
IDictionary<string, string> GeneratePartial(FindingInput finding);
}

View File

@@ -0,0 +1,51 @@
// <copyright file="ISarifExportService.cs" company="StellaOps">
// Copyright (c) StellaOps. Licensed under the AGPL-3.0-or-later.
// </copyright>
using StellaOps.Scanner.Sarif.Models;
namespace StellaOps.Scanner.Sarif;
/// <summary>
/// Service interface for exporting Scanner findings to SARIF 2.1.0 format.
/// Sprint: SPRINT_20260109_010_001 Task: Extract shared SARIF models
/// </summary>
public interface ISarifExportService
{
/// <summary>
/// Exports findings to a SARIF log structure.
/// </summary>
/// <param name="findings">The findings to export.</param>
/// <param name="options">Export options.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>The SARIF log containing the findings.</returns>
Task<SarifLog> ExportAsync(
IEnumerable<FindingInput> findings,
SarifExportOptions options,
CancellationToken cancellationToken = default);
/// <summary>
/// Exports findings to SARIF JSON string.
/// </summary>
/// <param name="findings">The findings to export.</param>
/// <param name="options">Export options.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>The SARIF JSON string.</returns>
Task<string> ExportToJsonAsync(
IEnumerable<FindingInput> findings,
SarifExportOptions options,
CancellationToken cancellationToken = default);
/// <summary>
/// Exports findings to SARIF JSON and writes to a stream.
/// </summary>
/// <param name="findings">The findings to export.</param>
/// <param name="options">Export options.</param>
/// <param name="outputStream">The output stream.</param>
/// <param name="cancellationToken">Cancellation token.</param>
Task ExportToStreamAsync(
IEnumerable<FindingInput> findings,
SarifExportOptions options,
Stream outputStream,
CancellationToken cancellationToken = default);
}

View File

@@ -0,0 +1,232 @@
// <copyright file="SarifModels.cs" company="StellaOps">
// Copyright (c) StellaOps. Licensed under the AGPL-3.0-or-later.
// </copyright>
using System.Collections.Immutable;
using System.Text.Json.Serialization;
namespace StellaOps.Scanner.Sarif.Models;
/// <summary>
/// SARIF 2.1.0 log model.
/// Sprint: SPRINT_20260109_010_001 Task: Extract shared SARIF models
/// </summary>
public sealed record SarifLog(
[property: JsonPropertyName("version")] string Version,
[property: JsonPropertyName("$schema")] string Schema,
[property: JsonPropertyName("runs")] ImmutableArray<SarifRun> Runs)
{
/// <summary>SARIF version constant.</summary>
public const string SarifVersion = "2.1.0";
/// <summary>SARIF schema URL.</summary>
public const string SchemaUrl = "https://raw.githubusercontent.com/oasis-tcs/sarif-spec/master/Schemata/sarif-schema-2.1.0.json";
/// <summary>
/// Creates a new SARIF log with the standard version and schema.
/// </summary>
public static SarifLog Create(ImmutableArray<SarifRun> runs)
=> new(SarifVersion, SchemaUrl, runs);
}
/// <summary>
/// A single SARIF run representing one analysis execution.
/// </summary>
public sealed record SarifRun(
[property: JsonPropertyName("tool")] SarifTool Tool,
[property: JsonPropertyName("results")] ImmutableArray<SarifResult> Results,
[property: JsonPropertyName("invocations")] ImmutableArray<SarifInvocation>? Invocations = null,
[property: JsonPropertyName("artifacts")] ImmutableArray<SarifArtifact>? Artifacts = null,
[property: JsonPropertyName("versionControlProvenance")] ImmutableArray<SarifVersionControlDetails>? VersionControlProvenance = null,
[property: JsonPropertyName("properties")] ImmutableSortedDictionary<string, object>? Properties = null);
/// <summary>
/// Tool information for the SARIF run.
/// </summary>
public sealed record SarifTool(
[property: JsonPropertyName("driver")] SarifToolComponent Driver,
[property: JsonPropertyName("extensions")] ImmutableArray<SarifToolComponent>? Extensions = null);
/// <summary>
/// Tool component (driver or extension).
/// </summary>
public sealed record SarifToolComponent(
[property: JsonPropertyName("name")] string Name,
[property: JsonPropertyName("version")] string Version,
[property: JsonPropertyName("semanticVersion")] string? SemanticVersion = null,
[property: JsonPropertyName("informationUri")] string? InformationUri = null,
[property: JsonPropertyName("rules")] ImmutableArray<SarifReportingDescriptor>? Rules = null,
[property: JsonPropertyName("supportedTaxonomies")] ImmutableArray<SarifToolComponentReference>? SupportedTaxonomies = null);
/// <summary>
/// Reference to a tool component.
/// </summary>
public sealed record SarifToolComponentReference(
[property: JsonPropertyName("name")] string Name,
[property: JsonPropertyName("guid")] string? Guid = null);
/// <summary>
/// Rule definition (reporting descriptor).
/// </summary>
public sealed record SarifReportingDescriptor(
[property: JsonPropertyName("id")] string Id,
[property: JsonPropertyName("name")] string? Name = null,
[property: JsonPropertyName("shortDescription")] SarifMessage? ShortDescription = null,
[property: JsonPropertyName("fullDescription")] SarifMessage? FullDescription = null,
[property: JsonPropertyName("defaultConfiguration")] SarifReportingConfiguration? DefaultConfiguration = null,
[property: JsonPropertyName("helpUri")] string? HelpUri = null,
[property: JsonPropertyName("help")] SarifMessage? Help = null,
[property: JsonPropertyName("properties")] ImmutableSortedDictionary<string, object>? Properties = null);
/// <summary>
/// Rule configuration.
/// </summary>
public sealed record SarifReportingConfiguration(
[property: JsonPropertyName("level")] SarifLevel Level = SarifLevel.Warning,
[property: JsonPropertyName("enabled")] bool Enabled = true);
/// <summary>
/// SARIF message with text.
/// </summary>
public sealed record SarifMessage(
[property: JsonPropertyName("text")] string Text,
[property: JsonPropertyName("markdown")] string? Markdown = null);
/// <summary>
/// SARIF result level.
/// </summary>
[JsonConverter(typeof(JsonStringEnumConverter<SarifLevel>))]
public enum SarifLevel
{
/// <summary>No level.</summary>
[JsonStringEnumMemberName("none")]
None,
/// <summary>Informational note.</summary>
[JsonStringEnumMemberName("note")]
Note,
/// <summary>Warning.</summary>
[JsonStringEnumMemberName("warning")]
Warning,
/// <summary>Error.</summary>
[JsonStringEnumMemberName("error")]
Error
}
/// <summary>
/// A single result/finding.
/// </summary>
public sealed record SarifResult(
[property: JsonPropertyName("ruleId")] string RuleId,
[property: JsonPropertyName("level")] SarifLevel Level,
[property: JsonPropertyName("message")] SarifMessage Message,
[property: JsonPropertyName("ruleIndex")] int? RuleIndex = null,
[property: JsonPropertyName("locations")] ImmutableArray<SarifLocation>? Locations = null,
[property: JsonPropertyName("fingerprints")] ImmutableSortedDictionary<string, string>? Fingerprints = null,
[property: JsonPropertyName("partialFingerprints")] ImmutableSortedDictionary<string, string>? PartialFingerprints = null,
[property: JsonPropertyName("relatedLocations")] ImmutableArray<SarifLocation>? RelatedLocations = null,
[property: JsonPropertyName("fixes")] ImmutableArray<SarifFix>? Fixes = null,
[property: JsonPropertyName("properties")] ImmutableSortedDictionary<string, object>? Properties = null);
/// <summary>
/// Location of a result.
/// </summary>
public sealed record SarifLocation(
[property: JsonPropertyName("physicalLocation")] SarifPhysicalLocation? PhysicalLocation = null,
[property: JsonPropertyName("logicalLocations")] ImmutableArray<SarifLogicalLocation>? LogicalLocations = null,
[property: JsonPropertyName("message")] SarifMessage? Message = null);
/// <summary>
/// Physical file location.
/// </summary>
public sealed record SarifPhysicalLocation(
[property: JsonPropertyName("artifactLocation")] SarifArtifactLocation ArtifactLocation,
[property: JsonPropertyName("region")] SarifRegion? Region = null,
[property: JsonPropertyName("contextRegion")] SarifRegion? ContextRegion = null);
/// <summary>
/// Artifact location (file path).
/// </summary>
public sealed record SarifArtifactLocation(
[property: JsonPropertyName("uri")] string Uri,
[property: JsonPropertyName("uriBaseId")] string? UriBaseId = null,
[property: JsonPropertyName("index")] int? Index = null);
/// <summary>
/// Region within a file.
/// </summary>
public sealed record SarifRegion(
[property: JsonPropertyName("startLine")] int? StartLine = null,
[property: JsonPropertyName("startColumn")] int? StartColumn = null,
[property: JsonPropertyName("endLine")] int? EndLine = null,
[property: JsonPropertyName("endColumn")] int? EndColumn = null,
[property: JsonPropertyName("charOffset")] int? CharOffset = null,
[property: JsonPropertyName("charLength")] int? CharLength = null,
[property: JsonPropertyName("snippet")] SarifArtifactContent? Snippet = null);
/// <summary>
/// Artifact content (code snippet).
/// </summary>
public sealed record SarifArtifactContent(
[property: JsonPropertyName("text")] string? Text = null,
[property: JsonPropertyName("rendered")] SarifMessage? Rendered = null);
/// <summary>
/// Logical location (namespace, class, function).
/// </summary>
public sealed record SarifLogicalLocation(
[property: JsonPropertyName("name")] string Name,
[property: JsonPropertyName("fullyQualifiedName")] string? FullyQualifiedName = null,
[property: JsonPropertyName("kind")] string? Kind = null,
[property: JsonPropertyName("index")] int? Index = null);
/// <summary>
/// Invocation information.
/// </summary>
public sealed record SarifInvocation(
[property: JsonPropertyName("executionSuccessful")] bool ExecutionSuccessful,
[property: JsonPropertyName("startTimeUtc")] DateTimeOffset? StartTimeUtc = null,
[property: JsonPropertyName("endTimeUtc")] DateTimeOffset? EndTimeUtc = null,
[property: JsonPropertyName("workingDirectory")] SarifArtifactLocation? WorkingDirectory = null,
[property: JsonPropertyName("commandLine")] string? CommandLine = null);
/// <summary>
/// Artifact (file) information.
/// </summary>
public sealed record SarifArtifact(
[property: JsonPropertyName("location")] SarifArtifactLocation Location,
[property: JsonPropertyName("mimeType")] string? MimeType = null,
[property: JsonPropertyName("hashes")] ImmutableSortedDictionary<string, string>? Hashes = null,
[property: JsonPropertyName("length")] long? Length = null);
/// <summary>
/// Version control information.
/// </summary>
public sealed record SarifVersionControlDetails(
[property: JsonPropertyName("repositoryUri")] string RepositoryUri,
[property: JsonPropertyName("revisionId")] string? RevisionId = null,
[property: JsonPropertyName("branch")] string? Branch = null,
[property: JsonPropertyName("mappedTo")] SarifArtifactLocation? MappedTo = null);
/// <summary>
/// Fix suggestion.
/// </summary>
public sealed record SarifFix(
[property: JsonPropertyName("description")] SarifMessage Description,
[property: JsonPropertyName("artifactChanges")] ImmutableArray<SarifArtifactChange> ArtifactChanges);
/// <summary>
/// Artifact change for a fix.
/// </summary>
public sealed record SarifArtifactChange(
[property: JsonPropertyName("artifactLocation")] SarifArtifactLocation ArtifactLocation,
[property: JsonPropertyName("replacements")] ImmutableArray<SarifReplacement> Replacements);
/// <summary>
/// Text replacement for a fix.
/// </summary>
public sealed record SarifReplacement(
[property: JsonPropertyName("deletedRegion")] SarifRegion DeletedRegion,
[property: JsonPropertyName("insertedContent")] SarifArtifactContent? InsertedContent = null);

View File

@@ -0,0 +1,48 @@
// <copyright file="ISarifRuleRegistry.cs" company="StellaOps">
// Copyright (c) StellaOps. Licensed under the AGPL-3.0-or-later.
// </copyright>
using StellaOps.Scanner.Sarif.Models;
namespace StellaOps.Scanner.Sarif.Rules;
/// <summary>
/// Registry interface for SARIF rule definitions.
/// Sprint: SPRINT_20260109_010_001 Task: Create rule registry
/// </summary>
public interface ISarifRuleRegistry
{
/// <summary>
/// Gets the rule definition for a finding.
/// </summary>
/// <param name="finding">The finding.</param>
/// <returns>The rule definition.</returns>
SarifReportingDescriptor GetRule(FindingInput finding);
/// <summary>
/// Gets the rule ID for a finding.
/// </summary>
/// <param name="finding">The finding.</param>
/// <returns>The rule ID.</returns>
string GetRuleId(FindingInput finding);
/// <summary>
/// Gets the SARIF level for a finding.
/// </summary>
/// <param name="finding">The finding.</param>
/// <returns>The SARIF level.</returns>
SarifLevel GetLevel(FindingInput finding);
/// <summary>
/// Gets all registered rules.
/// </summary>
/// <returns>All rule definitions.</returns>
IReadOnlyList<SarifReportingDescriptor> GetAllRules();
/// <summary>
/// Gets rules by type.
/// </summary>
/// <param name="type">The finding type.</param>
/// <returns>Rules for the specified type.</returns>
IReadOnlyList<SarifReportingDescriptor> GetRulesByType(FindingType type);
}

View File

@@ -0,0 +1,417 @@
// <copyright file="SarifRuleRegistry.cs" company="StellaOps">
// Copyright (c) StellaOps. Licensed under the AGPL-3.0-or-later.
// </copyright>
using System.Collections.Frozen;
using StellaOps.Scanner.Sarif.Models;
namespace StellaOps.Scanner.Sarif.Rules;
/// <summary>
/// Default implementation of <see cref="ISarifRuleRegistry"/>.
/// Sprint: SPRINT_20260109_010_001 Task: Create rule registry
/// </summary>
public sealed class SarifRuleRegistry : ISarifRuleRegistry
{
private readonly FrozenDictionary<string, SarifReportingDescriptor> _rulesById;
private readonly IReadOnlyList<SarifReportingDescriptor> _allRules;
/// <summary>
/// Initializes a new instance of the <see cref="SarifRuleRegistry"/> class.
/// </summary>
public SarifRuleRegistry()
{
var rules = BuildRules();
_allRules = rules;
_rulesById = rules.ToFrozenDictionary(r => r.Id, StringComparer.OrdinalIgnoreCase);
}
/// <inheritdoc/>
public SarifReportingDescriptor GetRule(FindingInput finding)
{
var ruleId = GetRuleId(finding);
return _rulesById.TryGetValue(ruleId, out var rule)
? rule
: CreateUnknownRule(ruleId);
}
/// <inheritdoc/>
public string GetRuleId(FindingInput finding)
{
return finding.Type switch
{
FindingType.Vulnerability => GetVulnerabilityRuleId(finding),
FindingType.Secret => GetSecretRuleId(finding),
FindingType.SupplyChain => GetSupplyChainRuleId(finding),
FindingType.BinaryHardening => GetBinaryHardeningRuleId(finding),
FindingType.License => "STELLA-LIC-001",
FindingType.Configuration => "STELLA-CFG-001",
_ => "STELLA-UNKNOWN"
};
}
/// <inheritdoc/>
public SarifLevel GetLevel(FindingInput finding)
{
// Runtime/confirmed reachable always elevates to error
if (finding.Reachability == ReachabilityStatus.RuntimeReachable)
{
return SarifLevel.Error;
}
// KEV always elevates to error
if (finding.IsKev)
{
return SarifLevel.Error;
}
// For non-vulnerability findings without explicit severity, use rule default
if (finding.Severity == Severity.Unknown)
{
var ruleId = GetRuleId(finding);
if (_rulesById.TryGetValue(ruleId, out var rule) && rule.DefaultConfiguration != null)
{
return rule.DefaultConfiguration.Level;
}
}
// Map severity to level
return finding.Severity switch
{
Severity.Critical => SarifLevel.Error,
Severity.High => SarifLevel.Error,
Severity.Medium => SarifLevel.Warning,
Severity.Low => SarifLevel.Note,
_ => SarifLevel.Warning
};
}
/// <inheritdoc/>
public IReadOnlyList<SarifReportingDescriptor> GetAllRules() => _allRules;
/// <inheritdoc/>
public IReadOnlyList<SarifReportingDescriptor> GetRulesByType(FindingType type)
{
var prefix = type switch
{
FindingType.Vulnerability => "STELLA-VULN-",
FindingType.Secret => "STELLA-SEC-",
FindingType.SupplyChain => "STELLA-SC-",
FindingType.BinaryHardening => "STELLA-BIN-",
FindingType.License => "STELLA-LIC-",
FindingType.Configuration => "STELLA-CFG-",
_ => "STELLA-"
};
return _allRules.Where(r => r.Id.StartsWith(prefix, StringComparison.Ordinal)).ToList();
}
private static string GetVulnerabilityRuleId(FindingInput finding)
{
// Check reachability first
if (finding.Reachability == ReachabilityStatus.RuntimeReachable)
{
return "STELLA-VULN-005"; // Runtime reachable
}
if (finding.Reachability == ReachabilityStatus.StaticReachable)
{
return "STELLA-VULN-006"; // Static reachable
}
// Fall back to severity
return finding.Severity switch
{
Severity.Critical => "STELLA-VULN-001",
Severity.High => "STELLA-VULN-002",
Severity.Medium => "STELLA-VULN-003",
Severity.Low => "STELLA-VULN-004",
_ => "STELLA-VULN-003"
};
}
private static string GetSecretRuleId(FindingInput finding)
{
// Check for private key patterns in title/description
var text = $"{finding.Title} {finding.Description}".ToUpperInvariant();
if (text.Contains("PRIVATE KEY", StringComparison.Ordinal))
{
return "STELLA-SEC-002";
}
if (text.Contains("CREDENTIAL", StringComparison.Ordinal) ||
text.Contains("PASSWORD", StringComparison.Ordinal))
{
return "STELLA-SEC-003";
}
return "STELLA-SEC-001"; // Default hardcoded secret
}
private static string GetSupplyChainRuleId(FindingInput finding)
{
var text = $"{finding.Title} {finding.Description}".ToUpperInvariant();
if (text.Contains("TYPOSQUAT", StringComparison.Ordinal))
{
return "STELLA-SC-003";
}
if (text.Contains("UNSIGNED", StringComparison.Ordinal))
{
return "STELLA-SC-001";
}
if (text.Contains("PROVENANCE", StringComparison.Ordinal))
{
return "STELLA-SC-002";
}
if (text.Contains("DEPRECAT", StringComparison.Ordinal))
{
return "STELLA-SC-004";
}
return "STELLA-SC-001";
}
private static string GetBinaryHardeningRuleId(FindingInput finding)
{
var text = $"{finding.Title} {finding.Description}".ToUpperInvariant();
if (text.Contains("RELRO", StringComparison.Ordinal))
{
return "STELLA-BIN-001";
}
if (text.Contains("CANARY", StringComparison.Ordinal) || text.Contains("STACK", StringComparison.Ordinal))
{
return "STELLA-BIN-002";
}
if (text.Contains("PIE", StringComparison.Ordinal))
{
return "STELLA-BIN-003";
}
if (text.Contains("FORTIFY", StringComparison.Ordinal))
{
return "STELLA-BIN-004";
}
return "STELLA-BIN-001";
}
private static SarifReportingDescriptor CreateUnknownRule(string ruleId)
{
return new SarifReportingDescriptor(
Id: ruleId,
Name: "Unknown Finding",
ShortDescription: new SarifMessage("Unknown finding type"),
DefaultConfiguration: new SarifReportingConfiguration(SarifLevel.Warning));
}
private static List<SarifReportingDescriptor> BuildRules()
{
return
[
// Vulnerability Rules
new SarifReportingDescriptor(
Id: "STELLA-VULN-001",
Name: "CriticalVulnerability",
ShortDescription: new SarifMessage("Critical severity vulnerability (CVSS >= 9.0)"),
FullDescription: new SarifMessage(
"A critical severity vulnerability was detected in a dependency. " +
"Critical vulnerabilities typically allow remote code execution, " +
"privilege escalation, or complete system compromise."),
DefaultConfiguration: new SarifReportingConfiguration(SarifLevel.Error),
HelpUri: "https://stellaops.io/docs/findings/vulnerabilities#critical"),
new SarifReportingDescriptor(
Id: "STELLA-VULN-002",
Name: "HighVulnerability",
ShortDescription: new SarifMessage("High severity vulnerability (CVSS 7.0-8.9)"),
FullDescription: new SarifMessage(
"A high severity vulnerability was detected in a dependency. " +
"High severity vulnerabilities can lead to significant data exposure or system impact."),
DefaultConfiguration: new SarifReportingConfiguration(SarifLevel.Error),
HelpUri: "https://stellaops.io/docs/findings/vulnerabilities#high"),
new SarifReportingDescriptor(
Id: "STELLA-VULN-003",
Name: "MediumVulnerability",
ShortDescription: new SarifMessage("Medium severity vulnerability (CVSS 4.0-6.9)"),
FullDescription: new SarifMessage(
"A medium severity vulnerability was detected in a dependency. " +
"Medium severity vulnerabilities require specific conditions to exploit."),
DefaultConfiguration: new SarifReportingConfiguration(SarifLevel.Warning),
HelpUri: "https://stellaops.io/docs/findings/vulnerabilities#medium"),
new SarifReportingDescriptor(
Id: "STELLA-VULN-004",
Name: "LowVulnerability",
ShortDescription: new SarifMessage("Low severity vulnerability (CVSS < 4.0)"),
FullDescription: new SarifMessage(
"A low severity vulnerability was detected in a dependency. " +
"Low severity vulnerabilities have limited impact or require unlikely conditions."),
DefaultConfiguration: new SarifReportingConfiguration(SarifLevel.Note),
HelpUri: "https://stellaops.io/docs/findings/vulnerabilities#low"),
new SarifReportingDescriptor(
Id: "STELLA-VULN-005",
Name: "RuntimeReachableVulnerability",
ShortDescription: new SarifMessage("Vulnerability confirmed reachable at runtime"),
FullDescription: new SarifMessage(
"This vulnerability has been confirmed as reachable through runtime analysis. " +
"The vulnerable code path is actively executed in your application."),
DefaultConfiguration: new SarifReportingConfiguration(SarifLevel.Error),
HelpUri: "https://stellaops.io/docs/findings/reachability#runtime"),
new SarifReportingDescriptor(
Id: "STELLA-VULN-006",
Name: "StaticReachableVulnerability",
ShortDescription: new SarifMessage("Vulnerability statically reachable"),
FullDescription: new SarifMessage(
"Static analysis indicates this vulnerability may be reachable. " +
"The vulnerable code exists in a call path from your application code."),
DefaultConfiguration: new SarifReportingConfiguration(SarifLevel.Warning),
HelpUri: "https://stellaops.io/docs/findings/reachability#static"),
// Secret Rules
new SarifReportingDescriptor(
Id: "STELLA-SEC-001",
Name: "HardcodedSecret",
ShortDescription: new SarifMessage("Hardcoded secret detected"),
FullDescription: new SarifMessage(
"A hardcoded secret (API key, token, password) was detected in source code or configuration. " +
"Secrets should be stored in secure vaults and injected at runtime."),
DefaultConfiguration: new SarifReportingConfiguration(SarifLevel.Error),
HelpUri: "https://stellaops.io/docs/findings/secrets#hardcoded"),
new SarifReportingDescriptor(
Id: "STELLA-SEC-002",
Name: "PrivateKeyExposure",
ShortDescription: new SarifMessage("Private key exposed"),
FullDescription: new SarifMessage(
"A private key (RSA, EC, SSH) was detected in source code or artifacts. " +
"Private keys should never be committed to version control."),
DefaultConfiguration: new SarifReportingConfiguration(SarifLevel.Error),
HelpUri: "https://stellaops.io/docs/findings/secrets#private-key"),
new SarifReportingDescriptor(
Id: "STELLA-SEC-003",
Name: "CredentialPattern",
ShortDescription: new SarifMessage("Credential pattern detected"),
FullDescription: new SarifMessage(
"A potential credential or password pattern was detected. " +
"Review to determine if this is a false positive or actual credential."),
DefaultConfiguration: new SarifReportingConfiguration(SarifLevel.Warning),
HelpUri: "https://stellaops.io/docs/findings/secrets#credential"),
// Supply Chain Rules
new SarifReportingDescriptor(
Id: "STELLA-SC-001",
Name: "UnsignedPackage",
ShortDescription: new SarifMessage("Unsigned package detected"),
FullDescription: new SarifMessage(
"A package without cryptographic signature was detected. " +
"Unsigned packages cannot be verified for authenticity and integrity."),
DefaultConfiguration: new SarifReportingConfiguration(SarifLevel.Warning),
HelpUri: "https://stellaops.io/docs/findings/supply-chain#unsigned"),
new SarifReportingDescriptor(
Id: "STELLA-SC-002",
Name: "UnknownProvenance",
ShortDescription: new SarifMessage("Package with unknown provenance"),
FullDescription: new SarifMessage(
"A package without verifiable build provenance was detected. " +
"Provenance helps verify that packages were built from expected sources."),
DefaultConfiguration: new SarifReportingConfiguration(SarifLevel.Warning),
HelpUri: "https://stellaops.io/docs/findings/supply-chain#provenance"),
new SarifReportingDescriptor(
Id: "STELLA-SC-003",
Name: "TyposquatCandidate",
ShortDescription: new SarifMessage("Potential typosquat package"),
FullDescription: new SarifMessage(
"A package name similar to a popular package was detected. " +
"This may be a typosquat attack attempting to install malicious code."),
DefaultConfiguration: new SarifReportingConfiguration(SarifLevel.Error),
HelpUri: "https://stellaops.io/docs/findings/supply-chain#typosquat"),
new SarifReportingDescriptor(
Id: "STELLA-SC-004",
Name: "DeprecatedPackage",
ShortDescription: new SarifMessage("Deprecated package in use"),
FullDescription: new SarifMessage(
"A deprecated package was detected. " +
"Deprecated packages may no longer receive security updates."),
DefaultConfiguration: new SarifReportingConfiguration(SarifLevel.Note),
HelpUri: "https://stellaops.io/docs/findings/supply-chain#deprecated"),
// Binary Hardening Rules
new SarifReportingDescriptor(
Id: "STELLA-BIN-001",
Name: "MissingRelro",
ShortDescription: new SarifMessage("Binary missing RELRO protection"),
FullDescription: new SarifMessage(
"The binary was compiled without RELRO (Relocation Read-Only). " +
"RELRO protects the GOT from being overwritten by attackers."),
DefaultConfiguration: new SarifReportingConfiguration(SarifLevel.Warning),
HelpUri: "https://stellaops.io/docs/findings/binary#relro"),
new SarifReportingDescriptor(
Id: "STELLA-BIN-002",
Name: "MissingStackCanary",
ShortDescription: new SarifMessage("Binary missing stack canary"),
FullDescription: new SarifMessage(
"The binary was compiled without stack canaries. " +
"Stack canaries help detect buffer overflow attacks."),
DefaultConfiguration: new SarifReportingConfiguration(SarifLevel.Warning),
HelpUri: "https://stellaops.io/docs/findings/binary#canary"),
new SarifReportingDescriptor(
Id: "STELLA-BIN-003",
Name: "MissingPie",
ShortDescription: new SarifMessage("Binary not position independent"),
FullDescription: new SarifMessage(
"The binary was not compiled as a Position Independent Executable (PIE). " +
"PIE enables full ASLR protection."),
DefaultConfiguration: new SarifReportingConfiguration(SarifLevel.Warning),
HelpUri: "https://stellaops.io/docs/findings/binary#pie"),
new SarifReportingDescriptor(
Id: "STELLA-BIN-004",
Name: "MissingFortifySource",
ShortDescription: new SarifMessage("Binary missing FORTIFY_SOURCE"),
FullDescription: new SarifMessage(
"The binary was compiled without FORTIFY_SOURCE. " +
"FORTIFY_SOURCE adds runtime checks for buffer overflows in standard library calls."),
DefaultConfiguration: new SarifReportingConfiguration(SarifLevel.Note),
HelpUri: "https://stellaops.io/docs/findings/binary#fortify"),
// License Rule
new SarifReportingDescriptor(
Id: "STELLA-LIC-001",
Name: "LicenseCompliance",
ShortDescription: new SarifMessage("License compliance issue"),
FullDescription: new SarifMessage(
"A license compliance issue was detected. " +
"Review the license terms for compatibility with your project."),
DefaultConfiguration: new SarifReportingConfiguration(SarifLevel.Warning),
HelpUri: "https://stellaops.io/docs/findings/license"),
// Configuration Rule
new SarifReportingDescriptor(
Id: "STELLA-CFG-001",
Name: "ConfigurationIssue",
ShortDescription: new SarifMessage("Security configuration issue"),
FullDescription: new SarifMessage(
"A security configuration issue was detected. " +
"Review the configuration to ensure secure defaults are used."),
DefaultConfiguration: new SarifReportingConfiguration(SarifLevel.Warning),
HelpUri: "https://stellaops.io/docs/findings/configuration")
];
}
}

Some files were not shown because too many files have changed in this diff Show More