more audit work

This commit is contained in:
master
2026-01-08 10:21:51 +02:00
parent 43c02081ef
commit 51cf4bc16c
546 changed files with 36721 additions and 4003 deletions

View File

@@ -0,0 +1,289 @@
// <copyright file="ActionProposalParser.cs" company="StellaOps">
// Copyright (c) StellaOps. Licensed under the AGPL-3.0-or-later.
// </copyright>
using System.Collections.Immutable;
using System.Text.RegularExpressions;
namespace StellaOps.AdvisoryAI.Chat;
/// <summary>
/// Parses model output for proposed actions.
/// Sprint: SPRINT_20260107_006_003 Task CH-004
/// </summary>
public sealed partial class ActionProposalParser
{
private static readonly ImmutableDictionary<string, ActionDefinition> ActionDefinitions =
new Dictionary<string, ActionDefinition>
{
["approve"] = new ActionDefinition
{
Type = "approve",
Description = "Accept risk with expiry",
RequiredRole = "approver",
RequiredParams = ImmutableArray.Create("cve_id"),
OptionalParams = ImmutableArray.Create("expiry", "rationale", "component")
},
["quarantine"] = new ActionDefinition
{
Type = "quarantine",
Description = "Block deployment",
RequiredRole = "operator",
RequiredParams = ImmutableArray.Create("image_digest"),
OptionalParams = ImmutableArray.Create("reason", "duration")
},
["defer"] = new ActionDefinition
{
Type = "defer",
Description = "Mark as under investigation",
RequiredRole = "triage",
RequiredParams = ImmutableArray.Create("cve_id"),
OptionalParams = ImmutableArray.Create("until", "assignee", "notes")
},
["generate_manifest"] = new ActionDefinition
{
Type = "generate_manifest",
Description = "Create integration manifest",
RequiredRole = "admin",
RequiredParams = ImmutableArray.Create("integration_type"),
OptionalParams = ImmutableArray.Create("name", "scopes")
},
["create_vex"] = new ActionDefinition
{
Type = "create_vex",
Description = "Draft VEX statement",
RequiredRole = "issuer",
RequiredParams = ImmutableArray.Create("product", "vulnerability"),
OptionalParams = ImmutableArray.Create("status", "justification", "statement")
}
}.ToImmutableDictionary();
/// <summary>
/// Parses model output for action proposals.
/// </summary>
/// <param name="modelOutput">The raw model output.</param>
/// <param name="userPermissions">The user's permissions/roles.</param>
/// <returns>Parsed action proposals.</returns>
public ActionParseResult Parse(string modelOutput, ImmutableArray<string> userPermissions)
{
var proposals = new List<ParsedActionProposal>();
var warnings = new List<string>();
// Match action button format: [Label]{action:type,param1=value1}
var matches = ActionButtonRegex().Matches(modelOutput);
foreach (Match match in matches)
{
var label = match.Groups["label"].Value;
var actionSpec = match.Groups["spec"].Value;
var parseResult = ParseActionSpec(actionSpec, label, userPermissions);
if (parseResult.Proposal is not null)
{
proposals.Add(parseResult.Proposal);
}
if (parseResult.Warning is not null)
{
warnings.Add(parseResult.Warning);
}
}
// Also check for inline action markers
var inlineMatches = InlineActionRegex().Matches(modelOutput);
foreach (Match match in inlineMatches)
{
var actionType = match.Groups["type"].Value.ToLowerInvariant();
var paramsStr = match.Groups["params"].Value;
var parseResult = ParseActionSpec($"action:{actionType},{paramsStr}", actionType, userPermissions);
if (parseResult.Proposal is not null &&
!proposals.Any(p => p.ActionType == parseResult.Proposal.ActionType))
{
proposals.Add(parseResult.Proposal);
}
if (parseResult.Warning is not null)
{
warnings.Add(parseResult.Warning);
}
}
return new ActionParseResult
{
Proposals = proposals.ToImmutableArray(),
Warnings = warnings.ToImmutableArray(),
HasBlockedActions = proposals.Any(p => !p.IsAllowed)
};
}
/// <summary>
/// Strips action markers from model output for display.
/// </summary>
public string StripActionMarkers(string modelOutput)
{
var result = ActionButtonRegex().Replace(modelOutput, m => m.Groups["label"].Value);
result = InlineActionRegex().Replace(result, string.Empty);
return result.Trim();
}
private (ParsedActionProposal? Proposal, string? Warning) ParseActionSpec(
string actionSpec,
string label,
ImmutableArray<string> userPermissions)
{
// Parse "action:type,param1=value1,param2=value2"
if (!actionSpec.StartsWith("action:", StringComparison.OrdinalIgnoreCase))
{
return (null, $"Invalid action format: {actionSpec}");
}
var parts = actionSpec[7..].Split(',');
if (parts.Length == 0)
{
return (null, "Action type not specified");
}
var actionType = parts[0].Trim().ToLowerInvariant();
// Parse parameters
var parameters = new Dictionary<string, string>();
for (int i = 1; i < parts.Length; i++)
{
var paramParts = parts[i].Split('=', 2);
if (paramParts.Length == 2)
{
parameters[paramParts[0].Trim()] = paramParts[1].Trim();
}
}
// Validate action type
if (!ActionDefinitions.TryGetValue(actionType, out var definition))
{
return (null, $"Unknown action type: {actionType}");
}
// Check permissions
var isAllowed = userPermissions.Contains(definition.RequiredRole, StringComparer.OrdinalIgnoreCase);
string? blockedReason = null;
if (!isAllowed)
{
blockedReason = $"Requires '{definition.RequiredRole}' role";
}
// Validate required parameters
var missingParams = definition.RequiredParams
.Where(p => !parameters.ContainsKey(p))
.ToList();
if (missingParams.Count > 0)
{
return (null, $"Missing required parameters: {string.Join(", ", missingParams)}");
}
var proposal = new ParsedActionProposal
{
ActionType = actionType,
Label = label,
Parameters = parameters.ToImmutableDictionary(),
IsAllowed = isAllowed,
BlockedReason = blockedReason,
RequiredRole = definition.RequiredRole,
Description = definition.Description
};
return (proposal, null);
}
[GeneratedRegex(@"\[(?<label>[^\]]+)\]\{(?<spec>action:[^}]+)\}", RegexOptions.Compiled)]
private static partial Regex ActionButtonRegex();
[GeneratedRegex(@"<!--\s*ACTION:\s*(?<type>\w+)\s*(?<params>[^>]*)\s*-->", RegexOptions.IgnoreCase | RegexOptions.Compiled)]
private static partial Regex InlineActionRegex();
}
/// <summary>
/// Definition of an action type.
/// </summary>
internal sealed record ActionDefinition
{
public required string Type { get; init; }
public required string Description { get; init; }
public required string RequiredRole { get; init; }
public ImmutableArray<string> RequiredParams { get; init; } = ImmutableArray<string>.Empty;
public ImmutableArray<string> OptionalParams { get; init; } = ImmutableArray<string>.Empty;
}
/// <summary>
/// Result of parsing action proposals.
/// </summary>
public sealed record ActionParseResult
{
/// <summary>
/// Gets the parsed action proposals.
/// </summary>
public ImmutableArray<ParsedActionProposal> Proposals { get; init; } =
ImmutableArray<ParsedActionProposal>.Empty;
/// <summary>
/// Gets any warnings from parsing.
/// </summary>
public ImmutableArray<string> Warnings { get; init; } =
ImmutableArray<string>.Empty;
/// <summary>
/// Gets whether any actions were blocked due to permissions.
/// </summary>
public bool HasBlockedActions { get; init; }
/// <summary>
/// Gets the allowed proposals only.
/// </summary>
public ImmutableArray<ParsedActionProposal> AllowedProposals =>
Proposals.Where(p => p.IsAllowed).ToImmutableArray();
}
/// <summary>
/// A parsed action proposal.
/// </summary>
public sealed record ParsedActionProposal
{
/// <summary>
/// Gets the action type.
/// </summary>
public required string ActionType { get; init; }
/// <summary>
/// Gets the display label.
/// </summary>
public required string Label { get; init; }
/// <summary>
/// Gets the action parameters.
/// </summary>
public ImmutableDictionary<string, string> Parameters { get; init; } =
ImmutableDictionary<string, string>.Empty;
/// <summary>
/// Gets whether this action is allowed for the user.
/// </summary>
public bool IsAllowed { get; init; }
/// <summary>
/// Gets the reason the action is blocked (if not allowed).
/// </summary>
public string? BlockedReason { get; init; }
/// <summary>
/// Gets the required role for this action.
/// </summary>
public required string RequiredRole { get; init; }
/// <summary>
/// Gets the action description.
/// </summary>
public required string Description { get; init; }
}

View File

@@ -0,0 +1,270 @@
// <copyright file="ChatPromptAssembler.cs" company="StellaOps">
// Copyright (c) StellaOps. Licensed under the AGPL-3.0-or-later.
// </copyright>
using System.Collections.Immutable;
using System.Text;
using Microsoft.Extensions.Options;
namespace StellaOps.AdvisoryAI.Chat;
/// <summary>
/// Assembles multi-turn prompts for AdvisoryAI chat.
/// Sprint: SPRINT_20260107_006_003 Task CH-003
/// </summary>
public sealed class ChatPromptAssembler
{
private readonly ChatPromptOptions _options;
private readonly ConversationContextBuilder _contextBuilder;
/// <summary>
/// Initializes a new instance of the <see cref="ChatPromptAssembler"/> class.
/// </summary>
public ChatPromptAssembler(
IOptions<ChatPromptOptions> options,
ConversationContextBuilder contextBuilder)
{
_options = options.Value;
_contextBuilder = contextBuilder;
}
/// <summary>
/// Assembles a complete prompt for the LLM.
/// </summary>
/// <param name="conversation">The conversation to build prompt from.</param>
/// <param name="userMessage">The new user message.</param>
/// <returns>The assembled prompt.</returns>
public AssembledPrompt Assemble(Conversation conversation, string userMessage)
{
var messages = new List<ChatMessage>();
// Add system prompt
var systemPrompt = BuildSystemPrompt(conversation.Context);
messages.Add(new ChatMessage(ChatMessageRole.System, systemPrompt));
// Build context and add to system message or as separate context
var context = _contextBuilder.Build(conversation, _options.MaxContextTokens);
// Add conversation history
foreach (var turn in context.History)
{
var role = turn.Role switch
{
TurnRole.User => ChatMessageRole.User,
TurnRole.Assistant => ChatMessageRole.Assistant,
TurnRole.System => ChatMessageRole.System,
_ => ChatMessageRole.User
};
var content = turn.Content;
// Include evidence links as footnotes for assistant messages
if (turn.Role == TurnRole.Assistant && !turn.EvidenceLinks.IsEmpty)
{
content = AppendEvidenceFootnotes(content, turn.EvidenceLinks);
}
messages.Add(new ChatMessage(role, content));
}
// Add the new user message
messages.Add(new ChatMessage(ChatMessageRole.User, userMessage));
// Calculate token estimate
var totalTokens = messages.Sum(m => EstimateTokens(m.Content));
return new AssembledPrompt
{
Messages = messages.ToImmutableArray(),
Context = context,
EstimatedTokens = totalTokens,
SystemPromptVersion = _options.SystemPromptVersion
};
}
private string BuildSystemPrompt(ConversationContext conversationContext)
{
var sb = new StringBuilder();
// Core identity
sb.AppendLine(_options.BaseSystemPrompt);
sb.AppendLine();
// Grounding rules
sb.AppendLine("## GROUNDING RULES");
sb.AppendLine();
sb.AppendLine("1. ALWAYS cite internal object links for claims about vulnerabilities, components, or security status.");
sb.AppendLine("2. Use the link format: [type:path] for deep links to evidence.");
sb.AppendLine("3. NEVER make claims about security status without evidence backing.");
sb.AppendLine("4. For actions, present action buttons; do not execute actions directly.");
sb.AppendLine("5. If uncertain, clearly state limitations and ask for clarification.");
sb.AppendLine();
// Object link formats
sb.AppendLine("## OBJECT LINK FORMATS");
sb.AppendLine();
sb.AppendLine("When referencing internal objects, use these formats:");
sb.AppendLine();
sb.AppendLine("| Type | Format | Example |");
sb.AppendLine("|------|--------|---------|");
sb.AppendLine("| SBOM | `[sbom:{id}]` | `[sbom:abc123]` |");
sb.AppendLine("| Reachability | `[reach:{service}:{function}]` | `[reach:api-gateway:grpc.Server]` |");
sb.AppendLine("| Runtime | `[runtime:{service}:traces]` | `[runtime:api-gateway:traces]` |");
sb.AppendLine("| VEX | `[vex:{issuer}:{digest}]` | `[vex:stellaops:sha256:abc]` |");
sb.AppendLine("| Attestation | `[attest:dsse:{digest}]` | `[attest:dsse:sha256:xyz]` |");
sb.AppendLine("| Authority Key | `[auth:keys/{keyId}]` | `[auth:keys/gitlab-oidc]` |");
sb.AppendLine("| Documentation | `[docs:{path}]` | `[docs:scopes/ci-webhook]` |");
sb.AppendLine();
// Action proposal format
sb.AppendLine("## ACTION PROPOSALS");
sb.AppendLine();
sb.AppendLine("When suggesting actions, use this button format:");
sb.AppendLine();
sb.AppendLine("```");
sb.AppendLine("[Action Label]{{action:type,param1=value1,param2=value2}}");
sb.AppendLine("```");
sb.AppendLine();
sb.AppendLine("Available actions:");
sb.AppendLine("- `approve` - Accept risk (requires approver role)");
sb.AppendLine("- `quarantine` - Block deployment (requires operator role)");
sb.AppendLine("- `defer` - Mark under investigation (requires triage role)");
sb.AppendLine("- `generate_manifest` - Create integration manifest (requires admin role)");
sb.AppendLine("- `create_vex` - Draft VEX statement (requires issuer role)");
sb.AppendLine();
// Context-specific rules
if (conversationContext.CurrentCveId is not null)
{
sb.AppendLine("## CURRENT FOCUS");
sb.AppendLine();
sb.AppendLine($"The user is currently investigating **{conversationContext.CurrentCveId}**.");
sb.AppendLine("Prioritize information relevant to this vulnerability.");
sb.AppendLine();
}
if (conversationContext.Policy is not null)
{
sb.AppendLine("## USER PERMISSIONS");
sb.AppendLine();
if (conversationContext.Policy.AutomationAllowed)
{
sb.AppendLine("- Automation is ALLOWED for this user");
}
else
{
sb.AppendLine("- Automation is DISABLED - only suggest actions, don't offer execution");
}
if (!conversationContext.Policy.Permissions.IsEmpty)
{
sb.AppendLine($"- Roles: {string.Join(", ", conversationContext.Policy.Permissions)}");
}
sb.AppendLine();
}
return sb.ToString();
}
private static string AppendEvidenceFootnotes(string content, ImmutableArray<EvidenceLink> links)
{
if (links.IsEmpty)
{
return content;
}
var sb = new StringBuilder(content);
sb.AppendLine();
sb.AppendLine();
sb.AppendLine("---");
sb.AppendLine("**Evidence:**");
foreach (var link in links.Take(5))
{
var label = link.Label ?? link.Uri;
sb.AppendLine($"- [{label}]({link.Uri})");
}
return sb.ToString();
}
private static int EstimateTokens(string text)
{
// Rough estimate: ~4 characters per token for English
return (text.Length + 3) / 4;
}
}
/// <summary>
/// An assembled prompt ready for LLM invocation.
/// </summary>
public sealed record AssembledPrompt
{
/// <summary>
/// Gets the messages to send to the LLM.
/// </summary>
public ImmutableArray<ChatMessage> Messages { get; init; } =
ImmutableArray<ChatMessage>.Empty;
/// <summary>
/// Gets the built context.
/// </summary>
public required BuiltContext Context { get; init; }
/// <summary>
/// Gets the estimated token count.
/// </summary>
public int EstimatedTokens { get; init; }
/// <summary>
/// Gets the system prompt version used.
/// </summary>
public string? SystemPromptVersion { get; init; }
}
/// <summary>
/// A chat message for the LLM.
/// </summary>
public sealed record ChatMessage(ChatMessageRole Role, string Content);
/// <summary>
/// Chat message roles.
/// </summary>
public enum ChatMessageRole
{
/// <summary>System message.</summary>
System,
/// <summary>User message.</summary>
User,
/// <summary>Assistant message.</summary>
Assistant
}
/// <summary>
/// Configuration options for chat prompts.
/// </summary>
public sealed class ChatPromptOptions
{
/// <summary>
/// Gets or sets the base system prompt.
/// </summary>
public string BaseSystemPrompt { get; set; } =
"You are AdvisoryAI, an AI assistant for StellaOps, a sovereign container security platform. " +
"You help users understand vulnerabilities, navigate security evidence, and make informed decisions. " +
"Your responses are grounded in internal evidence and you always cite your sources.";
/// <summary>
/// Gets or sets the maximum tokens for context.
/// </summary>
public int MaxContextTokens { get; set; } = 4000;
/// <summary>
/// Gets or sets the maximum tokens for history.
/// </summary>
public int MaxHistoryTokens { get; set; } = 2000;
/// <summary>
/// Gets or sets the system prompt version for tracking.
/// </summary>
public string SystemPromptVersion { get; set; } = "v1.0.0";
}

View File

@@ -0,0 +1,488 @@
// <copyright file="ChatResponseStreamer.cs" company="StellaOps">
// Copyright (c) StellaOps. Licensed under the AGPL-3.0-or-later.
// </copyright>
using System.Collections.Immutable;
using System.Globalization;
using System.Runtime.CompilerServices;
using System.Text;
using System.Text.Json;
using Microsoft.Extensions.Logging;
namespace StellaOps.AdvisoryAI.Chat;
/// <summary>
/// Streams chat responses as Server-Sent Events.
/// Sprint: SPRINT_20260107_006_003 Task CH-006
/// </summary>
public sealed class ChatResponseStreamer
{
private readonly ILogger<ChatResponseStreamer> _logger;
private readonly StreamingOptions _options;
/// <summary>
/// Initializes a new instance of the <see cref="ChatResponseStreamer"/> class.
/// </summary>
public ChatResponseStreamer(
ILogger<ChatResponseStreamer> logger,
StreamingOptions? options = null)
{
_logger = logger;
_options = options ?? new StreamingOptions();
}
/// <summary>
/// Streams response tokens from an LLM as Server-Sent Events.
/// </summary>
/// <param name="tokenSource">The source of tokens from the LLM.</param>
/// <param name="conversationId">The conversation ID.</param>
/// <param name="turnId">The turn ID being generated.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>Async enumerable of SSE events.</returns>
public async IAsyncEnumerable<StreamEvent> StreamResponseAsync(
IAsyncEnumerable<TokenChunk> tokenSource,
string conversationId,
string turnId,
[EnumeratorCancellation] CancellationToken cancellationToken = default)
{
var contentBuilder = new StringBuilder();
var citations = new List<CitationEvent>();
var actions = new List<ActionEvent>();
var tokenCount = 0;
var startTime = DateTimeOffset.UtcNow;
// Send start event
yield return new StreamEvent(StreamEventType.Start, new StartEventData
{
ConversationId = conversationId,
TurnId = turnId,
Timestamp = startTime.ToString("O", CultureInfo.InvariantCulture)
});
await foreach (var chunk in tokenSource.WithCancellation(cancellationToken).ConfigureAwait(false))
{
tokenCount++;
contentBuilder.Append(chunk.Content);
// Yield token event
yield return new StreamEvent(StreamEventType.Token, new TokenEventData
{
Content = chunk.Content,
Index = tokenCount
});
// Check for citations in the accumulated content
var newCitations = ExtractNewCitations(contentBuilder.ToString(), citations.Count);
foreach (var citation in newCitations)
{
citations.Add(citation);
yield return new StreamEvent(StreamEventType.Citation, citation);
}
// Check for action proposals
var newActions = ExtractNewActions(contentBuilder.ToString(), actions.Count);
foreach (var action in newActions)
{
actions.Add(action);
yield return new StreamEvent(StreamEventType.Action, action);
}
// Periodically send progress events
if (tokenCount % _options.ProgressInterval == 0)
{
yield return new StreamEvent(StreamEventType.Progress, new ProgressEventData
{
TokensGenerated = tokenCount,
ElapsedMs = (int)(DateTimeOffset.UtcNow - startTime).TotalMilliseconds
});
}
}
// Send completion event
var endTime = DateTimeOffset.UtcNow;
var groundingScore = CalculateGroundingScore(citations.Count, contentBuilder.Length);
yield return new StreamEvent(StreamEventType.Done, new DoneEventData
{
TurnId = turnId,
TotalTokens = tokenCount,
CitationCount = citations.Count,
ActionCount = actions.Count,
GroundingScore = groundingScore,
DurationMs = (int)(endTime - startTime).TotalMilliseconds,
Timestamp = endTime.ToString("O", CultureInfo.InvariantCulture)
});
_logger.LogInformation(
"Stream completed: conversation={ConversationId}, turn={TurnId}, tokens={Tokens}, grounding={Grounding:F2}",
conversationId, turnId, tokenCount, groundingScore);
}
/// <summary>
/// Formats a stream event as an SSE string.
/// </summary>
public static string FormatAsSSE(StreamEvent evt)
{
var sb = new StringBuilder();
sb.Append("event: ");
sb.AppendLine(evt.Type.ToString().ToLowerInvariant());
var json = JsonSerializer.Serialize(evt.Data, JsonOptions);
sb.Append("data: ");
sb.AppendLine(json);
sb.AppendLine(); // Empty line to end the event
return sb.ToString();
}
/// <summary>
/// Handles connection drops by checkpointing.
/// </summary>
public StreamCheckpoint CreateCheckpoint(
string conversationId,
string turnId,
int tokenIndex,
string partialContent)
{
return new StreamCheckpoint
{
ConversationId = conversationId,
TurnId = turnId,
TokenIndex = tokenIndex,
PartialContent = partialContent,
CreatedAt = DateTimeOffset.UtcNow
};
}
/// <summary>
/// Resumes streaming from a checkpoint.
/// </summary>
public async IAsyncEnumerable<StreamEvent> ResumeFromCheckpointAsync(
StreamCheckpoint checkpoint,
IAsyncEnumerable<TokenChunk> tokenSource,
[EnumeratorCancellation] CancellationToken cancellationToken = default)
{
// Send resume event
yield return new StreamEvent(StreamEventType.Resume, new ResumeEventData
{
ConversationId = checkpoint.ConversationId,
TurnId = checkpoint.TurnId,
ResumedFromToken = checkpoint.TokenIndex
});
// Skip tokens we already have
var skipCount = checkpoint.TokenIndex;
var skipped = 0;
await foreach (var chunk in tokenSource.WithCancellation(cancellationToken).ConfigureAwait(false))
{
if (skipped < skipCount)
{
skipped++;
continue;
}
yield return new StreamEvent(StreamEventType.Token, new TokenEventData
{
Content = chunk.Content,
Index = skipped + 1
});
}
}
private List<CitationEvent> ExtractNewCitations(string content, int existingCount)
{
var citations = new List<CitationEvent>();
// Pattern: [type:path]
var matches = System.Text.RegularExpressions.Regex.Matches(
content,
@"\[(?<type>sbom|reach|runtime|vex|attest|auth|docs):(?<path>[^\]]+)\]");
for (int i = existingCount; i < matches.Count; i++)
{
var match = matches[i];
citations.Add(new CitationEvent
{
Type = match.Groups["type"].Value,
Path = match.Groups["path"].Value,
Index = i + 1,
Verified = false // Will be verified by GroundingValidator
});
}
return citations;
}
private List<ActionEvent> ExtractNewActions(string content, int existingCount)
{
var actions = new List<ActionEvent>();
// Pattern: [Label]{action:type,params}
var matches = System.Text.RegularExpressions.Regex.Matches(
content,
@"\[(?<label>[^\]]+)\]\{action:(?<type>\w+)(?:,(?<params>[^}]*))?\}");
for (int i = existingCount; i < matches.Count; i++)
{
var match = matches[i];
actions.Add(new ActionEvent
{
Type = match.Groups["type"].Value,
Label = match.Groups["label"].Value,
Params = match.Groups["params"].Value,
Index = i + 1,
Enabled = true // Will be validated by ActionProposalParser
});
}
return actions;
}
private static double CalculateGroundingScore(int citationCount, int contentLength)
{
if (contentLength == 0)
{
return 0;
}
// Rough heuristic: expect ~1 citation per 200 characters
var expectedCitations = contentLength / 200.0;
if (expectedCitations < 1)
{
expectedCitations = 1;
}
var ratio = citationCount / expectedCitations;
return Math.Min(1.0, ratio);
}
private static readonly JsonSerializerOptions JsonOptions = new()
{
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
WriteIndented = false
};
}
/// <summary>
/// A token chunk from the LLM.
/// </summary>
public sealed record TokenChunk
{
/// <summary>Gets the token content.</summary>
public required string Content { get; init; }
/// <summary>Gets optional metadata.</summary>
public ImmutableDictionary<string, string>? Metadata { get; init; }
}
/// <summary>
/// Types of stream events.
/// </summary>
public enum StreamEventType
{
/// <summary>Stream starting.</summary>
Start,
/// <summary>Token generated.</summary>
Token,
/// <summary>Citation extracted.</summary>
Citation,
/// <summary>Action proposal detected.</summary>
Action,
/// <summary>Progress update.</summary>
Progress,
/// <summary>Stream completed.</summary>
Done,
/// <summary>Error occurred.</summary>
Error,
/// <summary>Stream resumed.</summary>
Resume
}
/// <summary>
/// A stream event with type and data.
/// </summary>
public sealed record StreamEvent(StreamEventType Type, object Data);
/// <summary>
/// Start event data.
/// </summary>
public sealed record StartEventData
{
/// <summary>Gets the conversation ID.</summary>
public required string ConversationId { get; init; }
/// <summary>Gets the turn ID.</summary>
public required string TurnId { get; init; }
/// <summary>Gets the timestamp.</summary>
public required string Timestamp { get; init; }
}
/// <summary>
/// Token event data.
/// </summary>
public sealed record TokenEventData
{
/// <summary>Gets the token content.</summary>
public required string Content { get; init; }
/// <summary>Gets the token index.</summary>
public required int Index { get; init; }
}
/// <summary>
/// Citation event data.
/// </summary>
public sealed record CitationEvent
{
/// <summary>Gets the citation type.</summary>
public required string Type { get; init; }
/// <summary>Gets the citation path.</summary>
public required string Path { get; init; }
/// <summary>Gets the citation index.</summary>
public required int Index { get; init; }
/// <summary>Gets whether the citation is verified.</summary>
public bool Verified { get; init; }
}
/// <summary>
/// Action event data.
/// </summary>
public sealed record ActionEvent
{
/// <summary>Gets the action type.</summary>
public required string Type { get; init; }
/// <summary>Gets the action label.</summary>
public required string Label { get; init; }
/// <summary>Gets the action parameters.</summary>
public required string Params { get; init; }
/// <summary>Gets the action index.</summary>
public required int Index { get; init; }
/// <summary>Gets whether the action is enabled.</summary>
public bool Enabled { get; init; }
}
/// <summary>
/// Progress event data.
/// </summary>
public sealed record ProgressEventData
{
/// <summary>Gets tokens generated so far.</summary>
public required int TokensGenerated { get; init; }
/// <summary>Gets elapsed milliseconds.</summary>
public required int ElapsedMs { get; init; }
}
/// <summary>
/// Done event data.
/// </summary>
public sealed record DoneEventData
{
/// <summary>Gets the turn ID.</summary>
public required string TurnId { get; init; }
/// <summary>Gets total tokens.</summary>
public required int TotalTokens { get; init; }
/// <summary>Gets citation count.</summary>
public required int CitationCount { get; init; }
/// <summary>Gets action count.</summary>
public required int ActionCount { get; init; }
/// <summary>Gets the grounding score.</summary>
public required double GroundingScore { get; init; }
/// <summary>Gets duration in milliseconds.</summary>
public required int DurationMs { get; init; }
/// <summary>Gets the timestamp.</summary>
public required string Timestamp { get; init; }
}
/// <summary>
/// Error event data.
/// </summary>
public sealed record ErrorEventData
{
/// <summary>Gets the error code.</summary>
public required string Code { get; init; }
/// <summary>Gets the error message.</summary>
public required string Message { get; init; }
/// <summary>Gets tokens generated before error.</summary>
public int TokensGenerated { get; init; }
}
/// <summary>
/// Resume event data.
/// </summary>
public sealed record ResumeEventData
{
/// <summary>Gets the conversation ID.</summary>
public required string ConversationId { get; init; }
/// <summary>Gets the turn ID.</summary>
public required string TurnId { get; init; }
/// <summary>Gets the token index resumed from.</summary>
public required int ResumedFromToken { get; init; }
}
/// <summary>
/// Checkpoint for resuming streams.
/// </summary>
public sealed record StreamCheckpoint
{
/// <summary>Gets the conversation ID.</summary>
public required string ConversationId { get; init; }
/// <summary>Gets the turn ID.</summary>
public required string TurnId { get; init; }
/// <summary>Gets the token index.</summary>
public required int TokenIndex { get; init; }
/// <summary>Gets partial content accumulated.</summary>
public required string PartialContent { get; init; }
/// <summary>Gets when checkpoint was created.</summary>
public required DateTimeOffset CreatedAt { get; init; }
}
/// <summary>
/// Options for streaming.
/// </summary>
public sealed class StreamingOptions
{
/// <summary>
/// Gets or sets the interval for progress events (in tokens).
/// Default: 50 tokens.
/// </summary>
public int ProgressInterval { get; set; } = 50;
/// <summary>
/// Gets or sets the timeout for idle streams.
/// Default: 30 seconds.
/// </summary>
public TimeSpan IdleTimeout { get; set; } = TimeSpan.FromSeconds(30);
}

View File

@@ -0,0 +1,377 @@
// <copyright file="ConversationContextBuilder.cs" company="StellaOps">
// Copyright (c) StellaOps. Licensed under the AGPL-3.0-or-later.
// </copyright>
using System.Collections.Immutable;
using System.Text;
namespace StellaOps.AdvisoryAI.Chat;
/// <summary>
/// Builds context from conversation history for LLM prompts.
/// Sprint: SPRINT_20260107_006_003 Task CH-002
/// </summary>
public sealed class ConversationContextBuilder
{
private readonly ConversationContextOptions _options;
/// <summary>
/// Initializes a new instance of the <see cref="ConversationContextBuilder"/> class.
/// </summary>
public ConversationContextBuilder(ConversationContextOptions? options = null)
{
_options = options ?? new ConversationContextOptions();
}
/// <summary>
/// Builds context from a conversation for use in LLM prompts.
/// </summary>
/// <param name="conversation">The conversation to build context from.</param>
/// <param name="tokenBudget">The maximum token budget for context.</param>
/// <returns>The built context.</returns>
public BuiltContext Build(Conversation conversation, int? tokenBudget = null)
{
var budget = tokenBudget ?? _options.DefaultTokenBudget;
var builder = new BuiltContextBuilder();
// Add conversation context (CVE, component, scan, etc.)
AddConversationContext(builder, conversation.Context);
// Add policy context
if (conversation.Context.Policy is not null)
{
AddPolicyContext(builder, conversation.Context.Policy);
}
// Add evidence links
AddEvidenceContext(builder, conversation.Context.EvidenceLinks);
// Add conversation history (truncated to fit budget)
var historyTokens = budget - builder.EstimatedTokens;
AddConversationHistory(builder, conversation.Turns, historyTokens);
return builder.Build();
}
/// <summary>
/// Merges evidence links from a new turn into the conversation context.
/// </summary>
public ConversationContext MergeEvidence(
ConversationContext existing,
IEnumerable<EvidenceLink> newLinks)
{
var allLinks = existing.EvidenceLinks
.Concat(newLinks)
.DistinctBy(l => l.Uri)
.Take(_options.MaxEvidenceLinks)
.ToImmutableArray();
return existing with { EvidenceLinks = allLinks };
}
/// <summary>
/// Updates the conversation context with a new focus (CVE, component, etc.).
/// </summary>
public ConversationContext UpdateFocus(
ConversationContext existing,
string? cveId = null,
string? component = null,
string? imageDigest = null,
string? scanId = null,
string? sbomId = null)
{
return existing with
{
CurrentCveId = cveId ?? existing.CurrentCveId,
CurrentComponent = component ?? existing.CurrentComponent,
CurrentImageDigest = imageDigest ?? existing.CurrentImageDigest,
ScanId = scanId ?? existing.ScanId,
SbomId = sbomId ?? existing.SbomId
};
}
private void AddConversationContext(BuiltContextBuilder builder, ConversationContext context)
{
if (context.CurrentCveId is not null)
{
builder.AddContextItem("Current CVE", context.CurrentCveId);
}
if (context.CurrentComponent is not null)
{
builder.AddContextItem("Current Component", context.CurrentComponent);
}
if (context.CurrentImageDigest is not null)
{
builder.AddContextItem("Image Digest", context.CurrentImageDigest);
}
if (context.ScanId is not null)
{
builder.AddContextItem("Scan ID", context.ScanId);
}
if (context.SbomId is not null)
{
builder.AddContextItem("SBOM ID", context.SbomId);
}
}
private static void AddPolicyContext(BuiltContextBuilder builder, PolicyContext policy)
{
if (policy.PolicyIds.Length > 0)
{
builder.AddContextItem("Policies", string.Join(", ", policy.PolicyIds));
}
if (policy.Permissions.Length > 0)
{
builder.AddContextItem("User Permissions", string.Join(", ", policy.Permissions));
}
builder.AddContextItem("Automation Allowed", policy.AutomationAllowed ? "Yes" : "No");
}
private static void AddEvidenceContext(BuiltContextBuilder builder, ImmutableArray<EvidenceLink> links)
{
if (links.IsEmpty)
{
return;
}
var evidenceByType = links.GroupBy(l => l.Type);
foreach (var group in evidenceByType)
{
var uris = group.Select(l => l.Uri).ToList();
builder.AddEvidenceReference(group.Key, uris);
}
}
private void AddConversationHistory(
BuiltContextBuilder builder,
ImmutableArray<ConversationTurn> turns,
int tokenBudget)
{
if (turns.IsEmpty)
{
return;
}
// Process turns from newest to oldest, but we'll reverse for output
var selectedTurns = new List<ConversationTurn>();
var currentTokens = 0;
// Always include the most recent turns within budget
for (int i = turns.Length - 1; i >= 0 && currentTokens < tokenBudget; i--)
{
var turn = turns[i];
var turnTokens = EstimateTokens(turn.Content);
if (currentTokens + turnTokens <= tokenBudget)
{
selectedTurns.Insert(0, turn);
currentTokens += turnTokens;
}
else if (selectedTurns.Count == 0)
{
// Always include at least the last turn, truncated if needed
var truncatedContent = TruncateToTokens(turn.Content, tokenBudget);
selectedTurns.Add(turn with { Content = truncatedContent });
break;
}
else
{
break;
}
}
// Add summary indicator if we truncated
var wasTruncated = selectedTurns.Count < turns.Length;
builder.AddHistory(selectedTurns, wasTruncated, turns.Length - selectedTurns.Count);
}
private static int EstimateTokens(string text)
{
// Rough estimate: ~4 characters per token for English
return (text.Length + 3) / 4;
}
private static string TruncateToTokens(string text, int maxTokens)
{
var maxChars = maxTokens * 4;
if (text.Length <= maxChars)
{
return text;
}
return text[..(maxChars - 3)] + "...";
}
}
/// <summary>
/// Builder for constructing context output.
/// </summary>
internal sealed class BuiltContextBuilder
{
private readonly List<(string Key, string Value)> _contextItems = new();
private readonly Dictionary<EvidenceLinkType, List<string>> _evidence = new();
private readonly List<ConversationTurn> _history = new();
private bool _historyTruncated;
private int _omittedTurnCount;
public int EstimatedTokens { get; private set; }
public void AddContextItem(string key, string value)
{
_contextItems.Add((key, value));
EstimatedTokens += (key.Length + value.Length + 4) / 4;
}
public void AddEvidenceReference(EvidenceLinkType type, List<string> uris)
{
_evidence[type] = uris;
EstimatedTokens += uris.Sum(u => u.Length) / 4;
}
public void AddHistory(List<ConversationTurn> turns, bool truncated, int omittedCount)
{
_history.AddRange(turns);
_historyTruncated = truncated;
_omittedTurnCount = omittedCount;
EstimatedTokens += turns.Sum(t => t.Content.Length) / 4;
}
public BuiltContext Build()
{
return new BuiltContext
{
ContextItems = _contextItems.ToImmutableArray(),
EvidenceReferences = _evidence.ToImmutableDictionary(
kv => kv.Key,
kv => (IReadOnlyList<string>)kv.Value),
History = _history.ToImmutableArray(),
HistoryTruncated = _historyTruncated,
OmittedTurnCount = _omittedTurnCount,
EstimatedTokenCount = EstimatedTokens
};
}
}
/// <summary>
/// The built context for LLM prompts.
/// </summary>
public sealed record BuiltContext
{
/// <summary>
/// Gets the context items (key-value pairs).
/// </summary>
public ImmutableArray<(string Key, string Value)> ContextItems { get; init; } =
ImmutableArray<(string, string)>.Empty;
/// <summary>
/// Gets evidence references grouped by type.
/// </summary>
public ImmutableDictionary<EvidenceLinkType, IReadOnlyList<string>> EvidenceReferences { get; init; } =
ImmutableDictionary<EvidenceLinkType, IReadOnlyList<string>>.Empty;
/// <summary>
/// Gets the conversation history.
/// </summary>
public ImmutableArray<ConversationTurn> History { get; init; } =
ImmutableArray<ConversationTurn>.Empty;
/// <summary>
/// Gets whether the history was truncated.
/// </summary>
public bool HistoryTruncated { get; init; }
/// <summary>
/// Gets the number of omitted turns.
/// </summary>
public int OmittedTurnCount { get; init; }
/// <summary>
/// Gets the estimated token count.
/// </summary>
public int EstimatedTokenCount { get; init; }
/// <summary>
/// Formats the context as a string for prompt injection.
/// </summary>
public string FormatForPrompt()
{
var sb = new StringBuilder();
// Context section
if (ContextItems.Length > 0)
{
sb.AppendLine("## Current Context");
foreach (var (key, value) in ContextItems)
{
sb.AppendLine($"- **{key}**: {value}");
}
sb.AppendLine();
}
// Evidence section
if (EvidenceReferences.Count > 0)
{
sb.AppendLine("## Available Evidence");
foreach (var (type, uris) in EvidenceReferences)
{
sb.AppendLine($"### {type}");
foreach (var uri in uris.Take(5))
{
sb.AppendLine($"- [{uri}]");
}
if (uris.Count > 5)
{
sb.AppendLine($"- ... and {uris.Count - 5} more");
}
}
sb.AppendLine();
}
// History section
if (History.Length > 0)
{
sb.AppendLine("## Conversation History");
if (HistoryTruncated)
{
sb.AppendLine($"*({OmittedTurnCount} earlier messages omitted)*");
}
foreach (var turn in History)
{
var role = turn.Role switch
{
TurnRole.User => "User",
TurnRole.Assistant => "Assistant",
TurnRole.System => "System",
_ => "Unknown"
};
sb.AppendLine($"**{role}**: {turn.Content}");
}
}
return sb.ToString();
}
}
/// <summary>
/// Options for conversation context building.
/// </summary>
public sealed class ConversationContextOptions
{
/// <summary>
/// Gets or sets the default token budget.
/// Default: 4000 tokens.
/// </summary>
public int DefaultTokenBudget { get; set; } = 4000;
/// <summary>
/// Gets or sets the maximum evidence links to include.
/// Default: 20.
/// </summary>
public int MaxEvidenceLinks { get; set; } = 20;
}

View File

@@ -0,0 +1,648 @@
// <copyright file="ConversationService.cs" company="StellaOps">
// Copyright (c) StellaOps. Licensed under the AGPL-3.0-or-later.
// </copyright>
using System.Collections.Concurrent;
using System.Collections.Immutable;
using System.Security.Cryptography;
using System.Text;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
namespace StellaOps.AdvisoryAI.Chat;
/// <summary>
/// Service for managing AdvisoryAI conversation sessions.
/// Sprint: SPRINT_20260107_006_003 Task CH-001
/// </summary>
public sealed class ConversationService : IConversationService
{
private readonly ConcurrentDictionary<string, Conversation> _conversations = new();
private readonly ConversationOptions _options;
private readonly TimeProvider _timeProvider;
private readonly ILogger<ConversationService> _logger;
private readonly IGuidGenerator _guidGenerator;
/// <summary>
/// Initializes a new instance of the <see cref="ConversationService"/> class.
/// </summary>
public ConversationService(
IOptions<ConversationOptions> options,
TimeProvider timeProvider,
IGuidGenerator guidGenerator,
ILogger<ConversationService> logger)
{
_options = options.Value;
_timeProvider = timeProvider;
_guidGenerator = guidGenerator;
_logger = logger;
}
/// <inheritdoc/>
public Task<Conversation> CreateAsync(
ConversationRequest request,
CancellationToken cancellationToken = default)
{
var conversationId = GenerateConversationId(request);
var now = _timeProvider.GetUtcNow();
var conversation = new Conversation
{
ConversationId = conversationId,
TenantId = request.TenantId,
UserId = request.UserId,
CreatedAt = now,
UpdatedAt = now,
Context = request.InitialContext ?? new ConversationContext(),
Turns = ImmutableArray<ConversationTurn>.Empty,
Metadata = request.Metadata ?? ImmutableDictionary<string, string>.Empty
};
_conversations[conversationId] = conversation;
_logger.LogDebug(
"Created conversation {ConversationId} for user {UserId}",
conversationId, request.UserId);
return Task.FromResult(conversation);
}
/// <inheritdoc/>
public Task<Conversation?> GetAsync(
string conversationId,
CancellationToken cancellationToken = default)
{
_conversations.TryGetValue(conversationId, out var conversation);
return Task.FromResult(conversation);
}
/// <inheritdoc/>
public Task<ConversationTurn> AddTurnAsync(
string conversationId,
TurnRequest request,
CancellationToken cancellationToken = default)
{
if (!_conversations.TryGetValue(conversationId, out var conversation))
{
throw new ConversationNotFoundException(conversationId);
}
var now = _timeProvider.GetUtcNow();
var turnId = $"{conversationId}-{conversation.Turns.Length + 1}";
var turn = new ConversationTurn
{
TurnId = turnId,
Role = request.Role,
Content = request.Content,
Timestamp = now,
EvidenceLinks = request.EvidenceLinks ?? ImmutableArray<EvidenceLink>.Empty,
ProposedActions = request.ProposedActions ?? ImmutableArray<ProposedAction>.Empty,
Metadata = request.Metadata ?? ImmutableDictionary<string, string>.Empty
};
// Enforce max turns limit
var turns = conversation.Turns;
if (turns.Length >= _options.MaxTurnsPerConversation)
{
// Remove oldest turn to make room
turns = turns.RemoveAt(0);
_logger.LogDebug(
"Conversation {ConversationId} exceeded max turns, removed oldest",
conversationId);
}
var updatedConversation = conversation with
{
Turns = turns.Add(turn),
UpdatedAt = now
};
_conversations[conversationId] = updatedConversation;
return Task.FromResult(turn);
}
/// <inheritdoc/>
public Task<bool> DeleteAsync(
string conversationId,
CancellationToken cancellationToken = default)
{
var removed = _conversations.TryRemove(conversationId, out _);
if (removed)
{
_logger.LogDebug("Deleted conversation {ConversationId}", conversationId);
}
return Task.FromResult(removed);
}
/// <inheritdoc/>
public Task<IReadOnlyList<Conversation>> ListAsync(
string tenantId,
string? userId = null,
int? limit = null,
CancellationToken cancellationToken = default)
{
var query = _conversations.Values
.Where(c => c.TenantId == tenantId);
if (userId is not null)
{
query = query.Where(c => c.UserId == userId);
}
var result = query
.OrderByDescending(c => c.UpdatedAt)
.Take(limit ?? 50)
.ToList();
return Task.FromResult<IReadOnlyList<Conversation>>(result);
}
/// <inheritdoc/>
public Task<Conversation?> UpdateContextAsync(
string conversationId,
ConversationContext context,
CancellationToken cancellationToken = default)
{
if (!_conversations.TryGetValue(conversationId, out var conversation))
{
return Task.FromResult<Conversation?>(null);
}
var updatedConversation = conversation with
{
Context = context,
UpdatedAt = _timeProvider.GetUtcNow()
};
_conversations[conversationId] = updatedConversation;
return Task.FromResult<Conversation?>(updatedConversation);
}
/// <summary>
/// Removes stale conversations older than the retention period.
/// </summary>
public int PruneStaleConversations()
{
var cutoff = _timeProvider.GetUtcNow() - _options.ConversationRetention;
var staleIds = _conversations
.Where(kv => kv.Value.UpdatedAt < cutoff)
.Select(kv => kv.Key)
.ToList();
foreach (var id in staleIds)
{
_conversations.TryRemove(id, out _);
}
if (staleIds.Count > 0)
{
_logger.LogInformation(
"Pruned {Count} stale conversations older than {Cutoff}",
staleIds.Count, cutoff);
}
return staleIds.Count;
}
private string GenerateConversationId(ConversationRequest request)
{
// Generate deterministic UUID based on tenant, user, and timestamp
var input = $"{request.TenantId}:{request.UserId}:{_timeProvider.GetUtcNow():O}:{_guidGenerator.NewGuid()}";
var hash = SHA256.HashData(Encoding.UTF8.GetBytes(input));
// Format as UUID
var guidBytes = new byte[16];
Array.Copy(hash, guidBytes, 16);
// Set version 5 (SHA-1/name-based) bits
guidBytes[6] = (byte)((guidBytes[6] & 0x0F) | 0x50);
guidBytes[8] = (byte)((guidBytes[8] & 0x3F) | 0x80);
return new Guid(guidBytes).ToString("N");
}
}
/// <summary>
/// Interface for conversation session management.
/// </summary>
public interface IConversationService
{
/// <summary>
/// Creates a new conversation session.
/// </summary>
Task<Conversation> CreateAsync(ConversationRequest request, CancellationToken cancellationToken = default);
/// <summary>
/// Gets a conversation by ID.
/// </summary>
Task<Conversation?> GetAsync(string conversationId, CancellationToken cancellationToken = default);
/// <summary>
/// Adds a turn (message) to a conversation.
/// </summary>
Task<ConversationTurn> AddTurnAsync(string conversationId, TurnRequest request, CancellationToken cancellationToken = default);
/// <summary>
/// Deletes a conversation.
/// </summary>
Task<bool> DeleteAsync(string conversationId, CancellationToken cancellationToken = default);
/// <summary>
/// Lists conversations for a tenant/user.
/// </summary>
Task<IReadOnlyList<Conversation>> ListAsync(string tenantId, string? userId = null, int? limit = null, CancellationToken cancellationToken = default);
/// <summary>
/// Updates the context for a conversation.
/// </summary>
Task<Conversation?> UpdateContextAsync(string conversationId, ConversationContext context, CancellationToken cancellationToken = default);
}
/// <summary>
/// Interface for GUID generation (for testability).
/// </summary>
public interface IGuidGenerator
{
/// <summary>
/// Generates a new GUID.
/// </summary>
Guid NewGuid();
}
/// <summary>
/// Default GUID generator.
/// </summary>
public sealed class DefaultGuidGenerator : IGuidGenerator
{
/// <inheritdoc/>
public Guid NewGuid() => Guid.NewGuid();
}
/// <summary>
/// A conversation session.
/// </summary>
public sealed record Conversation
{
/// <summary>
/// Gets the conversation identifier.
/// </summary>
public required string ConversationId { get; init; }
/// <summary>
/// Gets the tenant identifier.
/// </summary>
public required string TenantId { get; init; }
/// <summary>
/// Gets the user identifier.
/// </summary>
public required string UserId { get; init; }
/// <summary>
/// Gets when the conversation was created.
/// </summary>
public required DateTimeOffset CreatedAt { get; init; }
/// <summary>
/// Gets when the conversation was last updated.
/// </summary>
public required DateTimeOffset UpdatedAt { get; init; }
/// <summary>
/// Gets the conversation context.
/// </summary>
public required ConversationContext Context { get; init; }
/// <summary>
/// Gets the conversation turns (messages).
/// </summary>
public ImmutableArray<ConversationTurn> Turns { get; init; } = ImmutableArray<ConversationTurn>.Empty;
/// <summary>
/// Gets additional metadata.
/// </summary>
public ImmutableDictionary<string, string> Metadata { get; init; } =
ImmutableDictionary<string, string>.Empty;
/// <summary>
/// Gets the turn count.
/// </summary>
public int TurnCount => Turns.Length;
}
/// <summary>
/// Context information for a conversation.
/// </summary>
public sealed record ConversationContext
{
/// <summary>
/// Gets the tenant identifier for resolution.
/// </summary>
public string? TenantId { get; init; }
/// <summary>
/// Gets the current CVE being discussed.
/// </summary>
public string? CurrentCveId { get; init; }
/// <summary>
/// Gets the current component PURL.
/// </summary>
public string? CurrentComponent { get; init; }
/// <summary>
/// Gets the current image digest.
/// </summary>
public string? CurrentImageDigest { get; init; }
/// <summary>
/// Gets the scan ID in context.
/// </summary>
public string? ScanId { get; init; }
/// <summary>
/// Gets the SBOM ID in context.
/// </summary>
public string? SbomId { get; init; }
/// <summary>
/// Gets accumulated evidence links.
/// </summary>
public ImmutableArray<EvidenceLink> EvidenceLinks { get; init; } =
ImmutableArray<EvidenceLink>.Empty;
/// <summary>
/// Gets the policy context.
/// </summary>
public PolicyContext? Policy { get; init; }
}
/// <summary>
/// Policy context for a conversation.
/// </summary>
public sealed record PolicyContext
{
/// <summary>
/// Gets the policy IDs in scope.
/// </summary>
public ImmutableArray<string> PolicyIds { get; init; } = ImmutableArray<string>.Empty;
/// <summary>
/// Gets the user's permissions.
/// </summary>
public ImmutableArray<string> Permissions { get; init; } = ImmutableArray<string>.Empty;
/// <summary>
/// Gets whether automation is allowed.
/// </summary>
public bool AutomationAllowed { get; init; }
}
/// <summary>
/// A single turn in a conversation.
/// </summary>
public sealed record ConversationTurn
{
/// <summary>
/// Gets the turn identifier.
/// </summary>
public required string TurnId { get; init; }
/// <summary>
/// Gets the role (user/assistant/system).
/// </summary>
public required TurnRole Role { get; init; }
/// <summary>
/// Gets the message content.
/// </summary>
public required string Content { get; init; }
/// <summary>
/// Gets the timestamp.
/// </summary>
public required DateTimeOffset Timestamp { get; init; }
/// <summary>
/// Gets evidence links referenced in this turn.
/// </summary>
public ImmutableArray<EvidenceLink> EvidenceLinks { get; init; } =
ImmutableArray<EvidenceLink>.Empty;
/// <summary>
/// Gets proposed actions in this turn.
/// </summary>
public ImmutableArray<ProposedAction> ProposedActions { get; init; } =
ImmutableArray<ProposedAction>.Empty;
/// <summary>
/// Gets additional metadata.
/// </summary>
public ImmutableDictionary<string, string> Metadata { get; init; } =
ImmutableDictionary<string, string>.Empty;
}
/// <summary>
/// Turn role (who is speaking).
/// </summary>
public enum TurnRole
{
/// <summary>User message.</summary>
User,
/// <summary>Assistant (AdvisoryAI) response.</summary>
Assistant,
/// <summary>System message.</summary>
System
}
/// <summary>
/// A link to evidence (SBOM, DSSE, call-graph, etc.).
/// </summary>
public sealed record EvidenceLink
{
/// <summary>
/// Gets the link type.
/// </summary>
public required EvidenceLinkType Type { get; init; }
/// <summary>
/// Gets the URI (e.g., "sbom:abc123", "dsse:xyz789").
/// </summary>
public required string Uri { get; init; }
/// <summary>
/// Gets the display label.
/// </summary>
public string? Label { get; init; }
/// <summary>
/// Gets the confidence score (if applicable).
/// </summary>
public double? Confidence { get; init; }
}
/// <summary>
/// Types of evidence links.
/// </summary>
public enum EvidenceLinkType
{
/// <summary>SBOM reference.</summary>
Sbom,
/// <summary>DSSE envelope.</summary>
Dsse,
/// <summary>Call graph node.</summary>
CallGraph,
/// <summary>Reachability analysis.</summary>
Reachability,
/// <summary>Runtime trace.</summary>
RuntimeTrace,
/// <summary>VEX statement.</summary>
Vex,
/// <summary>Documentation link.</summary>
Documentation,
/// <summary>Authority key.</summary>
AuthorityKey,
/// <summary>Other evidence.</summary>
Other
}
/// <summary>
/// A proposed action from AdvisoryAI.
/// </summary>
public sealed record ProposedAction
{
/// <summary>
/// Gets the action type.
/// </summary>
public required string ActionType { get; init; }
/// <summary>
/// Gets the action label for display.
/// </summary>
public required string Label { get; init; }
/// <summary>
/// Gets the action payload (JSON).
/// </summary>
public string? Payload { get; init; }
/// <summary>
/// Gets whether this action requires confirmation.
/// </summary>
public bool RequiresConfirmation { get; init; } = true;
/// <summary>
/// Gets the policy gate for this action.
/// </summary>
public string? PolicyGate { get; init; }
}
/// <summary>
/// Request to create a conversation.
/// </summary>
public sealed record ConversationRequest
{
/// <summary>
/// Gets the tenant ID.
/// </summary>
public required string TenantId { get; init; }
/// <summary>
/// Gets the user ID.
/// </summary>
public required string UserId { get; init; }
/// <summary>
/// Gets the initial context.
/// </summary>
public ConversationContext? InitialContext { get; init; }
/// <summary>
/// Gets additional metadata.
/// </summary>
public ImmutableDictionary<string, string>? Metadata { get; init; }
}
/// <summary>
/// Request to add a turn to a conversation.
/// </summary>
public sealed record TurnRequest
{
/// <summary>
/// Gets the role.
/// </summary>
public required TurnRole Role { get; init; }
/// <summary>
/// Gets the content.
/// </summary>
public required string Content { get; init; }
/// <summary>
/// Gets evidence links in this turn.
/// </summary>
public ImmutableArray<EvidenceLink>? EvidenceLinks { get; init; }
/// <summary>
/// Gets proposed actions in this turn.
/// </summary>
public ImmutableArray<ProposedAction>? ProposedActions { get; init; }
/// <summary>
/// Gets additional metadata.
/// </summary>
public ImmutableDictionary<string, string>? Metadata { get; init; }
}
/// <summary>
/// Configuration options for conversations.
/// </summary>
public sealed class ConversationOptions
{
/// <summary>
/// Gets or sets the maximum turns per conversation.
/// Default: 50.
/// </summary>
public int MaxTurnsPerConversation { get; set; } = 50;
/// <summary>
/// Gets or sets the conversation retention period.
/// Default: 7 days.
/// </summary>
public TimeSpan ConversationRetention { get; set; } = TimeSpan.FromDays(7);
}
/// <summary>
/// Exception thrown when a conversation is not found.
/// </summary>
public sealed class ConversationNotFoundException : Exception
{
/// <summary>
/// Initializes a new instance of the <see cref="ConversationNotFoundException"/> class.
/// </summary>
public ConversationNotFoundException(string conversationId)
: base($"Conversation '{conversationId}' not found")
{
ConversationId = conversationId;
}
/// <summary>
/// Gets the conversation ID that was not found.
/// </summary>
public string ConversationId { get; }
}

View File

@@ -0,0 +1,601 @@
// <copyright file="GroundingValidator.cs" company="StellaOps">
// Copyright (c) StellaOps. Licensed under the AGPL-3.0-or-later.
// </copyright>
using System.Collections.Immutable;
using System.Globalization;
using System.Text.RegularExpressions;
using Microsoft.Extensions.Logging;
namespace StellaOps.AdvisoryAI.Chat;
/// <summary>
/// Validates that AI responses are properly grounded with citations.
/// Sprint: SPRINT_20260107_006_003 Task CH-007
/// </summary>
public sealed partial class GroundingValidator
{
private readonly IObjectLinkResolver _linkResolver;
private readonly ILogger<GroundingValidator> _logger;
private readonly GroundingOptions _options;
/// <summary>
/// Initializes a new instance of the <see cref="GroundingValidator"/> class.
/// </summary>
public GroundingValidator(
IObjectLinkResolver linkResolver,
ILogger<GroundingValidator> logger,
GroundingOptions? options = null)
{
_linkResolver = linkResolver;
_logger = logger;
_options = options ?? new GroundingOptions();
}
/// <summary>
/// Validates a response for proper grounding.
/// </summary>
/// <param name="response">The AI response to validate.</param>
/// <param name="context">The conversation context.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>Validation result with grounding score.</returns>
public async Task<GroundingValidationResult> ValidateAsync(
string response,
ConversationContext context,
CancellationToken cancellationToken = default)
{
var extractedLinks = ExtractObjectLinks(response);
var claims = ExtractClaims(response);
var issues = new List<GroundingIssue>();
// Validate each link resolves to a real object
var validatedLinks = new List<ValidatedLink>();
foreach (var link in extractedLinks)
{
var resolution = await _linkResolver.ResolveAsync(
link.Type, link.Path, context.TenantId, cancellationToken).ConfigureAwait(false);
var validated = new ValidatedLink
{
Type = link.Type,
Path = link.Path,
Position = link.Position,
IsValid = resolution.Exists,
ResolvedUri = resolution.Uri,
ObjectType = resolution.ObjectType
};
validatedLinks.Add(validated);
if (!resolution.Exists)
{
issues.Add(new GroundingIssue
{
Type = GroundingIssueType.InvalidLink,
Message = $"Object link does not resolve: [{link.Type}:{link.Path}]",
Position = link.Position,
Severity = IssueSeverity.Error
});
}
}
// Check for ungrounded claims
var groundedClaims = 0;
var ungroundedClaims = new List<UngroundedClaim>();
foreach (var claim in claims)
{
var hasNearbyLink = validatedLinks.Any(l =>
l.IsValid &&
Math.Abs(l.Position - claim.Position) < _options.MaxLinkDistance);
if (hasNearbyLink)
{
groundedClaims++;
}
else
{
ungroundedClaims.Add(claim);
issues.Add(new GroundingIssue
{
Type = GroundingIssueType.UngroundedClaim,
Message = $"Claim without nearby citation: \"{TruncateClaim(claim.Text)}\"",
Position = claim.Position,
Severity = IssueSeverity.Warning
});
}
}
// Calculate grounding score
var score = CalculateGroundingScore(
validatedLinks.Count(l => l.IsValid),
validatedLinks.Count,
groundedClaims,
claims.Count,
response.Length);
// Check if response should be rejected
var isAcceptable = score >= _options.MinGroundingScore;
if (!isAcceptable)
{
issues.Insert(0, new GroundingIssue
{
Type = GroundingIssueType.BelowThreshold,
Message = string.Format(
CultureInfo.InvariantCulture,
"Grounding score {0:F2} is below threshold {1:F2}",
score,
_options.MinGroundingScore),
Position = 0,
Severity = IssueSeverity.Critical
});
}
_logger.LogInformation(
"Grounding validation: score={Score:F2}, links={ValidLinks}/{TotalLinks}, claims={GroundedClaims}/{TotalClaims}, acceptable={IsAcceptable}",
score, validatedLinks.Count(l => l.IsValid), validatedLinks.Count, groundedClaims, claims.Count, isAcceptable);
return new GroundingValidationResult
{
GroundingScore = score,
IsAcceptable = isAcceptable,
ValidatedLinks = validatedLinks.ToImmutableArray(),
TotalClaims = claims.Count,
GroundedClaims = groundedClaims,
UngroundedClaims = ungroundedClaims.ToImmutableArray(),
Issues = issues.ToImmutableArray()
};
}
/// <summary>
/// Rejects a response that fails grounding validation.
/// </summary>
public RejectionResult RejectResponse(GroundingValidationResult validation)
{
var reason = new System.Text.StringBuilder();
reason.AppendLine("Response rejected due to insufficient grounding:");
reason.AppendLine();
foreach (var issue in validation.Issues.Where(i => i.Severity >= IssueSeverity.Error))
{
reason.AppendLine($"- {issue.Message}");
}
reason.AppendLine();
reason.AppendLine(string.Format(
CultureInfo.InvariantCulture,
"Grounding score: {0:P0} (minimum required: {1:P0})",
validation.GroundingScore,
_options.MinGroundingScore));
return new RejectionResult
{
Reason = reason.ToString(),
GroundingScore = validation.GroundingScore,
RequiredScore = _options.MinGroundingScore,
Issues = validation.Issues
};
}
/// <summary>
/// Suggests improvements for a poorly grounded response.
/// </summary>
public ImmutableArray<GroundingSuggestion> SuggestImprovements(GroundingValidationResult validation)
{
var suggestions = new List<GroundingSuggestion>();
if (validation.UngroundedClaims.Length > 0)
{
suggestions.Add(new GroundingSuggestion
{
Type = SuggestionType.AddCitations,
Message = $"Add citations for {validation.UngroundedClaims.Length} ungrounded claim(s)",
Examples = validation.UngroundedClaims
.Take(3)
.Select(c => $"Claim: \"{TruncateClaim(c.Text)}\" - needs evidence link")
.ToImmutableArray()
});
}
var invalidLinks = validation.ValidatedLinks.Where(l => !l.IsValid).ToList();
if (invalidLinks.Count > 0)
{
suggestions.Add(new GroundingSuggestion
{
Type = SuggestionType.FixLinks,
Message = $"Fix {invalidLinks.Count} invalid object link(s)",
Examples = invalidLinks
.Take(3)
.Select(l => $"Invalid: [{l.Type}:{l.Path}]")
.ToImmutableArray()
});
}
if (validation.ValidatedLinks.Length == 0 && validation.TotalClaims > 0)
{
suggestions.Add(new GroundingSuggestion
{
Type = SuggestionType.AddEvidence,
Message = "Response contains claims but no evidence links",
Examples = ImmutableArray.Create(
"Use [sbom:id] for SBOM references",
"Use [vex:issuer:digest] for VEX statements",
"Use [reach:service:function] for reachability data")
});
}
return suggestions.ToImmutableArray();
}
private List<ExtractedLink> ExtractObjectLinks(string response)
{
var links = new List<ExtractedLink>();
var matches = ObjectLinkRegex().Matches(response);
foreach (Match match in matches)
{
links.Add(new ExtractedLink
{
Type = match.Groups["type"].Value,
Path = match.Groups["path"].Value,
Position = match.Index
});
}
return links;
}
private List<UngroundedClaim> ExtractClaims(string response)
{
var claims = new List<UngroundedClaim>();
// Look for claim patterns: "is affected", "is vulnerable", "is not affected", etc.
var claimPatterns = ClaimPatternRegex().Matches(response);
foreach (Match match in claimPatterns)
{
claims.Add(new UngroundedClaim
{
Text = match.Value,
Position = match.Index,
ClaimType = DetermineClaimType(match.Value)
});
}
// Also look for severity/score statements
var severityMatches = SeverityClaimRegex().Matches(response);
foreach (Match match in severityMatches)
{
claims.Add(new UngroundedClaim
{
Text = match.Value,
Position = match.Index,
ClaimType = ClaimType.SeverityAssessment
});
}
return claims;
}
private static ClaimType DetermineClaimType(string text)
{
var lower = text.ToLowerInvariant();
if (lower.Contains("not affected") || lower.Contains("not vulnerable"))
{
return ClaimType.NotAffected;
}
if (lower.Contains("affected") || lower.Contains("vulnerable"))
{
return ClaimType.Affected;
}
if (lower.Contains("fixed") || lower.Contains("patched"))
{
return ClaimType.Fixed;
}
if (lower.Contains("under investigation"))
{
return ClaimType.UnderInvestigation;
}
return ClaimType.General;
}
private double CalculateGroundingScore(
int validLinks,
int totalLinks,
int groundedClaims,
int totalClaims,
int responseLength)
{
// Weight factors
const double linkValidityWeight = 0.4;
const double claimGroundingWeight = 0.4;
const double densityWeight = 0.2;
// Link validity score
var linkScore = totalLinks > 0 ? (double)validLinks / totalLinks : 0;
// Claim grounding score
var claimScore = totalClaims > 0 ? (double)groundedClaims / totalClaims : 1.0;
// Density score (links per 500 chars)
var expectedLinks = responseLength / 500.0;
if (expectedLinks < 1)
{
expectedLinks = 1;
}
var densityScore = Math.Min(1.0, validLinks / expectedLinks);
return (linkScore * linkValidityWeight) +
(claimScore * claimGroundingWeight) +
(densityScore * densityWeight);
}
private static string TruncateClaim(string claim)
{
const int maxLength = 50;
if (claim.Length <= maxLength)
{
return claim;
}
return claim[..(maxLength - 3)] + "...";
}
[GeneratedRegex(@"\[(?<type>sbom|reach|runtime|vex|attest|auth|docs):(?<path>[^\]]+)\]", RegexOptions.Compiled)]
private static partial Regex ObjectLinkRegex();
[GeneratedRegex(@"(?:is|are|was|were|has been|have been)\s+(?:not\s+)?(?:affected|vulnerable|exploitable|fixed|patched|mitigated|under investigation)", RegexOptions.IgnoreCase | RegexOptions.Compiled)]
private static partial Regex ClaimPatternRegex();
[GeneratedRegex(@"(?:severity|CVSS|EPSS|score|rating)\s*(?:is|of|:)?\s*(?:\d+\.?\d*|critical|high|medium|low)", RegexOptions.IgnoreCase | RegexOptions.Compiled)]
private static partial Regex SeverityClaimRegex();
}
/// <summary>
/// Interface for resolving object links.
/// </summary>
public interface IObjectLinkResolver
{
/// <summary>Resolves an object link to verify it exists.</summary>
Task<LinkResolution> ResolveAsync(string type, string path, string? tenantId, CancellationToken cancellationToken);
}
/// <summary>
/// Result of link resolution.
/// </summary>
public sealed record LinkResolution
{
/// <summary>Gets whether the object exists.</summary>
public bool Exists { get; init; }
/// <summary>Gets the resolved URI.</summary>
public string? Uri { get; init; }
/// <summary>Gets the object type.</summary>
public string? ObjectType { get; init; }
/// <summary>Gets resolution metadata.</summary>
public ImmutableDictionary<string, string>? Metadata { get; init; }
}
/// <summary>
/// Result of grounding validation.
/// </summary>
public sealed record GroundingValidationResult
{
/// <summary>Gets the grounding score (0.0-1.0).</summary>
public double GroundingScore { get; init; }
/// <summary>Gets whether the response is acceptable.</summary>
public bool IsAcceptable { get; init; }
/// <summary>Gets validated links.</summary>
public ImmutableArray<ValidatedLink> ValidatedLinks { get; init; } =
ImmutableArray<ValidatedLink>.Empty;
/// <summary>Gets total claims found.</summary>
public int TotalClaims { get; init; }
/// <summary>Gets grounded claims count.</summary>
public int GroundedClaims { get; init; }
/// <summary>Gets ungrounded claims.</summary>
public ImmutableArray<UngroundedClaim> UngroundedClaims { get; init; } =
ImmutableArray<UngroundedClaim>.Empty;
/// <summary>Gets validation issues.</summary>
public ImmutableArray<GroundingIssue> Issues { get; init; } =
ImmutableArray<GroundingIssue>.Empty;
}
/// <summary>
/// A validated object link.
/// </summary>
public sealed record ValidatedLink
{
/// <summary>Gets the link type.</summary>
public required string Type { get; init; }
/// <summary>Gets the link path.</summary>
public required string Path { get; init; }
/// <summary>Gets the position in response.</summary>
public int Position { get; init; }
/// <summary>Gets whether the link is valid.</summary>
public bool IsValid { get; init; }
/// <summary>Gets the resolved URI.</summary>
public string? ResolvedUri { get; init; }
/// <summary>Gets the object type.</summary>
public string? ObjectType { get; init; }
}
/// <summary>
/// An extracted link before validation.
/// </summary>
internal sealed record ExtractedLink
{
public required string Type { get; init; }
public required string Path { get; init; }
public int Position { get; init; }
}
/// <summary>
/// An ungrounded claim.
/// </summary>
public sealed record UngroundedClaim
{
/// <summary>Gets the claim text.</summary>
public required string Text { get; init; }
/// <summary>Gets the position in response.</summary>
public int Position { get; init; }
/// <summary>Gets the claim type.</summary>
public ClaimType ClaimType { get; init; }
}
/// <summary>
/// Types of claims.
/// </summary>
public enum ClaimType
{
/// <summary>General claim.</summary>
General,
/// <summary>Claims something is affected.</summary>
Affected,
/// <summary>Claims something is not affected.</summary>
NotAffected,
/// <summary>Claims something is fixed.</summary>
Fixed,
/// <summary>Claims something is under investigation.</summary>
UnderInvestigation,
/// <summary>Severity or score assessment.</summary>
SeverityAssessment
}
/// <summary>
/// A grounding issue.
/// </summary>
public sealed record GroundingIssue
{
/// <summary>Gets the issue type.</summary>
public required GroundingIssueType Type { get; init; }
/// <summary>Gets the issue message.</summary>
public required string Message { get; init; }
/// <summary>Gets the position in response.</summary>
public int Position { get; init; }
/// <summary>Gets the severity.</summary>
public IssueSeverity Severity { get; init; }
}
/// <summary>
/// Types of grounding issues.
/// </summary>
public enum GroundingIssueType
{
/// <summary>Link does not resolve.</summary>
InvalidLink,
/// <summary>Claim without citation.</summary>
UngroundedClaim,
/// <summary>Score below threshold.</summary>
BelowThreshold
}
/// <summary>
/// Issue severity.
/// </summary>
public enum IssueSeverity
{
/// <summary>Informational.</summary>
Info,
/// <summary>Warning.</summary>
Warning,
/// <summary>Error.</summary>
Error,
/// <summary>Critical.</summary>
Critical
}
/// <summary>
/// Result of rejecting a response.
/// </summary>
public sealed record RejectionResult
{
/// <summary>Gets the rejection reason.</summary>
public required string Reason { get; init; }
/// <summary>Gets the grounding score.</summary>
public double GroundingScore { get; init; }
/// <summary>Gets the required score.</summary>
public double RequiredScore { get; init; }
/// <summary>Gets the issues.</summary>
public ImmutableArray<GroundingIssue> Issues { get; init; } =
ImmutableArray<GroundingIssue>.Empty;
}
/// <summary>
/// A suggestion for improving grounding.
/// </summary>
public sealed record GroundingSuggestion
{
/// <summary>Gets the suggestion type.</summary>
public required SuggestionType Type { get; init; }
/// <summary>Gets the suggestion message.</summary>
public required string Message { get; init; }
/// <summary>Gets example improvements.</summary>
public ImmutableArray<string> Examples { get; init; } =
ImmutableArray<string>.Empty;
}
/// <summary>
/// Types of grounding suggestions.
/// </summary>
public enum SuggestionType
{
/// <summary>Add citations.</summary>
AddCitations,
/// <summary>Fix invalid links.</summary>
FixLinks,
/// <summary>Add evidence.</summary>
AddEvidence
}
/// <summary>
/// Options for grounding validation.
/// </summary>
public sealed class GroundingOptions
{
/// <summary>
/// Gets or sets the minimum grounding score.
/// Default: 0.5.
/// </summary>
public double MinGroundingScore { get; set; } = 0.5;
/// <summary>
/// Gets or sets the maximum distance between claim and link.
/// Default: 200 characters.
/// </summary>
public int MaxLinkDistance { get; set; } = 200;
}

View File

@@ -11,6 +11,7 @@
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" />
<PackageReference Include="Microsoft.Extensions.Options" />
<PackageReference Include="Microsoft.Extensions.Http" />
<PackageReference Include="Npgsql" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\..\Concelier\__Libraries\StellaOps.Concelier.Core\StellaOps.Concelier.Core.csproj" />

View File

@@ -0,0 +1,373 @@
// <copyright file="ConversationStore.cs" company="StellaOps">
// Copyright (c) StellaOps. Licensed under the AGPL-3.0-or-later.
// </copyright>
using System.Collections.Immutable;
using System.Globalization;
using System.Text.Json;
using Microsoft.Extensions.Logging;
using Npgsql;
using StellaOps.AdvisoryAI.Chat;
namespace StellaOps.AdvisoryAI.Storage;
/// <summary>
/// PostgreSQL-backed conversation storage.
/// Sprint: SPRINT_20260107_006_003 Task CH-008
/// </summary>
public sealed class ConversationStore : IConversationStore, IAsyncDisposable
{
private readonly NpgsqlDataSource _dataSource;
private readonly ILogger<ConversationStore> _logger;
private readonly ConversationStoreOptions _options;
private static readonly JsonSerializerOptions JsonOptions = new()
{
PropertyNamingPolicy = JsonNamingPolicy.CamelCase
};
/// <summary>
/// Initializes a new instance of the <see cref="ConversationStore"/> class.
/// </summary>
public ConversationStore(
NpgsqlDataSource dataSource,
ILogger<ConversationStore> logger,
ConversationStoreOptions? options = null)
{
_dataSource = dataSource;
_logger = logger;
_options = options ?? new ConversationStoreOptions();
}
/// <inheritdoc />
public async Task<Conversation> CreateAsync(
Conversation conversation,
CancellationToken cancellationToken = default)
{
const string sql = """
INSERT INTO advisoryai.conversations (
conversation_id, tenant_id, user_id, created_at, updated_at,
context, metadata
) VALUES (
@conversationId, @tenantId, @userId, @createdAt, @updatedAt,
@context::jsonb, @metadata::jsonb
)
""";
await using var cmd = _dataSource.CreateCommand(sql);
cmd.Parameters.AddWithValue("conversationId", conversation.ConversationId);
cmd.Parameters.AddWithValue("tenantId", conversation.TenantId);
cmd.Parameters.AddWithValue("userId", conversation.UserId);
cmd.Parameters.AddWithValue("createdAt", conversation.CreatedAt);
cmd.Parameters.AddWithValue("updatedAt", conversation.UpdatedAt);
cmd.Parameters.AddWithValue("context", JsonSerializer.Serialize(conversation.Context, JsonOptions));
cmd.Parameters.AddWithValue("metadata", JsonSerializer.Serialize(conversation.Metadata, JsonOptions));
await cmd.ExecuteNonQueryAsync(cancellationToken).ConfigureAwait(false);
_logger.LogInformation(
"Created conversation {ConversationId} for user {UserId}",
conversation.ConversationId, conversation.UserId);
return conversation;
}
/// <inheritdoc />
public async Task<Conversation?> GetByIdAsync(
string conversationId,
CancellationToken cancellationToken = default)
{
const string sql = """
SELECT * FROM advisoryai.conversations
WHERE conversation_id = @conversationId
""";
await using var cmd = _dataSource.CreateCommand(sql);
cmd.Parameters.AddWithValue("conversationId", conversationId);
await using var reader = await cmd.ExecuteReaderAsync(cancellationToken).ConfigureAwait(false);
if (!await reader.ReadAsync(cancellationToken).ConfigureAwait(false))
{
return null;
}
var conversation = await MapConversationAsync(reader, cancellationToken).ConfigureAwait(false);
// Load turns
var turns = await GetTurnsAsync(conversationId, cancellationToken).ConfigureAwait(false);
return conversation with { Turns = turns };
}
/// <inheritdoc />
public async Task<IReadOnlyList<Conversation>> GetByUserAsync(
string tenantId,
string userId,
int limit = 20,
CancellationToken cancellationToken = default)
{
var sql = string.Create(CultureInfo.InvariantCulture, $"""
SELECT * FROM advisoryai.conversations
WHERE tenant_id = @tenantId AND user_id = @userId
ORDER BY updated_at DESC
LIMIT {limit}
""");
await using var cmd = _dataSource.CreateCommand(sql);
cmd.Parameters.AddWithValue("tenantId", tenantId);
cmd.Parameters.AddWithValue("userId", userId);
await using var reader = await cmd.ExecuteReaderAsync(cancellationToken).ConfigureAwait(false);
var conversations = new List<Conversation>();
while (await reader.ReadAsync(cancellationToken).ConfigureAwait(false))
{
conversations.Add(await MapConversationAsync(reader, cancellationToken).ConfigureAwait(false));
}
return conversations;
}
/// <inheritdoc />
public async Task<Conversation> AddTurnAsync(
string conversationId,
ConversationTurn turn,
CancellationToken cancellationToken = default)
{
const string insertSql = """
INSERT INTO advisoryai.turns (
turn_id, conversation_id, role, content, timestamp,
evidence_links, proposed_actions, metadata
) VALUES (
@turnId, @conversationId, @role, @content, @timestamp,
@evidenceLinks::jsonb, @proposedActions::jsonb, @metadata::jsonb
)
""";
const string updateSql = """
UPDATE advisoryai.conversations
SET updated_at = @updatedAt
WHERE conversation_id = @conversationId
""";
await using var transaction = await _dataSource.OpenConnectionAsync(cancellationToken).ConfigureAwait(false);
// Insert turn
await using (var insertCmd = _dataSource.CreateCommand(insertSql))
{
insertCmd.Parameters.AddWithValue("turnId", turn.TurnId);
insertCmd.Parameters.AddWithValue("conversationId", conversationId);
insertCmd.Parameters.AddWithValue("role", turn.Role.ToString());
insertCmd.Parameters.AddWithValue("content", turn.Content);
insertCmd.Parameters.AddWithValue("timestamp", turn.Timestamp);
insertCmd.Parameters.AddWithValue("evidenceLinks", JsonSerializer.Serialize(turn.EvidenceLinks, JsonOptions));
insertCmd.Parameters.AddWithValue("proposedActions", JsonSerializer.Serialize(turn.ProposedActions, JsonOptions));
insertCmd.Parameters.AddWithValue("metadata", JsonSerializer.Serialize(turn.Metadata, JsonOptions));
await insertCmd.ExecuteNonQueryAsync(cancellationToken).ConfigureAwait(false);
}
// Update conversation timestamp
await using (var updateCmd = _dataSource.CreateCommand(updateSql))
{
updateCmd.Parameters.AddWithValue("conversationId", conversationId);
updateCmd.Parameters.AddWithValue("updatedAt", turn.Timestamp);
await updateCmd.ExecuteNonQueryAsync(cancellationToken).ConfigureAwait(false);
}
_logger.LogDebug(
"Added turn {TurnId} to conversation {ConversationId}",
turn.TurnId, conversationId);
return (await GetByIdAsync(conversationId, cancellationToken).ConfigureAwait(false))!;
}
/// <inheritdoc />
public async Task<bool> DeleteAsync(
string conversationId,
CancellationToken cancellationToken = default)
{
const string sql = """
DELETE FROM advisoryai.conversations
WHERE conversation_id = @conversationId
""";
await using var cmd = _dataSource.CreateCommand(sql);
cmd.Parameters.AddWithValue("conversationId", conversationId);
var rowsAffected = await cmd.ExecuteNonQueryAsync(cancellationToken).ConfigureAwait(false);
if (rowsAffected > 0)
{
_logger.LogInformation("Deleted conversation {ConversationId}", conversationId);
}
return rowsAffected > 0;
}
/// <inheritdoc />
public async Task CleanupExpiredAsync(
TimeSpan maxAge,
CancellationToken cancellationToken = default)
{
const string sql = """
DELETE FROM advisoryai.conversations
WHERE updated_at < @cutoff
""";
var cutoff = DateTimeOffset.UtcNow - maxAge;
await using var cmd = _dataSource.CreateCommand(sql);
cmd.Parameters.AddWithValue("cutoff", cutoff);
var rowsDeleted = await cmd.ExecuteNonQueryAsync(cancellationToken).ConfigureAwait(false);
if (rowsDeleted > 0)
{
_logger.LogInformation(
"Cleaned up {Count} expired conversations older than {MaxAge}",
rowsDeleted, maxAge);
}
}
/// <inheritdoc />
public async ValueTask DisposeAsync()
{
// NpgsqlDataSource is typically managed by DI, so we don't dispose it here
await Task.CompletedTask;
}
private async Task<ImmutableArray<ConversationTurn>> GetTurnsAsync(
string conversationId,
CancellationToken cancellationToken)
{
const string sql = """
SELECT * FROM advisoryai.turns
WHERE conversation_id = @conversationId
ORDER BY timestamp ASC
""";
await using var cmd = _dataSource.CreateCommand(sql);
cmd.Parameters.AddWithValue("conversationId", conversationId);
await using var reader = await cmd.ExecuteReaderAsync(cancellationToken).ConfigureAwait(false);
var turns = new List<ConversationTurn>();
while (await reader.ReadAsync(cancellationToken).ConfigureAwait(false))
{
turns.Add(MapTurn(reader));
}
return turns.ToImmutableArray();
}
private async Task<Conversation> MapConversationAsync(
NpgsqlDataReader reader,
CancellationToken cancellationToken)
{
_ = cancellationToken; // Suppress unused parameter warning
var contextJson = reader.IsDBNull(reader.GetOrdinal("context"))
? null : reader.GetString(reader.GetOrdinal("context"));
var metadataJson = reader.IsDBNull(reader.GetOrdinal("metadata"))
? null : reader.GetString(reader.GetOrdinal("metadata"));
var context = contextJson != null
? JsonSerializer.Deserialize<ConversationContext>(contextJson, JsonOptions) ?? new ConversationContext()
: new ConversationContext();
var metadata = metadataJson != null
? JsonSerializer.Deserialize<ImmutableDictionary<string, string>>(metadataJson, JsonOptions)
?? ImmutableDictionary<string, string>.Empty
: ImmutableDictionary<string, string>.Empty;
return new Conversation
{
ConversationId = reader.GetString(reader.GetOrdinal("conversation_id")),
TenantId = reader.GetString(reader.GetOrdinal("tenant_id")),
UserId = reader.GetString(reader.GetOrdinal("user_id")),
CreatedAt = reader.GetFieldValue<DateTimeOffset>(reader.GetOrdinal("created_at")),
UpdatedAt = reader.GetFieldValue<DateTimeOffset>(reader.GetOrdinal("updated_at")),
Context = context,
Metadata = metadata,
Turns = ImmutableArray<ConversationTurn>.Empty
};
}
private static ConversationTurn MapTurn(NpgsqlDataReader reader)
{
var evidenceLinksJson = reader.IsDBNull(reader.GetOrdinal("evidence_links"))
? null : reader.GetString(reader.GetOrdinal("evidence_links"));
var proposedActionsJson = reader.IsDBNull(reader.GetOrdinal("proposed_actions"))
? null : reader.GetString(reader.GetOrdinal("proposed_actions"));
var metadataJson = reader.IsDBNull(reader.GetOrdinal("metadata"))
? null : reader.GetString(reader.GetOrdinal("metadata"));
var evidenceLinks = evidenceLinksJson != null
? JsonSerializer.Deserialize<ImmutableArray<EvidenceLink>>(evidenceLinksJson, JsonOptions)
: ImmutableArray<EvidenceLink>.Empty;
var proposedActions = proposedActionsJson != null
? JsonSerializer.Deserialize<ImmutableArray<ProposedAction>>(proposedActionsJson, JsonOptions)
: ImmutableArray<ProposedAction>.Empty;
var metadata = metadataJson != null
? JsonSerializer.Deserialize<ImmutableDictionary<string, string>>(metadataJson, JsonOptions)
?? ImmutableDictionary<string, string>.Empty
: ImmutableDictionary<string, string>.Empty;
var roleStr = reader.GetString(reader.GetOrdinal("role"));
var role = Enum.TryParse<TurnRole>(roleStr, ignoreCase: true, out var parsedRole)
? parsedRole
: TurnRole.User;
return new ConversationTurn
{
TurnId = reader.GetString(reader.GetOrdinal("turn_id")),
Role = role,
Content = reader.GetString(reader.GetOrdinal("content")),
Timestamp = reader.GetFieldValue<DateTimeOffset>(reader.GetOrdinal("timestamp")),
EvidenceLinks = evidenceLinks,
ProposedActions = proposedActions,
Metadata = metadata
};
}
}
/// <summary>
/// Interface for conversation storage.
/// </summary>
public interface IConversationStore
{
/// <summary>Creates a new conversation.</summary>
Task<Conversation> CreateAsync(Conversation conversation, CancellationToken cancellationToken = default);
/// <summary>Gets a conversation by ID.</summary>
Task<Conversation?> GetByIdAsync(string conversationId, CancellationToken cancellationToken = default);
/// <summary>Gets conversations for a user.</summary>
Task<IReadOnlyList<Conversation>> GetByUserAsync(string tenantId, string userId, int limit = 20, CancellationToken cancellationToken = default);
/// <summary>Adds a turn to a conversation.</summary>
Task<Conversation> AddTurnAsync(string conversationId, ConversationTurn turn, CancellationToken cancellationToken = default);
/// <summary>Deletes a conversation.</summary>
Task<bool> DeleteAsync(string conversationId, CancellationToken cancellationToken = default);
/// <summary>Cleans up expired conversations.</summary>
Task CleanupExpiredAsync(TimeSpan maxAge, CancellationToken cancellationToken = default);
}
/// <summary>
/// Options for conversation store.
/// </summary>
public sealed class ConversationStoreOptions
{
/// <summary>
/// Gets or sets the default conversation TTL.
/// Default: 24 hours.
/// </summary>
public TimeSpan DefaultTtl { get; set; } = TimeSpan.FromHours(24);
}

View File

@@ -0,0 +1,26 @@
# AirGap Sync Charter
## Mission
Provide offline job sync bundle export/import and HLC merge services.
## Responsibilities
- Maintain air-gap bundle export/import, sync, and transport logic.
- Keep outputs deterministic and offline-friendly.
- Track sprint tasks in `TASKS.md` and update the sprint tracker.
## Key Paths
- `Services/*.cs`
- `Transport/*.cs`
- `Stores/*.cs`
- `Models/*.cs`
## Required Reading
- `docs/modules/airgap/architecture.md`
- `docs/modules/scheduler/architecture.md`
- `docs/implplan/permament/SPRINT_20251229_049_BE_csproj_audit_maint_tests.md`
## Working Agreement
- 1. Use TimeProvider and deterministic IDs; avoid DateTime.UtcNow and Guid.NewGuid in production paths.
- 2. Keep bundle outputs canonical and stable (ordering, line endings, hashing).
- 3. Validate file paths and inputs for offline safety.
- 4. Update `TASKS.md` and sprint statuses when work changes.

View File

@@ -0,0 +1,10 @@
# AirGap Sync Task Board
This board mirrors active sprint tasks for this module.
Source of truth: `docs/implplan/permament/SPRINT_20251229_049_BE_csproj_audit_maint_tests.md`.
| Task ID | Status | Notes |
| --- | --- | --- |
| AUDIT-0792-M | DONE | Revalidated 2026-01-07. |
| AUDIT-0792-T | DONE | Revalidated 2026-01-07. |
| AUDIT-0792-A | TODO | Open findings (TimeProvider, DSSE helper, InvariantCulture, path validation, line endings, tests). |

View File

@@ -0,0 +1,24 @@
# AirGap Sync Tests Charter
## Mission
Validate air-gap sync services, merge behavior, and signing determinism.
## Responsibilities
- Maintain unit tests for AirGap.Sync services.
- Keep fixtures deterministic and offline-friendly.
- Track sprint tasks in `TASKS.md` and update the sprint tracker.
## Key Paths
- `AirGapBundleDsseSignerTests.cs`
- `ConflictResolverTests.cs`
- `HlcMergeServiceTests.cs`
## Required Reading
- `docs/modules/airgap/architecture.md`
- `docs/modules/scheduler/architecture.md`
- `docs/implplan/permament/SPRINT_20251229_049_BE_csproj_audit_maint_tests.md`
## Working Agreement
- 1. Use fixed time/IDs in tests; avoid Guid.NewGuid, DateTime.UtcNow.
- 2. Keep determinism tests stable across platforms.
- 3. Update `TASKS.md` and sprint statuses when work changes.

View File

@@ -0,0 +1,10 @@
# AirGap Sync Tests Task Board
This board mirrors active sprint tasks for this module.
Source of truth: `docs/implplan/permament/SPRINT_20251229_049_BE_csproj_audit_maint_tests.md`.
| Task ID | Status | Notes |
| --- | --- | --- |
| AUDIT-0793-M | DONE | Revalidated 2026-01-07. |
| AUDIT-0793-T | DONE | Revalidated 2026-01-07. |
| AUDIT-0793-A | DONE | Waived (test project; revalidated 2026-01-07). |

View File

@@ -6,7 +6,8 @@ namespace StellaOps.Attestor.Core.Rekor;
public sealed class RekorInclusionVerificationResult
{
/// <summary>
/// True if inclusion proof was successfully verified.
/// True if inclusion proof was successfully verified (Merkle path only).
/// Check <see cref="CheckpointSignatureValid"/> for checkpoint signature status.
/// </summary>
public required bool Verified { get; init; }

View File

@@ -15,6 +15,7 @@ using NpgsqlTypes;
using StellaOps.Attestor.Core.Observability;
using StellaOps.Attestor.Core.Options;
using StellaOps.Attestor.Core.Queue;
using StellaOps.Determinism;
namespace StellaOps.Attestor.Infrastructure.Queue;
@@ -29,6 +30,7 @@ public sealed class PostgresRekorSubmissionQueue : IRekorSubmissionQueue
private readonly AttestorMetrics _metrics;
private readonly TimeProvider _timeProvider;
private readonly ILogger<PostgresRekorSubmissionQueue> _logger;
private readonly IGuidProvider _guidProvider;
private const int DefaultCommandTimeoutSeconds = 30;
@@ -37,12 +39,14 @@ public sealed class PostgresRekorSubmissionQueue : IRekorSubmissionQueue
IOptions<RekorQueueOptions> options,
AttestorMetrics metrics,
TimeProvider timeProvider,
IGuidProvider guidProvider,
ILogger<PostgresRekorSubmissionQueue> logger)
{
_dataSource = dataSource ?? throw new ArgumentNullException(nameof(dataSource));
_options = options?.Value ?? throw new ArgumentNullException(nameof(options));
_metrics = metrics ?? throw new ArgumentNullException(nameof(metrics));
_timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider));
_guidProvider = guidProvider ?? throw new ArgumentNullException(nameof(guidProvider));
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
}
@@ -55,7 +59,7 @@ public sealed class PostgresRekorSubmissionQueue : IRekorSubmissionQueue
CancellationToken cancellationToken = default)
{
var now = _timeProvider.GetUtcNow();
var id = Guid.NewGuid();
var id = _guidProvider.NewGuid();
const string sql = """
INSERT INTO attestor.rekor_submission_queue (
@@ -138,7 +142,7 @@ public sealed class PostgresRekorSubmissionQueue : IRekorSubmissionQueue
await using var reader = await command.ExecuteReaderAsync(cancellationToken);
while (await reader.ReadAsync(cancellationToken))
{
var queuedAt = reader.GetDateTime(reader.GetOrdinal("created_at"));
var queuedAt = reader.GetFieldValue<DateTimeOffset>(reader.GetOrdinal("created_at"));
var waitTime = (now - queuedAt).TotalSeconds;
_metrics.RekorQueueWaitTime.Record(waitTime);

View File

@@ -1,5 +1,6 @@
using System;
using System.Collections.Generic;
using System.Globalization;
using System.Linq;
using System.Net;
using System.Net.Http;
@@ -131,7 +132,15 @@ internal sealed class HttpRekorClient : IRekorClient
Origin = checkpointElement.TryGetProperty("origin", out var origin) ? origin.GetString() : null,
Size = checkpointElement.TryGetProperty("size", out var size) && size.TryGetInt64(out var sizeValue) ? sizeValue : 0,
RootHash = checkpointElement.TryGetProperty("rootHash", out var rootHash) ? rootHash.GetString() : null,
Timestamp = checkpointElement.TryGetProperty("timestamp", out var ts) && ts.ValueKind == JsonValueKind.String && DateTimeOffset.TryParse(ts.GetString(), out var dto) ? dto : null
Timestamp = checkpointElement.TryGetProperty("timestamp", out var ts)
&& ts.ValueKind == JsonValueKind.String
&& DateTimeOffset.TryParse(
ts.GetString(),
CultureInfo.InvariantCulture,
DateTimeStyles.AssumeUniversal | DateTimeStyles.AdjustToUniversal,
out var dto)
? dto
: null
}
: null,
Inclusion = inclusionElement.ValueKind == JsonValueKind.Object
@@ -269,6 +278,10 @@ internal sealed class HttpRekorClient : IRekorClient
"Successfully verified Rekor inclusion for UUID {Uuid} at index {Index}",
rekorUuid, logIndex);
_logger.LogDebug(
"Checkpoint signature verification is unavailable for UUID {Uuid}; treating checkpoint as unverified",
rekorUuid);
return RekorInclusionVerificationResult.Success(
logIndex.Value,
computedRootHex,

View File

@@ -0,0 +1,55 @@
using StellaOps.Attestor.Core.Options;
using StellaOps.Attestor.Core.Rekor;
namespace StellaOps.Attestor.Infrastructure.Rekor;
internal static class RekorBackendResolver
{
public static RekorBackend ResolveBackend(
AttestorOptions options,
string? backendName,
bool allowFallbackToPrimary)
{
ArgumentNullException.ThrowIfNull(options);
var normalized = string.IsNullOrWhiteSpace(backendName)
? "primary"
: backendName.Trim();
if (string.Equals(normalized, "primary", StringComparison.OrdinalIgnoreCase))
{
return BuildBackend("primary", options.Rekor.Primary);
}
if (string.Equals(normalized, "mirror", StringComparison.OrdinalIgnoreCase))
{
return BuildBackend("mirror", options.Rekor.Mirror);
}
if (allowFallbackToPrimary)
{
return BuildBackend(normalized, options.Rekor.Primary);
}
throw new InvalidOperationException($"Unknown Rekor backend: {backendName}");
}
public static RekorBackend BuildBackend(string name, AttestorOptions.RekorBackendOptions options)
{
ArgumentException.ThrowIfNullOrWhiteSpace(name);
if (string.IsNullOrWhiteSpace(options.Url))
{
throw new InvalidOperationException($"Rekor backend '{name}' is not configured.");
}
return new RekorBackend
{
Name = name,
Url = new Uri(options.Url, UriKind.Absolute),
ProofTimeout = TimeSpan.FromMilliseconds(options.ProofTimeoutMs),
PollInterval = TimeSpan.FromMilliseconds(options.PollIntervalMs),
MaxAttempts = options.MaxAttempts
};
}
}

View File

@@ -30,6 +30,7 @@ using StellaOps.Attestor.Core.InToto;
using StellaOps.Attestor.Core.InToto.Layout;
using StellaOps.Attestor.Infrastructure.InToto;
using StellaOps.Attestor.Verify;
using StellaOps.Determinism;
namespace StellaOps.Attestor.Infrastructure;
@@ -39,6 +40,7 @@ public static class ServiceCollectionExtensions
{
services.AddMemoryCache();
services.AddSingleton(TimeProvider.System);
services.AddSystemGuidProvider();
services.AddSingleton<IDsseCanonicalizer, DefaultDsseCanonicalizer>();
services.AddSingleton(sp =>

View File

@@ -13,6 +13,7 @@
<ProjectReference Include="..\..\..\__Libraries\StellaOps.Cryptography.Plugin.BouncyCastle\StellaOps.Cryptography.Plugin.BouncyCastle.csproj" />
<ProjectReference Include="..\..\..\__Libraries\StellaOps.Cryptography.Kms\StellaOps.Cryptography.Kms.csproj" />
<ProjectReference Include="..\..\..\__Libraries\StellaOps.Cryptography.Plugin.SmSoft\StellaOps.Cryptography.Plugin.SmSoft.csproj" />
<ProjectReference Include="..\..\..\__Libraries\StellaOps.Determinism.Abstractions\StellaOps.Determinism.Abstractions.csproj" />
<ProjectReference Include="..\..\..\Router/__Libraries/StellaOps.Messaging\StellaOps.Messaging.csproj" />
</ItemGroup>
<ItemGroup>

View File

@@ -15,6 +15,7 @@ using StellaOps.Attestor.Core.Storage;
using StellaOps.Attestor.Core.Submission;
using StellaOps.Attestor.Core.Transparency;
using StellaOps.Attestor.Core.Verification;
using StellaOps.Attestor.Infrastructure.Rekor;
namespace StellaOps.Attestor.Infrastructure.Submission;
@@ -384,7 +385,7 @@ internal sealed class AttestorSubmissionService : IAttestorSubmissionService
AttestorOptions.RekorBackendOptions backendOptions,
CancellationToken cancellationToken)
{
var backend = BuildBackend(backendName, backendOptions);
var backend = RekorBackendResolver.BuildBackend(backendName, backendOptions);
var stopwatch = Stopwatch.StartNew();
try
{
@@ -782,20 +783,4 @@ internal sealed class AttestorSubmissionService : IAttestorSubmissionService
new SubmissionOutcome(backend, url ?? string.Empty, null, null, null, latency, error);
}
private static RekorBackend BuildBackend(string name, AttestorOptions.RekorBackendOptions options)
{
if (string.IsNullOrWhiteSpace(options.Url))
{
throw new InvalidOperationException($"Rekor backend '{name}' is not configured.");
}
return new RekorBackend
{
Name = name,
Url = new Uri(options.Url, UriKind.Absolute),
ProofTimeout = TimeSpan.FromMilliseconds(options.ProofTimeoutMs),
PollInterval = TimeSpan.FromMilliseconds(options.PollIntervalMs),
MaxAttempts = options.MaxAttempts
};
}
}

View File

@@ -7,5 +7,5 @@ Source of truth: `docs/implplan/SPRINT_20251229_049_BE_csproj_audit_maint_tests.
| --- | --- | --- |
| AUDIT-0055-M | DONE | Revalidated 2026-01-06. |
| AUDIT-0055-T | DONE | Revalidated 2026-01-06. |
| AUDIT-0055-A | TODO | Reopened after revalidation 2026-01-06. |
| AUDIT-0055-A | DONE | Applied determinism, backend resolver, and Rekor client fixes 2026-01-08. |
| VAL-SMOKE-001 | DONE | Fixed continuation token behavior; unit tests pass. |

View File

@@ -14,6 +14,7 @@ using StellaOps.Attestor.Core.Storage;
using StellaOps.Attestor.Core.Submission;
using StellaOps.Attestor.Core.Transparency;
using StellaOps.Attestor.Core.Verification;
using StellaOps.Attestor.Infrastructure.Rekor;
using StellaOps.Attestor.Verify;
namespace StellaOps.Attestor.Infrastructure.Verification;
@@ -238,7 +239,7 @@ internal sealed class AttestorVerificationService : IAttestorVerificationService
var backendOptions = string.Equals(backendName, "mirror", StringComparison.OrdinalIgnoreCase)
? _options.Rekor.Mirror
: _options.Rekor.Primary;
var backend = BuildBackend(backendName ?? "primary", backendOptions);
var backend = RekorBackendResolver.ResolveBackend(_options, backendName, allowFallbackToPrimary: true);
using var activity = _activitySource.StartProofRefresh(backend.Name, _options.Verification.PolicyId);
@@ -354,23 +355,6 @@ internal sealed class AttestorVerificationService : IAttestorVerificationService
};
}
private static RekorBackend BuildBackend(string name, AttestorOptions.RekorBackendOptions options)
{
if (string.IsNullOrWhiteSpace(options.Url))
{
throw new InvalidOperationException($"Rekor backend '{name}' is not configured.");
}
return new RekorBackend
{
Name = name,
Url = new Uri(options.Url, UriKind.Absolute),
ProofTimeout = TimeSpan.FromMilliseconds(options.ProofTimeoutMs),
PollInterval = TimeSpan.FromMilliseconds(options.PollIntervalMs),
MaxAttempts = options.MaxAttempts
};
}
private static string NormalizeTag(string? value) => string.IsNullOrWhiteSpace(value) ? "unknown" : value;
}

View File

@@ -17,6 +17,7 @@ using StellaOps.Attestor.Core.Queue;
using StellaOps.Attestor.Core.Rekor;
using StellaOps.Attestor.Core.Submission;
using System.Text.Json;
using StellaOps.Attestor.Infrastructure.Rekor;
namespace StellaOps.Attestor.Infrastructure.Workers;
@@ -153,7 +154,7 @@ public sealed class RekorRetryWorker : BackgroundService
try
{
var backend = ResolveBackend(item.Backend);
var backend = RekorBackendResolver.ResolveBackend(_attestorOptions, item.Backend, allowFallbackToPrimary: false);
var request = BuildSubmissionRequest(item);
var response = await _rekorClient.SubmitAsync(request, backend, ct);
@@ -188,16 +189,6 @@ public sealed class RekorRetryWorker : BackgroundService
}
}
private RekorBackend ResolveBackend(string backend)
{
return backend.ToLowerInvariant() switch
{
"primary" => BuildBackend("primary", _attestorOptions.Rekor.Primary),
"mirror" => BuildBackend("mirror", _attestorOptions.Rekor.Mirror),
_ => throw new InvalidOperationException($"Unknown Rekor backend: {backend}")
};
}
private static AttestorSubmissionRequest BuildSubmissionRequest(RekorQueueItem item)
{
var dsseEnvelope = ParseDsseEnvelope(item.DssePayload);
@@ -260,22 +251,6 @@ public sealed class RekorRetryWorker : BackgroundService
};
}
private static RekorBackend BuildBackend(string name, AttestorOptions.RekorBackendOptions options)
{
if (string.IsNullOrWhiteSpace(options.Url))
{
throw new InvalidOperationException($"Rekor backend '{name}' is not configured.");
}
return new RekorBackend
{
Name = name,
Url = new Uri(options.Url, UriKind.Absolute),
ProofTimeout = TimeSpan.FromMilliseconds(options.ProofTimeoutMs),
PollInterval = TimeSpan.FromMilliseconds(options.PollIntervalMs),
MaxAttempts = options.MaxAttempts
};
}
}
#endif

View File

@@ -15,6 +15,7 @@ using StellaOps.Attestor.Core.Observability;
using StellaOps.Attestor.Core.Options;
using StellaOps.Attestor.Core.Queue;
using StellaOps.Attestor.Infrastructure.Queue;
using StellaOps.Determinism;
using Testcontainers.PostgreSql;
using Xunit;
@@ -63,6 +64,7 @@ public class PostgresRekorSubmissionQueueIntegrationTests : IAsyncLifetime
}),
_metrics,
_timeProvider,
SystemGuidProvider.Instance,
NullLogger<PostgresRekorSubmissionQueue>.Instance);
}
@@ -261,6 +263,7 @@ public class PostgresRekorSubmissionQueueIntegrationTests : IAsyncLifetime
Options.Create(new RekorQueueOptions { MaxAttempts = 2 }),
_metrics,
_timeProvider,
SystemGuidProvider.Instance,
NullLogger<PostgresRekorSubmissionQueue>.Instance);
var id = await queue.EnqueueAsync("tenant-1", "sha256:deadletter", new byte[] { 0x01 }, "primary");
@@ -307,6 +310,7 @@ public class PostgresRekorSubmissionQueueIntegrationTests : IAsyncLifetime
Options.Create(new RekorQueueOptions { MaxAttempts = 1 }),
_metrics,
_timeProvider,
SystemGuidProvider.Instance,
NullLogger<PostgresRekorSubmissionQueue>.Instance);
var id = await queue.EnqueueAsync("tenant-dlq", "sha256:dlq", new byte[] { 0x01 }, "primary");

View File

@@ -0,0 +1,23 @@
# Attestor SPDX3 Build Profile Charter
## Purpose & Scope
- Working directory: `src/Attestor/__Libraries/StellaOps.Attestor.Spdx3/`.
- Roles: backend engineer, QA automation.
- Focus: mapping SLSA/in-toto build attestations to SPDX 3.0.1 Build profile elements.
## Required Reading
- `docs/README.md`
- `docs/07_HIGH_LEVEL_ARCHITECTURE.md`
- `docs/modules/attestor/architecture.md`
- `docs/modules/platform/architecture-overview.md`
- `docs/implplan/permament/SPRINT_20251229_049_BE_csproj_audit_maint_tests.md`
## Working Agreement
- Preserve deterministic IDs and ordering in SPDX outputs.
- Use InvariantCulture for formatted timestamps and hashes.
- Avoid Guid.NewGuid/DateTime.UtcNow in core logic; use injected providers.
- Update the sprint tracker and local `TASKS.md` when work changes.
## Testing
- Unit tests live in `src/Attestor/__Libraries/__Tests/StellaOps.Attestor.Spdx3.Tests/`.
- Cover mapping, deterministic ID generation, and relationship ordering.

View File

@@ -0,0 +1,147 @@
// <copyright file="BuildAttestationMapper.cs" company="StellaOps">
// Copyright (c) StellaOps. Licensed under the AGPL-3.0-or-later.
// </copyright>
using System.Collections.Immutable;
using System.Globalization;
using StellaOps.Spdx3.Model.Build;
namespace StellaOps.Attestor.Spdx3;
/// <summary>
/// Maps between SLSA/in-toto build attestations and SPDX 3.0.1 Build profile elements.
/// Sprint: SPRINT_20260107_004_003 Task BP-004
/// </summary>
/// <remarks>
/// Mapping Table (SLSA -> SPDX 3.0.1):
/// | in-toto/SLSA | SPDX 3.0.1 Build |
/// |--------------|------------------|
/// | buildType | build_buildType |
/// | builder.id | CreationInfo.createdBy (Agent) |
/// | invocation.configSource.uri | build_configSourceUri |
/// | invocation.environment | build_environment |
/// | invocation.parameters | build_parameter |
/// | metadata.buildStartedOn | build_buildStartTime |
/// | metadata.buildFinishedOn | build_buildEndTime |
/// | metadata.buildInvocationId | build_buildId |
/// </remarks>
public sealed class BuildAttestationMapper : IBuildAttestationMapper
{
/// <inheritdoc />
public Spdx3Build MapToSpdx3(BuildAttestationPayload attestation, string spdxIdPrefix)
{
ArgumentNullException.ThrowIfNull(attestation);
ArgumentException.ThrowIfNullOrWhiteSpace(spdxIdPrefix);
var configSourceUris = ImmutableArray<string>.Empty;
var configSourceDigests = ImmutableArray<Spdx3Hash>.Empty;
var configSourceEntrypoints = ImmutableArray<string>.Empty;
if (attestation.Invocation?.ConfigSource is { } configSource)
{
if (!string.IsNullOrWhiteSpace(configSource.Uri))
{
configSourceUris = ImmutableArray.Create(configSource.Uri);
}
if (configSource.Digest.Count > 0)
{
configSourceDigests = configSource.Digest
.Select(kvp => new Spdx3Hash { Algorithm = kvp.Key, HashValue = kvp.Value })
.ToImmutableArray();
}
if (!string.IsNullOrWhiteSpace(configSource.EntryPoint))
{
configSourceEntrypoints = ImmutableArray.Create(configSource.EntryPoint);
}
}
var environment = attestation.Invocation?.Environment.ToImmutableDictionary()
?? ImmutableDictionary<string, string>.Empty;
var parameters = attestation.Invocation?.Parameters.ToImmutableDictionary()
?? ImmutableDictionary<string, string>.Empty;
var buildId = attestation.Metadata?.BuildInvocationId
?? GenerateBuildId(attestation);
return new Spdx3Build
{
SpdxId = GenerateSpdxId(spdxIdPrefix, buildId),
Type = Spdx3Build.TypeName,
Name = $"Build {buildId}",
BuildType = attestation.BuildType,
BuildId = buildId,
BuildStartTime = attestation.Metadata?.BuildStartedOn,
BuildEndTime = attestation.Metadata?.BuildFinishedOn,
ConfigSourceUri = configSourceUris,
ConfigSourceDigest = configSourceDigests,
ConfigSourceEntrypoint = configSourceEntrypoints,
Environment = environment,
Parameter = parameters
};
}
/// <inheritdoc />
public BuildAttestationPayload MapFromSpdx3(Spdx3Build build)
{
ArgumentNullException.ThrowIfNull(build);
ConfigSource? configSource = null;
if (build.ConfigSourceUri.Length > 0 || build.ConfigSourceDigest.Length > 0)
{
configSource = new ConfigSource
{
Uri = build.ConfigSourceUri.FirstOrDefault(),
Digest = build.ConfigSourceDigest
.ToDictionary(h => h.Algorithm, h => h.HashValue),
EntryPoint = build.ConfigSourceEntrypoint.FirstOrDefault()
};
}
return new BuildAttestationPayload
{
BuildType = build.BuildType,
Invocation = new BuildInvocation
{
ConfigSource = configSource,
Environment = build.Environment,
Parameters = build.Parameter
},
Metadata = new BuildMetadata
{
BuildInvocationId = build.BuildId,
BuildStartedOn = build.BuildStartTime,
BuildFinishedOn = build.BuildEndTime
}
};
}
/// <inheritdoc />
public bool CanMapToSpdx3(BuildAttestationPayload attestation)
{
if (attestation is null)
{
return false;
}
// buildType is required for SPDX 3.0.1 Build profile
return !string.IsNullOrWhiteSpace(attestation.BuildType);
}
private static string GenerateSpdxId(string prefix, string? buildId)
{
var id = buildId ?? Guid.NewGuid().ToString("N", CultureInfo.InvariantCulture);
return $"{prefix.TrimEnd('/')}/build/{id}";
}
private static string GenerateBuildId(BuildAttestationPayload attestation)
{
// Generate a deterministic build ID from available information
var input = $"{attestation.BuildType}:{attestation.Metadata?.BuildStartedOn:O}";
using var sha = System.Security.Cryptography.SHA256.Create();
var hash = sha.ComputeHash(System.Text.Encoding.UTF8.GetBytes(input));
return Convert.ToHexStringLower(hash)[..16];
}
}

View File

@@ -0,0 +1,160 @@
// <copyright file="BuildRelationshipBuilder.cs" company="StellaOps">
// Copyright (c) StellaOps. Licensed under the AGPL-3.0-or-later.
// </copyright>
using System.Collections.Immutable;
using StellaOps.Spdx3.Model;
using StellaOps.Spdx3.Model.Build;
namespace StellaOps.Attestor.Spdx3;
/// <summary>
/// Builds SPDX 3.0.1 relationships for Build profile elements.
/// Sprint: SPRINT_20260107_004_003 Task BP-006
/// </summary>
public sealed class BuildRelationshipBuilder
{
private readonly string _spdxIdPrefix;
private readonly List<Spdx3Relationship> _relationships = new();
/// <summary>
/// Initializes a new instance of the <see cref="BuildRelationshipBuilder"/> class.
/// </summary>
/// <param name="spdxIdPrefix">Prefix for generating relationship SPDX IDs.</param>
public BuildRelationshipBuilder(string spdxIdPrefix)
{
ArgumentException.ThrowIfNullOrWhiteSpace(spdxIdPrefix);
_spdxIdPrefix = spdxIdPrefix;
}
/// <summary>
/// Adds a BUILD_TOOL_OF relationship (tool -> artifact).
/// </summary>
/// <param name="toolSpdxId">SPDX ID of the build tool.</param>
/// <param name="artifactSpdxId">SPDX ID of the artifact built by the tool.</param>
public BuildRelationshipBuilder AddBuildToolOf(string toolSpdxId, string artifactSpdxId)
{
_relationships.Add(CreateRelationship(
"BUILD_TOOL_OF",
toolSpdxId,
artifactSpdxId));
return this;
}
/// <summary>
/// Adds a GENERATES relationship (build -> artifact).
/// </summary>
/// <param name="buildSpdxId">SPDX ID of the Build element.</param>
/// <param name="artifactSpdxId">SPDX ID of the generated artifact.</param>
public BuildRelationshipBuilder AddGenerates(string buildSpdxId, string artifactSpdxId)
{
_relationships.Add(CreateRelationship(
"GENERATES",
buildSpdxId,
artifactSpdxId));
return this;
}
/// <summary>
/// Adds a GENERATED_FROM relationship (artifact -> source).
/// </summary>
/// <param name="artifactSpdxId">SPDX ID of the generated artifact.</param>
/// <param name="sourceSpdxId">SPDX ID of the source material.</param>
public BuildRelationshipBuilder AddGeneratedFrom(string artifactSpdxId, string sourceSpdxId)
{
_relationships.Add(CreateRelationship(
"GENERATED_FROM",
artifactSpdxId,
sourceSpdxId));
return this;
}
/// <summary>
/// Adds a HAS_PREREQUISITE relationship (build -> dependency).
/// </summary>
/// <param name="buildSpdxId">SPDX ID of the Build element.</param>
/// <param name="prerequisiteSpdxId">SPDX ID of the prerequisite material.</param>
public BuildRelationshipBuilder AddHasPrerequisite(string buildSpdxId, string prerequisiteSpdxId)
{
_relationships.Add(CreateRelationship(
"HAS_PREREQUISITE",
buildSpdxId,
prerequisiteSpdxId));
return this;
}
/// <summary>
/// Links a Build element to its produced Package elements.
/// </summary>
/// <param name="build">The Build element.</param>
/// <param name="packageSpdxIds">SPDX IDs of produced Package elements.</param>
public BuildRelationshipBuilder LinkBuildToPackages(Spdx3Build build, IEnumerable<string> packageSpdxIds)
{
ArgumentNullException.ThrowIfNull(build);
ArgumentNullException.ThrowIfNull(packageSpdxIds);
foreach (var packageId in packageSpdxIds)
{
AddGenerates(build.SpdxId, packageId);
}
return this;
}
/// <summary>
/// Links a Build element to its source materials.
/// </summary>
/// <param name="build">The Build element.</param>
/// <param name="materials">Build materials (sources).</param>
public BuildRelationshipBuilder LinkBuildToMaterials(
Spdx3Build build,
IEnumerable<BuildMaterial> materials)
{
ArgumentNullException.ThrowIfNull(build);
ArgumentNullException.ThrowIfNull(materials);
foreach (var material in materials)
{
// Create a source element SPDX ID from the material URI
var materialSpdxId = GenerateMaterialSpdxId(material.Uri);
AddHasPrerequisite(build.SpdxId, materialSpdxId);
}
return this;
}
/// <summary>
/// Builds the list of relationships.
/// </summary>
/// <returns>Immutable array of SPDX 3.0.1 relationships.</returns>
public ImmutableArray<Spdx3Relationship> Build()
{
return _relationships.ToImmutableArray();
}
private Spdx3Relationship CreateRelationship(
string relationshipType,
string fromSpdxId,
string toSpdxId)
{
var relId = $"{_spdxIdPrefix}/relationship/{relationshipType.ToLowerInvariant()}/{_relationships.Count + 1}";
return new Spdx3Relationship
{
SpdxId = relId,
Type = "Relationship",
RelationshipType = relationshipType,
From = fromSpdxId,
To = ImmutableArray.Create(toSpdxId)
};
}
private string GenerateMaterialSpdxId(string materialUri)
{
// Generate a deterministic SPDX ID from the material URI
using var sha = System.Security.Cryptography.SHA256.Create();
var hash = sha.ComputeHash(System.Text.Encoding.UTF8.GetBytes(materialUri));
var shortHash = Convert.ToHexStringLower(hash)[..12];
return $"{_spdxIdPrefix}/material/{shortHash}";
}
}

View File

@@ -0,0 +1,282 @@
// <copyright file="CombinedDocumentBuilder.cs" company="StellaOps">
// Copyright (c) StellaOps. Licensed under the AGPL-3.0-or-later.
// </copyright>
using System.Collections.Immutable;
using StellaOps.Spdx3.Model;
using StellaOps.Spdx3.Model.Build;
namespace StellaOps.Attestor.Spdx3;
/// <summary>
/// Builds combined SPDX 3.0.1 documents containing multiple profiles (e.g., Software + Build).
/// Sprint: SPRINT_20260107_004_003 Task BP-008
/// </summary>
/// <remarks>
/// This builder merges elements from different profiles into a single coherent document,
/// ensuring proper profile conformance declarations and cross-profile relationships.
/// </remarks>
public sealed class CombinedDocumentBuilder
{
private readonly List<Spdx3Element> _elements = new();
private readonly HashSet<Spdx3ProfileIdentifier> _profiles = new();
private readonly List<Spdx3CreationInfo> _creationInfos = new();
private readonly List<Spdx3Relationship> _relationships = new();
private readonly TimeProvider _timeProvider;
private string? _documentSpdxId;
private string? _documentName;
private string? _rootElementId;
/// <summary>
/// Initializes a new instance of the <see cref="CombinedDocumentBuilder"/> class.
/// </summary>
/// <param name="timeProvider">Time provider for timestamp generation.</param>
public CombinedDocumentBuilder(TimeProvider timeProvider)
{
_timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider));
}
/// <summary>
/// Sets the document SPDX ID.
/// </summary>
/// <param name="spdxId">The document's unique IRI identifier.</param>
/// <returns>This builder for chaining.</returns>
public CombinedDocumentBuilder WithDocumentId(string spdxId)
{
ArgumentException.ThrowIfNullOrWhiteSpace(spdxId);
_documentSpdxId = spdxId;
return this;
}
/// <summary>
/// Sets the document name.
/// </summary>
/// <param name="name">Human-readable document name.</param>
/// <returns>This builder for chaining.</returns>
public CombinedDocumentBuilder WithName(string name)
{
ArgumentException.ThrowIfNullOrWhiteSpace(name);
_documentName = name;
return this;
}
/// <summary>
/// Adds elements from a Software profile SBOM.
/// </summary>
/// <param name="sbom">The source SBOM document.</param>
/// <returns>This builder for chaining.</returns>
public CombinedDocumentBuilder WithSoftwareProfile(Spdx3Document sbom)
{
ArgumentNullException.ThrowIfNull(sbom);
// Add all elements from the SBOM
foreach (var element in sbom.Elements)
{
_elements.Add(element);
}
// Add relationships
foreach (var relationship in sbom.Relationships)
{
_relationships.Add(relationship);
}
// Track root element from SBOM
var root = sbom.GetRootPackage();
if (root is not null && _rootElementId is null)
{
_rootElementId = root.SpdxId;
}
// Add Software and Core profiles
_profiles.Add(Spdx3ProfileIdentifier.Core);
_profiles.Add(Spdx3ProfileIdentifier.Software);
// Preserve existing profile conformance
foreach (var profile in sbom.Profiles)
{
_profiles.Add(profile);
}
return this;
}
/// <summary>
/// Adds a Build profile element with relationships to the SBOM.
/// </summary>
/// <param name="build">The Build element.</param>
/// <param name="producedArtifactId">Optional ID of the artifact produced by this build.</param>
/// <returns>This builder for chaining.</returns>
public CombinedDocumentBuilder WithBuildProfile(Spdx3Build build, string? producedArtifactId = null)
{
ArgumentNullException.ThrowIfNull(build);
_elements.Add(build);
_profiles.Add(Spdx3ProfileIdentifier.Core);
_profiles.Add(Spdx3ProfileIdentifier.Build);
// Link build to root/produced artifact if specified
var targetId = producedArtifactId ?? _rootElementId;
if (targetId is not null)
{
var generatesRelationship = new Spdx3Relationship
{
SpdxId = $"{build.SpdxId}/relationship/generates",
From = build.SpdxId,
To = ImmutableArray.Create(targetId),
RelationshipType = Spdx3RelationshipType.Generates
};
_relationships.Add(generatesRelationship);
}
return this;
}
/// <summary>
/// Adds a Build element mapped from an attestation.
/// </summary>
/// <param name="attestation">The source attestation.</param>
/// <param name="spdxIdPrefix">Prefix for generating SPDX IDs.</param>
/// <param name="producedArtifactId">Optional ID of the artifact produced by this build.</param>
/// <returns>This builder for chaining.</returns>
public CombinedDocumentBuilder WithBuildAttestation(
BuildAttestationPayload attestation,
string spdxIdPrefix,
string? producedArtifactId = null)
{
ArgumentNullException.ThrowIfNull(attestation);
ArgumentException.ThrowIfNullOrWhiteSpace(spdxIdPrefix);
var mapper = new BuildAttestationMapper();
var build = mapper.MapToSpdx3(attestation, spdxIdPrefix);
return WithBuildProfile(build, producedArtifactId);
}
/// <summary>
/// Adds creation information for the combined document.
/// </summary>
/// <param name="creationInfo">The creation information.</param>
/// <returns>This builder for chaining.</returns>
public CombinedDocumentBuilder WithCreationInfo(Spdx3CreationInfo creationInfo)
{
ArgumentNullException.ThrowIfNull(creationInfo);
_creationInfos.Add(creationInfo);
return this;
}
/// <summary>
/// Adds an arbitrary element to the document.
/// </summary>
/// <param name="element">The element to add.</param>
/// <returns>This builder for chaining.</returns>
public CombinedDocumentBuilder WithElement(Spdx3Element element)
{
ArgumentNullException.ThrowIfNull(element);
_elements.Add(element);
return this;
}
/// <summary>
/// Adds a relationship to the document.
/// </summary>
/// <param name="relationship">The relationship to add.</param>
/// <returns>This builder for chaining.</returns>
public CombinedDocumentBuilder WithRelationship(Spdx3Relationship relationship)
{
ArgumentNullException.ThrowIfNull(relationship);
_relationships.Add(relationship);
return this;
}
/// <summary>
/// Builds the combined SPDX 3.0.1 document.
/// </summary>
/// <returns>The combined document.</returns>
/// <exception cref="InvalidOperationException">If required fields are missing.</exception>
public Spdx3Document Build()
{
if (string.IsNullOrWhiteSpace(_documentSpdxId))
{
throw new InvalidOperationException("Document SPDX ID is required. Call WithDocumentId().");
}
// Create combined creation info if none provided
if (_creationInfos.Count == 0)
{
var defaultCreationInfo = new Spdx3CreationInfo
{
Id = $"{_documentSpdxId}/creationInfo",
SpecVersion = Spdx3CreationInfo.Spdx301Version,
Created = _timeProvider.GetUtcNow(),
CreatedBy = ImmutableArray<string>.Empty,
CreatedUsing = ImmutableArray.Create("StellaOps"),
Profile = _profiles.ToImmutableArray(),
DataLicense = Spdx3CreationInfo.Spdx301DataLicense
};
_creationInfos.Add(defaultCreationInfo);
}
// Combine all elements including relationships
var allElements = new List<Spdx3Element>(_elements);
allElements.AddRange(_relationships);
return new Spdx3Document(
elements: allElements,
creationInfos: _creationInfos,
profiles: _profiles);
}
/// <summary>
/// Creates a new builder with the given time provider.
/// </summary>
/// <param name="timeProvider">Time provider for timestamps.</param>
/// <returns>A new builder instance.</returns>
public static CombinedDocumentBuilder Create(TimeProvider timeProvider)
{
return new CombinedDocumentBuilder(timeProvider);
}
/// <summary>
/// Creates a new builder using the system time provider.
/// </summary>
/// <returns>A new builder instance.</returns>
public static CombinedDocumentBuilder Create()
{
return new CombinedDocumentBuilder(TimeProvider.System);
}
}
/// <summary>
/// Extension methods for combining SPDX 3.0.1 documents.
/// </summary>
public static class CombinedDocumentExtensions
{
/// <summary>
/// Combines an SBOM with a build attestation into a single document.
/// </summary>
/// <param name="sbom">The source SBOM.</param>
/// <param name="attestation">The build attestation.</param>
/// <param name="documentId">The combined document ID.</param>
/// <param name="spdxIdPrefix">Prefix for generated IDs.</param>
/// <param name="timeProvider">Time provider for timestamps.</param>
/// <returns>The combined document.</returns>
public static Spdx3Document WithBuildProvenance(
this Spdx3Document sbom,
BuildAttestationPayload attestation,
string documentId,
string spdxIdPrefix,
TimeProvider? timeProvider = null)
{
ArgumentNullException.ThrowIfNull(sbom);
ArgumentNullException.ThrowIfNull(attestation);
return CombinedDocumentBuilder.Create(timeProvider ?? TimeProvider.System)
.WithDocumentId(documentId)
.WithName($"Combined SBOM and Build Provenance")
.WithSoftwareProfile(sbom)
.WithBuildAttestation(attestation, spdxIdPrefix)
.Build();
}
}

View File

@@ -0,0 +1,476 @@
// <copyright file="DsseSpdx3Signer.cs" company="StellaOps">
// Copyright (c) StellaOps. Licensed under the AGPL-3.0-or-later.
// </copyright>
using System.Collections.Immutable;
using System.Text;
using StellaOps.Spdx3.Model;
using StellaOps.Spdx3.Model.Build;
namespace StellaOps.Attestor.Spdx3;
/// <summary>
/// Signs SPDX 3.0.1 documents with DSSE (Dead Simple Signing Envelope).
/// Sprint: SPRINT_20260107_004_003 Task BP-005
/// </summary>
/// <remarks>
/// The DSSE envelope wraps the entire SPDX 3.0.1 document as the payload.
/// This follows the same pattern as in-toto attestations, making the signed
/// SPDX document verifiable with standard DSSE/in-toto verification tools.
///
/// Payload type: application/spdx+json
/// </remarks>
public sealed class DsseSpdx3Signer : IDsseSpdx3Signer
{
/// <summary>
/// The DSSE payload type for SPDX 3.0.1 JSON-LD documents.
/// </summary>
public const string Spdx3PayloadType = "application/spdx+json";
/// <summary>
/// The PAE (Pre-Authentication Encoding) prefix for DSSE v1.
/// </summary>
private const string PaePrefix = "DSSEv1";
private readonly ISpdx3Serializer _serializer;
private readonly IDsseSigningProvider _signingProvider;
private readonly TimeProvider _timeProvider;
/// <summary>
/// Initializes a new instance of the <see cref="DsseSpdx3Signer"/> class.
/// </summary>
/// <param name="serializer">The SPDX 3.0.1 JSON-LD serializer.</param>
/// <param name="signingProvider">The DSSE signing provider.</param>
/// <param name="timeProvider">Time provider for timestamp injection.</param>
public DsseSpdx3Signer(
ISpdx3Serializer serializer,
IDsseSigningProvider signingProvider,
TimeProvider timeProvider)
{
_serializer = serializer ?? throw new ArgumentNullException(nameof(serializer));
_signingProvider = signingProvider ?? throw new ArgumentNullException(nameof(signingProvider));
_timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider));
}
/// <inheritdoc />
public async Task<DsseSpdx3Envelope> SignAsync(
Spdx3Document document,
DsseSpdx3SigningOptions options,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(document);
ArgumentNullException.ThrowIfNull(options);
// Serialize the SPDX 3.0.1 document to canonical JSON
var payloadBytes = _serializer.SerializeToBytes(document);
// Encode payload as base64url (RFC 4648 Section 5)
var payloadBase64Url = ToBase64Url(payloadBytes);
// Build PAE (Pre-Authentication Encoding) for signing
var paeBytes = BuildPae(Spdx3PayloadType, payloadBytes);
// Sign the PAE
var signatures = new List<DsseSpdx3Signature>();
var primarySignature = await _signingProvider
.SignAsync(paeBytes, options.PrimaryKeyId, options.PrimaryAlgorithm, cancellationToken)
.ConfigureAwait(false);
signatures.Add(new DsseSpdx3Signature
{
KeyId = primarySignature.KeyId,
Sig = ToBase64Url(primarySignature.SignatureBytes)
});
// Optional secondary signature (e.g., post-quantum algorithm)
if (!string.IsNullOrWhiteSpace(options.SecondaryKeyId))
{
var secondarySignature = await _signingProvider
.SignAsync(paeBytes, options.SecondaryKeyId, options.SecondaryAlgorithm, cancellationToken)
.ConfigureAwait(false);
signatures.Add(new DsseSpdx3Signature
{
KeyId = secondarySignature.KeyId,
Sig = ToBase64Url(secondarySignature.SignatureBytes)
});
}
return new DsseSpdx3Envelope
{
PayloadType = Spdx3PayloadType,
Payload = payloadBase64Url,
Signatures = signatures.ToImmutableArray(),
SignedAt = _timeProvider.GetUtcNow()
};
}
/// <inheritdoc />
public async Task<DsseSpdx3Envelope> SignBuildProfileAsync(
Spdx3Build build,
Spdx3Document? associatedSbom,
DsseSpdx3SigningOptions options,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(build);
ArgumentNullException.ThrowIfNull(options);
// Create a document containing the build element
var elements = new List<Spdx3Element> { build };
// Include associated SBOM elements if provided
if (associatedSbom is not null)
{
elements.AddRange(associatedSbom.Elements);
}
var creationInfo = build.CreationInfo ?? new Spdx3CreationInfo
{
SpecVersion = Spdx3CreationInfo.Spdx301Version,
Created = _timeProvider.GetUtcNow(),
CreatedBy = ImmutableArray<string>.Empty,
Profile = ImmutableArray.Create(
Spdx3ProfileIdentifier.Core,
Spdx3ProfileIdentifier.Build)
};
var profiles = ImmutableHashSet.Create(
Spdx3ProfileIdentifier.Core,
Spdx3ProfileIdentifier.Build);
var document = new Spdx3Document(
elements: elements,
creationInfos: new[] { creationInfo },
profiles: profiles);
return await SignAsync(document, options, cancellationToken).ConfigureAwait(false);
}
/// <inheritdoc />
public async Task<bool> VerifyAsync(
DsseSpdx3Envelope envelope,
IReadOnlyList<DsseVerificationKey> trustedKeys,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(envelope);
ArgumentNullException.ThrowIfNull(trustedKeys);
if (envelope.Signatures.IsEmpty)
{
return false;
}
// Decode payload
var payloadBytes = FromBase64Url(envelope.Payload);
// Build PAE for verification
var paeBytes = BuildPae(envelope.PayloadType, payloadBytes);
// Verify at least one signature from a trusted key
foreach (var signature in envelope.Signatures)
{
var trustedKey = trustedKeys.FirstOrDefault(k => k.KeyId == signature.KeyId);
if (trustedKey is null)
{
continue;
}
var signatureBytes = FromBase64Url(signature.Sig);
var isValid = await _signingProvider
.VerifyAsync(paeBytes, signatureBytes, trustedKey, cancellationToken)
.ConfigureAwait(false);
if (isValid)
{
return true;
}
}
return false;
}
/// <inheritdoc />
public Spdx3Document? ExtractDocument(DsseSpdx3Envelope envelope)
{
ArgumentNullException.ThrowIfNull(envelope);
if (envelope.PayloadType != Spdx3PayloadType)
{
return null;
}
var payloadBytes = FromBase64Url(envelope.Payload);
return _serializer.Deserialize(payloadBytes);
}
/// <summary>
/// Builds the Pre-Authentication Encoding (PAE) as per DSSE spec.
/// PAE format: "DSSEv1" SP LEN(type) SP type SP LEN(payload) SP payload
/// </summary>
/// <remarks>
/// DSSE v1 PAE uses ASCII decimal for lengths and space as separator.
/// This prevents length-extension attacks and ensures unambiguous parsing.
/// </remarks>
private static byte[] BuildPae(string payloadType, byte[] payload)
{
// PAE = "DSSEv1" SP LEN(type) SP type SP LEN(payload) SP payload
var typeBytes = Encoding.UTF8.GetBytes(payloadType);
var paeString = $"{PaePrefix} {typeBytes.Length} {payloadType} {payload.Length} ";
var paePrefix = Encoding.UTF8.GetBytes(paeString);
var result = new byte[paePrefix.Length + payload.Length];
Buffer.BlockCopy(paePrefix, 0, result, 0, paePrefix.Length);
Buffer.BlockCopy(payload, 0, result, paePrefix.Length, payload.Length);
return result;
}
/// <summary>
/// Converts bytes to base64url encoding (RFC 4648 Section 5).
/// </summary>
private static string ToBase64Url(byte[] bytes)
{
return Convert.ToBase64String(bytes)
.TrimEnd('=')
.Replace('+', '-')
.Replace('/', '_');
}
/// <summary>
/// Converts base64url string back to bytes.
/// </summary>
private static byte[] FromBase64Url(string base64Url)
{
var base64 = base64Url
.Replace('-', '+')
.Replace('_', '/');
// Add padding if necessary
var padding = (4 - (base64.Length % 4)) % 4;
if (padding > 0)
{
base64 += new string('=', padding);
}
return Convert.FromBase64String(base64);
}
}
/// <summary>
/// Interface for signing SPDX 3.0.1 documents with DSSE.
/// Sprint: SPRINT_20260107_004_003 Task BP-005
/// </summary>
public interface IDsseSpdx3Signer
{
/// <summary>
/// Signs an SPDX 3.0.1 document with DSSE.
/// </summary>
/// <param name="document">The SPDX 3.0.1 document to sign.</param>
/// <param name="options">Signing options including key selection.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>The DSSE envelope containing the signed document.</returns>
Task<DsseSpdx3Envelope> SignAsync(
Spdx3Document document,
DsseSpdx3SigningOptions options,
CancellationToken cancellationToken = default);
/// <summary>
/// Signs an SPDX 3.0.1 Build profile element with DSSE.
/// </summary>
/// <param name="build">The Build element to sign.</param>
/// <param name="associatedSbom">Optional associated SBOM to include.</param>
/// <param name="options">Signing options.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>The DSSE envelope containing the signed Build profile.</returns>
Task<DsseSpdx3Envelope> SignBuildProfileAsync(
Spdx3Build build,
Spdx3Document? associatedSbom,
DsseSpdx3SigningOptions options,
CancellationToken cancellationToken = default);
/// <summary>
/// Verifies a DSSE-signed SPDX 3.0.1 envelope.
/// </summary>
/// <param name="envelope">The envelope to verify.</param>
/// <param name="trustedKeys">List of trusted verification keys.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>True if the envelope is valid and signed by a trusted key.</returns>
Task<bool> VerifyAsync(
DsseSpdx3Envelope envelope,
IReadOnlyList<DsseVerificationKey> trustedKeys,
CancellationToken cancellationToken = default);
/// <summary>
/// Extracts the SPDX 3.0.1 document from a DSSE envelope.
/// </summary>
/// <param name="envelope">The envelope containing the signed document.</param>
/// <returns>The extracted document, or null if extraction fails.</returns>
Spdx3Document? ExtractDocument(DsseSpdx3Envelope envelope);
}
/// <summary>
/// DSSE envelope containing a signed SPDX 3.0.1 document.
/// </summary>
public sealed record DsseSpdx3Envelope
{
/// <summary>
/// Gets the payload type (should be "application/spdx+json").
/// </summary>
public required string PayloadType { get; init; }
/// <summary>
/// Gets the base64url-encoded payload.
/// </summary>
public required string Payload { get; init; }
/// <summary>
/// Gets the signatures over the PAE.
/// </summary>
public ImmutableArray<DsseSpdx3Signature> Signatures { get; init; } =
ImmutableArray<DsseSpdx3Signature>.Empty;
/// <summary>
/// Gets the timestamp when the document was signed.
/// </summary>
public DateTimeOffset SignedAt { get; init; }
}
/// <summary>
/// A signature within a DSSE envelope.
/// </summary>
public sealed record DsseSpdx3Signature
{
/// <summary>
/// Gets the key ID that produced this signature.
/// </summary>
public required string KeyId { get; init; }
/// <summary>
/// Gets the base64url-encoded signature value.
/// </summary>
public required string Sig { get; init; }
}
/// <summary>
/// Options for DSSE signing of SPDX 3.0.1 documents.
/// </summary>
public sealed record DsseSpdx3SigningOptions
{
/// <summary>
/// Gets the primary signing key ID.
/// </summary>
public required string PrimaryKeyId { get; init; }
/// <summary>
/// Gets the primary signing algorithm (e.g., "ES256", "RS256").
/// </summary>
public string? PrimaryAlgorithm { get; init; }
/// <summary>
/// Gets the optional secondary signing key ID (e.g., for PQ hybrid).
/// </summary>
public string? SecondaryKeyId { get; init; }
/// <summary>
/// Gets the optional secondary signing algorithm.
/// </summary>
public string? SecondaryAlgorithm { get; init; }
/// <summary>
/// Gets whether to include timestamps in the envelope.
/// </summary>
public bool IncludeTimestamp { get; init; } = true;
}
/// <summary>
/// Provider interface for DSSE signing operations.
/// </summary>
public interface IDsseSigningProvider
{
/// <summary>
/// Signs data with the specified key.
/// </summary>
/// <param name="data">The data to sign (PAE bytes).</param>
/// <param name="keyId">The key ID to use.</param>
/// <param name="algorithm">Optional algorithm override.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>The signature result.</returns>
Task<DsseSignatureResult> SignAsync(
byte[] data,
string keyId,
string? algorithm,
CancellationToken cancellationToken);
/// <summary>
/// Verifies a signature against the data.
/// </summary>
/// <param name="data">The original data (PAE bytes).</param>
/// <param name="signature">The signature to verify.</param>
/// <param name="key">The verification key.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>True if the signature is valid.</returns>
Task<bool> VerifyAsync(
byte[] data,
byte[] signature,
DsseVerificationKey key,
CancellationToken cancellationToken);
}
/// <summary>
/// Result of a DSSE signing operation.
/// </summary>
public sealed record DsseSignatureResult
{
/// <summary>
/// Gets the key ID used for signing.
/// </summary>
public required string KeyId { get; init; }
/// <summary>
/// Gets the raw signature bytes.
/// </summary>
public required byte[] SignatureBytes { get; init; }
/// <summary>
/// Gets the algorithm used.
/// </summary>
public string? Algorithm { get; init; }
}
/// <summary>
/// A verification key for DSSE signature validation.
/// </summary>
public sealed record DsseVerificationKey
{
/// <summary>
/// Gets the key ID.
/// </summary>
public required string KeyId { get; init; }
/// <summary>
/// Gets the public key bytes.
/// </summary>
public required byte[] PublicKey { get; init; }
/// <summary>
/// Gets the algorithm.
/// </summary>
public string? Algorithm { get; init; }
}
/// <summary>
/// Interface for SPDX 3.0.1 document serialization.
/// </summary>
public interface ISpdx3Serializer
{
/// <summary>
/// Serializes an SPDX 3.0.1 document to canonical JSON bytes.
/// </summary>
byte[] SerializeToBytes(Spdx3Document document);
/// <summary>
/// Deserializes bytes to an SPDX 3.0.1 document.
/// </summary>
Spdx3Document? Deserialize(byte[] bytes);
}

View File

@@ -0,0 +1,172 @@
// <copyright file="IBuildAttestationMapper.cs" company="StellaOps">
// Copyright (c) StellaOps. Licensed under the AGPL-3.0-or-later.
// </copyright>
using StellaOps.Spdx3.Model.Build;
namespace StellaOps.Attestor.Spdx3;
/// <summary>
/// Maps between SLSA/in-toto build attestations and SPDX 3.0.1 Build profile elements.
/// Sprint: SPRINT_20260107_004_003 Task BP-003
/// </summary>
public interface IBuildAttestationMapper
{
/// <summary>
/// Maps an in-toto/SLSA build attestation to an SPDX 3.0.1 Build element.
/// </summary>
/// <param name="attestation">The source build attestation.</param>
/// <param name="spdxIdPrefix">Prefix for generating the SPDX ID.</param>
/// <returns>The mapped SPDX 3.0.1 Build element.</returns>
Spdx3Build MapToSpdx3(BuildAttestationPayload attestation, string spdxIdPrefix);
/// <summary>
/// Maps an SPDX 3.0.1 Build element to an in-toto/SLSA build attestation payload.
/// </summary>
/// <param name="build">The source SPDX 3.0.1 Build element.</param>
/// <returns>The mapped build attestation payload.</returns>
BuildAttestationPayload MapFromSpdx3(Spdx3Build build);
/// <summary>
/// Determines if the attestation can be fully mapped to SPDX 3.0.1.
/// </summary>
/// <param name="attestation">The attestation to check.</param>
/// <returns>True if all required fields can be mapped.</returns>
bool CanMapToSpdx3(BuildAttestationPayload attestation);
}
/// <summary>
/// Represents an in-toto/SLSA build attestation payload.
/// Sprint: SPRINT_20260107_004_003 Task BP-003
/// </summary>
public sealed record BuildAttestationPayload
{
/// <summary>
/// Gets or sets the predicate type (e.g., "https://slsa.dev/provenance/v1").
/// </summary>
public required string BuildType { get; init; }
/// <summary>
/// Gets or sets the builder information.
/// </summary>
public BuilderInfo? Builder { get; init; }
/// <summary>
/// Gets or sets the build invocation information.
/// </summary>
public BuildInvocation? Invocation { get; init; }
/// <summary>
/// Gets or sets the build metadata.
/// </summary>
public BuildMetadata? Metadata { get; init; }
/// <summary>
/// Gets or sets the build materials (source inputs).
/// </summary>
public IReadOnlyList<BuildMaterial> Materials { get; init; } = Array.Empty<BuildMaterial>();
}
/// <summary>
/// Builder information from SLSA provenance.
/// </summary>
public sealed record BuilderInfo
{
/// <summary>
/// Gets or sets the builder ID (URI).
/// </summary>
public required string Id { get; init; }
/// <summary>
/// Gets or sets the builder version.
/// </summary>
public string? Version { get; init; }
}
/// <summary>
/// Build invocation information from SLSA provenance.
/// </summary>
public sealed record BuildInvocation
{
/// <summary>
/// Gets or sets the config source information.
/// </summary>
public ConfigSource? ConfigSource { get; init; }
/// <summary>
/// Gets or sets the environment variables.
/// </summary>
public IReadOnlyDictionary<string, string> Environment { get; init; } =
new Dictionary<string, string>();
/// <summary>
/// Gets or sets the build parameters.
/// </summary>
public IReadOnlyDictionary<string, string> Parameters { get; init; } =
new Dictionary<string, string>();
}
/// <summary>
/// Configuration source information.
/// </summary>
public sealed record ConfigSource
{
/// <summary>
/// Gets or sets the config source URI.
/// </summary>
public string? Uri { get; init; }
/// <summary>
/// Gets or sets the digest of the config source.
/// </summary>
public IReadOnlyDictionary<string, string> Digest { get; init; } =
new Dictionary<string, string>();
/// <summary>
/// Gets or sets the entry point within the config source.
/// </summary>
public string? EntryPoint { get; init; }
}
/// <summary>
/// Build metadata from SLSA provenance.
/// </summary>
public sealed record BuildMetadata
{
/// <summary>
/// Gets or sets the build invocation ID.
/// </summary>
public string? BuildInvocationId { get; init; }
/// <summary>
/// Gets or sets when the build started.
/// </summary>
public DateTimeOffset? BuildStartedOn { get; init; }
/// <summary>
/// Gets or sets when the build finished.
/// </summary>
public DateTimeOffset? BuildFinishedOn { get; init; }
/// <summary>
/// Gets or sets whether the build is reproducible.
/// </summary>
public bool? Reproducible { get; init; }
}
/// <summary>
/// Build material (input) from SLSA provenance.
/// </summary>
public sealed record BuildMaterial
{
/// <summary>
/// Gets or sets the material URI.
/// </summary>
public required string Uri { get; init; }
/// <summary>
/// Gets or sets the material digest.
/// </summary>
public IReadOnlyDictionary<string, string> Digest { get; init; } =
new Dictionary<string, string>();
}

View File

@@ -0,0 +1,17 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
<LangVersion>preview</LangVersion>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
<RootNamespace>StellaOps.Attestor.Spdx3</RootNamespace>
<Description>SPDX 3.0.1 Build profile integration for StellaOps Attestor</Description>
</PropertyGroup>
<ItemGroup>
<ProjectReference Include="..\..\..\..\__Libraries\StellaOps.Spdx3\StellaOps.Spdx3.csproj" />
</ItemGroup>
</Project>

View File

@@ -0,0 +1,10 @@
# Attestor SPDX3 Build Profile Task Board
This board mirrors active sprint tasks for this module.
Source of truth: `docs/implplan/permament/SPRINT_20251229_049_BE_csproj_audit_maint_tests.md`.
| Task ID | Status | Notes |
| --- | --- | --- |
| AUDIT-0848-M | DONE | Revalidated 2026-01-08. |
| AUDIT-0848-T | DONE | Revalidated 2026-01-08. |
| AUDIT-0848-A | TODO | Open findings; apply pending approval. |

View File

@@ -0,0 +1,19 @@
# Attestor SPDX3 Build Profile Tests Charter
## Purpose & Scope
- Working directory: `src/Attestor/__Libraries/__Tests/StellaOps.Attestor.Spdx3.Tests/`.
- Roles: QA automation, backend engineer.
- Focus: deterministic unit tests for SPDX3 build mapping and validation.
## Required Reading
- `docs/README.md`
- `docs/07_HIGH_LEVEL_ARCHITECTURE.md`
- `docs/modules/attestor/architecture.md`
- `docs/modules/platform/architecture-overview.md`
- `docs/implplan/permament/SPRINT_20251229_049_BE_csproj_audit_maint_tests.md`
## Working Agreement
- Use fixed timestamps and IDs in fixtures.
- Avoid Random, Guid.NewGuid, DateTime.UtcNow in tests.
- Cover error paths and deterministic ID generation.
- Update `TASKS.md` and sprint tracker as statuses change.

View File

@@ -0,0 +1,176 @@
// <copyright file="BuildAttestationMapperTests.cs" company="StellaOps">
// Copyright (c) StellaOps. Licensed under the AGPL-3.0-or-later.
// </copyright>
using System.Collections.Immutable;
using FluentAssertions;
using StellaOps.Spdx3.Model.Build;
using Xunit;
namespace StellaOps.Attestor.Spdx3.Tests;
/// <summary>
/// Unit tests for <see cref="BuildAttestationMapper"/>.
/// Sprint: SPRINT_20260107_004_003 Task BP-009
/// </summary>
[Trait("Category", "Unit")]
public sealed class BuildAttestationMapperTests
{
private readonly BuildAttestationMapper _mapper = new();
private const string SpdxIdPrefix = "https://stellaops.io/spdx/test";
[Fact]
public void MapToSpdx3_WithFullAttestation_MapsAllFields()
{
// Arrange
var attestation = new BuildAttestationPayload
{
BuildType = "https://slsa.dev/provenance/v1",
Builder = new BuilderInfo { Id = "https://github.com/actions/runner", Version = "2.300.0" },
Invocation = new BuildInvocation
{
ConfigSource = new ConfigSource
{
Uri = "https://github.com/stellaops/app",
Digest = new Dictionary<string, string> { ["sha256"] = "abc123" },
EntryPoint = ".github/workflows/build.yml"
},
Environment = new Dictionary<string, string> { ["CI"] = "true" },
Parameters = new Dictionary<string, string> { ["target"] = "release" }
},
Metadata = new BuildMetadata
{
BuildInvocationId = "run-12345",
BuildStartedOn = new DateTimeOffset(2026, 1, 7, 12, 0, 0, TimeSpan.Zero),
BuildFinishedOn = new DateTimeOffset(2026, 1, 7, 12, 5, 0, TimeSpan.Zero)
}
};
// Act
var build = _mapper.MapToSpdx3(attestation, SpdxIdPrefix);
// Assert
build.Should().NotBeNull();
build.BuildType.Should().Be("https://slsa.dev/provenance/v1");
build.BuildId.Should().Be("run-12345");
build.BuildStartTime.Should().Be(attestation.Metadata.BuildStartedOn);
build.BuildEndTime.Should().Be(attestation.Metadata.BuildFinishedOn);
build.ConfigSourceUri.Should().ContainSingle().Which.Should().Be("https://github.com/stellaops/app");
build.ConfigSourceDigest.Should().ContainSingle().Which.Algorithm.Should().Be("sha256");
build.ConfigSourceEntrypoint.Should().ContainSingle().Which.Should().Be(".github/workflows/build.yml");
build.Environment.Should().ContainKey("CI").WhoseValue.Should().Be("true");
build.Parameter.Should().ContainKey("target").WhoseValue.Should().Be("release");
build.SpdxId.Should().StartWith(SpdxIdPrefix);
}
[Fact]
public void MapToSpdx3_WithMinimalAttestation_MapsRequiredFields()
{
// Arrange
var attestation = new BuildAttestationPayload
{
BuildType = "https://stellaops.org/build/scan/v1"
};
// Act
var build = _mapper.MapToSpdx3(attestation, SpdxIdPrefix);
// Assert
build.Should().NotBeNull();
build.BuildType.Should().Be("https://stellaops.org/build/scan/v1");
build.SpdxId.Should().StartWith(SpdxIdPrefix);
build.ConfigSourceUri.Should().BeEmpty();
build.Environment.Should().BeEmpty();
}
[Fact]
public void MapFromSpdx3_WithFullBuild_MapsToAttestation()
{
// Arrange
var build = new Spdx3Build
{
SpdxId = "https://stellaops.io/spdx/test/build/123",
BuildType = "https://slsa.dev/provenance/v1",
BuildId = "build-123",
BuildStartTime = new DateTimeOffset(2026, 1, 7, 12, 0, 0, TimeSpan.Zero),
BuildEndTime = new DateTimeOffset(2026, 1, 7, 12, 5, 0, TimeSpan.Zero),
ConfigSourceUri = ImmutableArray.Create("https://github.com/stellaops/app"),
ConfigSourceDigest = ImmutableArray.Create(Spdx3Hash.Sha256("abc123")),
ConfigSourceEntrypoint = ImmutableArray.Create("Dockerfile"),
Environment = ImmutableDictionary<string, string>.Empty.Add("CI", "true"),
Parameter = ImmutableDictionary<string, string>.Empty.Add("target", "release")
};
// Act
var attestation = _mapper.MapFromSpdx3(build);
// Assert
attestation.Should().NotBeNull();
attestation.BuildType.Should().Be("https://slsa.dev/provenance/v1");
attestation.Metadata!.BuildInvocationId.Should().Be("build-123");
attestation.Metadata!.BuildStartedOn.Should().Be(build.BuildStartTime);
attestation.Metadata!.BuildFinishedOn.Should().Be(build.BuildEndTime);
attestation.Invocation!.ConfigSource!.Uri.Should().Be("https://github.com/stellaops/app");
attestation.Invocation!.Environment.Should().ContainKey("CI");
}
[Fact]
public void CanMapToSpdx3_WithValidAttestation_ReturnsTrue()
{
// Arrange
var attestation = new BuildAttestationPayload
{
BuildType = "https://slsa.dev/provenance/v1"
};
// Act
var result = _mapper.CanMapToSpdx3(attestation);
// Assert
result.Should().BeTrue();
}
[Fact]
public void CanMapToSpdx3_WithEmptyBuildType_ReturnsFalse()
{
// Arrange
var attestation = new BuildAttestationPayload
{
BuildType = ""
};
// Act
var result = _mapper.CanMapToSpdx3(attestation);
// Assert
result.Should().BeFalse();
}
[Fact]
public void CanMapToSpdx3_WithNull_ReturnsFalse()
{
// Act
var result = _mapper.CanMapToSpdx3(null!);
// Assert
result.Should().BeFalse();
}
[Fact]
public void MapToSpdx3_GeneratesDeterministicSpdxId()
{
// Arrange
var attestation = new BuildAttestationPayload
{
BuildType = "https://slsa.dev/provenance/v1",
Metadata = new BuildMetadata { BuildInvocationId = "fixed-id-123" }
};
// Act
var build1 = _mapper.MapToSpdx3(attestation, SpdxIdPrefix);
var build2 = _mapper.MapToSpdx3(attestation, SpdxIdPrefix);
// Assert
build1.SpdxId.Should().Be(build2.SpdxId);
}
}

View File

@@ -0,0 +1,185 @@
// <copyright file="BuildProfileValidatorTests.cs" company="StellaOps">
// Copyright (c) StellaOps. Licensed under the AGPL-3.0-or-later.
// </copyright>
using System.Collections.Immutable;
using FluentAssertions;
using StellaOps.Spdx3.Model.Build;
using Xunit;
namespace StellaOps.Attestor.Spdx3.Tests;
/// <summary>
/// Unit tests for <see cref="BuildProfileValidator"/>.
/// Sprint: SPRINT_20260107_004_003 Task BP-009
/// </summary>
[Trait("Category", "Unit")]
public sealed class BuildProfileValidatorTests
{
[Fact]
public void Validate_WithValidBuild_ReturnsSuccess()
{
// Arrange
var build = new Spdx3Build
{
SpdxId = "https://stellaops.io/spdx/test/build/123",
BuildType = "https://slsa.dev/provenance/v1",
BuildId = "build-123",
BuildStartTime = new DateTimeOffset(2026, 1, 7, 12, 0, 0, TimeSpan.Zero),
BuildEndTime = new DateTimeOffset(2026, 1, 7, 12, 5, 0, TimeSpan.Zero)
};
// Act
var result = BuildProfileValidator.Validate(build);
// Assert
result.IsValid.Should().BeTrue();
result.ErrorsOnly.Should().BeEmpty();
}
[Fact]
public void Validate_WithMissingBuildType_ReturnsError()
{
// Arrange
var build = new Spdx3Build
{
SpdxId = "https://stellaops.io/spdx/test/build/123",
BuildType = "",
BuildId = "build-123"
};
// Act
var result = BuildProfileValidator.Validate(build);
// Assert
result.IsValid.Should().BeFalse();
result.ErrorsOnly.Should().ContainSingle()
.Which.Field.Should().Be("buildType");
}
[Fact]
public void Validate_WithInvalidBuildTypeUri_ReturnsError()
{
// Arrange
var build = new Spdx3Build
{
SpdxId = "https://stellaops.io/spdx/test/build/123",
BuildType = "not-a-uri",
BuildId = "build-123"
};
// Act
var result = BuildProfileValidator.Validate(build);
// Assert
result.IsValid.Should().BeFalse();
result.ErrorsOnly.Should().ContainSingle()
.Which.Message.Should().Contain("valid URI");
}
[Fact]
public void Validate_WithEndTimeBeforeStartTime_ReturnsError()
{
// Arrange
var build = new Spdx3Build
{
SpdxId = "https://stellaops.io/spdx/test/build/123",
BuildType = "https://slsa.dev/provenance/v1",
BuildId = "build-123",
BuildStartTime = new DateTimeOffset(2026, 1, 7, 12, 5, 0, TimeSpan.Zero),
BuildEndTime = new DateTimeOffset(2026, 1, 7, 12, 0, 0, TimeSpan.Zero) // Before start
};
// Act
var result = BuildProfileValidator.Validate(build);
// Assert
result.IsValid.Should().BeFalse();
result.ErrorsOnly.Should().ContainSingle()
.Which.Field.Should().Be("buildEndTime");
}
[Fact]
public void Validate_WithMissingBuildId_ReturnsWarning()
{
// Arrange
var build = new Spdx3Build
{
SpdxId = "https://stellaops.io/spdx/test/build/123",
BuildType = "https://slsa.dev/provenance/v1"
};
// Act
var result = BuildProfileValidator.Validate(build);
// Assert
result.IsValid.Should().BeTrue(); // Warnings don't fail validation
result.WarningsOnly.Should().ContainSingle()
.Which.Field.Should().Be("buildId");
}
[Fact]
public void Validate_WithDigestWithoutUri_ReturnsWarning()
{
// Arrange
var build = new Spdx3Build
{
SpdxId = "https://stellaops.io/spdx/test/build/123",
BuildType = "https://slsa.dev/provenance/v1",
BuildId = "build-123",
ConfigSourceDigest = ImmutableArray.Create(Spdx3Hash.Sha256("abc123"))
// Note: ConfigSourceUri is empty
};
// Act
var result = BuildProfileValidator.Validate(build);
// Assert
result.IsValid.Should().BeTrue();
result.WarningsOnly.Should().Contain(w => w.Field == "configSourceDigest");
}
[Fact]
public void Validate_WithUnknownHashAlgorithm_ReturnsWarning()
{
// Arrange
var build = new Spdx3Build
{
SpdxId = "https://stellaops.io/spdx/test/build/123",
BuildType = "https://slsa.dev/provenance/v1",
BuildId = "build-123",
ConfigSourceUri = ImmutableArray.Create("https://github.com/test/repo"),
ConfigSourceDigest = ImmutableArray.Create(new Spdx3Hash
{
Algorithm = "unknown-algo",
HashValue = "abc123"
})
};
// Act
var result = BuildProfileValidator.Validate(build);
// Assert
result.IsValid.Should().BeTrue();
result.WarningsOnly.Should().Contain(w => w.Field == "configSourceDigest.algorithm");
}
[Fact]
public void Validate_WithMissingSpdxId_ReturnsError()
{
// Arrange
var build = new Spdx3Build
{
SpdxId = "",
BuildType = "https://slsa.dev/provenance/v1",
BuildId = "build-123"
};
// Act
var result = BuildProfileValidator.Validate(build);
// Assert
result.IsValid.Should().BeFalse();
result.ErrorsOnly.Should().Contain(e => e.Field == "spdxId");
}
}

View File

@@ -0,0 +1,280 @@
// <copyright file="CombinedDocumentBuilderTests.cs" company="StellaOps">
// Copyright (c) StellaOps. Licensed under the AGPL-3.0-or-later.
// </copyright>
using System.Collections.Immutable;
using FluentAssertions;
using Microsoft.Extensions.Time.Testing;
using StellaOps.Spdx3.Model;
using StellaOps.Spdx3.Model.Build;
using StellaOps.Spdx3.Model.Software;
using Xunit;
namespace StellaOps.Attestor.Spdx3.Tests;
/// <summary>
/// Unit tests for <see cref="CombinedDocumentBuilder"/>.
/// Sprint: SPRINT_20260107_004_003 Task BP-008
/// </summary>
[Trait("Category", "Unit")]
public sealed class CombinedDocumentBuilderTests
{
private readonly FakeTimeProvider _timeProvider;
private static readonly DateTimeOffset FixedTimestamp =
new(2026, 1, 8, 12, 0, 0, TimeSpan.Zero);
public CombinedDocumentBuilderTests()
{
_timeProvider = new FakeTimeProvider(FixedTimestamp);
}
[Fact]
public void Build_WithSoftwareAndBuildProfiles_CreatesCombinedDocument()
{
// Arrange
var sbom = CreateTestSbom();
var build = CreateTestBuild();
// Act
var document = CombinedDocumentBuilder.Create(_timeProvider)
.WithDocumentId("https://stellaops.io/spdx/combined/12345")
.WithName("Combined SBOM and Build")
.WithSoftwareProfile(sbom)
.WithBuildProfile(build)
.Build();
// Assert
document.Should().NotBeNull();
document.Profiles.Should().Contain(Spdx3ProfileIdentifier.Core);
document.Profiles.Should().Contain(Spdx3ProfileIdentifier.Software);
document.Profiles.Should().Contain(Spdx3ProfileIdentifier.Build);
}
[Fact]
public void Build_WithBuildProfile_CreatesGeneratesRelationship()
{
// Arrange
var sbom = CreateTestSbom();
var build = CreateTestBuild();
// Act
var document = CombinedDocumentBuilder.Create(_timeProvider)
.WithDocumentId("https://stellaops.io/spdx/combined/12345")
.WithSoftwareProfile(sbom)
.WithBuildProfile(build)
.Build();
// Assert
var relationships = document.Relationships.ToList();
relationships.Should().Contain(r =>
r.RelationshipType == Spdx3RelationshipType.Generates &&
r.From == build.SpdxId);
}
[Fact]
public void Build_WithBuildAttestation_MapsBuildFromAttestation()
{
// Arrange
var sbom = CreateTestSbom();
var attestation = new BuildAttestationPayload
{
BuildType = "https://slsa.dev/provenance/v1",
Metadata = new BuildMetadata
{
BuildInvocationId = "run-12345",
BuildStartedOn = FixedTimestamp
}
};
// Act
var document = CombinedDocumentBuilder.Create(_timeProvider)
.WithDocumentId("https://stellaops.io/spdx/combined/12345")
.WithSoftwareProfile(sbom)
.WithBuildAttestation(attestation, "https://stellaops.io/spdx")
.Build();
// Assert
document.Elements.Should().Contain(e => e is Spdx3Build);
var buildElement = document.Elements.OfType<Spdx3Build>().First();
buildElement.BuildType.Should().Be("https://slsa.dev/provenance/v1");
buildElement.BuildId.Should().Be("run-12345");
}
[Fact]
public void Build_WithoutDocumentId_ThrowsInvalidOperationException()
{
// Arrange
var sbom = CreateTestSbom();
// Act
var act = () => CombinedDocumentBuilder.Create(_timeProvider)
.WithSoftwareProfile(sbom)
.Build();
// Assert
act.Should().Throw<InvalidOperationException>()
.WithMessage("*Document SPDX ID is required*");
}
[Fact]
public void Build_CreatesDefaultCreationInfo()
{
// Arrange
var sbom = CreateTestSbom();
// Act
var document = CombinedDocumentBuilder.Create(_timeProvider)
.WithDocumentId("https://stellaops.io/spdx/doc/12345")
.WithSoftwareProfile(sbom)
.Build();
// Assert
document.CreationInfos.Should().HaveCount(1);
var creationInfo = document.CreationInfos.First();
creationInfo.SpecVersion.Should().Be(Spdx3CreationInfo.Spdx301Version);
creationInfo.Created.Should().Be(FixedTimestamp);
}
[Fact]
public void Build_WithCustomCreationInfo_UsesProvidedInfo()
{
// Arrange
var sbom = CreateTestSbom();
var customCreationInfo = new Spdx3CreationInfo
{
Id = "custom-creation-info",
SpecVersion = Spdx3CreationInfo.Spdx301Version,
Created = FixedTimestamp.AddHours(-1),
CreatedBy = ImmutableArray.Create("custom-author"),
Profile = ImmutableArray.Create(Spdx3ProfileIdentifier.Core)
};
// Act
var document = CombinedDocumentBuilder.Create(_timeProvider)
.WithDocumentId("https://stellaops.io/spdx/doc/12345")
.WithSoftwareProfile(sbom)
.WithCreationInfo(customCreationInfo)
.Build();
// Assert
document.CreationInfos.Should().Contain(customCreationInfo);
}
[Fact]
public void WithBuildProvenance_ExtensionMethod_CreatesCombinedDocument()
{
// Arrange
var sbom = CreateTestSbom();
var attestation = new BuildAttestationPayload
{
BuildType = "https://stellaops.org/build/scan/v1"
};
// Act
var combined = sbom.WithBuildProvenance(
attestation,
documentId: "https://stellaops.io/spdx/combined/ext-12345",
spdxIdPrefix: "https://stellaops.io/spdx",
timeProvider: _timeProvider);
// Assert
combined.Should().NotBeNull();
combined.Profiles.Should().Contain(Spdx3ProfileIdentifier.Build);
combined.Elements.Should().Contain(e => e is Spdx3Build);
}
[Fact]
public void Build_PreservesAllSbomElements()
{
// Arrange
var sbom = CreateTestSbomWithMultiplePackages();
// Act
var document = CombinedDocumentBuilder.Create(_timeProvider)
.WithDocumentId("https://stellaops.io/spdx/doc/12345")
.WithSoftwareProfile(sbom)
.Build();
// Assert
var packages = document.Packages.ToList();
packages.Should().HaveCount(3);
}
private static Spdx3Document CreateTestSbom()
{
var creationInfo = new Spdx3CreationInfo
{
SpecVersion = Spdx3CreationInfo.Spdx301Version,
Created = FixedTimestamp.AddDays(-1),
CreatedBy = ImmutableArray<string>.Empty,
Profile = ImmutableArray.Create(Spdx3ProfileIdentifier.Core, Spdx3ProfileIdentifier.Software)
};
var rootPackage = new Spdx3Package
{
SpdxId = "https://stellaops.io/spdx/pkg/root",
Type = "software_Package",
Name = "test-root-package",
PackageVersion = "1.0.0"
};
return new Spdx3Document(
elements: new Spdx3Element[] { rootPackage },
creationInfos: new[] { creationInfo },
profiles: new[] { Spdx3ProfileIdentifier.Core, Spdx3ProfileIdentifier.Software });
}
private static Spdx3Document CreateTestSbomWithMultiplePackages()
{
var creationInfo = new Spdx3CreationInfo
{
SpecVersion = Spdx3CreationInfo.Spdx301Version,
Created = FixedTimestamp.AddDays(-1),
CreatedBy = ImmutableArray<string>.Empty,
Profile = ImmutableArray.Create(Spdx3ProfileIdentifier.Core, Spdx3ProfileIdentifier.Software)
};
var packages = new Spdx3Package[]
{
new()
{
SpdxId = "https://stellaops.io/spdx/pkg/root",
Type = "software_Package",
Name = "root-package",
PackageVersion = "1.0.0"
},
new()
{
SpdxId = "https://stellaops.io/spdx/pkg/dep1",
Type = "software_Package",
Name = "dependency-1",
PackageVersion = "2.0.0"
},
new()
{
SpdxId = "https://stellaops.io/spdx/pkg/dep2",
Type = "software_Package",
Name = "dependency-2",
PackageVersion = "3.0.0"
}
};
return new Spdx3Document(
elements: packages,
creationInfos: new[] { creationInfo },
profiles: new[] { Spdx3ProfileIdentifier.Core, Spdx3ProfileIdentifier.Software });
}
private static Spdx3Build CreateTestBuild()
{
return new Spdx3Build
{
SpdxId = "https://stellaops.io/spdx/build/12345",
Type = Spdx3Build.TypeName,
BuildType = "https://slsa.dev/provenance/v1",
BuildId = "build-12345",
BuildStartTime = FixedTimestamp.AddMinutes(-5),
BuildEndTime = FixedTimestamp
};
}
}

View File

@@ -0,0 +1,307 @@
// <copyright file="DsseSpdx3SignerTests.cs" company="StellaOps">
// Copyright (c) StellaOps. Licensed under the AGPL-3.0-or-later.
// </copyright>
using System.Collections.Immutable;
using System.Text;
using FluentAssertions;
using Microsoft.Extensions.Time.Testing;
using Moq;
using StellaOps.Spdx3.Model;
using StellaOps.Spdx3.Model.Build;
using Xunit;
namespace StellaOps.Attestor.Spdx3.Tests;
/// <summary>
/// Unit tests for <see cref="DsseSpdx3Signer"/>.
/// Sprint: SPRINT_20260107_004_003 Task BP-005
/// </summary>
[Trait("Category", "Unit")]
public sealed class DsseSpdx3SignerTests
{
private readonly FakeTimeProvider _timeProvider;
private readonly Mock<ISpdx3Serializer> _serializerMock;
private readonly Mock<IDsseSigningProvider> _signingProviderMock;
private readonly DsseSpdx3Signer _signer;
private static readonly DateTimeOffset FixedTimestamp =
new(2026, 1, 7, 12, 0, 0, TimeSpan.Zero);
public DsseSpdx3SignerTests()
{
_timeProvider = new FakeTimeProvider(FixedTimestamp);
_serializerMock = new Mock<ISpdx3Serializer>();
_signingProviderMock = new Mock<IDsseSigningProvider>();
_signer = new DsseSpdx3Signer(
_serializerMock.Object,
_signingProviderMock.Object,
_timeProvider);
}
[Fact]
public async Task SignAsync_WithValidDocument_ReturnsEnvelope()
{
// Arrange
var document = CreateTestDocument();
var options = new DsseSpdx3SigningOptions { PrimaryKeyId = "key-123" };
var payloadBytes = Encoding.UTF8.GetBytes("{\"test\":\"document\"}");
_serializerMock
.Setup(s => s.SerializeToBytes(document))
.Returns(payloadBytes);
_signingProviderMock
.Setup(s => s.SignAsync(
It.IsAny<byte[]>(),
"key-123",
null,
It.IsAny<CancellationToken>()))
.ReturnsAsync(new DsseSignatureResult
{
KeyId = "key-123",
SignatureBytes = new byte[] { 0x01, 0x02, 0x03 }
});
// Act
var envelope = await _signer.SignAsync(document, options);
// Assert
envelope.Should().NotBeNull();
envelope.PayloadType.Should().Be(DsseSpdx3Signer.Spdx3PayloadType);
envelope.Payload.Should().NotBeNullOrEmpty();
envelope.Signatures.Should().HaveCount(1);
envelope.Signatures[0].KeyId.Should().Be("key-123");
envelope.SignedAt.Should().Be(FixedTimestamp);
}
[Fact]
public async Task SignAsync_WithSecondaryKey_ReturnsTwoSignatures()
{
// Arrange
var document = CreateTestDocument();
var options = new DsseSpdx3SigningOptions
{
PrimaryKeyId = "key-123",
PrimaryAlgorithm = "ES256",
SecondaryKeyId = "pq-key-456",
SecondaryAlgorithm = "ML-DSA-65"
};
var payloadBytes = Encoding.UTF8.GetBytes("{\"test\":\"document\"}");
_serializerMock
.Setup(s => s.SerializeToBytes(document))
.Returns(payloadBytes);
_signingProviderMock
.Setup(s => s.SignAsync(
It.IsAny<byte[]>(),
"key-123",
"ES256",
It.IsAny<CancellationToken>()))
.ReturnsAsync(new DsseSignatureResult
{
KeyId = "key-123",
SignatureBytes = new byte[] { 0x01, 0x02, 0x03 },
Algorithm = "ES256"
});
_signingProviderMock
.Setup(s => s.SignAsync(
It.IsAny<byte[]>(),
"pq-key-456",
"ML-DSA-65",
It.IsAny<CancellationToken>()))
.ReturnsAsync(new DsseSignatureResult
{
KeyId = "pq-key-456",
SignatureBytes = new byte[] { 0x04, 0x05, 0x06 },
Algorithm = "ML-DSA-65"
});
// Act
var envelope = await _signer.SignAsync(document, options);
// Assert
envelope.Signatures.Should().HaveCount(2);
envelope.Signatures[0].KeyId.Should().Be("key-123");
envelope.Signatures[1].KeyId.Should().Be("pq-key-456");
}
[Fact]
public async Task SignBuildProfileAsync_CreatesBuildDocument()
{
// Arrange
var build = new Spdx3Build
{
SpdxId = "https://stellaops.io/spdx/build/12345",
Type = Spdx3Build.TypeName,
BuildType = "https://slsa.dev/provenance/v1",
BuildId = "build-12345"
};
var options = new DsseSpdx3SigningOptions { PrimaryKeyId = "key-123" };
_serializerMock
.Setup(s => s.SerializeToBytes(It.IsAny<Spdx3Document>()))
.Returns(Encoding.UTF8.GetBytes("{\"build\":\"test\"}"));
_signingProviderMock
.Setup(s => s.SignAsync(
It.IsAny<byte[]>(),
It.IsAny<string>(),
It.IsAny<string?>(),
It.IsAny<CancellationToken>()))
.ReturnsAsync(new DsseSignatureResult
{
KeyId = "key-123",
SignatureBytes = new byte[] { 0x01, 0x02, 0x03 }
});
// Act
var envelope = await _signer.SignBuildProfileAsync(build, null, options);
// Assert
envelope.Should().NotBeNull();
envelope.PayloadType.Should().Be(DsseSpdx3Signer.Spdx3PayloadType);
_serializerMock.Verify(
s => s.SerializeToBytes(It.Is<Spdx3Document>(d =>
d.Elements.Any(e => e is Spdx3Build))),
Times.Once);
}
[Fact]
public async Task VerifyAsync_WithValidSignature_ReturnsTrue()
{
// Arrange
var envelope = new DsseSpdx3Envelope
{
PayloadType = DsseSpdx3Signer.Spdx3PayloadType,
Payload = "eyJ0ZXN0IjoiZG9jdW1lbnQifQ", // base64url of {"test":"document"}
Signatures = ImmutableArray.Create(new DsseSpdx3Signature
{
KeyId = "key-123",
Sig = "AQID" // base64url of [0x01, 0x02, 0x03]
})
};
var trustedKeys = new List<DsseVerificationKey>
{
new() { KeyId = "key-123", PublicKey = new byte[] { 0x10, 0x20 } }
};
_signingProviderMock
.Setup(s => s.VerifyAsync(
It.IsAny<byte[]>(),
It.IsAny<byte[]>(),
It.Is<DsseVerificationKey>(k => k.KeyId == "key-123"),
It.IsAny<CancellationToken>()))
.ReturnsAsync(true);
// Act
var result = await _signer.VerifyAsync(envelope, trustedKeys);
// Assert
result.Should().BeTrue();
}
[Fact]
public async Task VerifyAsync_WithUntrustedKey_ReturnsFalse()
{
// Arrange
var envelope = new DsseSpdx3Envelope
{
PayloadType = DsseSpdx3Signer.Spdx3PayloadType,
Payload = "eyJ0ZXN0IjoiZG9jdW1lbnQifQ",
Signatures = ImmutableArray.Create(new DsseSpdx3Signature
{
KeyId = "untrusted-key",
Sig = "AQID"
})
};
var trustedKeys = new List<DsseVerificationKey>
{
new() { KeyId = "key-123", PublicKey = new byte[] { 0x10, 0x20 } }
};
// Act
var result = await _signer.VerifyAsync(envelope, trustedKeys);
// Assert
result.Should().BeFalse();
}
[Fact]
public void ExtractDocument_WithValidEnvelope_ReturnsDocument()
{
// Arrange
var originalDocument = CreateTestDocument();
var payloadBytes = Encoding.UTF8.GetBytes("{\"test\":\"document\"}");
var payload = Convert.ToBase64String(payloadBytes)
.TrimEnd('=')
.Replace('+', '-')
.Replace('/', '_');
var envelope = new DsseSpdx3Envelope
{
PayloadType = DsseSpdx3Signer.Spdx3PayloadType,
Payload = payload,
Signatures = ImmutableArray<DsseSpdx3Signature>.Empty
};
_serializerMock
.Setup(s => s.Deserialize(It.IsAny<byte[]>()))
.Returns(originalDocument);
// Act
var extracted = _signer.ExtractDocument(envelope);
// Assert
extracted.Should().NotBeNull();
extracted.Should().Be(originalDocument);
}
[Fact]
public void ExtractDocument_WithWrongPayloadType_ReturnsNull()
{
// Arrange
var envelope = new DsseSpdx3Envelope
{
PayloadType = "application/vnd.in-toto+json",
Payload = "eyJ0ZXN0IjoiZG9jdW1lbnQifQ",
Signatures = ImmutableArray<DsseSpdx3Signature>.Empty
};
// Act
var extracted = _signer.ExtractDocument(envelope);
// Assert
extracted.Should().BeNull();
}
[Fact]
public void PayloadType_IsCorrectSpdxMediaType()
{
// Assert
DsseSpdx3Signer.Spdx3PayloadType.Should().Be("application/spdx+json");
}
private static Spdx3Document CreateTestDocument()
{
var creationInfo = new Spdx3CreationInfo
{
SpecVersion = Spdx3CreationInfo.Spdx301Version,
Created = FixedTimestamp,
CreatedBy = ImmutableArray<string>.Empty,
Profile = ImmutableArray.Create(Spdx3ProfileIdentifier.Core, Spdx3ProfileIdentifier.Build)
};
return new Spdx3Document(
elements: Array.Empty<Spdx3Element>(),
creationInfos: new[] { creationInfo },
profiles: new[] { Spdx3ProfileIdentifier.Core, Spdx3ProfileIdentifier.Build });
}
}

View File

@@ -0,0 +1,33 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
<LangVersion>preview</LangVersion>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
<IsPackable>false</IsPackable>
<IsTestProject>true</IsTestProject>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="FluentAssertions" Version="7.0.0" />
<PackageReference Include="Microsoft.Extensions.TimeProvider.Testing" Version="9.0.0" />
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.11.0" />
<PackageReference Include="Moq" Version="4.20.72" />
<PackageReference Include="xunit" Version="2.9.0" />
<PackageReference Include="xunit.runner.visualstudio" Version="2.8.2">
<PrivateAssets>all</PrivateAssets>
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
</PackageReference>
<PackageReference Include="coverlet.collector" Version="6.0.2">
<PrivateAssets>all</PrivateAssets>
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
</PackageReference>
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\..\StellaOps.Attestor.Spdx3\StellaOps.Attestor.Spdx3.csproj" />
</ItemGroup>
</Project>

View File

@@ -0,0 +1,10 @@
# Attestor SPDX3 Build Profile Tests Task Board
This board mirrors active sprint tasks for this module.
Source of truth: `docs/implplan/permament/SPRINT_20251229_049_BE_csproj_audit_maint_tests.md`.
| Task ID | Status | Notes |
| --- | --- | --- |
| AUDIT-0849-M | DONE | Revalidated 2026-01-08. |
| AUDIT-0849-T | DONE | Revalidated 2026-01-08. |
| AUDIT-0849-A | DONE | Waived (test project; revalidated 2026-01-08). |

View File

@@ -27,6 +27,7 @@ public class AttestationBundlerTests
private readonly Mock<ILogger<AttestationBundler>> _loggerMock;
private readonly IOptions<BundlingOptions> _options;
private readonly DateTimeOffset _fixedNow = new(2026, 1, 2, 0, 0, 0, TimeSpan.Zero);
private static CancellationToken TestCancellationToken => TestContext.Current.CancellationToken;
public AttestationBundlerTests()
{
@@ -53,7 +54,7 @@ public class AttestationBundlerTests
_fixedNow);
// Act
var bundle = await bundler.CreateBundleAsync(request);
var bundle = await bundler.CreateBundleAsync(request, TestCancellationToken);
// Assert
bundle.Should().NotBeNull();
@@ -81,12 +82,12 @@ public class AttestationBundlerTests
_fixedNow.AddDays(-30),
_fixedNow);
var bundle1 = await bundler1.CreateBundleAsync(request);
var bundle1 = await bundler1.CreateBundleAsync(request, TestCancellationToken);
// Reset and use different order
SetupAggregator(shuffled2);
var bundler2 = CreateBundler();
var bundle2 = await bundler2.CreateBundleAsync(request);
var bundle2 = await bundler2.CreateBundleAsync(request, TestCancellationToken);
// Assert - same merkle root regardless of input order
bundle1.MerkleTree.Root.Should().Be(bundle2.MerkleTree.Root);
@@ -107,7 +108,7 @@ public class AttestationBundlerTests
// Act & Assert
await Assert.ThrowsAsync<InvalidOperationException>(
() => bundler.CreateBundleAsync(request));
() => bundler.CreateBundleAsync(request, TestCancellationToken));
}
[Trait("Category", TestCategories.Unit)]
@@ -124,7 +125,7 @@ public class AttestationBundlerTests
// Act & Assert
await Assert.ThrowsAsync<ArgumentException>(
() => bundler.CreateBundleAsync(request, TestContext.Current.CancellationToken));
() => bundler.CreateBundleAsync(request, TestCancellationToken));
}
[Trait("Category", TestCategories.Unit)]
@@ -148,7 +149,7 @@ public class AttestationBundlerTests
// Act & Assert
await Assert.ThrowsAsync<InvalidOperationException>(
() => bundler.CreateBundleAsync(request, TestContext.Current.CancellationToken));
() => bundler.CreateBundleAsync(request, TestCancellationToken));
}
[Trait("Category", TestCategories.Unit)]
@@ -172,7 +173,7 @@ public class AttestationBundlerTests
_fixedNow);
// Act
await bundler.CreateBundleAsync(request, TestContext.Current.CancellationToken);
await bundler.CreateBundleAsync(request, TestCancellationToken);
// Assert
var expectedStart = _fixedNow.AddDays(-7);
@@ -196,7 +197,7 @@ public class AttestationBundlerTests
// Act & Assert
await Assert.ThrowsAsync<InvalidOperationException>(
() => bundler.CreateBundleAsync(request, TestContext.Current.CancellationToken));
() => bundler.CreateBundleAsync(request, TestCancellationToken));
}
[Trait("Category", TestCategories.Unit)]
@@ -213,7 +214,7 @@ public class AttestationBundlerTests
_fixedNow);
// Act
var bundle = await bundler.CreateBundleAsync(request, TestContext.Current.CancellationToken);
var bundle = await bundler.CreateBundleAsync(request, TestCancellationToken);
// Assert
bundle.Metadata.CreatedAt.Should().Be(_fixedNow);
@@ -259,7 +260,7 @@ public class AttestationBundlerTests
SignWithOrgKey: true);
// Act
var bundle = await bundler.CreateBundleAsync(request);
var bundle = await bundler.CreateBundleAsync(request, TestCancellationToken);
// Assert
bundle.OrgSignature.Should().NotBeNull();
@@ -281,10 +282,10 @@ public class AttestationBundlerTests
_fixedNow.AddDays(-30),
_fixedNow);
var bundle = await bundler.CreateBundleAsync(request);
var bundle = await bundler.CreateBundleAsync(request, TestCancellationToken);
// Act
var result = await bundler.VerifyBundleAsync(bundle);
var result = await bundler.VerifyBundleAsync(bundle, TestCancellationToken);
// Assert
result.Valid.Should().BeTrue();
@@ -306,7 +307,7 @@ public class AttestationBundlerTests
_fixedNow.AddDays(-30),
_fixedNow);
var bundle = await bundler.CreateBundleAsync(request);
var bundle = await bundler.CreateBundleAsync(request, TestCancellationToken);
// Tamper with the bundle by modifying an attestation
var tamperedAttestations = bundle.Attestations.ToList();
@@ -316,7 +317,7 @@ public class AttestationBundlerTests
var tamperedBundle = bundle with { Attestations = tamperedAttestations };
// Act
var result = await bundler.VerifyBundleAsync(tamperedBundle);
var result = await bundler.VerifyBundleAsync(tamperedBundle, TestCancellationToken);
// Assert
result.Valid.Should().BeFalse();
@@ -358,12 +359,12 @@ public class AttestationBundlerTests
var bundlerWithSigner = CreateBundler();
var request = new BundleCreationRequest(_fixedNow.AddDays(-7), _fixedNow, SignWithOrgKey: true);
var bundle = await bundlerWithSigner.CreateBundleAsync(request, TestContext.Current.CancellationToken);
var bundle = await bundlerWithSigner.CreateBundleAsync(request, TestCancellationToken);
var bundlerWithoutSigner = CreateBundler(orgSigner: null, useDefaultOrgSigner: false);
// Act
var result = await bundlerWithoutSigner.VerifyBundleAsync(bundle, TestContext.Current.CancellationToken);
var result = await bundlerWithoutSigner.VerifyBundleAsync(bundle, TestCancellationToken);
// Assert
result.Valid.Should().BeFalse();
@@ -387,7 +388,7 @@ public class AttestationBundlerTests
TenantId: "test-tenant");
// Act
var bundle = await bundler.CreateBundleAsync(request);
var bundle = await bundler.CreateBundleAsync(request, TestCancellationToken);
// Assert
bundle.Metadata.TenantId.Should().Be("test-tenant");
@@ -426,7 +427,7 @@ public class AttestationBundlerTests
_fixedNow);
// Act
var bundle = await bundler.CreateBundleAsync(request);
var bundle = await bundler.CreateBundleAsync(request, TestCancellationToken);
// Assert
bundle.Attestations.Should().HaveCount(10);

View File

@@ -16,6 +16,7 @@ namespace StellaOps.Attestor.Bundling.Tests;
public class BundleAggregatorTests
{
private readonly InMemoryBundleAggregator _aggregator;
private static CancellationToken TestCancellationToken => TestContext.Current.CancellationToken;
public BundleAggregatorTests()
{
@@ -39,8 +40,8 @@ public class BundleAggregatorTests
// Act
var results = await _aggregator
.AggregateAsync(new AggregationRequest(start, end))
.ToListAsync();
.AggregateAsync(new AggregationRequest(start, end), TestCancellationToken)
.ToListAsync(TestCancellationToken);
// Assert
results.Should().HaveCount(2);
@@ -63,8 +64,8 @@ public class BundleAggregatorTests
// Act
var results = await _aggregator
.AggregateAsync(new AggregationRequest(start, end))
.ToListAsync();
.AggregateAsync(new AggregationRequest(start, end), TestCancellationToken)
.ToListAsync(TestCancellationToken);
// Assert
results.Should().HaveCount(2);
@@ -86,8 +87,8 @@ public class BundleAggregatorTests
// Act
var results = await _aggregator
.AggregateAsync(new AggregationRequest(start, end))
.ToListAsync();
.AggregateAsync(new AggregationRequest(start, end), TestCancellationToken)
.ToListAsync(TestCancellationToken);
// Assert
results.Should().BeEmpty();
@@ -111,8 +112,8 @@ public class BundleAggregatorTests
// Act
var results = await _aggregator
.AggregateAsync(new AggregationRequest(start, end, TenantId: "tenant-a"))
.ToListAsync();
.AggregateAsync(new AggregationRequest(start, end, TenantId: "tenant-a"), TestCancellationToken)
.ToListAsync(TestCancellationToken);
// Assert
results.Should().HaveCount(2);
@@ -133,8 +134,8 @@ public class BundleAggregatorTests
// Act
var results = await _aggregator
.AggregateAsync(new AggregationRequest(start, end))
.ToListAsync();
.AggregateAsync(new AggregationRequest(start, end), TestCancellationToken)
.ToListAsync(TestCancellationToken);
// Assert
results.Should().HaveCount(3);
@@ -160,8 +161,9 @@ public class BundleAggregatorTests
var results = await _aggregator
.AggregateAsync(new AggregationRequest(
start, end,
PredicateTypes: new[] { "verdict.stella/v1" }))
.ToListAsync();
PredicateTypes: new[] { "verdict.stella/v1" }),
TestCancellationToken)
.ToListAsync(TestCancellationToken);
// Assert
results.Should().HaveCount(2);
@@ -184,8 +186,9 @@ public class BundleAggregatorTests
var results = await _aggregator
.AggregateAsync(new AggregationRequest(
start, end,
PredicateTypes: new[] { "verdict.stella/v1", "sbom.stella/v1" }))
.ToListAsync();
PredicateTypes: new[] { "verdict.stella/v1", "sbom.stella/v1" }),
TestCancellationToken)
.ToListAsync(TestCancellationToken);
// Assert
results.Should().HaveCount(2);
@@ -210,7 +213,7 @@ public class BundleAggregatorTests
}
// Act
var count = await _aggregator.CountAsync(new AggregationRequest(start, end));
var count = await _aggregator.CountAsync(new AggregationRequest(start, end), TestCancellationToken);
// Assert
count.Should().Be(50);
@@ -229,7 +232,9 @@ public class BundleAggregatorTests
_aggregator.AddAttestation(CreateAttestation("att-3", start.AddDays(15)), tenantId: "tenant-b");
// Act
var count = await _aggregator.CountAsync(new AggregationRequest(start, end, TenantId: "tenant-a"));
var count = await _aggregator.CountAsync(
new AggregationRequest(start, end, TenantId: "tenant-a"),
TestCancellationToken);
// Assert
count.Should().Be(2);
@@ -253,8 +258,12 @@ public class BundleAggregatorTests
_aggregator.AddAttestation(CreateAttestation("att-b", start.AddDays(10)));
// Act
var results1 = await _aggregator.AggregateAsync(new AggregationRequest(start, end)).ToListAsync();
var results2 = await _aggregator.AggregateAsync(new AggregationRequest(start, end)).ToListAsync();
var results1 = await _aggregator
.AggregateAsync(new AggregationRequest(start, end), TestCancellationToken)
.ToListAsync(TestCancellationToken);
var results2 = await _aggregator
.AggregateAsync(new AggregationRequest(start, end), TestCancellationToken)
.ToListAsync(TestCancellationToken);
// Assert: Order should be consistent (sorted by EntryId)
results1.Select(a => a.EntryId).Should().BeEquivalentTo(

View File

@@ -22,7 +22,7 @@ namespace StellaOps.Attestor.Bundling.Tests;
/// <summary>
/// Integration tests for the full bundle creation workflow:
/// Create → Store → Retrieve → Verify
/// Create -> Store -> Retrieve -> Verify
/// </summary>
public class BundleWorkflowIntegrationTests
{
@@ -30,6 +30,7 @@ public class BundleWorkflowIntegrationTests
private readonly InMemoryBundleAggregator _aggregator;
private readonly TestOrgKeySigner _signer;
private readonly IOptions<BundlingOptions> _options;
private static CancellationToken TestCancellationToken => TestContext.Current.CancellationToken;
public BundleWorkflowIntegrationTests()
{
@@ -68,13 +69,13 @@ public class BundleWorkflowIntegrationTests
bundle.OrgSignature.Should().NotBeNull();
// Act 2: Store bundle
await _store.StoreBundleAsync(bundle);
await _store.StoreBundleAsync(bundle, cancellationToken: TestCancellationToken);
// Assert: Bundle exists
(await _store.ExistsAsync(bundle.Metadata.BundleId)).Should().BeTrue();
(await _store.ExistsAsync(bundle.Metadata.BundleId, TestCancellationToken)).Should().BeTrue();
// Act 3: Retrieve bundle
var retrieved = await _store.GetBundleAsync(bundle.Metadata.BundleId);
var retrieved = await _store.GetBundleAsync(bundle.Metadata.BundleId, TestCancellationToken);
// Assert: Retrieved bundle matches
retrieved.Should().NotBeNull();
@@ -104,8 +105,8 @@ public class BundleWorkflowIntegrationTests
SignWithOrgKey: false);
var bundle = await CreateBundleAsync(createRequest);
await _store.StoreBundleAsync(bundle);
var retrieved = await _store.GetBundleAsync(bundle.Metadata.BundleId);
await _store.StoreBundleAsync(bundle, cancellationToken: TestCancellationToken);
var retrieved = await _store.GetBundleAsync(bundle.Metadata.BundleId, TestCancellationToken);
// Assert
retrieved.Should().NotBeNull();
@@ -145,8 +146,8 @@ public class BundleWorkflowIntegrationTests
// Act
var bundle = await CreateBundleAsync(new BundleCreationRequest(periodStart, periodEnd));
await _store.StoreBundleAsync(bundle);
var retrieved = await _store.GetBundleAsync(bundle.Metadata.BundleId);
await _store.StoreBundleAsync(bundle, cancellationToken: TestCancellationToken);
var retrieved = await _store.GetBundleAsync(bundle.Metadata.BundleId, TestCancellationToken);
// Assert
retrieved.Should().NotBeNull();
@@ -209,7 +210,7 @@ public class BundleWorkflowIntegrationTests
jobResult.AttestationCount.Should().Be(5);
// Verify bundle was stored
(await _store.ExistsAsync(jobResult.BundleId)).Should().BeTrue();
(await _store.ExistsAsync(jobResult.BundleId, TestCancellationToken)).Should().BeTrue();
}
[Trait("Category", TestCategories.Unit)]
@@ -242,17 +243,17 @@ public class BundleWorkflowIntegrationTests
// Arrange: Create old bundle
var oldPeriodStart = DateTimeOffset.UtcNow.AddMonths(-36);
var oldBundle = CreateExpiredBundle("old-bundle", oldPeriodStart);
await _store.StoreBundleAsync(oldBundle);
await _store.StoreBundleAsync(oldBundle, cancellationToken: TestCancellationToken);
// Verify old bundle exists
(await _store.ExistsAsync("old-bundle")).Should().BeTrue();
(await _store.ExistsAsync("old-bundle", TestCancellationToken)).Should().BeTrue();
// Act: Apply retention
var deleted = await ApplyRetentionAsync(retentionMonths: 24);
// Assert
deleted.Should().BeGreaterThan(0);
(await _store.ExistsAsync("old-bundle")).Should().BeFalse();
(await _store.ExistsAsync("old-bundle", TestCancellationToken)).Should().BeFalse();
}
#endregion
@@ -265,7 +266,8 @@ public class BundleWorkflowIntegrationTests
.AggregateAsync(new AggregationRequest(
request.PeriodStart,
request.PeriodEnd,
request.TenantId))
request.TenantId),
TestCancellationToken)
.ToListAsync();
// Sort for determinism
@@ -298,7 +300,7 @@ public class BundleWorkflowIntegrationTests
{
var digest = System.Security.Cryptography.SHA256.HashData(
System.Text.Encoding.UTF8.GetBytes(merkleRoot));
var signature = await _signer.SignBundleAsync(digest, request.OrgKeyId);
var signature = await _signer.SignBundleAsync(digest, request.OrgKeyId, TestCancellationToken);
bundle = bundle with
{
OrgSignature = signature,
@@ -323,7 +325,7 @@ public class BundleWorkflowIntegrationTests
{
var digest = System.Security.Cryptography.SHA256.HashData(
System.Text.Encoding.UTF8.GetBytes(computedRoot));
return await _signer.VerifyBundleAsync(digest, bundle.OrgSignature);
return await _signer.VerifyBundleAsync(digest, bundle.OrgSignature, TestCancellationToken);
}
return true;
@@ -342,7 +344,7 @@ public class BundleWorkflowIntegrationTests
SignWithOrgKey: true,
OrgKeyId: "scheduler-key"));
await _store.StoreBundleAsync(bundle);
await _store.StoreBundleAsync(bundle, cancellationToken: TestCancellationToken);
return new RotationJobResult
{
@@ -366,12 +368,12 @@ public class BundleWorkflowIntegrationTests
var cutoff = DateTimeOffset.UtcNow.AddMonths(-retentionMonths);
var deleted = 0;
var bundles = await _store.ListBundlesAsync(new BundleListRequest());
var bundles = await _store.ListBundlesAsync(new BundleListRequest(), TestCancellationToken);
foreach (var bundle in bundles.Bundles)
{
if (bundle.CreatedAt < cutoff)
{
if (await _store.DeleteBundleAsync(bundle.BundleId))
if (await _store.DeleteBundleAsync(bundle.BundleId, TestCancellationToken))
{
deleted++;
}

View File

@@ -21,6 +21,7 @@ public class KmsOrgKeySignerTests
{
private readonly Mock<IKmsProvider> _kmsProviderMock;
private readonly Mock<ILogger<KmsOrgKeySigner>> _loggerMock;
private static CancellationToken TestCancellationToken => TestContext.Current.CancellationToken;
public KmsOrgKeySignerTests()
{
@@ -46,7 +47,7 @@ public class KmsOrgKeySignerTests
var signer = CreateSigner();
// Act
var result = await signer.SignBundleAsync(bundleDigest, keyId);
var result = await signer.SignBundleAsync(bundleDigest, keyId, TestCancellationToken);
// Assert
result.Should().NotBeNull();
@@ -71,7 +72,7 @@ public class KmsOrgKeySignerTests
var signer = CreateSigner();
// Act & Assert
var act = () => signer.SignBundleAsync(bundleDigest, keyId);
var act = () => signer.SignBundleAsync(bundleDigest, keyId, TestCancellationToken);
await act.Should().ThrowAsync<InvalidOperationException>()
.WithMessage($"*'{keyId}'*not found*");
}
@@ -92,7 +93,7 @@ public class KmsOrgKeySignerTests
var signer = CreateSigner();
// Act & Assert
var act = () => signer.SignBundleAsync(bundleDigest, keyId);
var act = () => signer.SignBundleAsync(bundleDigest, keyId, TestCancellationToken);
await act.Should().ThrowAsync<InvalidOperationException>()
.WithMessage($"*'{keyId}'*not active*");
}
@@ -120,7 +121,7 @@ public class KmsOrgKeySignerTests
var signer = CreateSigner();
// Act & Assert
var act = () => signer.SignBundleAsync(bundleDigest, keyId);
var act = () => signer.SignBundleAsync(bundleDigest, keyId, TestCancellationToken);
await act.Should().ThrowAsync<InvalidOperationException>()
.WithMessage($"*'{keyId}'*expired*");
}
@@ -145,7 +146,7 @@ public class KmsOrgKeySignerTests
var signer = CreateSigner();
// Act
var result = await signer.SignBundleAsync(bundleDigest, keyId);
var result = await signer.SignBundleAsync(bundleDigest, keyId, TestCancellationToken);
// Assert
result.CertificateChain.Should().NotBeNull();
@@ -187,7 +188,7 @@ public class KmsOrgKeySignerTests
var signer = CreateSigner();
// Act
var result = await signer.VerifyBundleAsync(bundleDigest, signature);
var result = await signer.VerifyBundleAsync(bundleDigest, signature, TestCancellationToken);
// Assert
result.Should().BeTrue();
@@ -223,7 +224,7 @@ public class KmsOrgKeySignerTests
var signer = CreateSigner();
// Act
var result = await signer.VerifyBundleAsync(bundleDigest, signature);
var result = await signer.VerifyBundleAsync(bundleDigest, signature, TestCancellationToken);
// Assert
result.Should().BeFalse();
@@ -259,7 +260,7 @@ public class KmsOrgKeySignerTests
var signer = CreateSigner();
// Act
var result = await signer.VerifyBundleAsync(bundleDigest, signature);
var result = await signer.VerifyBundleAsync(bundleDigest, signature, TestCancellationToken);
// Assert
result.Should().BeFalse();
@@ -285,7 +286,7 @@ public class KmsOrgKeySignerTests
options);
// Act
var result = await signer.GetActiveKeyIdAsync();
var result = await signer.GetActiveKeyIdAsync(TestCancellationToken);
// Assert
result.Should().Be("configured-active-key");
@@ -310,7 +311,7 @@ public class KmsOrgKeySignerTests
var signer = CreateSigner();
// Act
var result = await signer.GetActiveKeyIdAsync();
var result = await signer.GetActiveKeyIdAsync(TestCancellationToken);
// Assert
result.Should().Be("key-2025"); // Newest active key
@@ -333,7 +334,7 @@ public class KmsOrgKeySignerTests
var signer = CreateSigner();
// Act & Assert
var act = () => signer.GetActiveKeyIdAsync();
var act = () => signer.GetActiveKeyIdAsync(TestCancellationToken);
await act.Should().ThrowAsync<InvalidOperationException>()
.WithMessage("*No active signing key*");
}
@@ -356,7 +357,7 @@ public class KmsOrgKeySignerTests
var signer = CreateSigner();
// Act
var result = await signer.GetActiveKeyIdAsync();
var result = await signer.GetActiveKeyIdAsync(TestCancellationToken);
// Assert
result.Should().Be("key-valid");
@@ -384,7 +385,7 @@ public class KmsOrgKeySignerTests
var signer = CreateSigner();
// Act
var result = await signer.ListKeysAsync();
var result = await signer.ListKeysAsync(TestCancellationToken);
// Assert
result.Should().HaveCount(2);
@@ -408,8 +409,8 @@ public class KmsOrgKeySignerTests
var bundleDigest = SHA256.HashData("test bundle content"u8.ToArray());
// Act
var signature = await signer.SignBundleAsync(bundleDigest, "test-key-1");
var isValid = await signer.VerifyBundleAsync(bundleDigest, signature);
var signature = await signer.SignBundleAsync(bundleDigest, "test-key-1", TestCancellationToken);
var isValid = await signer.VerifyBundleAsync(bundleDigest, signature, TestCancellationToken);
// Assert
isValid.Should().BeTrue();
@@ -430,8 +431,8 @@ public class KmsOrgKeySignerTests
var tamperedDigest = SHA256.HashData("tampered content"u8.ToArray());
// Act
var signature = await signer.SignBundleAsync(originalDigest, "test-key-1");
var isValid = await signer.VerifyBundleAsync(tamperedDigest, signature);
var signature = await signer.SignBundleAsync(originalDigest, "test-key-1", TestCancellationToken);
var isValid = await signer.VerifyBundleAsync(tamperedDigest, signature, TestCancellationToken);
// Assert
isValid.Should().BeFalse();
@@ -447,13 +448,13 @@ public class KmsOrgKeySignerTests
signer.AddKey("test-key-1", isActive: true);
var bundleDigest = SHA256.HashData("test"u8.ToArray());
var signature = await signer.SignBundleAsync(bundleDigest, "test-key-1");
var signature = await signer.SignBundleAsync(bundleDigest, "test-key-1", TestCancellationToken);
// Modify signature to reference unknown key
var fakeSignature = signature with { KeyId = "unknown-key" };
// Act
var isValid = await signer.VerifyBundleAsync(bundleDigest, fakeSignature);
var isValid = await signer.VerifyBundleAsync(bundleDigest, fakeSignature, TestCancellationToken);
// Assert
isValid.Should().BeFalse();
@@ -470,7 +471,7 @@ public class KmsOrgKeySignerTests
signer.AddKey("key-2", isActive: true);
// Act
var activeKeyId = await signer.GetActiveKeyIdAsync();
var activeKeyId = await signer.GetActiveKeyIdAsync(TestCancellationToken);
// Assert
activeKeyId.Should().Be("key-2");
@@ -486,7 +487,7 @@ public class KmsOrgKeySignerTests
// Don't add any keys
// Act & Assert
var act = () => signer.GetActiveKeyIdAsync();
var act = () => signer.GetActiveKeyIdAsync(TestCancellationToken);
await act.Should().ThrowAsync<InvalidOperationException>()
.WithMessage("*No active signing key*");
}
@@ -502,7 +503,7 @@ public class KmsOrgKeySignerTests
signer.AddKey("key-2", isActive: false);
// Act
var keys = await signer.ListKeysAsync();
var keys = await signer.ListKeysAsync(TestCancellationToken);
// Assert
keys.Should().HaveCount(2);

View File

@@ -23,6 +23,7 @@ public class OfflineKitBundleProviderTests
private readonly Mock<IBundleStore> _storeMock = new();
private readonly Mock<ILogger<OfflineKitBundleProvider>> _loggerMock = new();
private readonly DateTimeOffset _fixedNow = new(2026, 1, 2, 0, 0, 0, TimeSpan.Zero);
private static CancellationToken TestCancellationToken => TestContext.Current.CancellationToken;
[Trait("Category", TestCategories.Unit)]
[Fact]
@@ -47,7 +48,7 @@ public class OfflineKitBundleProviderTests
.ReturnsAsync(new BundleListResult(new List<BundleListItem>(), null));
// Act
await provider.GetOfflineKitManifestAsync(null, TestContext.Current.CancellationToken);
await provider.GetOfflineKitManifestAsync(null, TestCancellationToken);
// Assert
var expectedCutoff = _fixedNow.AddMonths(-6);
@@ -94,7 +95,7 @@ public class OfflineKitBundleProviderTests
using var temp = new TempDirectory();
// Act
await provider.ExportForOfflineKitAsync(temp.Path, null, TestContext.Current.CancellationToken);
await provider.ExportForOfflineKitAsync(temp.Path, null, TestCancellationToken);
// Assert
_storeMock.Verify(x => x.ExportBundleAsync(

View File

@@ -19,6 +19,7 @@ public class OrgKeySignerTests
{
private readonly TestOrgKeySigner _signer;
private readonly string _testKeyId = "test-org-key-2025";
private static CancellationToken TestCancellationToken => TestContext.Current.CancellationToken;
public OrgKeySignerTests()
{
@@ -35,7 +36,7 @@ public class OrgKeySignerTests
var bundleDigest = SHA256.HashData("test-bundle-content"u8.ToArray());
// Act
var signature = await _signer.SignBundleAsync(bundleDigest, _testKeyId);
var signature = await _signer.SignBundleAsync(bundleDigest, _testKeyId, TestCancellationToken);
// Assert
signature.Should().NotBeNull();
@@ -45,7 +46,7 @@ public class OrgKeySignerTests
signature.SignedAt.Should().BeCloseTo(DateTimeOffset.UtcNow, TimeSpan.FromSeconds(5));
// Verify roundtrip
var isValid = await _signer.VerifyBundleAsync(bundleDigest, signature);
var isValid = await _signer.VerifyBundleAsync(bundleDigest, signature, TestCancellationToken);
isValid.Should().BeTrue();
}
@@ -58,8 +59,8 @@ public class OrgKeySignerTests
var tamperedDigest = SHA256.HashData("tampered-content"u8.ToArray());
// Act
var signature = await _signer.SignBundleAsync(originalDigest, _testKeyId);
var isValid = await _signer.VerifyBundleAsync(tamperedDigest, signature);
var signature = await _signer.SignBundleAsync(originalDigest, _testKeyId, TestCancellationToken);
var isValid = await _signer.VerifyBundleAsync(tamperedDigest, signature, TestCancellationToken);
// Assert
isValid.Should().BeFalse();
@@ -75,15 +76,15 @@ public class OrgKeySignerTests
var digest2 = SHA256.HashData(content);
// Act
var signature1 = await _signer.SignBundleAsync(digest1, _testKeyId);
var signature2 = await _signer.SignBundleAsync(digest2, _testKeyId);
var signature1 = await _signer.SignBundleAsync(digest1, _testKeyId, TestCancellationToken);
var signature2 = await _signer.SignBundleAsync(digest2, _testKeyId, TestCancellationToken);
// Assert - Both signatures should be valid for the same content
(await _signer.VerifyBundleAsync(digest1, signature1)).Should().BeTrue();
(await _signer.VerifyBundleAsync(digest2, signature2)).Should().BeTrue();
(await _signer.VerifyBundleAsync(digest1, signature1, TestCancellationToken)).Should().BeTrue();
(await _signer.VerifyBundleAsync(digest2, signature2, TestCancellationToken)).Should().BeTrue();
// Cross-verify: signature1 should verify against digest2 (same content)
(await _signer.VerifyBundleAsync(digest2, signature1)).Should().BeTrue();
(await _signer.VerifyBundleAsync(digest2, signature1, TestCancellationToken)).Should().BeTrue();
}
#endregion
@@ -98,7 +99,7 @@ public class OrgKeySignerTests
var bundleDigest = SHA256.HashData("bundle-with-chain"u8.ToArray());
// Act
var signature = await _signer.SignBundleAsync(bundleDigest, _testKeyId);
var signature = await _signer.SignBundleAsync(bundleDigest, _testKeyId, TestCancellationToken);
// Assert
signature.CertificateChain.Should().NotBeNull();
@@ -120,8 +121,8 @@ public class OrgKeySignerTests
var keyId2 = "org-key-2025";
// Act
var signature1 = await _signer.SignBundleAsync(bundleDigest, keyId1);
var signature2 = await _signer.SignBundleAsync(bundleDigest, keyId2);
var signature1 = await _signer.SignBundleAsync(bundleDigest, keyId1, TestCancellationToken);
var signature2 = await _signer.SignBundleAsync(bundleDigest, keyId2, TestCancellationToken);
// Assert
signature1.KeyId.Should().Be(keyId1);
@@ -135,13 +136,13 @@ public class OrgKeySignerTests
{
// Arrange
var bundleDigest = SHA256.HashData("test-content"u8.ToArray());
var signatureWithKey1 = await _signer.SignBundleAsync(bundleDigest, "key-1");
var signatureWithKey1 = await _signer.SignBundleAsync(bundleDigest, "key-1", TestCancellationToken);
// Modify the key ID in the signature (simulating wrong key)
var tamperedSignature = signatureWithKey1 with { KeyId = "wrong-key" };
// Act
var isValid = await _signer.VerifyBundleAsync(bundleDigest, tamperedSignature);
var isValid = await _signer.VerifyBundleAsync(bundleDigest, tamperedSignature, TestCancellationToken);
// Assert
isValid.Should().BeFalse();
@@ -159,14 +160,14 @@ public class OrgKeySignerTests
var emptyDigest = SHA256.HashData(Array.Empty<byte>());
// Act
var signature = await _signer.SignBundleAsync(emptyDigest, _testKeyId);
var signature = await _signer.SignBundleAsync(emptyDigest, _testKeyId, TestCancellationToken);
// Assert
signature.Should().NotBeNull();
signature.Signature.Should().NotBeEmpty();
// Verify works
(await _signer.VerifyBundleAsync(emptyDigest, signature)).Should().BeTrue();
(await _signer.VerifyBundleAsync(emptyDigest, signature, TestCancellationToken)).Should().BeTrue();
}
#endregion
@@ -185,11 +186,11 @@ public class OrgKeySignerTests
var bundleDigest = SHA256.HashData(System.Text.Encoding.UTF8.GetBytes($"test-{algorithm}"));
// Act
var signature = await signer.SignBundleAsync(bundleDigest, _testKeyId);
var signature = await signer.SignBundleAsync(bundleDigest, _testKeyId, TestCancellationToken);
// Assert
signature.Algorithm.Should().Be(algorithm);
(await signer.VerifyBundleAsync(bundleDigest, signature)).Should().BeTrue();
(await signer.VerifyBundleAsync(bundleDigest, signature, TestCancellationToken)).Should().BeTrue();
}
#endregion
@@ -205,7 +206,7 @@ public class OrgKeySignerTests
var bundleDigest = SHA256.HashData("timestamp-test"u8.ToArray());
// Act
var signature = await _signer.SignBundleAsync(bundleDigest, _testKeyId);
var signature = await _signer.SignBundleAsync(bundleDigest, _testKeyId, TestCancellationToken);
var afterSign = DateTimeOffset.UtcNow;
// Assert

View File

@@ -24,6 +24,7 @@ public class RetentionPolicyEnforcerTests
private readonly Mock<IBundleExpiryNotifier> _notifierMock;
private readonly Mock<ILogger<RetentionPolicyEnforcer>> _loggerMock;
private readonly DateTimeOffset _fixedNow = new(2026, 1, 2, 0, 0, 0, TimeSpan.Zero);
private static CancellationToken TestCancellationToken => TestContext.Current.CancellationToken;
public RetentionPolicyEnforcerTests()
{
@@ -159,7 +160,7 @@ public class RetentionPolicyEnforcerTests
var enforcer = CreateEnforcer(options);
// Act
var result = await enforcer.EnforceAsync();
var result = await enforcer.EnforceAsync(TestCancellationToken);
// Assert
result.Success.Should().BeTrue();
@@ -198,7 +199,7 @@ public class RetentionPolicyEnforcerTests
var enforcer = CreateEnforcer(CreateOptions(retentionOptions));
// Act
var result = await enforcer.EnforceAsync();
var result = await enforcer.EnforceAsync(TestCancellationToken);
// Assert
result.Success.Should().BeTrue();
@@ -245,7 +246,7 @@ public class RetentionPolicyEnforcerTests
timeProvider: fixedTimeProvider);
// Act
var result = await enforcer.EnforceAsync(TestContext.Current.CancellationToken);
var result = await enforcer.EnforceAsync(TestCancellationToken);
// Assert
result.BundlesDeleted.Should().Be(0);
@@ -279,7 +280,7 @@ public class RetentionPolicyEnforcerTests
var enforcer = CreateEnforcer(CreateOptions(retentionOptions), _archiverMock.Object);
// Act
var result = await enforcer.EnforceAsync();
var result = await enforcer.EnforceAsync(TestCancellationToken);
// Assert
result.Success.Should().BeTrue();
@@ -308,7 +309,7 @@ public class RetentionPolicyEnforcerTests
var enforcer = CreateEnforcer(CreateOptions(retentionOptions));
// Act
var result = await enforcer.EnforceAsync();
var result = await enforcer.EnforceAsync(TestCancellationToken);
// Assert
result.Success.Should().BeTrue();
@@ -343,7 +344,7 @@ public class RetentionPolicyEnforcerTests
var enforcer = CreateEnforcer(CreateOptions(retentionOptions));
// Act
var result = await enforcer.EnforceAsync();
var result = await enforcer.EnforceAsync(TestCancellationToken);
// Assert
result.BundlesMarkedExpired.Should().Be(1);
@@ -379,7 +380,7 @@ public class RetentionPolicyEnforcerTests
var enforcer = CreateEnforcer(CreateOptions(retentionOptions));
// Act
var result = await enforcer.EnforceAsync();
var result = await enforcer.EnforceAsync(TestCancellationToken);
// Assert
result.BundlesDeleted.Should().Be(1);
@@ -409,7 +410,7 @@ public class RetentionPolicyEnforcerTests
var enforcer = CreateEnforcer(CreateOptions(retentionOptions), notifier: _notifierMock.Object);
// Act
var result = await enforcer.EnforceAsync();
var result = await enforcer.EnforceAsync(TestCancellationToken);
// Assert
result.BundlesApproachingExpiry.Should().Be(1);
@@ -442,7 +443,7 @@ public class RetentionPolicyEnforcerTests
var enforcer = CreateEnforcer(CreateOptions(retentionOptions), archiver: null);
// Act
var result = await enforcer.EnforceAsync();
var result = await enforcer.EnforceAsync(TestCancellationToken);
// Assert
result.Success.Should().BeFalse();
@@ -475,7 +476,7 @@ public class RetentionPolicyEnforcerTests
var enforcer = CreateEnforcer(CreateOptions(retentionOptions));
// Act
var result = await enforcer.EnforceAsync();
var result = await enforcer.EnforceAsync(TestCancellationToken);
// Assert
result.Success.Should().BeFalse();
@@ -527,7 +528,7 @@ public class RetentionPolicyEnforcerTests
var enforcer = CreateEnforcer(CreateOptions(retentionOptions));
// Act
var result = await enforcer.EnforceAsync();
var result = await enforcer.EnforceAsync(TestCancellationToken);
// Assert
// Should evaluate first batch (5) and stop before fetching second batch
@@ -554,7 +555,9 @@ public class RetentionPolicyEnforcerTests
var enforcer = CreateEnforcer(options);
// Act
var notifications = await enforcer.GetApproachingExpiryAsync(daysBeforeExpiry: 30);
var notifications = await enforcer.GetApproachingExpiryAsync(
daysBeforeExpiry: 30,
cancellationToken: TestCancellationToken);
// Assert
notifications.Should().HaveCount(1);

View File

@@ -8,3 +8,5 @@ Source of truth: `docs/implplan/SPRINT_20251229_049_BE_csproj_audit_maint_tests.
| AUDIT-0048-M | DONE | Revalidated maintainability for StellaOps.Attestor.Bundling.Tests. |
| AUDIT-0048-T | DONE | Revalidated test coverage for StellaOps.Attestor.Bundling.Tests. |
| AUDIT-0048-A | DONE | Waived (test project; revalidated 2026-01-06). |
| AUDIT-0207-T | DONE | Revalidated 2026-01-08 (stack overflow fix). |
| AUDIT-0207-A | DONE | Revalidated 2026-01-08 (stack overflow fix). |

View File

@@ -11,6 +11,8 @@ namespace StellaOps.Attestor.Infrastructure.Tests;
public sealed class DefaultDsseCanonicalizerTests
{
private static CancellationToken TestCancellationToken => TestContext.Current.CancellationToken;
[Trait("Category", TestCategories.Unit)]
[Fact]
public async Task CanonicalizeAsync_OrdersSignaturesDeterministically()
@@ -35,7 +37,7 @@ public sealed class DefaultDsseCanonicalizerTests
var canonicalizer = new DefaultDsseCanonicalizer();
var bytes = await canonicalizer.CanonicalizeAsync(request);
var bytes = await canonicalizer.CanonicalizeAsync(request, TestCancellationToken);
using var document = JsonDocument.Parse(bytes);
var signatures = document.RootElement.GetProperty("signatures");

View File

@@ -1,4 +1,5 @@
using System;
using System.Globalization;
using System.Net;
using System.Net.Http;
using System.Text;
@@ -7,6 +8,7 @@ using System.Threading.Tasks;
using FluentAssertions;
using Microsoft.Extensions.Logging.Abstractions;
using StellaOps.Attestor.Core.Rekor;
using StellaOps.Attestor.Core.Verification;
using StellaOps.Attestor.Infrastructure.Rekor;
using StellaOps.TestKit;
using Xunit;
@@ -19,18 +21,8 @@ public sealed class HttpRekorClientTests
[Fact]
public async Task VerifyInclusionAsync_MissingLogIndex_ReturnsFailure()
{
var handler = new StubHandler();
var httpClient = new HttpClient(handler)
{
BaseAddress = new Uri("https://rekor.example.com")
};
var client = new HttpRekorClient(httpClient, NullLogger<HttpRekorClient>.Instance);
var backend = new RekorBackend
{
Name = "primary",
Url = new Uri("https://rekor.example.com")
};
var client = CreateClient(new MissingLogIndexHandler());
var backend = CreateBackend();
var payloadDigest = Encoding.UTF8.GetBytes("payload-digest");
var result = await client.VerifyInclusionAsync("test-uuid", payloadDigest, backend, CancellationToken.None);
@@ -39,7 +31,98 @@ public sealed class HttpRekorClientTests
result.FailureReason.Should().Contain("log index");
}
private sealed class StubHandler : HttpMessageHandler
[Trait("Category", TestCategories.Unit)]
[Fact]
public async Task GetProofAsync_ParsesCheckpointTimestamp_InvariantCulture()
{
var originalCulture = CultureInfo.CurrentCulture;
var originalUiCulture = CultureInfo.CurrentUICulture;
try
{
CultureInfo.CurrentCulture = new CultureInfo("fr-FR");
CultureInfo.CurrentUICulture = new CultureInfo("fr-FR");
const string timestamp = "01/02/2026 03:04:05 +00:00";
var proofJson = BuildProofJson("rekor.example.com", "abcd", "abcd", timestamp);
var client = CreateClient(new ProofOnlyHandler(proofJson));
var backend = CreateBackend();
var proof = await client.GetProofAsync("test-uuid", backend, CancellationToken.None);
proof.Should().NotBeNull();
proof!.Checkpoint.Should().NotBeNull();
proof.Checkpoint!.Timestamp.Should().Be(DateTimeOffset.Parse(
timestamp,
CultureInfo.InvariantCulture,
DateTimeStyles.AssumeUniversal | DateTimeStyles.AdjustToUniversal));
}
finally
{
CultureInfo.CurrentCulture = originalCulture;
CultureInfo.CurrentUICulture = originalUiCulture;
}
}
[Trait("Category", TestCategories.Unit)]
[Fact]
public async Task VerifyInclusionAsync_ValidProof_ReturnsSuccessWithUnverifiedCheckpoint()
{
var payloadDigest = Encoding.UTF8.GetBytes("payload");
var leafHash = MerkleProofVerifier.HashLeaf(payloadDigest);
var leafHex = MerkleProofVerifier.BytesToHex(leafHash);
var client = CreateClient(new ValidProofHandler(leafHex));
var backend = CreateBackend();
var result = await client.VerifyInclusionAsync("test-uuid", payloadDigest, backend, CancellationToken.None);
result.Verified.Should().BeTrue();
result.CheckpointSignatureValid.Should().BeFalse();
result.LogIndex.Should().Be(0);
result.ComputedRootHash.Should().Be(leafHex);
result.ExpectedRootHash.Should().Be(leafHex);
result.FailureReason.Should().BeNull();
}
private static HttpRekorClient CreateClient(HttpMessageHandler handler)
{
var httpClient = new HttpClient(handler)
{
BaseAddress = new Uri("https://rekor.example.com")
};
return new HttpRekorClient(httpClient, NullLogger<HttpRekorClient>.Instance);
}
private static RekorBackend CreateBackend()
{
return new RekorBackend
{
Name = "primary",
Url = new Uri("https://rekor.example.com")
};
}
private static string BuildProofJson(string origin, string rootHash, string leafHash, string timestamp)
{
return $$"""
{
"checkpoint": {
"origin": "{{origin}}",
"size": 1,
"rootHash": "{{rootHash}}",
"timestamp": "{{timestamp}}"
},
"inclusion": {
"leafHash": "{{leafHash}}",
"path": []
}
}
""";
}
private sealed class MissingLogIndexHandler : HttpMessageHandler
{
protected override Task<HttpResponseMessage> SendAsync(HttpRequestMessage request, CancellationToken cancellationToken)
{
@@ -47,21 +130,7 @@ public sealed class HttpRekorClientTests
if (path.EndsWith("/proof", StringComparison.Ordinal))
{
var json = """
{
"checkpoint": {
"origin": "rekor.example.com",
"size": 1,
"rootHash": "abcd",
"timestamp": "2026-01-01T00:00:00Z"
},
"inclusion": {
"leafHash": "abcd",
"path": []
}
}
""";
var json = BuildProofJson("rekor.example.com", "abcd", "abcd", "2026-01-01T00:00:00Z");
return Task.FromResult(BuildResponse(json));
}
@@ -73,13 +142,62 @@ public sealed class HttpRekorClientTests
return Task.FromResult(new HttpResponseMessage(HttpStatusCode.NotFound));
}
}
private static HttpResponseMessage BuildResponse(string json)
private sealed class ProofOnlyHandler : HttpMessageHandler
{
private readonly string _proofJson;
public ProofOnlyHandler(string proofJson)
{
return new HttpResponseMessage(HttpStatusCode.OK)
_proofJson = proofJson;
}
protected override Task<HttpResponseMessage> SendAsync(HttpRequestMessage request, CancellationToken cancellationToken)
{
var path = request.RequestUri?.AbsolutePath ?? string.Empty;
if (path.EndsWith("/proof", StringComparison.Ordinal))
{
Content = new StringContent(json, Encoding.UTF8, "application/json")
};
return Task.FromResult(BuildResponse(_proofJson));
}
return Task.FromResult(new HttpResponseMessage(HttpStatusCode.NotFound));
}
}
private sealed class ValidProofHandler : HttpMessageHandler
{
private readonly string _proofJson;
public ValidProofHandler(string leafHex)
{
_proofJson = BuildProofJson("rekor.example.com", leafHex, leafHex, "2026-01-02T03:04:05Z");
}
protected override Task<HttpResponseMessage> SendAsync(HttpRequestMessage request, CancellationToken cancellationToken)
{
var path = request.RequestUri?.AbsolutePath ?? string.Empty;
if (path.EndsWith("/proof", StringComparison.Ordinal))
{
return Task.FromResult(BuildResponse(_proofJson));
}
if (path.Contains("/api/v2/log/entries/", StringComparison.Ordinal))
{
var json = "{\"logIndex\":0}";
return Task.FromResult(BuildResponse(json));
}
return Task.FromResult(new HttpResponseMessage(HttpStatusCode.NotFound));
}
}
private static HttpResponseMessage BuildResponse(string json)
{
return new HttpResponseMessage(HttpStatusCode.OK)
{
Content = new StringContent(json, Encoding.UTF8, "application/json")
};
}
}

View File

@@ -10,6 +10,8 @@ namespace StellaOps.Attestor.Infrastructure.Tests;
public sealed class InMemoryAttestorEntryRepositoryTests
{
private static CancellationToken TestCancellationToken => TestContext.Current.CancellationToken;
[Trait("Category", TestCategories.Unit)]
[Fact]
public async Task QueryAsync_ContinuationToken_DoesNotRepeatLastEntry()
@@ -20,18 +22,22 @@ public sealed class InMemoryAttestorEntryRepositoryTests
var first = CreateEntry("uuid-a", createdAt);
var second = CreateEntry("uuid-b", createdAt);
await repository.SaveAsync(first);
await repository.SaveAsync(second);
await repository.SaveAsync(first, TestCancellationToken);
await repository.SaveAsync(second, TestCancellationToken);
var firstPage = await repository.QueryAsync(new AttestorEntryQuery { PageSize = 1 });
var firstPage = await repository.QueryAsync(
new AttestorEntryQuery { PageSize = 1 },
TestCancellationToken);
firstPage.Items.Should().HaveCount(1);
firstPage.ContinuationToken.Should().NotBeNullOrWhiteSpace();
var secondPage = await repository.QueryAsync(new AttestorEntryQuery
{
PageSize = 1,
ContinuationToken = firstPage.ContinuationToken
});
var secondPage = await repository.QueryAsync(
new AttestorEntryQuery
{
PageSize = 1,
ContinuationToken = firstPage.ContinuationToken
},
TestCancellationToken);
secondPage.Items.Should().HaveCount(1);
secondPage.Items[0].RekorUuid.Should().NotBe(firstPage.Items[0].RekorUuid);

View File

@@ -0,0 +1,84 @@
using System;
using FluentAssertions;
using StellaOps.Attestor.Core.Options;
using StellaOps.Attestor.Infrastructure.Rekor;
using StellaOps.TestKit;
using Xunit;
namespace StellaOps.Attestor.Infrastructure.Tests;
public sealed class RekorBackendResolverTests
{
[Trait("Category", TestCategories.Unit)]
[Fact]
public void ResolveBackend_UnknownBackend_FallsBackToPrimary()
{
var options = new AttestorOptions
{
Rekor = new AttestorOptions.RekorOptions
{
Primary = new AttestorOptions.RekorBackendOptions
{
Url = "https://rekor.primary.example"
},
Mirror = new AttestorOptions.RekorMirrorOptions
{
Url = "https://rekor.mirror.example",
Enabled = true
}
}
};
var backend = RekorBackendResolver.ResolveBackend(options, "unknown", allowFallbackToPrimary: true);
backend.Name.Should().Be("unknown");
backend.Url.Should().Be(new Uri("https://rekor.primary.example"));
}
[Trait("Category", TestCategories.Unit)]
[Fact]
public void ResolveBackend_UnknownBackend_ThrowsWhenFallbackDisabled()
{
var options = new AttestorOptions
{
Rekor = new AttestorOptions.RekorOptions
{
Primary = new AttestorOptions.RekorBackendOptions
{
Url = "https://rekor.primary.example"
}
}
};
var action = () => RekorBackendResolver.ResolveBackend(options, "unknown", allowFallbackToPrimary: false);
action.Should().Throw<InvalidOperationException>()
.WithMessage("Unknown Rekor backend: unknown");
}
[Trait("Category", TestCategories.Unit)]
[Fact]
public void ResolveBackend_Mirror_ReturnsMirror()
{
var options = new AttestorOptions
{
Rekor = new AttestorOptions.RekorOptions
{
Primary = new AttestorOptions.RekorBackendOptions
{
Url = "https://rekor.primary.example"
},
Mirror = new AttestorOptions.RekorMirrorOptions
{
Url = "https://rekor.mirror.example",
Enabled = true
}
}
};
var backend = RekorBackendResolver.ResolveBackend(options, "mirror", allowFallbackToPrimary: false);
backend.Name.Should().Be("mirror");
backend.Url.Should().Be(new Uri("https://rekor.mirror.example"));
}
}

View File

@@ -5,5 +5,10 @@ Source of truth: `docs/implplan/SPRINT_20251229_049_BE_csproj_audit_maint_tests.
| Task ID | Status | Notes |
| --- | --- | --- |
| AUDIT-0055-A | TODO | Reopened after revalidation 2026-01-06 (additional coverage needed). |
| AUDIT-0055-A | DONE | Added Rekor client coverage and backend resolver tests 2026-01-08. |
| AUDIT-0729-M | DONE | Revalidated 2026-01-07 (test project). |
| AUDIT-0729-T | DONE | Revalidated 2026-01-07. |
| AUDIT-0729-A | DONE | Waived (test project; revalidated 2026-01-07). |
| VAL-SMOKE-001 | DONE | Removed xUnit v2 references and verified unit tests pass. |
| AUDIT-0208-T | DONE | Revalidated 2026-01-08 (raw string + xUnit1051 fixes). |
| AUDIT-0208-A | DONE | Applied fixes 2026-01-08 (raw string + xUnit1051 fixes). |

View File

@@ -23,6 +23,7 @@ public class FileSystemRootStoreTests : IDisposable
{
private readonly Mock<ILogger<FileSystemRootStore>> _loggerMock;
private readonly string _testRootPath;
private static CancellationToken TestCancellationToken => TestContext.Current.CancellationToken;
public FileSystemRootStoreTests()
{
@@ -48,7 +49,7 @@ public class FileSystemRootStoreTests : IDisposable
var store = CreateStore(options);
// Act
var roots = await store.GetFulcioRootsAsync();
var roots = await store.GetFulcioRootsAsync(TestCancellationToken);
// Assert
roots.Should().BeEmpty();
@@ -61,13 +62,13 @@ public class FileSystemRootStoreTests : IDisposable
// Arrange
var cert = CreateTestCertificate("CN=Test Fulcio Root");
var pemPath = Path.Combine(_testRootPath, "fulcio.pem");
await WritePemFileAsync(pemPath, cert);
await WritePemFileAsync(pemPath, cert, TestCancellationToken);
var options = CreateOptions(fulcioPath: pemPath);
var store = CreateStore(options);
// Act
var roots = await store.GetFulcioRootsAsync();
var roots = await store.GetFulcioRootsAsync(TestCancellationToken);
// Assert
roots.Should().HaveCount(1);
@@ -85,14 +86,14 @@ public class FileSystemRootStoreTests : IDisposable
var cert1 = CreateTestCertificate("CN=Root 1");
var cert2 = CreateTestCertificate("CN=Root 2");
await WritePemFileAsync(Path.Combine(fulcioDir, "root1.pem"), cert1);
await WritePemFileAsync(Path.Combine(fulcioDir, "root2.pem"), cert2);
await WritePemFileAsync(Path.Combine(fulcioDir, "root1.pem"), cert1, TestCancellationToken);
await WritePemFileAsync(Path.Combine(fulcioDir, "root2.pem"), cert2, TestCancellationToken);
var options = CreateOptions(fulcioPath: fulcioDir);
var store = CreateStore(options);
// Act
var roots = await store.GetFulcioRootsAsync();
var roots = await store.GetFulcioRootsAsync(TestCancellationToken);
// Assert
roots.Should().HaveCount(2);
@@ -109,14 +110,14 @@ public class FileSystemRootStoreTests : IDisposable
var certA = CreateTestCertificate("CN=Root A");
var certB = CreateTestCertificate("CN=Root B");
await WritePemFileAsync(Path.Combine(fulcioDir, "b.pem"), certB);
await WritePemFileAsync(Path.Combine(fulcioDir, "a.pem"), certA);
await WritePemFileAsync(Path.Combine(fulcioDir, "b.pem"), certB, TestCancellationToken);
await WritePemFileAsync(Path.Combine(fulcioDir, "a.pem"), certA, TestCancellationToken);
var options = CreateOptions(fulcioPath: fulcioDir);
var store = CreateStore(options);
// Act
var roots = await store.GetFulcioRootsAsync();
var roots = await store.GetFulcioRootsAsync(TestCancellationToken);
// Assert
roots.Should().HaveCount(2);
@@ -131,14 +132,14 @@ public class FileSystemRootStoreTests : IDisposable
// Arrange
var cert = CreateTestCertificate("CN=Cached Root");
var pemPath = Path.Combine(_testRootPath, "cached.pem");
await WritePemFileAsync(pemPath, cert);
await WritePemFileAsync(pemPath, cert, TestCancellationToken);
var options = CreateOptions(fulcioPath: pemPath);
var store = CreateStore(options);
// Act
var roots1 = await store.GetFulcioRootsAsync();
var roots2 = await store.GetFulcioRootsAsync();
var roots1 = await store.GetFulcioRootsAsync(TestCancellationToken);
var roots2 = await store.GetFulcioRootsAsync(TestCancellationToken);
// Assert - same collection instance (cached)
roots1.Should().HaveCount(1);
@@ -154,14 +155,14 @@ public class FileSystemRootStoreTests : IDisposable
// Arrange
var cert = CreateTestCertificate("CN=Imported Root");
var sourcePath = Path.Combine(_testRootPath, "import-source.pem");
await WritePemFileAsync(sourcePath, cert);
await WritePemFileAsync(sourcePath, cert, TestCancellationToken);
var options = CreateOptions();
options.Value.BaseRootPath = _testRootPath;
var store = CreateStore(options);
// Act
await store.ImportRootsAsync(sourcePath, RootType.Fulcio);
await store.ImportRootsAsync(sourcePath, RootType.Fulcio, TestCancellationToken);
// Assert
var targetDir = Path.Combine(_testRootPath, "fulcio");
@@ -179,7 +180,7 @@ public class FileSystemRootStoreTests : IDisposable
// Act & Assert
await Assert.ThrowsAsync<FileNotFoundException>(
() => store.ImportRootsAsync("/nonexistent/path.pem", RootType.Fulcio));
() => store.ImportRootsAsync("/nonexistent/path.pem", RootType.Fulcio, TestCancellationToken));
}
[Trait("Category", TestCategories.Unit)]
@@ -190,24 +191,24 @@ public class FileSystemRootStoreTests : IDisposable
var cert1 = CreateTestCertificate("CN=Initial Root");
var fulcioDir = Path.Combine(_testRootPath, "fulcio");
Directory.CreateDirectory(fulcioDir);
await WritePemFileAsync(Path.Combine(fulcioDir, "initial.pem"), cert1);
await WritePemFileAsync(Path.Combine(fulcioDir, "initial.pem"), cert1, TestCancellationToken);
var options = CreateOptions(fulcioPath: fulcioDir);
options.Value.BaseRootPath = _testRootPath;
var store = CreateStore(options);
// Load initial cache
var initialRoots = await store.GetFulcioRootsAsync();
var initialRoots = await store.GetFulcioRootsAsync(TestCancellationToken);
initialRoots.Should().HaveCount(1);
// Import a new certificate
var cert2 = CreateTestCertificate("CN=Imported Root");
var importPath = Path.Combine(_testRootPath, "import.pem");
await WritePemFileAsync(importPath, cert2);
await WritePemFileAsync(importPath, cert2, TestCancellationToken);
// Act
await store.ImportRootsAsync(importPath, RootType.Fulcio);
var updatedRoots = await store.GetFulcioRootsAsync();
await store.ImportRootsAsync(importPath, RootType.Fulcio, TestCancellationToken);
var updatedRoots = await store.GetFulcioRootsAsync(TestCancellationToken);
// Assert - cache invalidated and new cert loaded
updatedRoots.Should().HaveCount(2);
@@ -221,13 +222,13 @@ public class FileSystemRootStoreTests : IDisposable
var cert = CreateTestCertificate("CN=Listed Root");
var fulcioDir = Path.Combine(_testRootPath, "fulcio");
Directory.CreateDirectory(fulcioDir);
await WritePemFileAsync(Path.Combine(fulcioDir, "root.pem"), cert);
await WritePemFileAsync(Path.Combine(fulcioDir, "root.pem"), cert, TestCancellationToken);
var options = CreateOptions(fulcioPath: fulcioDir);
var store = CreateStore(options);
// Act
var roots = await store.ListRootsAsync(RootType.Fulcio);
var roots = await store.ListRootsAsync(RootType.Fulcio, TestCancellationToken);
// Assert
roots.Should().HaveCount(1);
@@ -244,20 +245,20 @@ public class FileSystemRootStoreTests : IDisposable
var cert = CreateTestCertificate("CN=Org Signing Key");
var orgDir = Path.Combine(_testRootPath, "org-signing");
Directory.CreateDirectory(orgDir);
await WritePemFileAsync(Path.Combine(orgDir, "org.pem"), cert);
await WritePemFileAsync(Path.Combine(orgDir, "org.pem"), cert, TestCancellationToken);
var options = CreateOptions(orgSigningPath: orgDir);
var store = CreateStore(options);
// First, verify the cert was loaded and get its thumbprint from listing
var orgKeys = await store.GetOrgSigningKeysAsync();
var orgKeys = await store.GetOrgSigningKeysAsync(TestCancellationToken);
orgKeys.Should().HaveCount(1);
// Get the thumbprint from the loaded certificate
var thumbprint = ComputeThumbprint(orgKeys[0]);
// Act
var found = await store.GetOrgKeyByIdAsync(thumbprint);
var found = await store.GetOrgKeyByIdAsync(thumbprint, TestCancellationToken);
// Assert
found.Should().NotBeNull();
@@ -272,13 +273,13 @@ public class FileSystemRootStoreTests : IDisposable
var cert = CreateTestCertificate("CN=Org Key");
var orgDir = Path.Combine(_testRootPath, "org-signing");
Directory.CreateDirectory(orgDir);
await WritePemFileAsync(Path.Combine(orgDir, "org.pem"), cert);
await WritePemFileAsync(Path.Combine(orgDir, "org.pem"), cert, TestCancellationToken);
var options = CreateOptions(orgSigningPath: orgDir);
var store = CreateStore(options);
// Act
var found = await store.GetOrgKeyByIdAsync("nonexistent-key-id");
var found = await store.GetOrgKeyByIdAsync("nonexistent-key-id", TestCancellationToken);
// Assert
found.Should().BeNull();
@@ -291,13 +292,13 @@ public class FileSystemRootStoreTests : IDisposable
// Arrange
var cert = CreateTestCertificate("CN=Rekor Key");
var rekorPath = Path.Combine(_testRootPath, "rekor.pem");
await WritePemFileAsync(rekorPath, cert);
await WritePemFileAsync(rekorPath, cert, TestCancellationToken);
var options = CreateOptions(rekorPath: rekorPath);
var store = CreateStore(options);
// Act
var keys = await store.GetRekorKeysAsync();
var keys = await store.GetRekorKeysAsync(TestCancellationToken);
// Assert
keys.Should().HaveCount(1);
@@ -314,13 +315,13 @@ public class FileSystemRootStoreTests : IDisposable
var cert3 = CreateTestCertificate("CN=Cert 3");
var pemPath = Path.Combine(_testRootPath, "multi.pem");
await WriteMultiplePemFileAsync(pemPath, [cert1, cert2, cert3]);
await WriteMultiplePemFileAsync(pemPath, [cert1, cert2, cert3], TestCancellationToken);
var options = CreateOptions(fulcioPath: pemPath);
var store = CreateStore(options);
// Act
var roots = await store.GetFulcioRootsAsync();
var roots = await store.GetFulcioRootsAsync(TestCancellationToken);
// Assert
roots.Should().HaveCount(3);
@@ -336,7 +337,7 @@ public class FileSystemRootStoreTests : IDisposable
Directory.CreateDirectory(fulcioKitDir);
var cert = CreateTestCertificate("CN=Offline Kit Root");
await WritePemFileAsync(Path.Combine(fulcioKitDir, "root.pem"), cert);
await WritePemFileAsync(Path.Combine(fulcioKitDir, "root.pem"), cert, TestCancellationToken);
var options = Options.Create(new OfflineRootStoreOptions
{
@@ -347,7 +348,7 @@ public class FileSystemRootStoreTests : IDisposable
var store = CreateStore(options);
// Act
var roots = await store.GetFulcioRootsAsync();
var roots = await store.GetFulcioRootsAsync(TestCancellationToken);
// Assert
roots.Should().HaveCount(1);
@@ -364,7 +365,7 @@ public class FileSystemRootStoreTests : IDisposable
Directory.CreateDirectory(fulcioKitDir);
var cert = CreateTestCertificate("CN=Offline Kit Root");
await WritePemFileAsync(Path.Combine(fulcioKitDir, "root.pem"), cert);
await WritePemFileAsync(Path.Combine(fulcioKitDir, "root.pem"), cert, TestCancellationToken);
var options = Options.Create(new OfflineRootStoreOptions
{
@@ -375,7 +376,7 @@ public class FileSystemRootStoreTests : IDisposable
var store = CreateStore(options);
// Act
var roots = await store.GetFulcioRootsAsync();
var roots = await store.GetFulcioRootsAsync(TestCancellationToken);
// Assert
roots.Should().BeEmpty();
@@ -423,17 +424,17 @@ public class FileSystemRootStoreTests : IDisposable
return request.CreateSelfSigned(notBefore, notAfter);
}
private static async Task WritePemFileAsync(string path, X509Certificate2 cert)
private static async Task WritePemFileAsync(string path, X509Certificate2 cert, CancellationToken cancellationToken)
{
var pem = new StringBuilder();
pem.AppendLine("-----BEGIN CERTIFICATE-----");
pem.AppendLine(Convert.ToBase64String(cert.RawData, Base64FormattingOptions.InsertLineBreaks));
pem.AppendLine("-----END CERTIFICATE-----");
await File.WriteAllTextAsync(path, pem.ToString());
await File.WriteAllTextAsync(path, pem.ToString(), cancellationToken);
}
private static async Task WriteMultiplePemFileAsync(string path, X509Certificate2[] certs)
private static async Task WriteMultiplePemFileAsync(string path, X509Certificate2[] certs, CancellationToken cancellationToken)
{
var pem = new StringBuilder();
foreach (var cert in certs)
@@ -444,7 +445,7 @@ public class FileSystemRootStoreTests : IDisposable
pem.AppendLine();
}
await File.WriteAllTextAsync(path, pem.ToString());
await File.WriteAllTextAsync(path, pem.ToString(), cancellationToken);
}
private static string ComputeThumbprint(X509Certificate2 cert)

View File

@@ -26,6 +26,7 @@ public class OfflineCertChainValidatorTests
private readonly Mock<ILogger<OfflineVerifier>> _loggerMock;
private readonly IMerkleTreeBuilder _merkleBuilder;
private readonly IOptions<OfflineVerificationConfig> _config;
private static CancellationToken TestCancellationToken => TestContext.Current.CancellationToken;
public OfflineCertChainValidatorTests()
{
@@ -51,7 +52,7 @@ public class OfflineCertChainValidatorTests
VerifyCertificateChain: true);
// Act
var result = await verifier.VerifyAttestationAsync(attestation, options);
var result = await verifier.VerifyAttestationAsync(attestation, options, TestCancellationToken);
// Assert
result.CertificateChainValid.Should().BeTrue();
@@ -77,7 +78,7 @@ public class OfflineCertChainValidatorTests
VerifyCertificateChain: true);
// Act
var result = await verifier.VerifyAttestationAsync(attestation, options);
var result = await verifier.VerifyAttestationAsync(attestation, options, TestCancellationToken);
// Assert
result.CertificateChainValid.Should().BeFalse();
@@ -100,7 +101,7 @@ public class OfflineCertChainValidatorTests
VerifyCertificateChain: true);
// Act
var result = await verifier.VerifyAttestationAsync(attestation, options);
var result = await verifier.VerifyAttestationAsync(attestation, options, TestCancellationToken);
// Assert
result.CertificateChainValid.Should().BeFalse();
@@ -125,7 +126,7 @@ public class OfflineCertChainValidatorTests
VerifyCertificateChain: true);
// Act
var result = await verifier.VerifyAttestationAsync(attestation, options);
var result = await verifier.VerifyAttestationAsync(attestation, options, TestCancellationToken);
// Assert
result.CertificateChainValid.Should().BeFalse();
@@ -150,7 +151,7 @@ public class OfflineCertChainValidatorTests
VerifyCertificateChain: true);
// Act
var result = await verifier.VerifyAttestationAsync(attestation, options);
var result = await verifier.VerifyAttestationAsync(attestation, options, TestCancellationToken);
// Assert
result.CertificateChainValid.Should().BeFalse();
@@ -178,7 +179,7 @@ public class OfflineCertChainValidatorTests
VerifyCertificateChain: true);
// Act
var result = await verifier.VerifyBundleAsync(bundle, options);
var result = await verifier.VerifyBundleAsync(bundle, options, TestCancellationToken);
// Assert
result.CertificateChainValid.Should().BeTrue();
@@ -200,7 +201,7 @@ public class OfflineCertChainValidatorTests
VerifyCertificateChain: false); // Disabled
// Act
var result = await verifier.VerifyAttestationAsync(attestation, options);
var result = await verifier.VerifyAttestationAsync(attestation, options, TestCancellationToken);
// Assert - When cert chain validation is disabled, it should not report cert-related issues
result.Issues.Should().NotContain(i => i.Code.Contains("CERT_CHAIN"));
@@ -224,7 +225,7 @@ public class OfflineCertChainValidatorTests
VerifyCertificateChain: true);
// Act
var result = await verifier.VerifyAttestationAsync(attestation, options);
var result = await verifier.VerifyAttestationAsync(attestation, options, TestCancellationToken);
// Assert
result.CertificateChainValid.Should().BeFalse();
@@ -247,7 +248,7 @@ public class OfflineCertChainValidatorTests
VerifyCertificateChain: true);
// Act
var result = await verifier.VerifyAttestationAsync(attestation, options);
var result = await verifier.VerifyAttestationAsync(attestation, options, TestCancellationToken);
// Assert
result.CertificateChainValid.Should().BeFalse();

View File

@@ -30,6 +30,7 @@ namespace StellaOps.Attestor.Offline.Tests;
public class OfflineVerifierTests
{
private static readonly DateTimeOffset FixedNow = new(2025, 1, 1, 0, 0, 0, TimeSpan.Zero);
private static CancellationToken TestCancellationToken => TestContext.Current.CancellationToken;
private readonly Mock<IOfflineRootStore> _rootStoreMock;
private readonly IMerkleTreeBuilder _merkleBuilder;
private readonly Mock<IOrgKeySigner> _orgSignerMock;
@@ -65,7 +66,7 @@ public class OfflineVerifierTests
VerifyOrgSignature: false);
// Act
var result = await verifier.VerifyBundleAsync(bundle, options);
var result = await verifier.VerifyBundleAsync(bundle, options, TestCancellationToken);
// Assert
result.Valid.Should().BeTrue();
@@ -99,7 +100,7 @@ public class OfflineVerifierTests
VerifyCertificateChain: false);
// Act
var result = await verifier.VerifyBundleAsync(tamperedBundle, options);
var result = await verifier.VerifyBundleAsync(tamperedBundle, options, TestCancellationToken);
// Assert
result.Valid.Should().BeFalse();
@@ -123,7 +124,7 @@ public class OfflineVerifierTests
RequireOrgSignature: true);
// Act
var result = await verifier.VerifyBundleAsync(bundle, options);
var result = await verifier.VerifyBundleAsync(bundle, options, TestCancellationToken);
// Assert
result.Valid.Should().BeFalse();
@@ -161,7 +162,7 @@ public class OfflineVerifierTests
VerifyOrgSignature: true);
// Act
var result = await verifier.VerifyBundleAsync(signedBundle, options);
var result = await verifier.VerifyBundleAsync(signedBundle, options, TestCancellationToken);
// Assert
result.Valid.Should().BeTrue();
@@ -183,7 +184,7 @@ public class OfflineVerifierTests
VerifyCertificateChain: false);
// Act
var result = await verifier.VerifyAttestationAsync(attestation, options);
var result = await verifier.VerifyAttestationAsync(attestation, options, TestCancellationToken);
// Assert
result.Valid.Should().BeTrue();
@@ -214,7 +215,7 @@ public class OfflineVerifierTests
VerifyCertificateChain: false);
// Act
var result = await verifier.VerifyAttestationAsync(tamperedAttestation, options);
var result = await verifier.VerifyAttestationAsync(tamperedAttestation, options, TestCancellationToken);
// Assert
result.Valid.Should().BeFalse();
@@ -236,7 +237,7 @@ public class OfflineVerifierTests
VerifyCertificateChain: false);
// Act
var summaries = await verifier.GetVerificationSummariesAsync(bundle, options);
var summaries = await verifier.GetVerificationSummariesAsync(bundle, options, TestCancellationToken);
// Assert
summaries.Should().HaveCount(10);
@@ -276,7 +277,7 @@ public class OfflineVerifierTests
StrictMode: true);
// Act
var result = await verifier.VerifyBundleAsync(bundle, options);
var result = await verifier.VerifyBundleAsync(bundle, options, TestCancellationToken);
// Assert
result.Valid.Should().BeFalse();
@@ -296,7 +297,7 @@ public class OfflineVerifierTests
var verifier = CreateVerifier(config);
// Act
var result = await verifier.VerifyBundleAsync(bundle, options: null);
var result = await verifier.VerifyBundleAsync(bundle, options: null, cancellationToken: TestCancellationToken);
// Assert
result.Valid.Should().BeFalse();
@@ -316,7 +317,7 @@ public class OfflineVerifierTests
var verifier = CreateVerifier(config);
// Act
var result = await verifier.VerifyAttestationAsync(attestation, options: null);
var result = await verifier.VerifyAttestationAsync(attestation, options: null, cancellationToken: TestCancellationToken);
// Assert
result.Valid.Should().BeFalse();
@@ -331,7 +332,7 @@ public class OfflineVerifierTests
var tempPath = Path.Combine(Path.GetTempPath(), $"bundle-{Guid.NewGuid():N}.json");
try
{
await File.WriteAllBytesAsync(tempPath, new byte[2 * 1024 * 1024]);
await File.WriteAllBytesAsync(tempPath, new byte[2 * 1024 * 1024], TestCancellationToken);
var config = Options.Create(new OfflineVerificationConfig
{
@@ -347,7 +348,8 @@ public class OfflineVerifierTests
VerifyMerkleProof: false,
VerifySignatures: false,
VerifyCertificateChain: false,
VerifyOrgSignature: false));
VerifyOrgSignature: false),
TestCancellationToken);
// Assert
result.Valid.Should().BeFalse();
@@ -383,8 +385,8 @@ public class OfflineVerifierTests
VerifyCertificateChain: false);
// Act
var result1 = await verifier.VerifyBundleAsync(bundle1, options);
var result2 = await verifier.VerifyBundleAsync(bundle2, options);
var result1 = await verifier.VerifyBundleAsync(bundle1, options, TestCancellationToken);
var result2 = await verifier.VerifyBundleAsync(bundle2, options, TestCancellationToken);
// Assert - both should have the same merkle validation result
result1.MerkleProofValid.Should().Be(result2.MerkleProofValid);

View File

@@ -8,3 +8,5 @@ Source of truth: `docs/implplan/SPRINT_20251229_049_BE_csproj_audit_maint_tests.
| AUDIT-0059-M | DONE | Revalidated 2026-01-06. |
| AUDIT-0059-T | DONE | Revalidated 2026-01-06. |
| AUDIT-0059-A | DONE | Waived after revalidation 2026-01-06. |
| AUDIT-0210-T | DONE | Revalidated 2026-01-08 (xUnit1051 fixes). |
| AUDIT-0210-A | DONE | Applied fixes 2026-01-08 (xUnit1051 fixes). |

View File

@@ -12,12 +12,23 @@ public sealed class GeneratorOutputTests
var schemaDir = Path.Combine(AppContext.BaseDirectory, "schemas");
Directory.Exists(schemaDir).Should().BeTrue($"schema directory should exist at '{schemaDir}'");
var expectedOverrides = new Dictionary<string, string>(StringComparer.OrdinalIgnoreCase)
{
["attestation-common.v1.schema.json"] = "https://schemas.stella-ops.org/attestations/common/v1",
["uncertainty-budget-statement.v1.schema.json"] = "https://stella-ops.org/schemas/attestation/uncertainty-budget-statement.v1.json",
["uncertainty-statement.v1.schema.json"] = "https://stella-ops.org/schemas/attestation/uncertainty-statement.v1.json",
["verification-policy.v1.schema.json"] = "https://stellaops.io/schemas/verification-policy.v1.json"
};
foreach (var path in Directory.EnumerateFiles(schemaDir, "*.schema.json", SearchOption.TopDirectoryOnly))
{
using var doc = JsonDocument.Parse(File.ReadAllText(path));
doc.RootElement.TryGetProperty("$id", out var idElement).Should().BeTrue();
var expected = $"https://stella-ops.org/schemas/attestor/{Path.GetFileName(path)}";
var fileName = Path.GetFileName(path);
var expected = expectedOverrides.TryGetValue(fileName, out var overrideId)
? overrideId
: $"https://stella-ops.org/schemas/attestor/{fileName}";
idElement.GetString().Should().Be(expected);
}
}

View File

@@ -65,7 +65,7 @@ public sealed class RekorInclusionProofTests
_output.WriteLine($"Tree size: {tree.Size}");
_output.WriteLine($"Root hash: {Convert.ToHexString(tree.RootHash).ToLower()}");
_output.WriteLine($"Proof path length: {proof.Count}");
_output.WriteLine(" Inclusion proof verified");
_output.WriteLine("[OK] Inclusion proof verified");
}
[Fact]
@@ -97,7 +97,7 @@ public sealed class RekorInclusionProofTests
proof: proof);
verified.Should().BeTrue($"entry {i} should verify");
_output.WriteLine($" Entry {i}: (proof path: {proof.Count} nodes)");
_output.WriteLine($" Entry {i}: [OK] (proof path: {proof.Count} nodes)");
}
}
@@ -128,7 +128,7 @@ public sealed class RekorInclusionProofTests
// Assert
verified.Should().BeFalse("tampered leaf should not verify");
_output.WriteLine(" Tampered leaf data detected");
_output.WriteLine("[OK] Tampered leaf data detected");
}
[Fact]
@@ -159,7 +159,7 @@ public sealed class RekorInclusionProofTests
// Assert
verified.Should().BeFalse("tampered proof path should not verify");
_output.WriteLine(" Tampered proof path detected");
_output.WriteLine("[OK] Tampered proof path detected");
}
[Fact]
@@ -183,7 +183,7 @@ public sealed class RekorInclusionProofTests
// Assert
verified.Should().BeFalse("tampered root hash should not verify");
_output.WriteLine(" Tampered root hash detected");
_output.WriteLine("[OK] Tampered root hash detected");
}
[Fact]
@@ -209,7 +209,7 @@ public sealed class RekorInclusionProofTests
// Assert
verified.Should().BeFalse("wrong index should not verify");
_output.WriteLine(" Wrong index detected");
_output.WriteLine("[OK] Wrong index detected");
}
#endregion
@@ -237,7 +237,7 @@ public sealed class RekorInclusionProofTests
verified.Should().BeTrue("single node tree should verify");
proof.Should().BeEmpty("single node tree needs no proof path");
_output.WriteLine(" Single node tree verified");
_output.WriteLine("[OK] Single node tree verified");
}
[Fact]
@@ -271,7 +271,7 @@ public sealed class RekorInclusionProofTests
verified0.Should().BeTrue("entry 0 should verify");
verified1.Should().BeTrue("entry 1 should verify");
_output.WriteLine(" Two node tree verified");
_output.WriteLine("[OK] Two node tree verified");
}
[Fact]
@@ -304,7 +304,7 @@ public sealed class RekorInclusionProofTests
proof: proof);
verified.Should().BeTrue($"entry {index} should verify");
_output.WriteLine($" Entry {index}: (proof path: {proof.Count} nodes)");
_output.WriteLine($" Entry {index}: [OK] (proof path: {proof.Count} nodes)");
}
}
@@ -334,7 +334,7 @@ public sealed class RekorInclusionProofTests
verified.Should().BeTrue($"entry {i} should verify in non-power-of-two tree");
}
_output.WriteLine(" Non-power-of-two tree verified");
_output.WriteLine("[OK] Non-power-of-two tree verified");
}
#endregion
@@ -412,7 +412,7 @@ public sealed class RekorInclusionProofTests
// Assert - all results should be identical
results.Should().AllBeEquivalentTo(true);
_output.WriteLine(" Verification is deterministic across 10 runs");
_output.WriteLine("[OK] Verification is deterministic across 10 runs");
}
[Fact]
@@ -482,6 +482,11 @@ public sealed class RekorInclusionProofTests
byte[] rootHash,
IReadOnlyList<byte[]> proof)
{
if (treeSize <= 0 || leafIndex < 0 || leafIndex >= treeSize)
{
return false;
}
var leafHash = HashLeaf(leafData);
var computedRoot = RecomputeRoot(leafHash, leafIndex, treeSize, proof);
return computedRoot.SequenceEqual(rootHash);
@@ -555,19 +560,21 @@ public sealed class RekorInclusionProofTests
{
var current = leafHash;
var currentIndex = index;
var lastIndex = treeSize - 1;
foreach (var sibling in proof)
{
if (currentIndex % 2 == 0)
{
current = HashInner(current, sibling);
}
else
if (currentIndex % 2 == 1 || currentIndex == lastIndex)
{
current = HashInner(sibling, current);
}
else
{
current = HashInner(current, sibling);
}
currentIndex /= 2;
lastIndex /= 2;
}
return current;

View File

@@ -1,7 +1,7 @@
// -----------------------------------------------------------------------------
// RekorReceiptGenerationTests.cs
// Sprint: SPRINT_5100_0009_0007 - Attestor Module Test Implementation
// Task: ATTESTOR-5100-006 - Add Rekor receipt generation tests: attestation Rekor entry receipt returned
// Task: ATTESTOR-5100-006 - Add Rekor receipt generation tests: attestation -> Rekor entry -> receipt returned
// Description: Tests for Rekor transparency log receipt generation
// -----------------------------------------------------------------------------
@@ -51,7 +51,7 @@ public sealed class RekorReceiptGenerationTests
response.Status.Should().Be("included", "entry should be included in log");
response.Index.Should().BeGreaterThanOrEqualTo(0, "index should be assigned");
_output.WriteLine($"✓ Receipt generated:");
_output.WriteLine("[OK] Receipt generated:");
_output.WriteLine($" UUID: {response.Uuid}");
_output.WriteLine($" Index: {response.Index}");
_output.WriteLine($" Status: {response.Status}");
@@ -283,7 +283,7 @@ public sealed class RekorReceiptGenerationTests
// Assert
response.LogUrl.Should().StartWith(expectedBaseUrl);
_output.WriteLine($"Backend {backend} {response.LogUrl}");
_output.WriteLine($"Backend {backend} -> {response.LogUrl}");
}
#endregion
@@ -329,7 +329,7 @@ public sealed class RekorReceiptGenerationTests
deserialized.Status.Should().Be(original.Status);
deserialized.IntegratedTime.Should().Be(original.IntegratedTime);
_output.WriteLine(" Receipt serialization roundtrips correctly");
_output.WriteLine("[OK] Receipt serialization roundtrips correctly");
}
#endregion
@@ -481,7 +481,7 @@ public sealed class RekorReceiptGenerationTests
{
return Task.FromResult(new SubmissionResult(false,
ErrorCode: "REKOR_INVALID_ENTRY",
ErrorMessage: "Invalid DSSE envelope: payload type and payload are required"));
ErrorMessage: "invalid DSSE envelope: payload type and payload are required"));
}
var response = CreateResponse(envelope);

View File

@@ -1,13 +1,12 @@
// -----------------------------------------------------------------------------
// RekorReceiptVerificationTests.cs
// Sprint: SPRINT_5100_0009_0007 - Attestor Module Test Implementation
// Task: ATTESTOR-5100-007 - Add Rekor receipt verification tests: valid receipt verification succeeds; invalid receipt fails
// Task: ATTESTOR-5100-007 - Add Rekor receipt verification tests: valid receipt -> verification succeeds; invalid receipt -> fails
// Description: Tests for Rekor transparency log receipt verification
// -----------------------------------------------------------------------------
using System.Security.Cryptography;
using System.Text;
using System.Text.Json;
using FluentAssertions;
using Xunit;
@@ -49,7 +48,7 @@ public sealed class RekorReceiptVerificationTests
result.Success.Should().BeTrue("valid receipt should verify");
result.ErrorCode.Should().BeNullOrEmpty();
_output.WriteLine(" Valid receipt verified successfully");
_output.WriteLine("[OK] Valid receipt verified successfully");
}
[Fact]
@@ -93,7 +92,7 @@ public sealed class RekorReceiptVerificationTests
result.Success.Should().BeFalse();
result.ErrorCode.Should().Be("RECEIPT_MISSING_UUID");
_output.WriteLine($" Missing UUID detected: {result.ErrorCode}");
_output.WriteLine($"[OK] Missing UUID detected: {result.ErrorCode}");
}
[Fact]
@@ -111,7 +110,7 @@ public sealed class RekorReceiptVerificationTests
result.Success.Should().BeFalse();
result.ErrorCode.Should().Be("RECEIPT_MISSING_INDEX");
_output.WriteLine($" Missing index detected: {result.ErrorCode}");
_output.WriteLine($"[OK] Missing index detected: {result.ErrorCode}");
}
[Fact]
@@ -129,7 +128,7 @@ public sealed class RekorReceiptVerificationTests
result.Success.Should().BeFalse();
result.ErrorCode.Should().Be("RECEIPT_MISSING_PROOF");
_output.WriteLine($" Missing proof detected: {result.ErrorCode}");
_output.WriteLine($"[OK] Missing proof detected: {result.ErrorCode}");
}
[Fact]
@@ -147,7 +146,7 @@ public sealed class RekorReceiptVerificationTests
result.Success.Should().BeFalse();
result.ErrorCode.Should().Be("RECEIPT_MISSING_CHECKPOINT");
_output.WriteLine($" Missing checkpoint detected: {result.ErrorCode}");
_output.WriteLine($"[OK] Missing checkpoint detected: {result.ErrorCode}");
}
[Fact]
@@ -165,7 +164,7 @@ public sealed class RekorReceiptVerificationTests
result.Success.Should().BeFalse();
result.ErrorCode.Should().Be("RECEIPT_MISSING_INCLUSION");
_output.WriteLine($" Missing inclusion proof detected: {result.ErrorCode}");
_output.WriteLine($"[OK] Missing inclusion proof detected: {result.ErrorCode}");
}
#endregion
@@ -190,7 +189,7 @@ public sealed class RekorReceiptVerificationTests
result.Success.Should().BeFalse();
result.ErrorCode.Should().Be("RECEIPT_INVALID_ROOT_HASH");
_output.WriteLine($"✓ Tampered root hash detected");
_output.WriteLine("[OK] Tampered root hash detected");
_output.WriteLine($" Original: {originalHash}");
_output.WriteLine($" Tampered: {receipt.Proof.Checkpoint.RootHash}");
}
@@ -212,7 +211,7 @@ public sealed class RekorReceiptVerificationTests
result.Success.Should().BeFalse();
result.ErrorCode.Should().Be("RECEIPT_INVALID_LEAF_HASH");
_output.WriteLine($"✓ Tampered leaf hash detected");
_output.WriteLine("[OK] Tampered leaf hash detected");
}
[Fact]
@@ -232,7 +231,7 @@ public sealed class RekorReceiptVerificationTests
result.Success.Should().BeFalse();
result.ErrorCode.Should().Be("RECEIPT_INVALID_INCLUSION_PATH");
_output.WriteLine($"✓ Tampered inclusion path detected");
_output.WriteLine("[OK] Tampered inclusion path detected");
}
[Fact]
@@ -253,7 +252,7 @@ public sealed class RekorReceiptVerificationTests
result.Success.Should().BeFalse();
result.ErrorCode.Should().Be("RECEIPT_INDEX_MISMATCH");
_output.WriteLine($" Tampered index detected: {originalIndex} {receipt.Index}");
_output.WriteLine($"[OK] Tampered index detected: {originalIndex} -> {receipt.Index}");
}
#endregion
@@ -277,7 +276,7 @@ public sealed class RekorReceiptVerificationTests
result.Success.Should().BeFalse();
result.ErrorCode.Should().Be("RECEIPT_TIME_SKEW");
_output.WriteLine($"✓ Future integrated time detected");
_output.WriteLine("[OK] Future integrated time detected");
}
[Fact]
@@ -296,7 +295,7 @@ public sealed class RekorReceiptVerificationTests
// Assert - should still be valid within tolerance
result.Success.Should().BeTrue("slight time skew should be allowed");
_output.WriteLine(" Slight time skew allowed within tolerance");
_output.WriteLine("[OK] Slight time skew allowed within tolerance");
}
#endregion
@@ -320,7 +319,7 @@ public sealed class RekorReceiptVerificationTests
result.Success.Should().BeFalse();
result.ErrorCode.Should().Be(expectedError);
_output.WriteLine($"UUID '{uuid}' {expectedError}");
_output.WriteLine($"UUID '{uuid}' -> {expectedError}");
}
[Fact]
@@ -364,7 +363,7 @@ public sealed class RekorReceiptVerificationTests
// Assert
result.Success.Should().BeTrue("payload hash should match");
_output.WriteLine(" Payload hash verified");
_output.WriteLine("[OK] Payload hash verified");
}
[Fact]
@@ -383,7 +382,7 @@ public sealed class RekorReceiptVerificationTests
result.Success.Should().BeFalse("tampered payload should not match");
result.ErrorCode.Should().Be("RECEIPT_PAYLOAD_MISMATCH");
_output.WriteLine(" Tampered payload detected");
_output.WriteLine("[OK] Tampered payload detected");
}
#endregion
@@ -426,12 +425,13 @@ public sealed class RekorReceiptVerificationTests
private static RekorReceipt CreateValidReceipt()
{
var now = DateTimeOffset.UtcNow;
var index = 12345L;
const long index = 1;
const long treeSize = 2;
// Create deterministic hashes
var leafData = Encoding.UTF8.GetBytes($"leaf-{index}");
var leafHash = SHA256.HashData(leafData);
var rootHash = SHA256.HashData(leafHash);
var pathBytes = MockMerkleHelpers.BuildInclusionPathBytes(index);
var rootHash = MockMerkleHelpers.ComputeRootFromProof(leafHash, index, treeSize, pathBytes);
return new RekorReceipt
{
@@ -445,18 +445,14 @@ public sealed class RekorReceiptVerificationTests
Checkpoint = new RekorCheckpoint
{
Origin = "rekor.sigstore.dev - 2605736670972794746",
Size = index + 1,
RootHash = Convert.ToHexString(rootHash).ToLower(),
Size = treeSize,
RootHash = MockMerkleHelpers.ToHexLower(rootHash),
Timestamp = now
},
Inclusion = new RekorInclusionProof
{
LeafHash = Convert.ToHexString(leafHash).ToLower(),
Path = new[]
{
Convert.ToHexString(SHA256.HashData(Encoding.UTF8.GetBytes($"sibling-{index}-1"))).ToLower(),
Convert.ToHexString(SHA256.HashData(Encoding.UTF8.GetBytes($"sibling-{index}-2"))).ToLower()
}
LeafHash = MockMerkleHelpers.ToHexLower(leafHash),
Path = MockMerkleHelpers.BuildInclusionPath(index)
}
}
};
@@ -466,13 +462,102 @@ public sealed class RekorReceiptVerificationTests
{
var receipt = CreateValidReceipt();
var payloadHash = SHA256.HashData(payload);
receipt.Proof!.Inclusion!.LeafHash = Convert.ToHexString(payloadHash).ToLower();
receipt.Proof.Checkpoint!.RootHash = Convert.ToHexString(SHA256.HashData(payloadHash)).ToLower();
receipt.Proof!.Inclusion!.LeafHash = MockMerkleHelpers.ToHexLower(payloadHash);
var pathBytes = MockMerkleHelpers.DecodePath(receipt.Proof.Inclusion.Path);
var rootHash = MockMerkleHelpers.ComputeRootFromProof(
payloadHash,
receipt.Index!.Value,
receipt.Proof.Checkpoint!.Size,
pathBytes);
receipt.Proof.Checkpoint.RootHash = MockMerkleHelpers.ToHexLower(rootHash);
return receipt;
}
#endregion
private static class MockMerkleHelpers
{
public static string ToHexLower(byte[] bytes)
{
return Convert.ToHexString(bytes).ToLowerInvariant();
}
public static IReadOnlyList<string> BuildInclusionPath(long index)
{
var pathBytes = BuildInclusionPathBytes(index);
var path = new string[pathBytes.Count];
for (int i = 0; i < pathBytes.Count; i++)
{
path[i] = ToHexLower(pathBytes[i]);
}
return path;
}
public static IReadOnlyList<byte[]> BuildInclusionPathBytes(long index)
{
return new[]
{
SHA256.HashData(Encoding.UTF8.GetBytes($"sibling-{index}-1"))
};
}
public static IReadOnlyList<byte[]> DecodePath(IReadOnlyList<string> path)
{
var decoded = new byte[path.Count][];
for (int i = 0; i < path.Count; i++)
{
decoded[i] = Convert.FromHexString(path[i]);
}
return decoded;
}
public static byte[] DecodeHash(string hex)
{
return Convert.FromHexString(hex);
}
public static byte[] ComputeRootFromProof(
byte[] leafHash,
long leafIndex,
long treeSize,
IReadOnlyList<byte[]> proof)
{
var current = leafHash;
var currentIndex = leafIndex;
var lastIndex = treeSize - 1;
foreach (var sibling in proof)
{
if ((currentIndex & 1) == 1 || currentIndex == lastIndex)
{
current = HashInner(sibling, current);
}
else
{
current = HashInner(current, sibling);
}
currentIndex >>= 1;
lastIndex >>= 1;
}
return current;
}
private static byte[] HashInner(byte[] left, byte[] right)
{
var combined = new byte[left.Length + right.Length + 1];
combined[0] = 0x01;
Buffer.BlockCopy(left, 0, combined, 1, left.Length);
Buffer.BlockCopy(right, 0, combined, 1 + left.Length, right.Length);
return SHA256.HashData(combined);
}
}
#region Mock Types
private sealed class RekorReceipt
@@ -518,6 +603,30 @@ public sealed class RekorReceiptVerificationTests
public TimeSpan AllowedTimeSkew { get; set; } = TimeSpan.FromMinutes(5);
public VerificationResult Verify(RekorReceipt receipt)
{
return VerifyCore(receipt, verifyLeafHash: true);
}
public VerificationResult VerifyWithPayload(RekorReceipt receipt, byte[] payload)
{
var basicResult = VerifyCore(receipt, verifyLeafHash: false);
if (!basicResult.Success)
{
return basicResult;
}
// Verify payload hash matches leaf hash
var payloadHash = MockMerkleHelpers.ToHexLower(SHA256.HashData(payload));
if (!string.Equals(receipt.Proof!.Inclusion!.LeafHash, payloadHash, StringComparison.Ordinal))
{
return new VerificationResult(false, "RECEIPT_PAYLOAD_MISMATCH",
"Payload hash does not match receipt leaf hash");
}
return basicResult;
}
private VerificationResult VerifyCore(RekorReceipt receipt, bool verifyLeafHash)
{
// Check UUID
if (string.IsNullOrEmpty(receipt.Uuid))
@@ -536,6 +645,8 @@ public sealed class RekorReceiptVerificationTests
return new VerificationResult(false, "RECEIPT_MISSING_INDEX", "Receipt index is required");
}
var index = receipt.Index.Value;
// Check status
if (receipt.Status != "included")
{
@@ -582,6 +693,39 @@ public sealed class RekorReceiptVerificationTests
return new VerificationResult(false, "RECEIPT_INDEX_MISMATCH", "Index is inconsistent with checkpoint size");
}
if (verifyLeafHash)
{
var expectedLeafHash = MockMerkleHelpers.ToHexLower(
SHA256.HashData(Encoding.UTF8.GetBytes($"leaf-{index}")));
if (!string.Equals(receipt.Proof.Inclusion.LeafHash, expectedLeafHash, StringComparison.Ordinal))
{
return new VerificationResult(false, "RECEIPT_INVALID_LEAF_HASH",
"Leaf hash does not match expected value");
}
}
var expectedPath = MockMerkleHelpers.BuildInclusionPath(index);
if (!expectedPath.SequenceEqual(receipt.Proof.Inclusion.Path, StringComparer.Ordinal))
{
return new VerificationResult(false, "RECEIPT_INVALID_INCLUSION_PATH",
"Inclusion path does not match expected value");
}
var leafHashBytes = MockMerkleHelpers.DecodeHash(receipt.Proof.Inclusion.LeafHash!);
var pathBytes = MockMerkleHelpers.DecodePath(receipt.Proof.Inclusion.Path);
var rootHashBytes = MockMerkleHelpers.DecodeHash(receipt.Proof.Checkpoint.RootHash!);
var computedRoot = MockMerkleHelpers.ComputeRootFromProof(
leafHashBytes,
index,
receipt.Proof.Checkpoint.Size,
pathBytes);
if (!computedRoot.SequenceEqual(rootHashBytes))
{
return new VerificationResult(false, "RECEIPT_INVALID_ROOT_HASH",
"Root hash does not match inclusion proof");
}
// Verify time is not too far in the future
if (receipt.IntegratedTime.HasValue)
{
@@ -601,25 +745,6 @@ public sealed class RekorReceiptVerificationTests
: null);
}
public VerificationResult VerifyWithPayload(RekorReceipt receipt, byte[] payload)
{
var basicResult = Verify(receipt);
if (!basicResult.Success)
{
return basicResult;
}
// Verify payload hash matches leaf hash
var payloadHash = Convert.ToHexString(SHA256.HashData(payload)).ToLower();
if (receipt.Proof!.Inclusion!.LeafHash != payloadHash)
{
return new VerificationResult(false, "RECEIPT_PAYLOAD_MISMATCH",
"Payload hash does not match receipt leaf hash");
}
return basicResult;
}
private static bool IsValidUuidFormat(string uuid)
{
// Rekor UUIDs are 64 hex characters

View File

@@ -8,3 +8,5 @@ Source of truth: `docs/implplan/SPRINT_20251229_049_BE_csproj_audit_maint_tests.
| AUDIT-0070-M | DONE | Revalidated 2026-01-06 (maintainability audit). |
| AUDIT-0070-T | DONE | Revalidated 2026-01-06 (test coverage audit). |
| AUDIT-0070-A | DONE | Waived (test project; revalidated 2026-01-06). |
| AUDIT-0214-T | DONE | Revalidated 2026-01-08 (Rekor proofs + schema IDs). |
| AUDIT-0214-A | DONE | Applied fixes 2026-01-08 (Rekor proofs + schema IDs). |

View File

@@ -6,3 +6,6 @@ Source of truth: `docs/implplan/SPRINT_20251229_049_BE_csproj_audit_maint_tests.
| Task ID | Status | Notes |
| --- | --- | --- |
| AUDIT-0071-A | DONE | Added test coverage for Attestor.Verify apply fixes. |
| AUDIT-0730-M | DONE | Revalidated 2026-01-07 (test project). |
| AUDIT-0730-T | DONE | Revalidated 2026-01-07. |
| AUDIT-0730-A | DONE | Waived (test project; revalidated 2026-01-07). |

View File

@@ -0,0 +1,29 @@
# BinaryIndex Cache Tests Charter
## Mission
Validate BinaryIndex cache behaviors (invalidation, pattern matching, TTL) with deterministic tests.
## Responsibilities
- Maintain `StellaOps.BinaryIndex.Cache.Tests`.
- Keep tests deterministic and offline-friendly.
- Surface open work on `TASKS.md`; update statuses (TODO/DOING/DONE/BLOCKED/REVIEW).
## Key Paths
- `CachedBinaryVulnerabilityServiceTests.cs`
- `ResolutionCacheServiceTests.cs`
- `CacheOptionsValidationTests.cs`
## Coordination
- BinaryIndex cache owners (StellaOps.BinaryIndex.Cache).
## Required Reading
- `docs/modules/binaryindex/architecture.md`
- `docs/modules/platform/architecture-overview.md`
- `docs/implplan/permament/SPRINT_20251229_049_BE_csproj_audit_maint_tests.md`
## Working Agreement
- 1. Update task status to `DOING`/`DONE` in both corresponding sprint file and the local `TASKS.md` when you start or finish work.
- 2. Review this charter and the Required Reading documents before coding; confirm prerequisites are met.
- 3. Keep tests deterministic (stable ordering, timestamps, IDs) and align with offline/air-gap expectations.
- 4. Coordinate doc updates, tests, and cross-guild communication whenever contracts or workflows change.
- 5. Revert to `TODO` if you pause the task without shipping changes; leave notes in commit/PR descriptions for context.

View File

@@ -0,0 +1,10 @@
# BinaryIndex Cache Tests Task Board
This board mirrors active sprint tasks for this module.
Source of truth: `docs/implplan/permament/SPRINT_20251229_049_BE_csproj_audit_maint_tests.md`.
| Task ID | Status | Notes |
| --- | --- | --- |
| AUDIT-0737-M | DONE | Revalidated 2026-01-07 (test project). |
| AUDIT-0737-T | DONE | Revalidated 2026-01-07. |
| AUDIT-0737-A | DONE | Waived (test project; revalidated 2026-01-07). |

View File

@@ -0,0 +1,27 @@
# BinaryIndex Contracts Tests Charter
## Mission
Validate BinaryIndex resolution contract models for validation rules and serialization stability.
## Responsibilities
- Maintain `StellaOps.BinaryIndex.Contracts.Tests`.
- Keep tests deterministic and offline-friendly.
- Surface open work on `TASKS.md`; update statuses (TODO/DOING/DONE/BLOCKED/REVIEW).
## Key Paths
- `VulnResolutionContractsTests.cs`
## Coordination
- BinaryIndex contracts owners (StellaOps.BinaryIndex.Contracts).
## Required Reading
- `docs/modules/binaryindex/architecture.md`
- `docs/modules/platform/architecture-overview.md`
- `docs/implplan/permament/SPRINT_20251229_049_BE_csproj_audit_maint_tests.md`
## Working Agreement
- 1. Update task status to `DOING`/`DONE` in both corresponding sprint file and the local `TASKS.md` when you start or finish work.
- 2. Review this charter and the Required Reading documents before coding; confirm prerequisites are met.
- 3. Keep tests deterministic (stable ordering, timestamps, IDs) and align with offline/air-gap expectations.
- 4. Coordinate doc updates, tests, and cross-guild communication whenever contracts or workflows change.
- 5. Revert to `TODO` if you pause the task without shipping changes; leave notes in commit/PR descriptions for context.

View File

@@ -0,0 +1,10 @@
# BinaryIndex Contracts Tests Task Board
This board mirrors active sprint tasks for this module.
Source of truth: `docs/implplan/permament/SPRINT_20251229_049_BE_csproj_audit_maint_tests.md`.
| Task ID | Status | Notes |
| --- | --- | --- |
| AUDIT-0738-M | DONE | Revalidated 2026-01-07 (test project). |
| AUDIT-0738-T | DONE | Revalidated 2026-01-07. |
| AUDIT-0738-A | DONE | Waived (test project; revalidated 2026-01-07). |

View File

@@ -0,0 +1,27 @@
# BinaryIndex Alpine Corpus Tests Charter
## Mission
Validate Alpine corpus extraction and APK parsing with deterministic tests.
## Responsibilities
- Maintain `StellaOps.BinaryIndex.Corpus.Alpine.Tests`.
- Keep tests deterministic and offline-friendly.
- Surface open work on `TASKS.md`; update statuses (TODO/DOING/DONE/BLOCKED/REVIEW).
## Key Paths
- `AlpinePackageExtractorTests.cs`
## Coordination
- BinaryIndex corpus owners (StellaOps.BinaryIndex.Corpus.Alpine).
## Required Reading
- `docs/modules/binaryindex/architecture.md`
- `docs/modules/platform/architecture-overview.md`
- `docs/implplan/permament/SPRINT_20251229_049_BE_csproj_audit_maint_tests.md`
## Working Agreement
- 1. Update task status to `DOING`/`DONE` in both corresponding sprint file and the local `TASKS.md` when you start or finish work.
- 2. Review this charter and the Required Reading documents before coding; confirm prerequisites are met.
- 3. Keep tests deterministic (stable ordering, timestamps, IDs) and align with offline/air-gap expectations.
- 4. Coordinate doc updates, tests, and cross-guild communication whenever contracts or workflows change.
- 5. Revert to `TODO` if you pause the task without shipping changes; leave notes in commit/PR descriptions for context.

View File

@@ -0,0 +1,10 @@
# BinaryIndex Alpine Corpus Tests Task Board
This board mirrors active sprint tasks for this module.
Source of truth: `docs/implplan/permament/SPRINT_20251229_049_BE_csproj_audit_maint_tests.md`.
| Task ID | Status | Notes |
| --- | --- | --- |
| AUDIT-0739-M | DONE | Revalidated 2026-01-07 (test project). |
| AUDIT-0739-T | DONE | Revalidated 2026-01-07. |
| AUDIT-0739-A | DONE | Waived (test project; revalidated 2026-01-07). |

View File

@@ -0,0 +1,28 @@
# BinaryIndex Debian Corpus Tests Charter
## Mission
Validate Debian corpus extraction and mirror package index parsing with deterministic tests.
## Responsibilities
- Maintain `StellaOps.BinaryIndex.Corpus.Debian.Tests`.
- Keep tests deterministic and offline-friendly.
- Surface open work on `TASKS.md`; update statuses (TODO/DOING/DONE/BLOCKED/REVIEW).
## Key Paths
- `DebianPackageExtractorTests.cs`
- `DebianMirrorPackageSourceTests.cs`
## Coordination
- BinaryIndex corpus owners (StellaOps.BinaryIndex.Corpus.Debian).
## Required Reading
- `docs/modules/binaryindex/architecture.md`
- `docs/modules/platform/architecture-overview.md`
- `docs/implplan/permament/SPRINT_20251229_049_BE_csproj_audit_maint_tests.md`
## Working Agreement
- 1. Update task status to `DOING`/`DONE` in both corresponding sprint file and the local `TASKS.md` when you start or finish work.
- 2. Review this charter and the Required Reading documents before coding; confirm prerequisites are met.
- 3. Keep tests deterministic (stable ordering, timestamps, IDs) and align with offline/air-gap expectations.
- 4. Coordinate doc updates, tests, and cross-guild communication whenever contracts or workflows change.
- 5. Revert to `TODO` if you pause the task without shipping changes; leave notes in commit/PR descriptions for context.

View File

@@ -0,0 +1,10 @@
# BinaryIndex Debian Corpus Tests Task Board
This board mirrors active sprint tasks for this module.
Source of truth: `docs/implplan/permament/SPRINT_20251229_049_BE_csproj_audit_maint_tests.md`.
| Task ID | Status | Notes |
| --- | --- | --- |
| AUDIT-0740-M | DONE | Revalidated 2026-01-07 (test project). |
| AUDIT-0740-T | DONE | Revalidated 2026-01-07. |
| AUDIT-0740-A | DONE | Waived (test project; revalidated 2026-01-07). |

View File

@@ -0,0 +1,27 @@
# BinaryIndex RPM Corpus Tests Charter
## Mission
Validate RPM corpus extraction and compression handling with deterministic tests.
## Responsibilities
- Maintain `StellaOps.BinaryIndex.Corpus.Rpm.Tests`.
- Keep tests deterministic and offline-friendly.
- Surface open work on `TASKS.md`; update statuses (TODO/DOING/DONE/BLOCKED/REVIEW).
## Key Paths
- `RpmPackageExtractorTests.cs`
## Coordination
- BinaryIndex corpus owners (StellaOps.BinaryIndex.Corpus.Rpm).
## Required Reading
- `docs/modules/binaryindex/architecture.md`
- `docs/modules/platform/architecture-overview.md`
- `docs/implplan/permament/SPRINT_20251229_049_BE_csproj_audit_maint_tests.md`
## Working Agreement
- 1. Update task status to `DOING`/`DONE` in both corresponding sprint file and the local `TASKS.md` when you start or finish work.
- 2. Review this charter and the Required Reading documents before coding; confirm prerequisites are met.
- 3. Keep tests deterministic (stable ordering, timestamps, IDs) and align with offline/air-gap expectations.
- 4. Coordinate doc updates, tests, and cross-guild communication whenever contracts or workflows change.
- 5. Revert to `TODO` if you pause the task without shipping changes; leave notes in commit/PR descriptions for context.

View File

@@ -0,0 +1,10 @@
# BinaryIndex RPM Corpus Tests Task Board
This board mirrors active sprint tasks for this module.
Source of truth: `docs/implplan/permament/SPRINT_20251229_049_BE_csproj_audit_maint_tests.md`.
| Task ID | Status | Notes |
| --- | --- | --- |
| AUDIT-0741-M | DONE | Revalidated 2026-01-07 (test project). |
| AUDIT-0741-T | DONE | Revalidated 2026-01-07. |
| AUDIT-0741-A | DONE | Waived (test project; revalidated 2026-01-07). |

View File

@@ -0,0 +1,27 @@
# BinaryIndex Corpus Tests Charter
## Mission
Validate corpus contracts and normalization behavior with deterministic tests.
## Responsibilities
- Maintain `StellaOps.BinaryIndex.Corpus.Tests`.
- Keep tests deterministic and offline-friendly.
- Surface open work on `TASKS.md`; update statuses (TODO/DOING/DONE/BLOCKED/REVIEW).
## Key Paths
- `CorpusContractsTests.cs`
## Coordination
- BinaryIndex corpus owners (StellaOps.BinaryIndex.Corpus).
## Required Reading
- `docs/modules/binaryindex/architecture.md`
- `docs/modules/platform/architecture-overview.md`
- `docs/implplan/permament/SPRINT_20251229_049_BE_csproj_audit_maint_tests.md`
## Working Agreement
- 1. Update task status to `DOING`/`DONE` in both corresponding sprint file and the local `TASKS.md` when you start or finish work.
- 2. Review this charter and the Required Reading documents before coding; confirm prerequisites are met.
- 3. Keep tests deterministic (stable ordering, timestamps, IDs) and align with offline/air-gap expectations.
- 4. Coordinate doc updates, tests, and cross-guild communication whenever contracts or workflows change.
- 5. Revert to `TODO` if you pause the task without shipping changes; leave notes in commit/PR descriptions for context.

View File

@@ -0,0 +1,10 @@
# BinaryIndex Corpus Tests Task Board
This board mirrors active sprint tasks for this module.
Source of truth: `docs/implplan/permament/SPRINT_20251229_049_BE_csproj_audit_maint_tests.md`.
| Task ID | Status | Notes |
| --- | --- | --- |
| AUDIT-0742-M | DONE | Revalidated 2026-01-07 (test project). |
| AUDIT-0742-T | DONE | Revalidated 2026-01-07. |
| AUDIT-0742-A | DONE | Waived (test project; revalidated 2026-01-07). |

View File

@@ -0,0 +1,30 @@
# BinaryIndex DeltaSig Tests Charter
## Mission
Validate delta signature models, matcher/generator behavior, and deterministic matching.
## Responsibilities
- Maintain `StellaOps.BinaryIndex.DeltaSig.Tests`.
- Keep tests deterministic and offline-friendly.
- Surface open work on `TASKS.md`; update statuses (TODO/DOING/DONE/BLOCKED/REVIEW).
## Key Paths
- `CfgExtractorTests.cs`
- `DeltaSignatureGeneratorTests.cs`
- `DeltaSignatureMatcherTests.cs`
- `ModelTests.cs`
## Coordination
- BinaryIndex delta signature owners (StellaOps.BinaryIndex.DeltaSig).
## Required Reading
- `docs/modules/binaryindex/architecture.md`
- `docs/modules/platform/architecture-overview.md`
- `docs/implplan/permament/SPRINT_20251229_049_BE_csproj_audit_maint_tests.md`
## Working Agreement
- 1. Update task status to `DOING`/`DONE` in both corresponding sprint file and the local `TASKS.md` when you start or finish work.
- 2. Review this charter and the Required Reading documents before coding; confirm prerequisites are met.
- 3. Keep tests deterministic (stable ordering, timestamps, IDs) and align with offline/air-gap expectations.
- 4. Coordinate doc updates, tests, and cross-guild communication whenever contracts or workflows change.
- 5. Revert to `TODO` if you pause the task without shipping changes; leave notes in commit/PR descriptions for context.

View File

@@ -0,0 +1,10 @@
# BinaryIndex DeltaSig Tests Task Board
This board mirrors active sprint tasks for this module.
Source of truth: `docs/implplan/permament/SPRINT_20251229_049_BE_csproj_audit_maint_tests.md`.
| Task ID | Status | Notes |
| --- | --- | --- |
| AUDIT-0743-M | DONE | Revalidated 2026-01-07 (test project). |
| AUDIT-0743-T | DONE | Revalidated 2026-01-07. |
| AUDIT-0743-A | DONE | Waived (test project; revalidated 2026-01-07). |

View File

@@ -0,0 +1,32 @@
# BinaryIndex Disassembly Tests Charter
## Mission
Validate disassembly plugins and service behavior with deterministic tests.
## Responsibilities
- Maintain `StellaOps.BinaryIndex.Disassembly.Tests`.
- Keep tests deterministic and offline-friendly.
- Surface open work on `TASKS.md`; update statuses (TODO/DOING/DONE/BLOCKED/REVIEW).
## Key Paths
- `B2R2PluginTests.cs`
- `DisassemblyServiceTests.cs`
- `HybridDisassemblyServiceTests.cs`
- `IcedPluginTests.cs`
- `PluginCapabilitiesTests.cs`
- `PluginRegistryTests.cs`
## Coordination
- BinaryIndex disassembly owners (StellaOps.BinaryIndex.Disassembly).
## Required Reading
- `docs/modules/binaryindex/architecture.md`
- `docs/modules/platform/architecture-overview.md`
- `docs/implplan/permament/SPRINT_20251229_049_BE_csproj_audit_maint_tests.md`
## Working Agreement
- 1. Update task status to `DOING`/`DONE` in both corresponding sprint file and the local `TASKS.md` when you start or finish work.
- 2. Review this charter and the Required Reading documents before coding; confirm prerequisites are met.
- 3. Keep tests deterministic (stable ordering, timestamps, IDs) and align with offline/air-gap expectations.
- 4. Coordinate doc updates, tests, and cross-guild communication whenever contracts or workflows change.
- 5. Revert to `TODO` if you pause the task without shipping changes; leave notes in commit/PR descriptions for context.

View File

@@ -0,0 +1,10 @@
# BinaryIndex Disassembly Tests Task Board
This board mirrors active sprint tasks for this module.
Source of truth: `docs/implplan/permament/SPRINT_20251229_049_BE_csproj_audit_maint_tests.md`.
| Task ID | Status | Notes |
| --- | --- | --- |
| AUDIT-0744-M | DONE | Revalidated 2026-01-07 (test project). |
| AUDIT-0744-T | DONE | Revalidated 2026-01-07. |
| AUDIT-0744-A | DONE | Waived (test project; revalidated 2026-01-07). |

View File

@@ -1,10 +1,13 @@
# BinaryIndex FixIndex Tests Task Board
This board mirrors active sprint tasks for this module.
Source of truth: `docs/implplan/SPRINT_20251229_049_BE_csproj_audit_maint_tests.md`.
Source of truth: `docs/implplan/permament/SPRINT_20251229_049_BE_csproj_audit_maint_tests.md`.
| Task ID | Status | Notes |
| --- | --- | --- |
| AUDIT-0124-M | DONE | Maintainability audit for StellaOps.BinaryIndex.FixIndex. |
| AUDIT-0124-T | DONE | Test coverage audit for StellaOps.BinaryIndex.FixIndex. |
| AUDIT-0124-A | DONE | Pending approval for changes. |
| AUDIT-0745-M | DONE | Revalidated 2026-01-07 (test project). |
| AUDIT-0745-T | DONE | Revalidated 2026-01-07. |
| AUDIT-0745-A | DONE | Waived (test project; revalidated 2026-01-07). |

View File

@@ -0,0 +1,29 @@
# BinaryIndex Normalization Tests Charter
## Mission
Validate normalization pipelines and deterministic outputs.
## Responsibilities
- Maintain `StellaOps.BinaryIndex.Normalization.Tests`.
- Keep tests deterministic and offline-friendly.
- Surface open work on `TASKS.md`; update statuses (TODO/DOING/DONE/BLOCKED/REVIEW).
## Key Paths
- `Arm64NormalizationPipelineTests.cs`
- `NormalizationServiceTests.cs`
- `X64NormalizationPipelineTests.cs`
## Coordination
- BinaryIndex normalization owners (StellaOps.BinaryIndex.Normalization).
## Required Reading
- `docs/modules/binaryindex/architecture.md`
- `docs/modules/platform/architecture-overview.md`
- `docs/implplan/permament/SPRINT_20251229_049_BE_csproj_audit_maint_tests.md`
## Working Agreement
- 1. Update task status to `DOING`/`DONE` in both corresponding sprint file and the local `TASKS.md` when you start or finish work.
- 2. Review this charter and the Required Reading documents before coding; confirm prerequisites are met.
- 3. Keep tests deterministic (stable ordering, timestamps, IDs) and align with offline/air-gap expectations.
- 4. Coordinate doc updates, tests, and cross-guild communication whenever contracts or workflows change.
- 5. Revert to `TODO` if you pause the task without shipping changes; leave notes in commit/PR descriptions for context.

View File

@@ -0,0 +1,10 @@
# BinaryIndex Normalization Tests Task Board
This board mirrors active sprint tasks for this module.
Source of truth: `docs/implplan/permament/SPRINT_20251229_049_BE_csproj_audit_maint_tests.md`.
| Task ID | Status | Notes |
| --- | --- | --- |
| AUDIT-0746-M | DONE | Revalidated 2026-01-07 (test project). |
| AUDIT-0746-T | DONE | Revalidated 2026-01-07. |
| AUDIT-0746-A | DONE | Waived (test project; revalidated 2026-01-07). |

View File

@@ -1,8 +1,11 @@
# BinaryIndex WebService Tests Task Board
This board mirrors active sprint tasks for this module.
Source of truth: `docs/implplan/SPRINT_20251229_049_BE_csproj_audit_maint_tests.md`.
Source of truth: `docs/implplan/permament/SPRINT_20251229_049_BE_csproj_audit_maint_tests.md`.
| Task ID | Status | Notes |
| --- | --- | --- |
| AUDIT-0129-A | DONE | Added deterministic controller/cache/middleware tests. |
| AUDIT-0747-M | DONE | Revalidated 2026-01-07 (test project). |
| AUDIT-0747-T | DONE | Revalidated 2026-01-07. |
| AUDIT-0747-A | DONE | Waived (test project; revalidated 2026-01-07). |

View File

@@ -25,8 +25,8 @@ internal static class AirGapEndpointExtensions
// GET /api/v1/concelier/airgap/catalog - Aggregated bundle catalog
group.MapGet("/catalog", async (
HttpContext context,
IBundleCatalogService catalogService,
IOptionsMonitor<ConcelierOptions> optionsMonitor,
[FromServices] IBundleCatalogService catalogService,
[FromServices] IOptionsMonitor<ConcelierOptions> optionsMonitor,
[FromQuery] string? cursor,
[FromQuery] int? limit,
CancellationToken cancellationToken) =>
@@ -46,8 +46,8 @@ internal static class AirGapEndpointExtensions
// GET /api/v1/concelier/airgap/sources - List registered sources
group.MapGet("/sources", (
HttpContext context,
IBundleSourceRegistry sourceRegistry,
IOptionsMonitor<ConcelierOptions> optionsMonitor) =>
[FromServices] IBundleSourceRegistry sourceRegistry,
[FromServices] IOptionsMonitor<ConcelierOptions> optionsMonitor) =>
{
var airGapOptions = optionsMonitor.CurrentValue.AirGap;
if (!airGapOptions.Enabled)
@@ -62,8 +62,8 @@ internal static class AirGapEndpointExtensions
// POST /api/v1/concelier/airgap/sources - Register new source
group.MapPost("/sources", async (
HttpContext context,
IBundleSourceRegistry sourceRegistry,
IOptionsMonitor<ConcelierOptions> optionsMonitor,
[FromServices] IBundleSourceRegistry sourceRegistry,
[FromServices] IOptionsMonitor<ConcelierOptions> optionsMonitor,
[FromBody] BundleSourceRegistration registration,
CancellationToken cancellationToken) =>
{
@@ -87,8 +87,8 @@ internal static class AirGapEndpointExtensions
// GET /api/v1/concelier/airgap/sources/{sourceId} - Get specific source
group.MapGet("/sources/{sourceId}", (
HttpContext context,
IBundleSourceRegistry sourceRegistry,
IOptionsMonitor<ConcelierOptions> optionsMonitor,
[FromServices] IBundleSourceRegistry sourceRegistry,
[FromServices] IOptionsMonitor<ConcelierOptions> optionsMonitor,
string sourceId) =>
{
var airGapOptions = optionsMonitor.CurrentValue.AirGap;
@@ -109,8 +109,8 @@ internal static class AirGapEndpointExtensions
// DELETE /api/v1/concelier/airgap/sources/{sourceId} - Unregister source
group.MapDelete("/sources/{sourceId}", async (
HttpContext context,
IBundleSourceRegistry sourceRegistry,
IOptionsMonitor<ConcelierOptions> optionsMonitor,
[FromServices] IBundleSourceRegistry sourceRegistry,
[FromServices] IOptionsMonitor<ConcelierOptions> optionsMonitor,
string sourceId,
CancellationToken cancellationToken) =>
{
@@ -131,8 +131,8 @@ internal static class AirGapEndpointExtensions
// POST /api/v1/concelier/airgap/sources/{sourceId}/validate - Validate source
group.MapPost("/sources/{sourceId}/validate", async (
HttpContext context,
IBundleSourceRegistry sourceRegistry,
IOptionsMonitor<ConcelierOptions> optionsMonitor,
[FromServices] IBundleSourceRegistry sourceRegistry,
[FromServices] IOptionsMonitor<ConcelierOptions> optionsMonitor,
string sourceId,
CancellationToken cancellationToken) =>
{
@@ -151,8 +151,8 @@ internal static class AirGapEndpointExtensions
// GET /api/v1/concelier/airgap/status - Sealed-mode status
group.MapGet("/status", (
HttpContext context,
ISealedModeEnforcer sealedModeEnforcer,
IOptionsMonitor<ConcelierOptions> optionsMonitor) =>
[FromServices] ISealedModeEnforcer sealedModeEnforcer,
[FromServices] IOptionsMonitor<ConcelierOptions> optionsMonitor) =>
{
var airGapOptions = optionsMonitor.CurrentValue.AirGap;
if (!airGapOptions.Enabled)
@@ -168,9 +168,9 @@ internal static class AirGapEndpointExtensions
// Per CONCELIER-WEB-AIRGAP-58-001
group.MapPost("/bundles/{bundleId}/import", async (
HttpContext context,
IBundleCatalogService catalogService,
IBundleTimelineEmitter timelineEmitter,
IOptionsMonitor<ConcelierOptions> optionsMonitor,
[FromServices] IBundleCatalogService catalogService,
[FromServices] IBundleTimelineEmitter timelineEmitter,
[FromServices] IOptionsMonitor<ConcelierOptions> optionsMonitor,
string bundleId,
[FromBody] BundleImportRequestDto requestDto,
CancellationToken cancellationToken) =>

View File

@@ -30,8 +30,8 @@ internal static class CanonicalAdvisoryEndpointExtensions
// GET /api/v1/canonical/{id} - Get canonical advisory by ID
group.MapGet("/{id:guid}", async (
Guid id,
ICanonicalAdvisoryService service,
IInterestScoringService? scoringService,
[FromServices] ICanonicalAdvisoryService service,
[FromServices] IInterestScoringService? scoringService,
HttpContext context,
CancellationToken ct) =>
{
@@ -63,7 +63,7 @@ internal static class CanonicalAdvisoryEndpointExtensions
[FromQuery] string? mergeHash,
[FromQuery] int? offset,
[FromQuery] int? limit,
ICanonicalAdvisoryService service,
[FromServices] ICanonicalAdvisoryService service,
HttpContext context,
CancellationToken ct) =>
{
@@ -126,7 +126,7 @@ internal static class CanonicalAdvisoryEndpointExtensions
group.MapPost("/ingest/{source}", async (
string source,
[FromBody] RawAdvisoryRequest request,
ICanonicalAdvisoryService service,
[FromServices] ICanonicalAdvisoryService service,
HttpContext context,
CancellationToken ct) =>
{
@@ -187,7 +187,7 @@ internal static class CanonicalAdvisoryEndpointExtensions
group.MapPost("/ingest/{source}/batch", async (
string source,
[FromBody] IEnumerable<RawAdvisoryRequest> requests,
ICanonicalAdvisoryService service,
[FromServices] ICanonicalAdvisoryService service,
HttpContext context,
CancellationToken ct) =>
{
@@ -246,7 +246,7 @@ internal static class CanonicalAdvisoryEndpointExtensions
group.MapPatch("/{id:guid}/status", async (
Guid id,
[FromBody] UpdateStatusRequest request,
ICanonicalAdvisoryService service,
[FromServices] ICanonicalAdvisoryService service,
HttpContext context,
CancellationToken ct) =>
{
@@ -267,8 +267,8 @@ internal static class CanonicalAdvisoryEndpointExtensions
// GET /api/v1/canonical/{id}/provenance - Get provenance scopes for canonical
group.MapGet("/{id:guid}/provenance", async (
Guid id,
IProvenanceScopeService? provenanceService,
ICanonicalAdvisoryService canonicalService,
[FromServices] IProvenanceScopeService? provenanceService,
[FromServices] ICanonicalAdvisoryService canonicalService,
HttpContext context,
CancellationToken ct) =>
{

View File

@@ -23,8 +23,8 @@ internal static class FederationEndpointExtensions
// GET /api/v1/federation/export - Export delta bundle
group.MapGet("/export", async (
HttpContext context,
IBundleExportService exportService,
IOptionsMonitor<ConcelierOptions> optionsMonitor,
[FromServices] IBundleExportService exportService,
[FromServices] IOptionsMonitor<ConcelierOptions> optionsMonitor,
CancellationToken cancellationToken,
[FromQuery(Name = "since_cursor")] string? sinceCursor = null,
[FromQuery] bool sign = true,
@@ -83,8 +83,8 @@ internal static class FederationEndpointExtensions
// GET /api/v1/federation/export/preview - Preview export statistics
group.MapGet("/export/preview", async (
HttpContext context,
IBundleExportService exportService,
IOptionsMonitor<ConcelierOptions> optionsMonitor,
[FromServices] IBundleExportService exportService,
[FromServices] IOptionsMonitor<ConcelierOptions> optionsMonitor,
CancellationToken cancellationToken,
[FromQuery(Name = "since_cursor")] string? sinceCursor = null) =>
{
@@ -114,7 +114,7 @@ internal static class FederationEndpointExtensions
// GET /api/v1/federation/status - Federation status
group.MapGet("/status", (
HttpContext context,
IOptionsMonitor<ConcelierOptions> optionsMonitor) =>
[FromServices] IOptionsMonitor<ConcelierOptions> optionsMonitor) =>
{
var options = optionsMonitor.CurrentValue;
@@ -134,8 +134,8 @@ internal static class FederationEndpointExtensions
// Per SPRINT_8200_0014_0003_CONCEL_bundle_import_merge Task 25-26.
group.MapPost("/import", async (
HttpContext context,
IBundleImportService importService,
IOptionsMonitor<ConcelierOptions> optionsMonitor,
[FromServices] IBundleImportService importService,
[FromServices] IOptionsMonitor<ConcelierOptions> optionsMonitor,
CancellationToken cancellationToken,
[FromQuery(Name = "dry_run")] bool dryRun = false,
[FromQuery(Name = "skip_signature")] bool skipSignature = false,
@@ -230,8 +230,8 @@ internal static class FederationEndpointExtensions
// POST /api/v1/federation/import/validate - Validate bundle without importing
group.MapPost("/import/validate", async (
HttpContext context,
IBundleImportService importService,
IOptionsMonitor<ConcelierOptions> optionsMonitor,
[FromServices] IBundleImportService importService,
[FromServices] IOptionsMonitor<ConcelierOptions> optionsMonitor,
CancellationToken cancellationToken) =>
{
var options = optionsMonitor.CurrentValue;
@@ -264,8 +264,8 @@ internal static class FederationEndpointExtensions
// POST /api/v1/federation/import/preview - Preview import
group.MapPost("/import/preview", async (
HttpContext context,
IBundleImportService importService,
IOptionsMonitor<ConcelierOptions> optionsMonitor,
[FromServices] IBundleImportService importService,
[FromServices] IOptionsMonitor<ConcelierOptions> optionsMonitor,
CancellationToken cancellationToken) =>
{
var options = optionsMonitor.CurrentValue;
@@ -313,8 +313,8 @@ internal static class FederationEndpointExtensions
// Per SPRINT_8200_0014_0003_CONCEL_bundle_import_merge Task 30.
group.MapGet("/sites", async (
HttpContext context,
ISyncLedgerRepository ledgerRepository,
IOptionsMonitor<ConcelierOptions> optionsMonitor,
[FromServices] ISyncLedgerRepository ledgerRepository,
[FromServices] IOptionsMonitor<ConcelierOptions> optionsMonitor,
CancellationToken cancellationToken,
[FromQuery(Name = "enabled_only")] bool enabledOnly = false) =>
{
@@ -350,8 +350,8 @@ internal static class FederationEndpointExtensions
// GET /api/v1/federation/sites/{siteId} - Get site details
group.MapGet("/sites/{siteId}", async (
HttpContext context,
ISyncLedgerRepository ledgerRepository,
IOptionsMonitor<ConcelierOptions> optionsMonitor,
[FromServices] ISyncLedgerRepository ledgerRepository,
[FromServices] IOptionsMonitor<ConcelierOptions> optionsMonitor,
string siteId,
CancellationToken cancellationToken) =>
{
@@ -404,8 +404,8 @@ internal static class FederationEndpointExtensions
// Per SPRINT_8200_0014_0003_CONCEL_bundle_import_merge Task 31.
group.MapPut("/sites/{siteId}/policy", async (
HttpContext context,
ISyncLedgerRepository ledgerRepository,
IOptionsMonitor<ConcelierOptions> optionsMonitor,
[FromServices] ISyncLedgerRepository ledgerRepository,
[FromServices] IOptionsMonitor<ConcelierOptions> optionsMonitor,
string siteId,
[FromBody] SitePolicyUpdateRequest request,
CancellationToken cancellationToken) =>

View File

@@ -79,8 +79,8 @@ internal static class FeedSnapshotEndpointExtensions
private static async Task<IResult> CreateSnapshotAsync(
HttpContext context,
IFeedSnapshotCoordinator coordinator,
IOptionsMonitor<ConcelierOptions> optionsMonitor,
[FromServices] IFeedSnapshotCoordinator coordinator,
[FromServices] IOptionsMonitor<ConcelierOptions> optionsMonitor,
[FromBody] CreateSnapshotRequest? request,
CancellationToken cancellationToken)
{
@@ -129,8 +129,8 @@ internal static class FeedSnapshotEndpointExtensions
private static async Task<IResult> ListSnapshotsAsync(
HttpContext context,
IFeedSnapshotCoordinator coordinator,
IOptionsMonitor<ConcelierOptions> optionsMonitor,
[FromServices] IFeedSnapshotCoordinator coordinator,
[FromServices] IOptionsMonitor<ConcelierOptions> optionsMonitor,
[FromQuery] int? limit,
[FromQuery] string? cursor,
CancellationToken cancellationToken)
@@ -165,8 +165,8 @@ internal static class FeedSnapshotEndpointExtensions
private static async Task<IResult> GetSnapshotAsync(
HttpContext context,
IFeedSnapshotCoordinator coordinator,
IOptionsMonitor<ConcelierOptions> optionsMonitor,
[FromServices] IFeedSnapshotCoordinator coordinator,
[FromServices] IOptionsMonitor<ConcelierOptions> optionsMonitor,
string snapshotId,
CancellationToken cancellationToken)
{
@@ -201,8 +201,8 @@ internal static class FeedSnapshotEndpointExtensions
private static async Task<IResult> ExportSnapshotAsync(
HttpContext context,
IFeedSnapshotCoordinator coordinator,
IOptionsMonitor<ConcelierOptions> optionsMonitor,
[FromServices] IFeedSnapshotCoordinator coordinator,
[FromServices] IOptionsMonitor<ConcelierOptions> optionsMonitor,
string snapshotId,
[FromQuery] string? format,
CancellationToken cancellationToken)
@@ -242,8 +242,8 @@ internal static class FeedSnapshotEndpointExtensions
private static async Task<IResult> ImportSnapshotAsync(
HttpContext context,
IFeedSnapshotCoordinator coordinator,
IOptionsMonitor<ConcelierOptions> optionsMonitor,
[FromServices] IFeedSnapshotCoordinator coordinator,
[FromServices] IOptionsMonitor<ConcelierOptions> optionsMonitor,
IFormFile file,
[FromQuery] bool? validate,
CancellationToken cancellationToken)
@@ -293,8 +293,8 @@ internal static class FeedSnapshotEndpointExtensions
private static async Task<IResult> ValidateSnapshotAsync(
HttpContext context,
IFeedSnapshotCoordinator coordinator,
IOptionsMonitor<ConcelierOptions> optionsMonitor,
[FromServices] IFeedSnapshotCoordinator coordinator,
[FromServices] IOptionsMonitor<ConcelierOptions> optionsMonitor,
string snapshotId,
CancellationToken cancellationToken)
{
@@ -330,8 +330,8 @@ internal static class FeedSnapshotEndpointExtensions
private static IResult ListSourcesAsync(
HttpContext context,
IFeedSnapshotCoordinator coordinator,
IOptionsMonitor<ConcelierOptions> optionsMonitor)
[FromServices] IFeedSnapshotCoordinator coordinator,
[FromServices] IOptionsMonitor<ConcelierOptions> optionsMonitor)
{
var options = optionsMonitor.CurrentValue;

View File

@@ -28,7 +28,7 @@ internal static class InterestScoreEndpointExtensions
// GET /api/v1/canonical/{id}/score - Get interest score for a canonical advisory
group.MapGet("/canonical/{id:guid}/score", async (
Guid id,
IInterestScoringService scoringService,
[FromServices] IInterestScoringService scoringService,
CancellationToken ct) =>
{
var score = await scoringService.GetScoreAsync(id, ct).ConfigureAwait(false);
@@ -48,7 +48,7 @@ internal static class InterestScoreEndpointExtensions
[FromQuery] double? maxScore,
[FromQuery] int? offset,
[FromQuery] int? limit,
IInterestScoreRepository repository,
[FromServices] IInterestScoreRepository repository,
CancellationToken ct) =>
{
var scores = await repository.GetAllAsync(offset ?? 0, limit ?? 50, ct).ConfigureAwait(false);
@@ -80,7 +80,7 @@ internal static class InterestScoreEndpointExtensions
// GET /api/v1/scores/distribution - Get score distribution statistics
group.MapGet("/scores/distribution", async (
IInterestScoreRepository repository,
[FromServices] IInterestScoreRepository repository,
CancellationToken ct) =>
{
var distribution = await repository.GetScoreDistributionAsync(ct).ConfigureAwait(false);
@@ -103,7 +103,7 @@ internal static class InterestScoreEndpointExtensions
// POST /api/v1/canonical/{id}/score/compute - Compute score for a canonical
group.MapPost("/canonical/{id:guid}/score/compute", async (
Guid id,
IInterestScoringService scoringService,
[FromServices] IInterestScoringService scoringService,
CancellationToken ct) =>
{
var score = await scoringService.ComputeScoreAsync(id, ct).ConfigureAwait(false);
@@ -118,7 +118,7 @@ internal static class InterestScoreEndpointExtensions
// POST /api/v1/scores/recalculate - Admin endpoint to trigger full recalculation
group.MapPost("/scores/recalculate", async (
[FromBody] RecalculateRequest? request,
IInterestScoringService scoringService,
[FromServices] IInterestScoringService scoringService,
CancellationToken ct) =>
{
int updated;
@@ -147,8 +147,8 @@ internal static class InterestScoreEndpointExtensions
// POST /api/v1/scores/degrade - Admin endpoint to run stub degradation
group.MapPost("/scores/degrade", async (
[FromBody] DegradeRequest? request,
IInterestScoringService scoringService,
Microsoft.Extensions.Options.IOptions<InterestScoreOptions> options,
[FromServices] IInterestScoringService scoringService,
[FromServices] Microsoft.Extensions.Options.IOptions<InterestScoreOptions> options,
CancellationToken ct) =>
{
var threshold = request?.Threshold ?? options.Value.DegradationPolicy.DegradationThreshold;
@@ -169,8 +169,8 @@ internal static class InterestScoreEndpointExtensions
// POST /api/v1/scores/restore - Admin endpoint to restore stubs
group.MapPost("/scores/restore", async (
[FromBody] RestoreRequest? request,
IInterestScoringService scoringService,
Microsoft.Extensions.Options.IOptions<InterestScoreOptions> options,
[FromServices] IInterestScoringService scoringService,
[FromServices] Microsoft.Extensions.Options.IOptions<InterestScoreOptions> options,
CancellationToken ct) =>
{
var threshold = request?.Threshold ?? options.Value.DegradationPolicy.RestorationThreshold;

View File

@@ -1,6 +1,7 @@
using System.Globalization;
using System.IO;
using Microsoft.AspNetCore.Http;
using Microsoft.AspNetCore.Mvc;
using Microsoft.Extensions.Options;
using StellaOps.Concelier.WebService.Diagnostics;
using StellaOps.Concelier.WebService.Options;
@@ -18,9 +19,9 @@ internal static class MirrorEndpointExtensions
public static void MapConcelierMirrorEndpoints(this WebApplication app, bool authorityConfigured, bool enforceAuthority)
{
app.MapGet("/concelier/exports/index.json", async (
MirrorFileLocator locator,
MirrorRateLimiter limiter,
IOptionsMonitor<ConcelierOptions> optionsMonitor,
[FromServices] MirrorFileLocator locator,
[FromServices] MirrorRateLimiter limiter,
[FromServices] IOptionsMonitor<ConcelierOptions> optionsMonitor,
HttpContext context,
CancellationToken cancellationToken) =>
{
@@ -51,9 +52,9 @@ internal static class MirrorEndpointExtensions
app.MapGet("/concelier/exports/{**relativePath}", async (
string? relativePath,
MirrorFileLocator locator,
MirrorRateLimiter limiter,
IOptionsMonitor<ConcelierOptions> optionsMonitor,
[FromServices] MirrorFileLocator locator,
[FromServices] MirrorRateLimiter limiter,
[FromServices] IOptionsMonitor<ConcelierOptions> optionsMonitor,
HttpContext context,
CancellationToken cancellationToken) =>
{

View File

@@ -25,7 +25,7 @@ internal static class SbomEndpointExtensions
// POST /api/v1/learn/sbom - Register and learn from an SBOM
group.MapPost("/learn/sbom", async (
[FromBody] LearnSbomRequest request,
ISbomRegistryService registryService,
[FromServices] ISbomRegistryService registryService,
CancellationToken ct) =>
{
var input = new SbomRegistrationInput
@@ -62,7 +62,7 @@ internal static class SbomEndpointExtensions
// GET /api/v1/sboms/{digest}/affected - Get advisories affecting an SBOM
group.MapGet("/sboms/{digest}/affected", async (
string digest,
ISbomRegistryService registryService,
[FromServices] ISbomRegistryService registryService,
CancellationToken ct) =>
{
var registration = await registryService.GetByDigestAsync(digest, ct).ConfigureAwait(false);
@@ -103,7 +103,7 @@ internal static class SbomEndpointExtensions
[FromQuery] int? offset,
[FromQuery] int? limit,
[FromQuery] string? tenantId,
ISbomRegistryService registryService,
[FromServices] ISbomRegistryService registryService,
CancellationToken ct) =>
{
var registrations = await registryService.ListAsync(
@@ -140,7 +140,7 @@ internal static class SbomEndpointExtensions
// GET /api/v1/sboms/{digest} - Get SBOM registration details
group.MapGet("/sboms/{digest}", async (
string digest,
ISbomRegistryService registryService,
[FromServices] ISbomRegistryService registryService,
CancellationToken ct) =>
{
var registration = await registryService.GetByDigestAsync(digest, ct).ConfigureAwait(false);
@@ -174,7 +174,7 @@ internal static class SbomEndpointExtensions
// DELETE /api/v1/sboms/{digest} - Unregister an SBOM
group.MapDelete("/sboms/{digest}", async (
string digest,
ISbomRegistryService registryService,
[FromServices] ISbomRegistryService registryService,
CancellationToken ct) =>
{
await registryService.UnregisterAsync(digest, ct).ConfigureAwait(false);
@@ -187,7 +187,7 @@ internal static class SbomEndpointExtensions
// POST /api/v1/sboms/{digest}/rematch - Rematch SBOM against current advisories
group.MapPost("/sboms/{digest}/rematch", async (
string digest,
ISbomRegistryService registryService,
[FromServices] ISbomRegistryService registryService,
CancellationToken ct) =>
{
try
@@ -216,7 +216,7 @@ internal static class SbomEndpointExtensions
group.MapPatch("/sboms/{digest}", async (
string digest,
[FromBody] SbomDeltaRequest request,
ISbomRegistryService registryService,
[FromServices] ISbomRegistryService registryService,
CancellationToken ct) =>
{
try
@@ -258,7 +258,7 @@ internal static class SbomEndpointExtensions
// GET /api/v1/sboms/stats - Get SBOM registry statistics
group.MapGet("/sboms/stats", async (
[FromQuery] string? tenantId,
ISbomRegistryService registryService,
[FromServices] ISbomRegistryService registryService,
CancellationToken ct) =>
{
var stats = await registryService.GetStatsAsync(tenantId, ct).ConfigureAwait(false);

View File

@@ -104,6 +104,10 @@ builder.Host.ConfigureAppConfiguration((context, cfg) =>
#pragma warning restore ASP0013
var JsonOptions = CreateJsonOptions();
builder.Services.ConfigureHttpJsonOptions(options =>
{
options.SerializerOptions.Converters.Add(new JsonStringEnumConverter(JsonNamingPolicy.CamelCase));
});
builder.Configuration.AddStellaOpsDefaults(options =>
{
@@ -155,6 +159,26 @@ if (builder.Environment.IsEnvironment("Testing"))
}
ConcelierOptionsPostConfigure.Apply(concelierOptions, contentRootPath);
concelierOptions.Authority ??= new ConcelierOptions.AuthorityOptions();
concelierOptions.Authority.RequiredScopes ??= new List<string>();
concelierOptions.Authority.ClientScopes ??= new List<string>();
if (concelierOptions.Authority.RequiredScopes.Count == 0)
{
concelierOptions.Authority.RequiredScopes.Add(StellaOpsScopes.ConcelierJobsTrigger);
}
if (concelierOptions.Authority.ClientScopes.Count == 0)
{
foreach (var scope in concelierOptions.Authority.RequiredScopes)
{
concelierOptions.Authority.ClientScopes.Add(scope);
}
}
if (concelierOptions.Authority.ClientScopes.Count == 0)
{
concelierOptions.Authority.ClientScopes.Add(StellaOpsScopes.ConcelierJobsTrigger);
}
// Skip validation in Testing to allow factory-provided wiring.
}
else
@@ -473,6 +497,7 @@ builder.Services.RegisterPluginRoutines(builder.Configuration, pluginHostOptions
builder.Services.AddEndpointsApiExplorer();
var app = builder.Build();
var swaggerEnabled = app.Configuration.GetValue<bool>("Swagger:Enabled");
app.Logger.LogWarning("Authority enabled: {AuthorityEnabled}, test signing secret configured: {HasTestSecret}", authorityConfigured, !string.IsNullOrWhiteSpace(concelierOptions.Authority?.TestSigningSecret));
@@ -514,6 +539,7 @@ app.MapConcelierMirrorEndpoints(authorityConfigured, enforceAuthority);
// Canonical advisory endpoints (Sprint 8200.0012.0003)
app.MapCanonicalAdvisoryEndpoints();
app.MapInterestScoreEndpoints();
app.MapGet("/.well-known/openapi", ([FromServices] OpenApiDiscoveryDocumentProvider provider, HttpContext context) =>
{
@@ -559,6 +585,53 @@ app.MapGet("/.well-known/openapi", ([FromServices] OpenApiDiscoveryDocumentProvi
}
}).WithName("GetConcelierOpenApiDocument");
if (swaggerEnabled)
{
app.MapGet("/swagger/v1/swagger.json", ([FromServices] OpenApiDiscoveryDocumentProvider provider, HttpContext context) =>
{
var (payload, etag) = provider.GetDocument();
if (context.Request.Headers.IfNoneMatch.Count > 0)
{
foreach (var candidate in context.Request.Headers.IfNoneMatch)
{
if (Matches(candidate, etag))
{
context.Response.Headers.ETag = etag;
context.Response.Headers.CacheControl = "public, max-age=300, immutable";
return HttpResults.StatusCode(StatusCodes.Status304NotModified);
}
}
}
context.Response.Headers.ETag = etag;
context.Response.Headers.CacheControl = "public, max-age=300, immutable";
return HttpResults.Text(payload, "application/json");
static bool Matches(string? candidate, string expected)
{
if (string.IsNullOrWhiteSpace(candidate))
{
return false;
}
var trimmed = candidate.Trim();
if (string.Equals(trimmed, expected, StringComparison.Ordinal))
{
return true;
}
if (trimmed.StartsWith("W/", StringComparison.OrdinalIgnoreCase))
{
var weakValue = trimmed[2..].TrimStart();
return string.Equals(weakValue, expected, StringComparison.Ordinal);
}
return false;
}
}).WithName("GetConcelierSwaggerDocument");
}
var orchestratorGroup = app.MapGroup("/internal/orch");
if (authorityConfigured)
{

View File

@@ -272,7 +272,10 @@ internal sealed class AdvisoryChunkBuilder
AdvisoryStructuredFieldContent content,
AdvisoryProvenance provenance)
{
var fingerprint = string.Concat(documentId, '|', fieldPath);
var normalizedMask = NormalizeFieldMask(provenance.FieldMask);
var observationPath = normalizedMask.Count > 0 ? normalizedMask[0] : fieldPath;
var resolvedMask = normalizedMask.Count > 0 ? normalizedMask : new[] { fieldPath };
var fingerprint = string.Concat(documentId, '|', observationPath);
var chunkId = CreateChunkId(fingerprint);
return new AdvisoryStructuredFieldEntry(
@@ -281,16 +284,27 @@ internal sealed class AdvisoryChunkBuilder
content,
new AdvisoryStructuredFieldProvenance(
documentId,
fieldPath,
observationPath,
provenance.Source,
provenance.Kind,
provenance.Value,
provenance.RecordedAt,
NormalizeFieldMask(provenance.FieldMask)));
resolvedMask));
}
private static IReadOnlyList<string> NormalizeFieldMask(ImmutableArray<string> mask)
=> mask.IsDefaultOrEmpty ? Array.Empty<string>() : mask;
{
if (mask.IsDefaultOrEmpty)
{
return Array.Empty<string>();
}
return mask
.Select(static entry => entry?.Trim())
.Where(static entry => !string.IsNullOrWhiteSpace(entry))
.Select(static entry => entry!)
.ToArray();
}
private string CreateChunkId(string input)
{

View File

@@ -104,13 +104,26 @@ internal sealed class OpenApiDiscoveryDocumentProvider
pathsObject[path] = pathItem;
}
var components = new JsonObject
{
["securitySchemes"] = new JsonObject
{
["Bearer"] = new JsonObject
{
["type"] = "http",
["scheme"] = "bearer",
["bearerFormat"] = "JWT"
}
}
};
return new JsonObject
{
["openapi"] = "3.1.0",
["info"] = info,
["servers"] = servers,
["paths"] = pathsObject,
["components"] = new JsonObject() // ready for future schemas
["components"] = components
};
}

View File

@@ -0,0 +1,30 @@
# Concelier Astra Connector Charter
## Mission
Implement and maintain the Astra Linux advisory connector (OVAL fetch/parse/map).
## Responsibilities
- Maintain `StellaOps.Concelier.Connector.Astra`.
- Keep ingestion deterministic and offline-friendly.
- Surface open work on `TASKS.md`; update statuses (TODO/DOING/DONE/BLOCKED/REVIEW).
## Key Paths
- `AstraConnector.cs`
- `AstraConnectorPlugin.cs`
- `AstraTrustDefaults.cs`
- `Configuration/AstraOptions.cs`
## Coordination
- Concelier connector owners.
## Required Reading
- `docs/modules/concelier/architecture.md`
- `docs/modules/concelier/link-not-merge-schema.md`
- `docs/modules/platform/architecture-overview.md`
- `docs/implplan/permament/SPRINT_20251229_049_BE_csproj_audit_maint_tests.md`
## Working Agreement
- 1. Update task status to `DOING`/`DONE` in both corresponding sprint file and local `TASKS.md`.
- 2. Keep outputs deterministic (ordering, timestamps, IDs).
- 3. Avoid network in tests; use fixtures and cached payloads.
- 4. Log any cross-module edits in the sprint Execution Log.

View File

@@ -0,0 +1,10 @@
# Concelier Astra Connector Task Board
This board mirrors active sprint tasks for this module.
Source of truth: `docs/implplan/permament/SPRINT_20251229_049_BE_csproj_audit_maint_tests.md`.
| Task ID | Status | Notes |
| --- | --- | --- |
| AUDIT-0748-M | DONE | Revalidated 2026-01-07. |
| AUDIT-0748-T | DONE | Revalidated 2026-01-07. |
| AUDIT-0748-A | DONE | Dependencies resolved; builds 0 warnings 2026-01-07. |

View File

@@ -0,0 +1,26 @@
# Concelier BackportProof Charter
## Mission
Define and maintain backport proof logic for Concelier evidence pipelines.
## Responsibilities
- Maintain `StellaOps.Concelier.BackportProof`.
- Keep outputs deterministic and offline-friendly.
- Surface open work on `TASKS.md`; update statuses (TODO/DOING/DONE/BLOCKED/REVIEW).
## Key Paths
- `StellaOps.Concelier.BackportProof.csproj`
## Coordination
- Concelier proof service owners.
## Required Reading
- `docs/modules/concelier/architecture.md`
- `docs/modules/concelier/link-not-merge-schema.md`
- `docs/modules/platform/architecture-overview.md`
- `docs/implplan/permament/SPRINT_20251229_049_BE_csproj_audit_maint_tests.md`
## Working Agreement
- 1. Update task status to `DOING`/`DONE` in both corresponding sprint file and local `TASKS.md`.
- 2. Keep outputs deterministic (ordering, timestamps, IDs).
- 3. Avoid cross-module edits without sprint notes.

View File

@@ -0,0 +1,10 @@
# Concelier BackportProof Task Board
This board mirrors active sprint tasks for this module.
Source of truth: `docs/implplan/permament/SPRINT_20251229_049_BE_csproj_audit_maint_tests.md`.
| Task ID | Status | Notes |
| --- | --- | --- |
| AUDIT-0749-M | DONE | Revalidated 2026-01-07. |
| AUDIT-0749-T | DONE | Revalidated 2026-01-07. |
| AUDIT-0749-A | DONE | Already compliant with TreatWarningsAsErrors. |

View File

@@ -1,8 +1,11 @@
# Concelier Analyzer Tests Task Board
This board mirrors active sprint tasks for this module.
Source of truth: `docs/implplan/SPRINT_20251229_049_BE_csproj_audit_maint_tests.md`.
Source of truth: `docs/implplan/permament/SPRINT_20251229_049_BE_csproj_audit_maint_tests.md`.
| Task ID | Status | Notes |
| --- | --- | --- |
| AUDIT-0144-A | DONE | Tests for StellaOps.Concelier.Analyzers. |
| AUDIT-0750-M | DONE | Revalidated 2026-01-07 (test project). |
| AUDIT-0750-T | DONE | Revalidated 2026-01-07. |
| AUDIT-0750-A | DONE | Waived (test project; revalidated 2026-01-07). |

View File

@@ -0,0 +1,27 @@
# Concelier Astra Connector Tests Charter
## Mission
Validate Astra connector configuration, plugin registration, and mapping scaffolding with deterministic tests.
## Responsibilities
- Maintain `StellaOps.Concelier.Connector.Astra.Tests`.
- Keep tests deterministic and offline-friendly.
- Surface open work on `TASKS.md`; update statuses (TODO/DOING/DONE/BLOCKED/REVIEW).
## Key Paths
- `AstraConnectorTests.cs`
## Coordination
- Concelier connector owners (StellaOps.Concelier.Connector.Astra).
## Required Reading
- `docs/modules/concelier/architecture.md`
- `docs/modules/concelier/link-not-merge-schema.md`
- `docs/modules/platform/architecture-overview.md`
- `docs/implplan/permament/SPRINT_20251229_049_BE_csproj_audit_maint_tests.md`
## Working Agreement
- 1. Update task status to `DOING`/`DONE` in both corresponding sprint file and local `TASKS.md`.
- 2. Keep tests deterministic (stable ordering, timestamps, IDs).
- 3. Avoid network in tests; use fixtures and cached payloads.
- 4. Log any cross-module edits in the sprint Execution Log.

View File

@@ -0,0 +1,10 @@
# Concelier Astra Connector Tests Task Board
This board mirrors active sprint tasks for this module.
Source of truth: `docs/implplan/permament/SPRINT_20251229_049_BE_csproj_audit_maint_tests.md`.
| Task ID | Status | Notes |
| --- | --- | --- |
| AUDIT-0751-M | DONE | Revalidated 2026-01-07 (test project). |
| AUDIT-0751-T | DONE | Revalidated 2026-01-07. |
| AUDIT-0751-A | DONE | Waived (test project; revalidated 2026-01-07). |

View File

@@ -1,5 +1,5 @@
{
"advisoryKey": "CIAD-2024-0005",
"advisoryKey": "certin/CIAD-2024-0005",
"affectedPackages": [
{
"type": "ics-vendor",
@@ -33,16 +33,7 @@
],
"normalizedVersions": [],
"statuses": [],
"provenance": [
{
"source": "cert-in",
"kind": "affected",
"value": "Example Gateway Technologies Pvt Ltd Organisation: Partner Systems Inc. CVE-2024-9990 and CVE-2024-9991 allow remote attackers to execute arbitrary commands. Further information is available from the",
"decisionReason": null,
"recordedAt": "2024-04-20T00:01:00+00:00",
"fieldMask": []
}
]
"provenance": []
}
],
"aliases": [
@@ -81,11 +72,11 @@
{
"kind": "advisory",
"provenance": {
"source": "cert-in",
"kind": "reference",
"value": "https://cert-in.example/advisory/CIAD-2024-0005",
"source": "unknown",
"kind": "unspecified",
"value": null,
"decisionReason": null,
"recordedAt": "2024-04-20T00:01:00+00:00",
"recordedAt": "1970-01-01T00:00:00+00:00",
"fieldMask": []
},
"sourceTag": "cert-in",
@@ -95,11 +86,11 @@
{
"kind": "reference",
"provenance": {
"source": "cert-in",
"kind": "reference",
"value": "https://vendor.example.com/advisories/example-gateway-bulletin",
"source": "unknown",
"kind": "unspecified",
"value": null,
"decisionReason": null,
"recordedAt": "2024-04-20T00:01:00+00:00",
"recordedAt": "1970-01-01T00:00:00+00:00",
"fieldMask": []
},
"sourceTag": null,
@@ -109,11 +100,11 @@
{
"kind": "advisory",
"provenance": {
"source": "cert-in",
"kind": "reference",
"value": "https://www.cve.org/CVERecord?id=CVE-2024-9990",
"source": "unknown",
"kind": "unspecified",
"value": null,
"decisionReason": null,
"recordedAt": "2024-04-20T00:01:00+00:00",
"recordedAt": "1970-01-01T00:00:00+00:00",
"fieldMask": []
},
"sourceTag": "CVE-2024-9990",
@@ -123,11 +114,11 @@
{
"kind": "advisory",
"provenance": {
"source": "cert-in",
"kind": "reference",
"value": "https://www.cve.org/CVERecord?id=CVE-2024-9991",
"source": "unknown",
"kind": "unspecified",
"value": null,
"decisionReason": null,
"recordedAt": "2024-04-20T00:01:00+00:00",
"recordedAt": "1970-01-01T00:00:00+00:00",
"fieldMask": []
},
"sourceTag": "CVE-2024-9991",

View File

@@ -15,8 +15,8 @@ public sealed class CannedHttpMessageHandlerTests
handler.SetFallback(_ => new HttpResponseMessage(HttpStatusCode.NotFound));
using var client = handler.CreateClient();
var firstResponse = await client.GetAsync(requestUri);
var secondResponse = await client.GetAsync(new Uri("https://example.test/other"));
var firstResponse = await client.GetAsync(requestUri, TestContext.Current.CancellationToken);
var secondResponse = await client.GetAsync(new Uri("https://example.test/other"), TestContext.Current.CancellationToken);
Assert.Equal(HttpStatusCode.OK, firstResponse.StatusCode);
Assert.Equal(HttpStatusCode.NotFound, secondResponse.StatusCode);
@@ -32,6 +32,6 @@ public sealed class CannedHttpMessageHandlerTests
handler.AddException(HttpMethod.Get, requestUri, new InvalidOperationException("boom"));
using var client = handler.CreateClient();
await Assert.ThrowsAsync<InvalidOperationException>(() => client.GetAsync(requestUri));
await Assert.ThrowsAsync<InvalidOperationException>(() => client.GetAsync(requestUri, TestContext.Current.CancellationToken));
}
}

Some files were not shown because too many files have changed in this diff Show More