sprints completion. new product advisories prepared

This commit is contained in:
master
2026-01-16 16:30:03 +02:00
parent a927d924e3
commit 4ca3ce8fb4
255 changed files with 42434 additions and 1020 deletions

View File

@@ -58,6 +58,16 @@ internal static class ExportEndpoints
.Produces(StatusCodes.Status200OK, contentType: "application/json")
.Produces(StatusCodes.Status404NotFound)
.RequireAuthorization(ScannerPolicies.ScansRead);
// GET /scans/{scanId}/exports/signed-sbom-archive
// Sprint: SPRINT_20260112_016_SCANNER_signed_sbom_archive_spec Task SBOM-SPEC-010
scansGroup.MapGet("/{scanId}/exports/signed-sbom-archive", HandleExportSignedSbomArchiveAsync)
.WithName("scanner.scans.exports.signedSbomArchive")
.WithTags("Exports", "SBOM", "Signed")
.Produces(StatusCodes.Status200OK, contentType: "application/gzip")
.Produces(StatusCodes.Status200OK, contentType: "application/zstd")
.Produces(StatusCodes.Status404NotFound)
.RequireAuthorization(ScannerPolicies.ScansRead);
}
private static async Task<IResult> HandleExportSarifAsync(
@@ -319,6 +329,144 @@ internal static class ExportEndpoints
"software" or _ => Spdx3ProfileType.Software
};
}
/// <summary>
/// Handles signed SBOM archive export.
/// Sprint: SPRINT_20260112_016_SCANNER_signed_sbom_archive_spec Task SBOM-SPEC-010
/// </summary>
/// <param name="scanId">The scan identifier.</param>
/// <param name="format">SBOM format: spdx-2.3 (default), spdx-3.0.1, cyclonedx-1.7.</param>
/// <param name="compression">Compression: gzip (default), zstd.</param>
/// <param name="includeRekor">Include Rekor proof (default: true).</param>
/// <param name="includeSchemas">Include bundled JSON schemas (default: true).</param>
/// <param name="coordinator">The scan coordinator service.</param>
/// <param name="sbomExportService">The SBOM export service.</param>
/// <param name="archiveBuilder">The signed SBOM archive builder.</param>
/// <param name="context">The HTTP context.</param>
/// <param name="cancellationToken">Cancellation token.</param>
private static async Task<IResult> HandleExportSignedSbomArchiveAsync(
string scanId,
string? format,
string? compression,
bool? includeRekor,
bool? includeSchemas,
IScanCoordinator coordinator,
ISbomExportService sbomExportService,
ISignedSbomArchiveBuilder archiveBuilder,
HttpContext context,
CancellationToken cancellationToken)
{
ArgumentNullException.ThrowIfNull(coordinator);
ArgumentNullException.ThrowIfNull(sbomExportService);
ArgumentNullException.ThrowIfNull(archiveBuilder);
if (!ScanId.TryParse(scanId, out var parsed))
{
return ProblemResultFactory.Create(
context,
ProblemTypes.Validation,
"Invalid scan identifier",
StatusCodes.Status400BadRequest,
detail: "Scan identifier is required.");
}
var snapshot = await coordinator.GetAsync(parsed, cancellationToken).ConfigureAwait(false);
if (snapshot is null)
{
return ProblemResultFactory.Create(
context,
ProblemTypes.NotFound,
"Scan not found",
StatusCodes.Status404NotFound,
detail: "Requested scan could not be located.");
}
// Export SBOM
var selectedFormat = SelectSbomFormat(format ?? "spdx-2.3");
var selectedProfile = Spdx3ProfileType.Software;
var sbomExport = await sbomExportService.ExportAsync(
parsed,
selectedFormat,
selectedProfile,
cancellationToken).ConfigureAwait(false);
if (sbomExport is null || sbomExport.Bytes is null || sbomExport.Bytes.Length == 0)
{
return ProblemResultFactory.Create(
context,
ProblemTypes.NotFound,
"No SBOM data available",
StatusCodes.Status404NotFound,
detail: "No SBOM data available for archive export.");
}
// Build signed archive request
// Note: In production, DSSE envelope would come from actual signing service
var sbomFormatString = selectedFormat switch
{
SbomExportFormat.Spdx3 => "spdx-3.0.1",
SbomExportFormat.Spdx2 => "spdx-2.3",
SbomExportFormat.CycloneDx => "cyclonedx-1.7",
_ => "spdx-2.3"
};
var request = new SignedSbomArchiveRequest
{
ScanId = parsed,
SbomBytes = sbomExport.Bytes,
SbomFormat = sbomFormatString,
DsseEnvelopeBytes = CreatePlaceholderDsseEnvelope(sbomExport.Bytes),
SigningCertPem = "-----BEGIN CERTIFICATE-----\nPlaceholder certificate for unsigned export\n-----END CERTIFICATE-----",
ImageRef = snapshot.ImageRef ?? "unknown",
ImageDigest = snapshot.ImageDigest ?? "sha256:unknown",
Platform = snapshot.Platform,
ComponentCount = sbomExport.ComponentCount,
PackageCount = sbomExport.ComponentCount, // Approximation
FileCount = 0,
Operator = context.User?.Identity?.Name,
IncludeRekorProof = includeRekor ?? true,
IncludeSchemas = includeSchemas ?? true,
Compression = compression ?? "gzip"
};
var result = await archiveBuilder.BuildAsync(request, cancellationToken).ConfigureAwait(false);
// Set response headers per spec
context.Response.Headers["Content-Disposition"] = $"attachment; filename=\"{result.FileName}\"";
context.Response.Headers["X-SBOM-Digest"] = result.SbomDigest;
context.Response.Headers["X-Archive-Merkle-Root"] = result.MerkleRoot;
if (result.RekorLogIndex.HasValue)
{
context.Response.Headers["X-Rekor-Log-Index"] = result.RekorLogIndex.Value.ToString();
}
var bytes = new byte[result.Size];
await result.Stream.ReadExactlyAsync(bytes, cancellationToken).ConfigureAwait(false);
return Results.Bytes(bytes, result.ContentType);
}
/// <summary>
/// Creates a placeholder DSSE envelope for unsigned exports.
/// In production, this would come from the actual signing service.
/// </summary>
private static byte[] CreatePlaceholderDsseEnvelope(byte[] sbomBytes)
{
var payload = Convert.ToBase64String(sbomBytes);
var envelope = new
{
payloadType = "application/vnd.stellaops.sbom+json",
payload = payload,
signatures = Array.Empty<object>()
};
return System.Text.Json.JsonSerializer.SerializeToUtf8Bytes(envelope, new JsonSerializerOptions
{
PropertyNamingPolicy = JsonNamingPolicy.CamelCase
});
}
}
/// <summary>

View File

@@ -59,6 +59,16 @@ internal static class ReachabilityEndpoints
.Produces(StatusCodes.Status400BadRequest)
.Produces(StatusCodes.Status404NotFound)
.RequireAuthorization(ScannerPolicies.ScansRead);
// Sprint: SPRINT_20260112_004_SCANNER_reachability_trace_runtime_evidence
// GET /scans/{scanId}/reachability/traces/export - Trace export with runtime evidence
scansGroup.MapGet("/{scanId}/reachability/traces/export", HandleTraceExportAsync)
.WithName("scanner.scans.reachability.traces.export")
.WithTags("Reachability")
.Produces<ReachabilityTraceExportDto>(StatusCodes.Status200OK)
.Produces(StatusCodes.Status400BadRequest)
.Produces(StatusCodes.Status404NotFound)
.RequireAuthorization(ScannerPolicies.ScansRead);
}
private static async Task<IResult> HandleComputeReachabilityAsync(
@@ -315,9 +325,145 @@ internal static class ReachabilityEndpoints
return Json(response, StatusCodes.Status200OK);
}
// Sprint: SPRINT_20260112_004_SCANNER_reachability_trace_runtime_evidence (SCAN-RT-003)
private static async Task<IResult> HandleTraceExportAsync(
string scanId,
string? format,
bool? includeRuntimeEvidence,
double? minReachabilityScore,
bool? runtimeConfirmedOnly,
IScanCoordinator coordinator,
IReachabilityQueryService queryService,
HttpContext context,
CancellationToken cancellationToken)
{
ArgumentNullException.ThrowIfNull(coordinator);
ArgumentNullException.ThrowIfNull(queryService);
if (!ScanId.TryParse(scanId, out var parsed))
{
return ProblemResultFactory.Create(
context,
ProblemTypes.Validation,
"Invalid scan identifier",
StatusCodes.Status400BadRequest,
detail: "Scan identifier is required.");
}
var snapshot = await coordinator.GetAsync(parsed, cancellationToken).ConfigureAwait(false);
if (snapshot is null)
{
return ProblemResultFactory.Create(
context,
ProblemTypes.NotFound,
"Scan not found",
StatusCodes.Status404NotFound,
detail: "Requested scan could not be located.");
}
// Determine export format (default to json-lines for determinism)
var exportFormat = (format?.ToLowerInvariant()) switch
{
"graphson" => "graphson",
"ndjson" or "json-lines" => "json-lines",
_ => "json-lines"
};
var options = new TraceExportOptions
{
Format = exportFormat,
IncludeRuntimeEvidence = includeRuntimeEvidence ?? true,
MinReachabilityScore = minReachabilityScore,
RuntimeConfirmedOnly = runtimeConfirmedOnly ?? false
};
var export = await queryService.ExportTracesAsync(parsed, options, cancellationToken).ConfigureAwait(false);
if (export is null)
{
return ProblemResultFactory.Create(
context,
ProblemTypes.NotFound,
"No reachability data",
StatusCodes.Status404NotFound,
detail: "No reachability data found for this scan.");
}
var response = new ReachabilityTraceExportDto(
Format: export.Format,
CanonicalizationMethod: "StellaOps.Canonical.Json",
ContentDigest: export.ContentDigest,
Timestamp: export.Timestamp,
NodeCount: export.Nodes.Count,
EdgeCount: export.Edges.Count,
RuntimeCoverage: export.RuntimeCoverage,
AverageReachabilityScore: export.AverageReachabilityScore,
Nodes: export.Nodes.Select(n => new TraceNodeDto(
Id: n.Id,
SymbolId: n.SymbolId,
ReachabilityScore: n.ReachabilityScore,
RuntimeConfirmed: n.RuntimeConfirmed,
RuntimeObservationCount: n.RuntimeObservationCount,
Evidence: n.Evidence)).ToList(),
Edges: export.Edges.Select(e => new TraceEdgeDto(
From: e.From,
To: e.To,
Kind: e.Kind,
Confidence: e.Confidence,
RuntimeConfirmed: e.RuntimeConfirmed,
RuntimeObservationCount: e.RuntimeObservationCount,
Evidence: e.Evidence)).ToList());
return Json(response, StatusCodes.Status200OK);
}
private static IResult Json<T>(T value, int statusCode)
{
var payload = JsonSerializer.Serialize(value, SerializerOptions);
return Results.Content(payload, "application/json", System.Text.Encoding.UTF8, statusCode);
}
}
// Sprint: SPRINT_20260112_004_SCANNER_reachability_trace_runtime_evidence
// Trace export DTOs
/// <summary>Options for trace export.</summary>
public sealed record TraceExportOptions
{
public string Format { get; init; } = "json-lines";
public bool IncludeRuntimeEvidence { get; init; } = true;
public double? MinReachabilityScore { get; init; }
public bool RuntimeConfirmedOnly { get; init; }
}
/// <summary>Trace export response.</summary>
public sealed record ReachabilityTraceExportDto(
string Format,
string CanonicalizationMethod,
string ContentDigest,
DateTimeOffset Timestamp,
int NodeCount,
int EdgeCount,
double RuntimeCoverage,
double? AverageReachabilityScore,
IReadOnlyList<TraceNodeDto> Nodes,
IReadOnlyList<TraceEdgeDto> Edges);
/// <summary>Node in trace export.</summary>
public sealed record TraceNodeDto(
string Id,
string SymbolId,
double? ReachabilityScore,
bool? RuntimeConfirmed,
ulong? RuntimeObservationCount,
IReadOnlyList<string>? Evidence);
/// <summary>Edge in trace export.</summary>
public sealed record TraceEdgeDto(
string From,
string To,
string Kind,
double Confidence,
bool? RuntimeConfirmed,
ulong? RuntimeObservationCount,
IReadOnlyList<string>? Evidence);

View File

@@ -12,6 +12,7 @@ using StellaOps.Scanner.Sources.Services;
using StellaOps.Scanner.Sources.Triggers;
using StellaOps.Scanner.WebService.Constants;
using StellaOps.Scanner.WebService.Infrastructure;
using StellaOps.Scanner.WebService.Services;
namespace StellaOps.Scanner.WebService.Endpoints;
@@ -301,6 +302,7 @@ internal static class WebhookEndpoints
IEnumerable<ISourceTypeHandler> handlers,
ISourceTriggerDispatcher dispatcher,
ICredentialResolver credentialResolver,
IPrAnnotationWebhookHandler? prAnnotationHandler,
ILogger<WebhookEndpointLogger> logger,
HttpContext context,
CancellationToken ct)
@@ -335,7 +337,9 @@ internal static class WebhookEndpoints
logger,
context,
signatureHeader: "X-Hub-Signature-256",
ct);
ct,
prAnnotationHandler: prAnnotationHandler,
provider: "GitHub");
}
/// <summary>
@@ -348,6 +352,7 @@ internal static class WebhookEndpoints
IEnumerable<ISourceTypeHandler> handlers,
ISourceTriggerDispatcher dispatcher,
ICredentialResolver credentialResolver,
IPrAnnotationWebhookHandler? prAnnotationHandler,
ILogger<WebhookEndpointLogger> logger,
HttpContext context,
CancellationToken ct)
@@ -376,7 +381,9 @@ internal static class WebhookEndpoints
logger,
context,
signatureHeader: "X-Gitlab-Token",
ct);
ct,
prAnnotationHandler: prAnnotationHandler,
provider: "GitLab");
}
/// <summary>
@@ -434,7 +441,9 @@ internal static class WebhookEndpoints
ILogger<WebhookEndpointLogger> logger,
HttpContext context,
string signatureHeader,
CancellationToken ct)
CancellationToken ct,
IPrAnnotationWebhookHandler? prAnnotationHandler = null,
string? provider = null)
{
// Read the raw payload
using var reader = new StreamReader(context.Request.Body);
@@ -525,6 +534,23 @@ internal static class WebhookEndpoints
StatusCodes.Status400BadRequest);
}
// Sprint: SPRINT_20260112_007_SCANNER_pr_mr_annotations (SCANNER-PR-001)
// Extract PR context if this is a PR/MR event
PrWebhookContext? prContext = null;
if (prAnnotationHandler != null && !string.IsNullOrEmpty(provider))
{
prContext = prAnnotationHandler.ExtractPrContext(payload, provider);
if (prContext != null)
{
logger.LogInformation(
"Extracted PR context for {Provider} {Owner}/{Repo}#{PrNumber}",
prContext.Provider,
prContext.Owner,
prContext.Repository,
prContext.PrNumber);
}
}
// Create trigger context
var triggerContext = new TriggerContext
{
@@ -534,6 +560,23 @@ internal static class WebhookEndpoints
WebhookPayload = payload
};
// Add PR context to trigger metadata if available
if (prContext != null)
{
triggerContext.Metadata["pr_provider"] = prContext.Provider;
triggerContext.Metadata["pr_owner"] = prContext.Owner;
triggerContext.Metadata["pr_repository"] = prContext.Repository;
triggerContext.Metadata["pr_number"] = prContext.PrNumber.ToString(System.Globalization.CultureInfo.InvariantCulture);
if (!string.IsNullOrEmpty(prContext.BaseBranch))
triggerContext.Metadata["pr_base_branch"] = prContext.BaseBranch;
if (!string.IsNullOrEmpty(prContext.HeadBranch))
triggerContext.Metadata["pr_head_branch"] = prContext.HeadBranch;
if (!string.IsNullOrEmpty(prContext.BaseCommitSha))
triggerContext.Metadata["pr_base_commit"] = prContext.BaseCommitSha;
if (!string.IsNullOrEmpty(prContext.HeadCommitSha))
triggerContext.Metadata["pr_head_commit"] = prContext.HeadCommitSha;
}
// Dispatch the trigger
try
{
@@ -562,7 +605,14 @@ internal static class WebhookEndpoints
Accepted = true,
Message = $"Queued {result.JobsQueued} scan jobs",
RunId = result.Run?.RunId,
JobsQueued = result.JobsQueued
JobsQueued = result.JobsQueued,
PrContext = prContext != null ? new WebhookPrContextResponse
{
Provider = prContext.Provider,
Owner = prContext.Owner,
Repository = prContext.Repository,
PrNumber = prContext.PrNumber
} : null
});
}
catch (Exception ex)
@@ -598,4 +648,21 @@ public record WebhookResponse
public string? Message { get; init; }
public Guid? RunId { get; init; }
public int JobsQueued { get; init; }
/// <summary>
/// PR context if this webhook was triggered by a PR/MR event.
/// Sprint: SPRINT_20260112_007_SCANNER_pr_mr_annotations (SCANNER-PR-001)
/// </summary>
public WebhookPrContextResponse? PrContext { get; init; }
}
/// <summary>
/// PR context extracted from webhook payload.
/// </summary>
public record WebhookPrContextResponse
{
public string Provider { get; init; } = "";
public string Owner { get; init; } = "";
public string Repository { get; init; } = "";
public int PrNumber { get; init; }
}

View File

@@ -0,0 +1,592 @@
// -----------------------------------------------------------------------------
// PrAnnotationWebhookHandler.cs
// Sprint: SPRINT_20260112_007_SCANNER_pr_mr_annotations
// Tasks: SCANNER-PR-001, SCANNER-PR-003
// Description: Integrates PrAnnotationService into webhook handling for PR/MR events.
// SCANNER-PR-003: Posts PR/MR comments and status checks via Integrations SCM clients.
// -----------------------------------------------------------------------------
using System.Globalization;
using System.Text.Json;
using Microsoft.Extensions.Logging;
using StellaOps.Integrations.Contracts;
using StellaOps.Scanner.Sources.Domain;
using StellaOps.Scanner.Sources.Triggers;
namespace StellaOps.Scanner.WebService.Services;
/// <summary>
/// Handles PR/MR webhook events and coordinates annotation generation.
/// </summary>
public interface IPrAnnotationWebhookHandler
{
/// <summary>
/// Extracts PR context from a webhook payload.
/// </summary>
/// <param name="payload">Webhook JSON payload.</param>
/// <param name="provider">Provider type (GitHub, GitLab, etc.).</param>
/// <returns>PR context if this is a PR event, null otherwise.</returns>
PrWebhookContext? ExtractPrContext(JsonDocument payload, string provider);
/// <summary>
/// Generates and posts a PR annotation after scan completion.
/// </summary>
/// <param name="context">PR context from webhook.</param>
/// <param name="baseGraphId">Base graph ID (before changes).</param>
/// <param name="headGraphId">Head graph ID (after changes).</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>Result of annotation posting.</returns>
Task<PrAnnotationPostResult> GenerateAndPostAnnotationAsync(
PrWebhookContext context,
string baseGraphId,
string headGraphId,
CancellationToken cancellationToken = default);
}
/// <summary>
/// Context extracted from a PR/MR webhook event.
/// </summary>
public sealed record PrWebhookContext
{
/// <summary>
/// Provider type (GitHub, GitLab, Bitbucket).
/// </summary>
public required string Provider { get; init; }
/// <summary>
/// Repository owner/organization.
/// </summary>
public required string Owner { get; init; }
/// <summary>
/// Repository name.
/// </summary>
public required string Repository { get; init; }
/// <summary>
/// PR/MR number.
/// </summary>
public required int PrNumber { get; init; }
/// <summary>
/// Base branch name.
/// </summary>
public required string BaseBranch { get; init; }
/// <summary>
/// Head branch name.
/// </summary>
public required string HeadBranch { get; init; }
/// <summary>
/// Base commit SHA.
/// </summary>
public string? BaseCommitSha { get; init; }
/// <summary>
/// Head commit SHA.
/// </summary>
public string? HeadCommitSha { get; init; }
/// <summary>
/// PR action (opened, synchronize, etc.).
/// </summary>
public string? Action { get; init; }
/// <summary>
/// PR author username.
/// </summary>
public string? Author { get; init; }
/// <summary>
/// PR title.
/// </summary>
public string? Title { get; init; }
}
/// <summary>
/// Result of posting a PR annotation.
/// </summary>
public sealed record PrAnnotationPostResult
{
/// <summary>
/// Whether the annotation was posted successfully.
/// </summary>
public required bool Success { get; init; }
/// <summary>
/// Error message if posting failed.
/// </summary>
public string? Error { get; init; }
/// <summary>
/// URL of the posted comment (if available).
/// </summary>
public string? CommentUrl { get; init; }
/// <summary>
/// Status check result (if posted).
/// </summary>
public string? StatusCheckResult { get; init; }
}
/// <summary>
/// Implementation of PR annotation webhook handling.
/// Sprint: SCANNER-PR-003 - Posts PR/MR comments via Integrations SCM clients.
/// </summary>
public sealed class PrAnnotationWebhookHandler : IPrAnnotationWebhookHandler
{
private readonly IPrAnnotationService _annotationService;
private readonly IScmAnnotationClient? _scmAnnotationClient;
private readonly ILogger<PrAnnotationWebhookHandler> _logger;
/// <summary>
/// Maximum retry attempts for transient failures.
/// </summary>
private const int MaxRetryAttempts = 3;
/// <summary>
/// Initial backoff delay in milliseconds.
/// </summary>
private const int InitialBackoffMs = 500;
public PrAnnotationWebhookHandler(
IPrAnnotationService annotationService,
ILogger<PrAnnotationWebhookHandler> logger,
IScmAnnotationClient? scmAnnotationClient = null)
{
_annotationService = annotationService ?? throw new ArgumentNullException(nameof(annotationService));
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
_scmAnnotationClient = scmAnnotationClient;
}
/// <inheritdoc />
public PrWebhookContext? ExtractPrContext(JsonDocument payload, string provider)
{
ArgumentNullException.ThrowIfNull(payload);
try
{
var root = payload.RootElement;
return provider.ToUpperInvariant() switch
{
"GITHUB" => ExtractGitHubPrContext(root),
"GITLAB" => ExtractGitLabMrContext(root),
_ => null
};
}
catch (Exception ex)
{
_logger.LogWarning(ex, "Failed to extract PR context from {Provider} webhook payload", provider);
return null;
}
}
private static PrWebhookContext? ExtractGitHubPrContext(JsonElement root)
{
// Check if this is a PR event
if (!root.TryGetProperty("pull_request", out var pr))
{
return null;
}
if (!root.TryGetProperty("repository", out var repo))
{
return null;
}
// Extract owner and repo
var fullName = repo.TryGetProperty("full_name", out var fn) ? fn.GetString() : null;
if (string.IsNullOrEmpty(fullName) || !fullName.Contains('/'))
{
return null;
}
var parts = fullName.Split('/', 2);
// Extract PR number
if (!pr.TryGetProperty("number", out var numProp) || numProp.ValueKind != JsonValueKind.Number)
{
return null;
}
// Extract branches
var baseBranch = pr.TryGetProperty("base", out var baseProp) &&
baseProp.TryGetProperty("ref", out var baseRef)
? baseRef.GetString()
: null;
var headBranch = pr.TryGetProperty("head", out var headProp) &&
headProp.TryGetProperty("ref", out var headRef)
? headRef.GetString()
: null;
if (string.IsNullOrEmpty(baseBranch) || string.IsNullOrEmpty(headBranch))
{
return null;
}
return new PrWebhookContext
{
Provider = "GitHub",
Owner = parts[0],
Repository = parts[1],
PrNumber = numProp.GetInt32(),
BaseBranch = baseBranch,
HeadBranch = headBranch,
BaseCommitSha = baseProp.TryGetProperty("sha", out var baseSha) ? baseSha.GetString() : null,
HeadCommitSha = headProp.TryGetProperty("sha", out var headSha) ? headSha.GetString() : null,
Action = root.TryGetProperty("action", out var action) ? action.GetString() : null,
Author = pr.TryGetProperty("user", out var user) &&
user.TryGetProperty("login", out var login)
? login.GetString()
: null,
Title = pr.TryGetProperty("title", out var title) ? title.GetString() : null
};
}
private static PrWebhookContext? ExtractGitLabMrContext(JsonElement root)
{
// Check if this is a merge request event
if (!root.TryGetProperty("object_kind", out var kind) || kind.GetString() != "merge_request")
{
return null;
}
if (!root.TryGetProperty("object_attributes", out var mr))
{
return null;
}
if (!root.TryGetProperty("project", out var project))
{
return null;
}
// Extract project path
var pathWithNamespace = project.TryGetProperty("path_with_namespace", out var path)
? path.GetString()
: null;
if (string.IsNullOrEmpty(pathWithNamespace) || !pathWithNamespace.Contains('/'))
{
return null;
}
var lastSlash = pathWithNamespace.LastIndexOf('/');
var owner = pathWithNamespace[..lastSlash];
var repoName = pathWithNamespace[(lastSlash + 1)..];
// Extract MR IID (internal ID)
if (!mr.TryGetProperty("iid", out var iidProp) || iidProp.ValueKind != JsonValueKind.Number)
{
return null;
}
// Extract branches
var sourceBranch = mr.TryGetProperty("source_branch", out var srcBranch)
? srcBranch.GetString()
: null;
var targetBranch = mr.TryGetProperty("target_branch", out var tgtBranch)
? tgtBranch.GetString()
: null;
if (string.IsNullOrEmpty(sourceBranch) || string.IsNullOrEmpty(targetBranch))
{
return null;
}
return new PrWebhookContext
{
Provider = "GitLab",
Owner = owner,
Repository = repoName,
PrNumber = iidProp.GetInt32(),
BaseBranch = targetBranch,
HeadBranch = sourceBranch,
HeadCommitSha = mr.TryGetProperty("last_commit", out var lastCommit) &&
lastCommit.TryGetProperty("id", out var commitId)
? commitId.GetString()
: null,
Action = mr.TryGetProperty("action", out var action) ? action.GetString() : null,
Author = root.TryGetProperty("user", out var user) &&
user.TryGetProperty("username", out var username)
? username.GetString()
: null,
Title = mr.TryGetProperty("title", out var title) ? title.GetString() : null
};
}
/// <inheritdoc />
public async Task<PrAnnotationPostResult> GenerateAndPostAnnotationAsync(
PrWebhookContext context,
string baseGraphId,
string headGraphId,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(context);
_logger.LogInformation(
"Generating PR annotation for {Provider} {Owner}/{Repo}#{PrNumber}",
context.Provider,
context.Owner,
context.Repository,
context.PrNumber.ToString(CultureInfo.InvariantCulture));
try
{
// Generate annotation using PrAnnotationService
var annotationResult = await _annotationService.GenerateAnnotationAsync(
baseGraphId,
headGraphId,
cancellationToken);
if (!annotationResult.Success)
{
_logger.LogWarning(
"Failed to generate PR annotation for {Owner}/{Repo}#{PrNumber}: {Error}",
context.Owner,
context.Repository,
context.PrNumber.ToString(CultureInfo.InvariantCulture),
annotationResult.Error);
return new PrAnnotationPostResult
{
Success = false,
Error = annotationResult.Error
};
}
// SCANNER-PR-003: Post annotation via Integrations SCM annotation clients
string? commentUrl = null;
string? statusCheckResult = annotationResult.Summary?.ShouldBlockPr == true ? "failure" : "success";
if (_scmAnnotationClient != null && !string.IsNullOrEmpty(annotationResult.CommentBody))
{
// Post main comment with retry/backoff
var commentResult = await PostCommentWithRetryAsync(
context,
annotationResult.CommentBody!,
cancellationToken);
if (commentResult.Success && commentResult.Value != null)
{
commentUrl = commentResult.Value.Url;
_logger.LogInformation(
"Posted PR comment for {Owner}/{Repo}#{PrNumber}: {Url}",
context.Owner,
context.Repository,
context.PrNumber.ToString(CultureInfo.InvariantCulture),
commentUrl);
}
else if (!commentResult.Success)
{
_logger.LogWarning(
"Failed to post PR comment for {Owner}/{Repo}#{PrNumber}: {Error} (Code: {Code})",
context.Owner,
context.Repository,
context.PrNumber.ToString(CultureInfo.InvariantCulture),
commentResult.ErrorMessage ?? "unknown",
commentResult.ErrorCode ?? "N/A");
}
// Post status check
if (!string.IsNullOrEmpty(context.HeadCommitSha))
{
var statusResult = await PostStatusWithRetryAsync(
context,
annotationResult.Summary?.ShouldBlockPr == true ? ScmStatusState.Failure : ScmStatusState.Success,
annotationResult.Summary?.Summary ?? "Reachability analysis complete",
cancellationToken);
if (statusResult.Success)
{
statusCheckResult = statusResult.Value?.State.ToString().ToLowerInvariant();
_logger.LogInformation(
"Posted status check for {Owner}/{Repo}@{Sha}: {State}",
context.Owner,
context.Repository,
context.HeadCommitSha,
statusCheckResult);
}
}
}
else
{
// No SCM client configured - log annotation only
_logger.LogInformation(
"Generated PR annotation for {Provider} {Owner}/{Repo}#{PrNumber} (no SCM client configured): " +
"{NewRisks} new risks, {Mitigated} mitigated, block={ShouldBlock}",
context.Provider,
context.Owner,
context.Repository,
context.PrNumber.ToString(CultureInfo.InvariantCulture),
annotationResult.Summary?.NewRiskCount.ToString(CultureInfo.InvariantCulture) ?? "0",
annotationResult.Summary?.MitigatedCount.ToString(CultureInfo.InvariantCulture) ?? "0",
annotationResult.Summary?.ShouldBlockPr.ToString(CultureInfo.InvariantCulture) ?? "false");
}
return new PrAnnotationPostResult
{
Success = true,
CommentUrl = commentUrl,
StatusCheckResult = statusCheckResult
};
}
catch (Exception ex)
{
_logger.LogError(
ex,
"Exception generating PR annotation for {Owner}/{Repo}#{PrNumber}",
context.Owner,
context.Repository,
context.PrNumber.ToString(CultureInfo.InvariantCulture));
return new PrAnnotationPostResult
{
Success = false,
Error = ex.Message
};
}
}
/// <summary>
/// Posts a PR comment with exponential backoff retry for transient failures.
/// </summary>
private async Task<ScmOperationResult<ScmCommentResponse>> PostCommentWithRetryAsync(
PrWebhookContext context,
string body,
CancellationToken cancellationToken)
{
var request = new ScmCommentRequest
{
Owner = context.Owner,
Repo = context.Repository,
PrNumber = context.PrNumber,
Body = body,
CommitSha = context.HeadCommitSha,
Context = "stellaops-reachability"
};
return await ExecuteWithRetryAsync(
() => _scmAnnotationClient!.PostCommentAsync(request, cancellationToken),
"PostComment",
context,
cancellationToken);
}
/// <summary>
/// Posts a status check with exponential backoff retry for transient failures.
/// </summary>
private async Task<ScmOperationResult<ScmStatusResponse>> PostStatusWithRetryAsync(
PrWebhookContext context,
ScmStatusState state,
string description,
CancellationToken cancellationToken)
{
var request = new ScmStatusRequest
{
Owner = context.Owner,
Repo = context.Repository,
CommitSha = context.HeadCommitSha!,
State = state,
Context = "stellaops/reachability",
Description = TruncateDescription(description, 140),
TargetUrl = null // Could link to evidence pack
};
return await ExecuteWithRetryAsync(
() => _scmAnnotationClient!.PostStatusAsync(request, cancellationToken),
"PostStatus",
context,
cancellationToken);
}
/// <summary>
/// Executes an SCM operation with exponential backoff retry for transient failures.
/// </summary>
private async Task<ScmOperationResult<T>> ExecuteWithRetryAsync<T>(
Func<Task<ScmOperationResult<T>>> operation,
string operationName,
PrWebhookContext context,
CancellationToken cancellationToken)
{
ScmOperationResult<T>? lastResult = null;
var backoffMs = InitialBackoffMs;
for (var attempt = 1; attempt <= MaxRetryAttempts; attempt++)
{
cancellationToken.ThrowIfCancellationRequested();
lastResult = await operation();
if (lastResult.Success)
{
return lastResult;
}
// Only retry on transient errors
if (!lastResult.IsTransient)
{
_logger.LogWarning(
"{Operation} failed for {Owner}/{Repo}#{PrNumber} with non-transient error: {Error} (Code: {Code})",
operationName,
context.Owner,
context.Repository,
context.PrNumber.ToString(CultureInfo.InvariantCulture),
lastResult.ErrorMessage ?? "unknown",
lastResult.ErrorCode ?? "N/A");
return lastResult;
}
if (attempt < MaxRetryAttempts)
{
_logger.LogInformation(
"{Operation} failed for {Owner}/{Repo}#{PrNumber} with transient error, " +
"retrying in {BackoffMs}ms (attempt {Attempt}/{MaxAttempts}): {Error}",
operationName,
context.Owner,
context.Repository,
context.PrNumber.ToString(CultureInfo.InvariantCulture),
backoffMs.ToString(CultureInfo.InvariantCulture),
attempt.ToString(CultureInfo.InvariantCulture),
MaxRetryAttempts.ToString(CultureInfo.InvariantCulture),
lastResult.ErrorMessage ?? "unknown");
await Task.Delay(backoffMs, cancellationToken);
backoffMs *= 2; // Exponential backoff
}
}
_logger.LogWarning(
"{Operation} failed for {Owner}/{Repo}#{PrNumber} after {MaxAttempts} attempts: {Error}",
operationName,
context.Owner,
context.Repository,
context.PrNumber.ToString(CultureInfo.InvariantCulture),
MaxRetryAttempts.ToString(CultureInfo.InvariantCulture),
lastResult?.ErrorMessage ?? "unknown");
return lastResult!;
}
/// <summary>
/// Truncates description to fit SCM limits (GitHub status descriptions are max 140 chars).
/// </summary>
private static string TruncateDescription(string description, int maxLength)
{
if (string.IsNullOrEmpty(description))
{
return string.Empty;
}
if (description.Length <= maxLength)
{
return description;
}
return description[..(maxLength - 3)] + "...";
}
}

View File

@@ -0,0 +1,727 @@
// -----------------------------------------------------------------------------
// SignedSbomArchiveBuilder.cs
// Sprint: SPRINT_20260112_016_SCANNER_signed_sbom_archive_spec
// Tasks: SBOM-SPEC-003 through SBOM-SPEC-009
// Description: Builds signed SBOM archives with verification materials
// -----------------------------------------------------------------------------
using System.IO.Compression;
using System.Security.Cryptography;
using System.Text;
using System.Text.Json;
using System.Text.Json.Serialization;
using Microsoft.Extensions.Logging;
using StellaOps.Scanner.WebService.Domain;
namespace StellaOps.Scanner.WebService.Services;
/// <summary>
/// Service for building signed SBOM archives per signed-sbom-archive-spec.md.
/// </summary>
public interface ISignedSbomArchiveBuilder
{
/// <summary>
/// Builds a signed SBOM archive containing the SBOM, signature, metadata, and verification materials.
/// </summary>
Task<SignedSbomArchiveResult> BuildAsync(
SignedSbomArchiveRequest request,
CancellationToken cancellationToken = default);
}
/// <summary>
/// Request parameters for building a signed SBOM archive.
/// </summary>
public sealed record SignedSbomArchiveRequest
{
/// <summary>The scan identifier.</summary>
public required ScanId ScanId { get; init; }
/// <summary>SBOM bytes (SPDX or CycloneDX JSON).</summary>
public required byte[] SbomBytes { get; init; }
/// <summary>SBOM format (spdx-2.3, spdx-3.0.1, cyclonedx-1.7, etc.).</summary>
public required string SbomFormat { get; init; }
/// <summary>DSSE envelope JSON bytes containing the signature.</summary>
public required byte[] DsseEnvelopeBytes { get; init; }
/// <summary>Signing certificate PEM.</summary>
public required string SigningCertPem { get; init; }
/// <summary>Certificate chain PEM (optional).</summary>
public string? SigningChainPem { get; init; }
/// <summary>Image reference being scanned.</summary>
public required string ImageRef { get; init; }
/// <summary>Image digest.</summary>
public required string ImageDigest { get; init; }
/// <summary>Platform (e.g., linux/amd64).</summary>
public string? Platform { get; init; }
/// <summary>Component count in SBOM.</summary>
public int ComponentCount { get; init; }
/// <summary>Package count in SBOM.</summary>
public int PackageCount { get; init; }
/// <summary>File count in SBOM.</summary>
public int FileCount { get; init; }
/// <summary>Operator identity (e.g., email).</summary>
public string? Operator { get; init; }
/// <summary>Signature issuer (e.g., OIDC issuer URL).</summary>
public string? SignatureIssuer { get; init; }
/// <summary>Signature subject (e.g., identity email).</summary>
public string? SignatureSubject { get; init; }
/// <summary>Signature type (keyless, key-based).</summary>
public string SignatureType { get; init; } = "keyless";
/// <summary>Include Rekor transparency proof.</summary>
public bool IncludeRekorProof { get; init; } = true;
/// <summary>Rekor inclusion proof JSON (optional).</summary>
public byte[]? RekorInclusionProofBytes { get; init; }
/// <summary>Rekor checkpoint signature (optional).</summary>
public byte[]? RekorCheckpointBytes { get; init; }
/// <summary>Rekor public key PEM (optional).</summary>
public string? RekorPublicKeyPem { get; init; }
/// <summary>Rekor log index (optional).</summary>
public long? RekorLogIndex { get; init; }
/// <summary>Include bundled JSON schemas for offline validation.</summary>
public bool IncludeSchemas { get; init; } = true;
/// <summary>Fulcio root CA PEM for keyless verification.</summary>
public string? FulcioRootPem { get; init; }
/// <summary>Compression format (gzip or zstd).</summary>
public string Compression { get; init; } = "gzip";
}
/// <summary>
/// Result of building a signed SBOM archive.
/// </summary>
public sealed record SignedSbomArchiveResult
{
/// <summary>Archive stream.</summary>
public required Stream Stream { get; init; }
/// <summary>Archive filename.</summary>
public required string FileName { get; init; }
/// <summary>Content type.</summary>
public required string ContentType { get; init; }
/// <summary>Archive size in bytes.</summary>
public required long Size { get; init; }
/// <summary>SHA-256 digest of the archive.</summary>
public required string ArchiveDigest { get; init; }
/// <summary>SHA-256 digest of the SBOM content.</summary>
public required string SbomDigest { get; init; }
/// <summary>Merkle root of archive files.</summary>
public required string MerkleRoot { get; init; }
/// <summary>Rekor log index (if applicable).</summary>
public long? RekorLogIndex { get; init; }
}
/// <summary>
/// Builds signed SBOM archives per signed-sbom-archive-spec.md.
/// </summary>
public sealed class SignedSbomArchiveBuilder : ISignedSbomArchiveBuilder
{
private readonly TimeProvider _timeProvider;
private readonly ILogger<SignedSbomArchiveBuilder> _logger;
private static readonly JsonSerializerOptions JsonOptions = new()
{
WriteIndented = true,
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull
};
/// <summary>
/// Initializes a new instance of the <see cref="SignedSbomArchiveBuilder"/> class.
/// </summary>
public SignedSbomArchiveBuilder(
TimeProvider timeProvider,
ILogger<SignedSbomArchiveBuilder> logger)
{
_timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider));
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
}
/// <inheritdoc />
public async Task<SignedSbomArchiveResult> BuildAsync(
SignedSbomArchiveRequest request,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(request);
var timestamp = _timeProvider.GetUtcNow();
var sbomDigest = ComputeSha256Hex(request.SbomBytes);
var digestShort = sbomDigest[..12];
var timestampStr = timestamp.ToString("yyyyMMdd'T'HHmmss'Z'");
var archiveId = $"signed-sbom-{digestShort}-{timestampStr}";
_logger.LogInformation(
"Building signed SBOM archive {ArchiveId} for scan {ScanId}",
archiveId,
request.ScanId);
var files = new List<ArchiveFile>();
// 1. Add SBOM file
var sbomFileName = GetSbomFileName(request.SbomFormat);
files.Add(new ArchiveFile(sbomFileName, request.SbomBytes, GetSbomMediaType(request.SbomFormat)));
// 2. Add DSSE envelope
files.Add(new ArchiveFile("sbom.dsse.json", request.DsseEnvelopeBytes, "application/vnd.dsse+json"));
// 3. Add certificates
files.Add(new ArchiveFile("certs/signing-cert.pem", Encoding.UTF8.GetBytes(request.SigningCertPem), "application/x-pem-file"));
if (!string.IsNullOrEmpty(request.SigningChainPem))
{
files.Add(new ArchiveFile("certs/signing-chain.pem", Encoding.UTF8.GetBytes(request.SigningChainPem), "application/x-pem-file"));
}
if (!string.IsNullOrEmpty(request.FulcioRootPem))
{
files.Add(new ArchiveFile("certs/fulcio-root.pem", Encoding.UTF8.GetBytes(request.FulcioRootPem), "application/x-pem-file"));
}
// 4. Add Rekor proof (optional)
if (request.IncludeRekorProof)
{
if (request.RekorInclusionProofBytes is not null)
{
files.Add(new ArchiveFile("rekor-proof/inclusion-proof.json", request.RekorInclusionProofBytes, "application/json"));
}
if (request.RekorCheckpointBytes is not null)
{
files.Add(new ArchiveFile("rekor-proof/checkpoint.sig", request.RekorCheckpointBytes, "application/octet-stream"));
}
if (!string.IsNullOrEmpty(request.RekorPublicKeyPem))
{
files.Add(new ArchiveFile("rekor-proof/rekor-public.pem", Encoding.UTF8.GetBytes(request.RekorPublicKeyPem), "application/x-pem-file"));
}
}
// 5. Add bundled schemas (optional)
if (request.IncludeSchemas)
{
// Schema stubs - in production, these would be loaded from embedded resources
files.Add(new ArchiveFile("schemas/README.md", Encoding.UTF8.GetBytes(GenerateSchemasReadme()), "text/markdown"));
}
// 6. Create metadata.json (SBOM-SPEC-004, SBOM-SPEC-005)
var metadata = CreateMetadata(request, timestamp, sbomDigest);
var metadataBytes = JsonSerializer.SerializeToUtf8Bytes(metadata, JsonOptions);
files.Add(new ArchiveFile("metadata.json", metadataBytes, "application/json"));
// 7. Create manifest.json (SBOM-SPEC-006)
var manifest = CreateManifest(archiveId, timestamp, files);
var manifestBytes = JsonSerializer.SerializeToUtf8Bytes(manifest, JsonOptions);
files.Insert(0, new ArchiveFile("manifest.json", manifestBytes, "application/json"));
// 8. Generate VERIFY.md (SBOM-SPEC-009)
var verifyMd = GenerateVerifyMd(request, manifest, sbomFileName);
files.Add(new ArchiveFile("VERIFY.md", Encoding.UTF8.GetBytes(verifyMd), "text/markdown"));
// 9. Create archive
var archiveStream = new MemoryStream();
await CreateTarGzArchiveAsync(archiveId, files, archiveStream, cancellationToken)
.ConfigureAwait(false);
archiveStream.Position = 0;
var archiveDigest = ComputeSha256Hex(archiveStream);
archiveStream.Position = 0;
var fileName = $"{archiveId}.tar.gz";
var contentType = request.Compression == "zstd" ? "application/zstd" : "application/gzip";
_logger.LogInformation(
"Built signed SBOM archive {FileName} ({Size} bytes, digest: {Digest})",
fileName,
archiveStream.Length,
archiveDigest);
return new SignedSbomArchiveResult
{
Stream = archiveStream,
FileName = fileName,
ContentType = contentType,
Size = archiveStream.Length,
ArchiveDigest = archiveDigest,
SbomDigest = sbomDigest,
MerkleRoot = manifest.MerkleRoot,
RekorLogIndex = request.RekorLogIndex
};
}
private static string GetSbomFileName(string format) => format.StartsWith("spdx", StringComparison.OrdinalIgnoreCase)
? "sbom.spdx.json"
: "sbom.cdx.json";
private static string GetSbomMediaType(string format) => format.StartsWith("spdx", StringComparison.OrdinalIgnoreCase)
? "application/spdx+json"
: "application/vnd.cyclonedx+json";
private static SignedSbomMetadata CreateMetadata(
SignedSbomArchiveRequest request,
DateTimeOffset timestamp,
string sbomDigest)
{
return new SignedSbomMetadata
{
SchemaVersion = "1.0.0",
StellaOps = new StellaOpsVersionInfo
{
SuiteVersion = GetSuiteVersion(),
ScannerVersion = GetScannerVersion(),
ScannerDigest = GetScannerDigest(),
SignerVersion = "1.0.0",
SbomServiceVersion = "1.0.0"
},
Generation = new GenerationInfo
{
Timestamp = timestamp,
HlcTimestamp = timestamp.ToUnixTimeMilliseconds().ToString() + "000000",
Operator = request.Operator
},
Input = new InputInfo
{
ImageRef = request.ImageRef,
ImageDigest = request.ImageDigest,
Platform = request.Platform
},
Sbom = new SbomInfo
{
Format = request.SbomFormat,
Digest = sbomDigest,
ComponentCount = request.ComponentCount,
PackageCount = request.PackageCount,
FileCount = request.FileCount
},
Signature = new SignatureInfo
{
Type = request.SignatureType,
Issuer = request.SignatureIssuer,
Subject = request.SignatureSubject,
SignedAt = timestamp
},
Reproducibility = new ReproducibilityInfo
{
Deterministic = true,
ExpectedDigest = sbomDigest
}
};
}
private static SignedSbomManifest CreateManifest(
string archiveId,
DateTimeOffset timestamp,
IReadOnlyList<ArchiveFile> files)
{
var fileEntries = files.Select(f => new ManifestFileEntry
{
Path = f.Path,
Sha256 = ComputeSha256Hex(f.Bytes),
Size = f.Bytes.Length,
MediaType = f.MediaType
}).ToList();
// Compute Merkle root from file hashes
var merkleRoot = ComputeMerkleRoot(fileEntries.Select(f => f.Sha256).ToList());
return new SignedSbomManifest
{
SchemaVersion = "1.0.0",
ArchiveId = archiveId,
GeneratedAt = timestamp,
Files = fileEntries,
MerkleRoot = $"sha256:{merkleRoot}",
TotalFiles = fileEntries.Count,
TotalSize = fileEntries.Sum(f => f.Size)
};
}
private static string GenerateVerifyMd(
SignedSbomArchiveRequest request,
SignedSbomManifest manifest,
string sbomFileName)
{
var sb = new StringBuilder();
sb.AppendLine("# SBOM Archive Verification");
sb.AppendLine();
sb.AppendLine("This archive contains a cryptographically signed SBOM with verification materials.");
sb.AppendLine();
sb.AppendLine("## Quick Verification");
sb.AppendLine();
sb.AppendLine("```bash");
sb.AppendLine("# Verify archive integrity");
sb.AppendLine("sha256sum -c <<EOF");
foreach (var file in manifest.Files.Where(f => !f.Path.StartsWith("schemas/")))
{
sb.AppendLine($"{file.Sha256} {file.Path}");
}
sb.AppendLine("EOF");
sb.AppendLine("```");
sb.AppendLine();
sb.AppendLine("## Signature Verification");
sb.AppendLine();
sb.AppendLine("```bash");
sb.AppendLine("# Verify signature using cosign");
sb.AppendLine("cosign verify-blob \\");
sb.AppendLine(" --signature sbom.dsse.json \\");
sb.AppendLine(" --certificate certs/signing-cert.pem \\");
if (!string.IsNullOrEmpty(request.SigningChainPem))
{
sb.AppendLine(" --certificate-chain certs/signing-chain.pem \\");
}
sb.AppendLine($" {sbomFileName}");
sb.AppendLine("```");
sb.AppendLine();
sb.AppendLine("## Offline Verification");
sb.AppendLine();
sb.AppendLine("```bash");
sb.AppendLine("# Using bundled Fulcio root");
sb.AppendLine("cosign verify-blob \\");
sb.AppendLine(" --signature sbom.dsse.json \\");
sb.AppendLine(" --certificate certs/signing-cert.pem \\");
if (!string.IsNullOrEmpty(request.SigningChainPem))
{
sb.AppendLine(" --certificate-chain certs/signing-chain.pem \\");
}
if (!string.IsNullOrEmpty(request.SignatureIssuer))
{
sb.AppendLine($" --certificate-oidc-issuer {request.SignatureIssuer} \\");
}
sb.AppendLine(" --offline \\");
sb.AppendLine($" {sbomFileName}");
sb.AppendLine("```");
sb.AppendLine();
if (request.IncludeRekorProof && request.RekorLogIndex.HasValue)
{
sb.AppendLine("## Rekor Transparency Log");
sb.AppendLine();
sb.AppendLine($"Log Index: {request.RekorLogIndex}");
sb.AppendLine();
sb.AppendLine("```bash");
sb.AppendLine("# Verify transparency log inclusion");
sb.AppendLine("rekor-cli verify \\");
sb.AppendLine($" --artifact {sbomFileName} \\");
sb.AppendLine(" --signature sbom.dsse.json \\");
sb.AppendLine(" --public-key certs/signing-cert.pem \\");
sb.AppendLine(" --rekor-server https://rekor.sigstore.dev");
sb.AppendLine("```");
sb.AppendLine();
}
sb.AppendLine("## Archive Contents");
sb.AppendLine();
sb.AppendLine("| File | Size | SHA-256 |");
sb.AppendLine("|------|------|---------|");
foreach (var file in manifest.Files)
{
sb.AppendLine($"| {file.Path} | {file.Size} | {file.Sha256[..12]}... |");
}
sb.AppendLine();
sb.AppendLine($"**Merkle Root**: {manifest.MerkleRoot}");
sb.AppendLine();
sb.AppendLine("---");
sb.AppendLine("Generated by StellaOps Scanner");
return sb.ToString();
}
private static string GenerateSchemasReadme()
{
return """
# Bundled JSON Schemas
This directory contains JSON schemas for offline validation.
## Available Schemas
For offline SBOM validation, download schemas from:
- SPDX: https://github.com/spdx/spdx-spec/tree/development/v2.3/schemas
- CycloneDX: https://github.com/CycloneDX/specification/tree/master/schema
## Usage
```bash
# Validate SPDX SBOM
jsonschema -i sbom.spdx.json schemas/spdx-2.3.schema.json
# Validate CycloneDX SBOM
jsonschema -i sbom.cdx.json schemas/cyclonedx-1.7.schema.json
```
""";
}
private static async Task CreateTarGzArchiveAsync(
string rootFolder,
IReadOnlyList<ArchiveFile> files,
Stream outputStream,
CancellationToken cancellationToken)
{
await using var gzipStream = new GZipStream(outputStream, CompressionLevel.Optimal, leaveOpen: true);
await using var tarWriter = new MemoryStream();
foreach (var file in files)
{
cancellationToken.ThrowIfCancellationRequested();
var fullPath = $"{rootFolder}/{file.Path}";
WriteTarEntry(tarWriter, fullPath, file.Bytes);
}
// Write end-of-archive markers (two 512-byte zero blocks)
var endMarker = new byte[1024];
tarWriter.Write(endMarker);
tarWriter.Position = 0;
await tarWriter.CopyToAsync(gzipStream, cancellationToken).ConfigureAwait(false);
}
private static void WriteTarEntry(Stream stream, string path, byte[] content)
{
// POSIX ustar header (512 bytes)
var header = new byte[512];
// File name (100 bytes)
var pathBytes = Encoding.ASCII.GetBytes(path);
Array.Copy(pathBytes, 0, header, 0, Math.Min(pathBytes.Length, 100));
// File mode (8 bytes) - 0644
Encoding.ASCII.GetBytes("0000644\0").CopyTo(header, 100);
// Owner UID (8 bytes)
Encoding.ASCII.GetBytes("0000000\0").CopyTo(header, 108);
// Owner GID (8 bytes)
Encoding.ASCII.GetBytes("0000000\0").CopyTo(header, 116);
// File size in octal (12 bytes)
var sizeOctal = Convert.ToString(content.Length, 8).PadLeft(11, '0') + "\0";
Encoding.ASCII.GetBytes(sizeOctal).CopyTo(header, 124);
// Modification time (12 bytes) - use epoch
var mtime = DateTimeOffset.UtcNow.ToUnixTimeSeconds();
var mtimeOctal = Convert.ToString(mtime, 8).PadLeft(11, '0') + "\0";
Encoding.ASCII.GetBytes(mtimeOctal).CopyTo(header, 136);
// Checksum placeholder (8 bytes of spaces)
for (int i = 148; i < 156; i++) header[i] = 0x20;
// Type flag (1 byte) - '0' for regular file
header[156] = (byte)'0';
// Link name (100 bytes) - empty
// USTAR magic (6 bytes)
Encoding.ASCII.GetBytes("ustar\0").CopyTo(header, 257);
// USTAR version (2 bytes)
Encoding.ASCII.GetBytes("00").CopyTo(header, 263);
// Owner name (32 bytes)
Encoding.ASCII.GetBytes("stellaops").CopyTo(header, 265);
// Group name (32 bytes)
Encoding.ASCII.GetBytes("stellaops").CopyTo(header, 297);
// Calculate checksum
var checksum = 0;
for (int i = 0; i < 512; i++)
{
checksum += header[i];
}
var checksumOctal = Convert.ToString(checksum, 8).PadLeft(6, '0') + "\0 ";
Encoding.ASCII.GetBytes(checksumOctal).CopyTo(header, 148);
// Write header
stream.Write(header);
// Write content
stream.Write(content);
// Pad to 512-byte boundary
var padding = (512 - (content.Length % 512)) % 512;
if (padding > 0)
{
stream.Write(new byte[padding]);
}
}
private static string ComputeSha256Hex(byte[] data)
{
var hash = SHA256.HashData(data);
return Convert.ToHexString(hash).ToLowerInvariant();
}
private static string ComputeSha256Hex(Stream stream)
{
var hash = SHA256.HashData(stream);
return Convert.ToHexString(hash).ToLowerInvariant();
}
private static string ComputeMerkleRoot(IReadOnlyList<string> hashes)
{
if (hashes.Count == 0)
return string.Empty;
if (hashes.Count == 1)
return hashes[0];
var currentLevel = hashes.ToList();
while (currentLevel.Count > 1)
{
var nextLevel = new List<string>();
for (int i = 0; i < currentLevel.Count; i += 2)
{
if (i + 1 < currentLevel.Count)
{
var combined = currentLevel[i] + currentLevel[i + 1];
var hash = SHA256.HashData(Encoding.UTF8.GetBytes(combined));
nextLevel.Add(Convert.ToHexString(hash).ToLowerInvariant());
}
else
{
// Odd element, promote to next level
nextLevel.Add(currentLevel[i]);
}
}
currentLevel = nextLevel;
}
return currentLevel[0];
}
private static string GetSuiteVersion() => "2027.Q1";
private static string GetScannerVersion() => "1.0.0";
private static string GetScannerDigest() => "sha256:scanner-image-digest";
private sealed record ArchiveFile(string Path, byte[] Bytes, string MediaType);
}
#region Metadata DTOs
/// <summary>
/// Metadata for signed SBOM archive.
/// </summary>
public sealed class SignedSbomMetadata
{
public required string SchemaVersion { get; init; }
public required StellaOpsVersionInfo StellaOps { get; init; }
public required GenerationInfo Generation { get; init; }
public required InputInfo Input { get; init; }
public required SbomInfo Sbom { get; init; }
public required SignatureInfo Signature { get; init; }
public required ReproducibilityInfo Reproducibility { get; init; }
}
public sealed class StellaOpsVersionInfo
{
public required string SuiteVersion { get; init; }
public required string ScannerVersion { get; init; }
public required string ScannerDigest { get; init; }
public required string SignerVersion { get; init; }
public required string SbomServiceVersion { get; init; }
}
public sealed class GenerationInfo
{
public required DateTimeOffset Timestamp { get; init; }
public required string HlcTimestamp { get; init; }
public string? Operator { get; init; }
}
public sealed class InputInfo
{
public required string ImageRef { get; init; }
public required string ImageDigest { get; init; }
public string? Platform { get; init; }
}
public sealed class SbomInfo
{
public required string Format { get; init; }
public required string Digest { get; init; }
public int ComponentCount { get; init; }
public int PackageCount { get; init; }
public int FileCount { get; init; }
}
public sealed class SignatureInfo
{
public required string Type { get; init; }
public string? Issuer { get; init; }
public string? Subject { get; init; }
public DateTimeOffset SignedAt { get; init; }
}
public sealed class ReproducibilityInfo
{
public bool Deterministic { get; init; }
public string? ExpectedDigest { get; init; }
}
#endregion
#region Manifest DTOs
/// <summary>
/// Manifest for signed SBOM archive.
/// </summary>
public sealed class SignedSbomManifest
{
public required string SchemaVersion { get; init; }
public required string ArchiveId { get; init; }
public required DateTimeOffset GeneratedAt { get; init; }
public required IReadOnlyList<ManifestFileEntry> Files { get; init; }
public required string MerkleRoot { get; init; }
public int TotalFiles { get; init; }
public long TotalSize { get; init; }
}
public sealed class ManifestFileEntry
{
public required string Path { get; init; }
public required string Sha256 { get; init; }
public int Size { get; init; }
public required string MediaType { get; init; }
}
#endregion