sprints completion. new product advisories prepared
This commit is contained in:
@@ -58,6 +58,16 @@ internal static class ExportEndpoints
|
||||
.Produces(StatusCodes.Status200OK, contentType: "application/json")
|
||||
.Produces(StatusCodes.Status404NotFound)
|
||||
.RequireAuthorization(ScannerPolicies.ScansRead);
|
||||
|
||||
// GET /scans/{scanId}/exports/signed-sbom-archive
|
||||
// Sprint: SPRINT_20260112_016_SCANNER_signed_sbom_archive_spec Task SBOM-SPEC-010
|
||||
scansGroup.MapGet("/{scanId}/exports/signed-sbom-archive", HandleExportSignedSbomArchiveAsync)
|
||||
.WithName("scanner.scans.exports.signedSbomArchive")
|
||||
.WithTags("Exports", "SBOM", "Signed")
|
||||
.Produces(StatusCodes.Status200OK, contentType: "application/gzip")
|
||||
.Produces(StatusCodes.Status200OK, contentType: "application/zstd")
|
||||
.Produces(StatusCodes.Status404NotFound)
|
||||
.RequireAuthorization(ScannerPolicies.ScansRead);
|
||||
}
|
||||
|
||||
private static async Task<IResult> HandleExportSarifAsync(
|
||||
@@ -319,6 +329,144 @@ internal static class ExportEndpoints
|
||||
"software" or _ => Spdx3ProfileType.Software
|
||||
};
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Handles signed SBOM archive export.
|
||||
/// Sprint: SPRINT_20260112_016_SCANNER_signed_sbom_archive_spec Task SBOM-SPEC-010
|
||||
/// </summary>
|
||||
/// <param name="scanId">The scan identifier.</param>
|
||||
/// <param name="format">SBOM format: spdx-2.3 (default), spdx-3.0.1, cyclonedx-1.7.</param>
|
||||
/// <param name="compression">Compression: gzip (default), zstd.</param>
|
||||
/// <param name="includeRekor">Include Rekor proof (default: true).</param>
|
||||
/// <param name="includeSchemas">Include bundled JSON schemas (default: true).</param>
|
||||
/// <param name="coordinator">The scan coordinator service.</param>
|
||||
/// <param name="sbomExportService">The SBOM export service.</param>
|
||||
/// <param name="archiveBuilder">The signed SBOM archive builder.</param>
|
||||
/// <param name="context">The HTTP context.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
private static async Task<IResult> HandleExportSignedSbomArchiveAsync(
|
||||
string scanId,
|
||||
string? format,
|
||||
string? compression,
|
||||
bool? includeRekor,
|
||||
bool? includeSchemas,
|
||||
IScanCoordinator coordinator,
|
||||
ISbomExportService sbomExportService,
|
||||
ISignedSbomArchiveBuilder archiveBuilder,
|
||||
HttpContext context,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(coordinator);
|
||||
ArgumentNullException.ThrowIfNull(sbomExportService);
|
||||
ArgumentNullException.ThrowIfNull(archiveBuilder);
|
||||
|
||||
if (!ScanId.TryParse(scanId, out var parsed))
|
||||
{
|
||||
return ProblemResultFactory.Create(
|
||||
context,
|
||||
ProblemTypes.Validation,
|
||||
"Invalid scan identifier",
|
||||
StatusCodes.Status400BadRequest,
|
||||
detail: "Scan identifier is required.");
|
||||
}
|
||||
|
||||
var snapshot = await coordinator.GetAsync(parsed, cancellationToken).ConfigureAwait(false);
|
||||
if (snapshot is null)
|
||||
{
|
||||
return ProblemResultFactory.Create(
|
||||
context,
|
||||
ProblemTypes.NotFound,
|
||||
"Scan not found",
|
||||
StatusCodes.Status404NotFound,
|
||||
detail: "Requested scan could not be located.");
|
||||
}
|
||||
|
||||
// Export SBOM
|
||||
var selectedFormat = SelectSbomFormat(format ?? "spdx-2.3");
|
||||
var selectedProfile = Spdx3ProfileType.Software;
|
||||
|
||||
var sbomExport = await sbomExportService.ExportAsync(
|
||||
parsed,
|
||||
selectedFormat,
|
||||
selectedProfile,
|
||||
cancellationToken).ConfigureAwait(false);
|
||||
|
||||
if (sbomExport is null || sbomExport.Bytes is null || sbomExport.Bytes.Length == 0)
|
||||
{
|
||||
return ProblemResultFactory.Create(
|
||||
context,
|
||||
ProblemTypes.NotFound,
|
||||
"No SBOM data available",
|
||||
StatusCodes.Status404NotFound,
|
||||
detail: "No SBOM data available for archive export.");
|
||||
}
|
||||
|
||||
// Build signed archive request
|
||||
// Note: In production, DSSE envelope would come from actual signing service
|
||||
var sbomFormatString = selectedFormat switch
|
||||
{
|
||||
SbomExportFormat.Spdx3 => "spdx-3.0.1",
|
||||
SbomExportFormat.Spdx2 => "spdx-2.3",
|
||||
SbomExportFormat.CycloneDx => "cyclonedx-1.7",
|
||||
_ => "spdx-2.3"
|
||||
};
|
||||
|
||||
var request = new SignedSbomArchiveRequest
|
||||
{
|
||||
ScanId = parsed,
|
||||
SbomBytes = sbomExport.Bytes,
|
||||
SbomFormat = sbomFormatString,
|
||||
DsseEnvelopeBytes = CreatePlaceholderDsseEnvelope(sbomExport.Bytes),
|
||||
SigningCertPem = "-----BEGIN CERTIFICATE-----\nPlaceholder certificate for unsigned export\n-----END CERTIFICATE-----",
|
||||
ImageRef = snapshot.ImageRef ?? "unknown",
|
||||
ImageDigest = snapshot.ImageDigest ?? "sha256:unknown",
|
||||
Platform = snapshot.Platform,
|
||||
ComponentCount = sbomExport.ComponentCount,
|
||||
PackageCount = sbomExport.ComponentCount, // Approximation
|
||||
FileCount = 0,
|
||||
Operator = context.User?.Identity?.Name,
|
||||
IncludeRekorProof = includeRekor ?? true,
|
||||
IncludeSchemas = includeSchemas ?? true,
|
||||
Compression = compression ?? "gzip"
|
||||
};
|
||||
|
||||
var result = await archiveBuilder.BuildAsync(request, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
// Set response headers per spec
|
||||
context.Response.Headers["Content-Disposition"] = $"attachment; filename=\"{result.FileName}\"";
|
||||
context.Response.Headers["X-SBOM-Digest"] = result.SbomDigest;
|
||||
context.Response.Headers["X-Archive-Merkle-Root"] = result.MerkleRoot;
|
||||
|
||||
if (result.RekorLogIndex.HasValue)
|
||||
{
|
||||
context.Response.Headers["X-Rekor-Log-Index"] = result.RekorLogIndex.Value.ToString();
|
||||
}
|
||||
|
||||
var bytes = new byte[result.Size];
|
||||
await result.Stream.ReadExactlyAsync(bytes, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
return Results.Bytes(bytes, result.ContentType);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Creates a placeholder DSSE envelope for unsigned exports.
|
||||
/// In production, this would come from the actual signing service.
|
||||
/// </summary>
|
||||
private static byte[] CreatePlaceholderDsseEnvelope(byte[] sbomBytes)
|
||||
{
|
||||
var payload = Convert.ToBase64String(sbomBytes);
|
||||
var envelope = new
|
||||
{
|
||||
payloadType = "application/vnd.stellaops.sbom+json",
|
||||
payload = payload,
|
||||
signatures = Array.Empty<object>()
|
||||
};
|
||||
|
||||
return System.Text.Json.JsonSerializer.SerializeToUtf8Bytes(envelope, new JsonSerializerOptions
|
||||
{
|
||||
PropertyNamingPolicy = JsonNamingPolicy.CamelCase
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
|
||||
@@ -59,6 +59,16 @@ internal static class ReachabilityEndpoints
|
||||
.Produces(StatusCodes.Status400BadRequest)
|
||||
.Produces(StatusCodes.Status404NotFound)
|
||||
.RequireAuthorization(ScannerPolicies.ScansRead);
|
||||
|
||||
// Sprint: SPRINT_20260112_004_SCANNER_reachability_trace_runtime_evidence
|
||||
// GET /scans/{scanId}/reachability/traces/export - Trace export with runtime evidence
|
||||
scansGroup.MapGet("/{scanId}/reachability/traces/export", HandleTraceExportAsync)
|
||||
.WithName("scanner.scans.reachability.traces.export")
|
||||
.WithTags("Reachability")
|
||||
.Produces<ReachabilityTraceExportDto>(StatusCodes.Status200OK)
|
||||
.Produces(StatusCodes.Status400BadRequest)
|
||||
.Produces(StatusCodes.Status404NotFound)
|
||||
.RequireAuthorization(ScannerPolicies.ScansRead);
|
||||
}
|
||||
|
||||
private static async Task<IResult> HandleComputeReachabilityAsync(
|
||||
@@ -315,9 +325,145 @@ internal static class ReachabilityEndpoints
|
||||
return Json(response, StatusCodes.Status200OK);
|
||||
}
|
||||
|
||||
// Sprint: SPRINT_20260112_004_SCANNER_reachability_trace_runtime_evidence (SCAN-RT-003)
|
||||
private static async Task<IResult> HandleTraceExportAsync(
|
||||
string scanId,
|
||||
string? format,
|
||||
bool? includeRuntimeEvidence,
|
||||
double? minReachabilityScore,
|
||||
bool? runtimeConfirmedOnly,
|
||||
IScanCoordinator coordinator,
|
||||
IReachabilityQueryService queryService,
|
||||
HttpContext context,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(coordinator);
|
||||
ArgumentNullException.ThrowIfNull(queryService);
|
||||
|
||||
if (!ScanId.TryParse(scanId, out var parsed))
|
||||
{
|
||||
return ProblemResultFactory.Create(
|
||||
context,
|
||||
ProblemTypes.Validation,
|
||||
"Invalid scan identifier",
|
||||
StatusCodes.Status400BadRequest,
|
||||
detail: "Scan identifier is required.");
|
||||
}
|
||||
|
||||
var snapshot = await coordinator.GetAsync(parsed, cancellationToken).ConfigureAwait(false);
|
||||
if (snapshot is null)
|
||||
{
|
||||
return ProblemResultFactory.Create(
|
||||
context,
|
||||
ProblemTypes.NotFound,
|
||||
"Scan not found",
|
||||
StatusCodes.Status404NotFound,
|
||||
detail: "Requested scan could not be located.");
|
||||
}
|
||||
|
||||
// Determine export format (default to json-lines for determinism)
|
||||
var exportFormat = (format?.ToLowerInvariant()) switch
|
||||
{
|
||||
"graphson" => "graphson",
|
||||
"ndjson" or "json-lines" => "json-lines",
|
||||
_ => "json-lines"
|
||||
};
|
||||
|
||||
var options = new TraceExportOptions
|
||||
{
|
||||
Format = exportFormat,
|
||||
IncludeRuntimeEvidence = includeRuntimeEvidence ?? true,
|
||||
MinReachabilityScore = minReachabilityScore,
|
||||
RuntimeConfirmedOnly = runtimeConfirmedOnly ?? false
|
||||
};
|
||||
|
||||
var export = await queryService.ExportTracesAsync(parsed, options, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
if (export is null)
|
||||
{
|
||||
return ProblemResultFactory.Create(
|
||||
context,
|
||||
ProblemTypes.NotFound,
|
||||
"No reachability data",
|
||||
StatusCodes.Status404NotFound,
|
||||
detail: "No reachability data found for this scan.");
|
||||
}
|
||||
|
||||
var response = new ReachabilityTraceExportDto(
|
||||
Format: export.Format,
|
||||
CanonicalizationMethod: "StellaOps.Canonical.Json",
|
||||
ContentDigest: export.ContentDigest,
|
||||
Timestamp: export.Timestamp,
|
||||
NodeCount: export.Nodes.Count,
|
||||
EdgeCount: export.Edges.Count,
|
||||
RuntimeCoverage: export.RuntimeCoverage,
|
||||
AverageReachabilityScore: export.AverageReachabilityScore,
|
||||
Nodes: export.Nodes.Select(n => new TraceNodeDto(
|
||||
Id: n.Id,
|
||||
SymbolId: n.SymbolId,
|
||||
ReachabilityScore: n.ReachabilityScore,
|
||||
RuntimeConfirmed: n.RuntimeConfirmed,
|
||||
RuntimeObservationCount: n.RuntimeObservationCount,
|
||||
Evidence: n.Evidence)).ToList(),
|
||||
Edges: export.Edges.Select(e => new TraceEdgeDto(
|
||||
From: e.From,
|
||||
To: e.To,
|
||||
Kind: e.Kind,
|
||||
Confidence: e.Confidence,
|
||||
RuntimeConfirmed: e.RuntimeConfirmed,
|
||||
RuntimeObservationCount: e.RuntimeObservationCount,
|
||||
Evidence: e.Evidence)).ToList());
|
||||
|
||||
return Json(response, StatusCodes.Status200OK);
|
||||
}
|
||||
|
||||
private static IResult Json<T>(T value, int statusCode)
|
||||
{
|
||||
var payload = JsonSerializer.Serialize(value, SerializerOptions);
|
||||
return Results.Content(payload, "application/json", System.Text.Encoding.UTF8, statusCode);
|
||||
}
|
||||
}
|
||||
|
||||
// Sprint: SPRINT_20260112_004_SCANNER_reachability_trace_runtime_evidence
|
||||
// Trace export DTOs
|
||||
|
||||
/// <summary>Options for trace export.</summary>
|
||||
public sealed record TraceExportOptions
|
||||
{
|
||||
public string Format { get; init; } = "json-lines";
|
||||
public bool IncludeRuntimeEvidence { get; init; } = true;
|
||||
public double? MinReachabilityScore { get; init; }
|
||||
public bool RuntimeConfirmedOnly { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>Trace export response.</summary>
|
||||
public sealed record ReachabilityTraceExportDto(
|
||||
string Format,
|
||||
string CanonicalizationMethod,
|
||||
string ContentDigest,
|
||||
DateTimeOffset Timestamp,
|
||||
int NodeCount,
|
||||
int EdgeCount,
|
||||
double RuntimeCoverage,
|
||||
double? AverageReachabilityScore,
|
||||
IReadOnlyList<TraceNodeDto> Nodes,
|
||||
IReadOnlyList<TraceEdgeDto> Edges);
|
||||
|
||||
/// <summary>Node in trace export.</summary>
|
||||
public sealed record TraceNodeDto(
|
||||
string Id,
|
||||
string SymbolId,
|
||||
double? ReachabilityScore,
|
||||
bool? RuntimeConfirmed,
|
||||
ulong? RuntimeObservationCount,
|
||||
IReadOnlyList<string>? Evidence);
|
||||
|
||||
/// <summary>Edge in trace export.</summary>
|
||||
public sealed record TraceEdgeDto(
|
||||
string From,
|
||||
string To,
|
||||
string Kind,
|
||||
double Confidence,
|
||||
bool? RuntimeConfirmed,
|
||||
ulong? RuntimeObservationCount,
|
||||
IReadOnlyList<string>? Evidence);
|
||||
|
||||
@@ -12,6 +12,7 @@ using StellaOps.Scanner.Sources.Services;
|
||||
using StellaOps.Scanner.Sources.Triggers;
|
||||
using StellaOps.Scanner.WebService.Constants;
|
||||
using StellaOps.Scanner.WebService.Infrastructure;
|
||||
using StellaOps.Scanner.WebService.Services;
|
||||
|
||||
namespace StellaOps.Scanner.WebService.Endpoints;
|
||||
|
||||
@@ -301,6 +302,7 @@ internal static class WebhookEndpoints
|
||||
IEnumerable<ISourceTypeHandler> handlers,
|
||||
ISourceTriggerDispatcher dispatcher,
|
||||
ICredentialResolver credentialResolver,
|
||||
IPrAnnotationWebhookHandler? prAnnotationHandler,
|
||||
ILogger<WebhookEndpointLogger> logger,
|
||||
HttpContext context,
|
||||
CancellationToken ct)
|
||||
@@ -335,7 +337,9 @@ internal static class WebhookEndpoints
|
||||
logger,
|
||||
context,
|
||||
signatureHeader: "X-Hub-Signature-256",
|
||||
ct);
|
||||
ct,
|
||||
prAnnotationHandler: prAnnotationHandler,
|
||||
provider: "GitHub");
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
@@ -348,6 +352,7 @@ internal static class WebhookEndpoints
|
||||
IEnumerable<ISourceTypeHandler> handlers,
|
||||
ISourceTriggerDispatcher dispatcher,
|
||||
ICredentialResolver credentialResolver,
|
||||
IPrAnnotationWebhookHandler? prAnnotationHandler,
|
||||
ILogger<WebhookEndpointLogger> logger,
|
||||
HttpContext context,
|
||||
CancellationToken ct)
|
||||
@@ -376,7 +381,9 @@ internal static class WebhookEndpoints
|
||||
logger,
|
||||
context,
|
||||
signatureHeader: "X-Gitlab-Token",
|
||||
ct);
|
||||
ct,
|
||||
prAnnotationHandler: prAnnotationHandler,
|
||||
provider: "GitLab");
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
@@ -434,7 +441,9 @@ internal static class WebhookEndpoints
|
||||
ILogger<WebhookEndpointLogger> logger,
|
||||
HttpContext context,
|
||||
string signatureHeader,
|
||||
CancellationToken ct)
|
||||
CancellationToken ct,
|
||||
IPrAnnotationWebhookHandler? prAnnotationHandler = null,
|
||||
string? provider = null)
|
||||
{
|
||||
// Read the raw payload
|
||||
using var reader = new StreamReader(context.Request.Body);
|
||||
@@ -525,6 +534,23 @@ internal static class WebhookEndpoints
|
||||
StatusCodes.Status400BadRequest);
|
||||
}
|
||||
|
||||
// Sprint: SPRINT_20260112_007_SCANNER_pr_mr_annotations (SCANNER-PR-001)
|
||||
// Extract PR context if this is a PR/MR event
|
||||
PrWebhookContext? prContext = null;
|
||||
if (prAnnotationHandler != null && !string.IsNullOrEmpty(provider))
|
||||
{
|
||||
prContext = prAnnotationHandler.ExtractPrContext(payload, provider);
|
||||
if (prContext != null)
|
||||
{
|
||||
logger.LogInformation(
|
||||
"Extracted PR context for {Provider} {Owner}/{Repo}#{PrNumber}",
|
||||
prContext.Provider,
|
||||
prContext.Owner,
|
||||
prContext.Repository,
|
||||
prContext.PrNumber);
|
||||
}
|
||||
}
|
||||
|
||||
// Create trigger context
|
||||
var triggerContext = new TriggerContext
|
||||
{
|
||||
@@ -534,6 +560,23 @@ internal static class WebhookEndpoints
|
||||
WebhookPayload = payload
|
||||
};
|
||||
|
||||
// Add PR context to trigger metadata if available
|
||||
if (prContext != null)
|
||||
{
|
||||
triggerContext.Metadata["pr_provider"] = prContext.Provider;
|
||||
triggerContext.Metadata["pr_owner"] = prContext.Owner;
|
||||
triggerContext.Metadata["pr_repository"] = prContext.Repository;
|
||||
triggerContext.Metadata["pr_number"] = prContext.PrNumber.ToString(System.Globalization.CultureInfo.InvariantCulture);
|
||||
if (!string.IsNullOrEmpty(prContext.BaseBranch))
|
||||
triggerContext.Metadata["pr_base_branch"] = prContext.BaseBranch;
|
||||
if (!string.IsNullOrEmpty(prContext.HeadBranch))
|
||||
triggerContext.Metadata["pr_head_branch"] = prContext.HeadBranch;
|
||||
if (!string.IsNullOrEmpty(prContext.BaseCommitSha))
|
||||
triggerContext.Metadata["pr_base_commit"] = prContext.BaseCommitSha;
|
||||
if (!string.IsNullOrEmpty(prContext.HeadCommitSha))
|
||||
triggerContext.Metadata["pr_head_commit"] = prContext.HeadCommitSha;
|
||||
}
|
||||
|
||||
// Dispatch the trigger
|
||||
try
|
||||
{
|
||||
@@ -562,7 +605,14 @@ internal static class WebhookEndpoints
|
||||
Accepted = true,
|
||||
Message = $"Queued {result.JobsQueued} scan jobs",
|
||||
RunId = result.Run?.RunId,
|
||||
JobsQueued = result.JobsQueued
|
||||
JobsQueued = result.JobsQueued,
|
||||
PrContext = prContext != null ? new WebhookPrContextResponse
|
||||
{
|
||||
Provider = prContext.Provider,
|
||||
Owner = prContext.Owner,
|
||||
Repository = prContext.Repository,
|
||||
PrNumber = prContext.PrNumber
|
||||
} : null
|
||||
});
|
||||
}
|
||||
catch (Exception ex)
|
||||
@@ -598,4 +648,21 @@ public record WebhookResponse
|
||||
public string? Message { get; init; }
|
||||
public Guid? RunId { get; init; }
|
||||
public int JobsQueued { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// PR context if this webhook was triggered by a PR/MR event.
|
||||
/// Sprint: SPRINT_20260112_007_SCANNER_pr_mr_annotations (SCANNER-PR-001)
|
||||
/// </summary>
|
||||
public WebhookPrContextResponse? PrContext { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// PR context extracted from webhook payload.
|
||||
/// </summary>
|
||||
public record WebhookPrContextResponse
|
||||
{
|
||||
public string Provider { get; init; } = "";
|
||||
public string Owner { get; init; } = "";
|
||||
public string Repository { get; init; } = "";
|
||||
public int PrNumber { get; init; }
|
||||
}
|
||||
|
||||
@@ -0,0 +1,592 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// PrAnnotationWebhookHandler.cs
|
||||
// Sprint: SPRINT_20260112_007_SCANNER_pr_mr_annotations
|
||||
// Tasks: SCANNER-PR-001, SCANNER-PR-003
|
||||
// Description: Integrates PrAnnotationService into webhook handling for PR/MR events.
|
||||
// SCANNER-PR-003: Posts PR/MR comments and status checks via Integrations SCM clients.
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System.Globalization;
|
||||
using System.Text.Json;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using StellaOps.Integrations.Contracts;
|
||||
using StellaOps.Scanner.Sources.Domain;
|
||||
using StellaOps.Scanner.Sources.Triggers;
|
||||
|
||||
namespace StellaOps.Scanner.WebService.Services;
|
||||
|
||||
/// <summary>
|
||||
/// Handles PR/MR webhook events and coordinates annotation generation.
|
||||
/// </summary>
|
||||
public interface IPrAnnotationWebhookHandler
|
||||
{
|
||||
/// <summary>
|
||||
/// Extracts PR context from a webhook payload.
|
||||
/// </summary>
|
||||
/// <param name="payload">Webhook JSON payload.</param>
|
||||
/// <param name="provider">Provider type (GitHub, GitLab, etc.).</param>
|
||||
/// <returns>PR context if this is a PR event, null otherwise.</returns>
|
||||
PrWebhookContext? ExtractPrContext(JsonDocument payload, string provider);
|
||||
|
||||
/// <summary>
|
||||
/// Generates and posts a PR annotation after scan completion.
|
||||
/// </summary>
|
||||
/// <param name="context">PR context from webhook.</param>
|
||||
/// <param name="baseGraphId">Base graph ID (before changes).</param>
|
||||
/// <param name="headGraphId">Head graph ID (after changes).</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>Result of annotation posting.</returns>
|
||||
Task<PrAnnotationPostResult> GenerateAndPostAnnotationAsync(
|
||||
PrWebhookContext context,
|
||||
string baseGraphId,
|
||||
string headGraphId,
|
||||
CancellationToken cancellationToken = default);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Context extracted from a PR/MR webhook event.
|
||||
/// </summary>
|
||||
public sealed record PrWebhookContext
|
||||
{
|
||||
/// <summary>
|
||||
/// Provider type (GitHub, GitLab, Bitbucket).
|
||||
/// </summary>
|
||||
public required string Provider { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Repository owner/organization.
|
||||
/// </summary>
|
||||
public required string Owner { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Repository name.
|
||||
/// </summary>
|
||||
public required string Repository { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// PR/MR number.
|
||||
/// </summary>
|
||||
public required int PrNumber { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Base branch name.
|
||||
/// </summary>
|
||||
public required string BaseBranch { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Head branch name.
|
||||
/// </summary>
|
||||
public required string HeadBranch { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Base commit SHA.
|
||||
/// </summary>
|
||||
public string? BaseCommitSha { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Head commit SHA.
|
||||
/// </summary>
|
||||
public string? HeadCommitSha { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// PR action (opened, synchronize, etc.).
|
||||
/// </summary>
|
||||
public string? Action { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// PR author username.
|
||||
/// </summary>
|
||||
public string? Author { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// PR title.
|
||||
/// </summary>
|
||||
public string? Title { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Result of posting a PR annotation.
|
||||
/// </summary>
|
||||
public sealed record PrAnnotationPostResult
|
||||
{
|
||||
/// <summary>
|
||||
/// Whether the annotation was posted successfully.
|
||||
/// </summary>
|
||||
public required bool Success { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Error message if posting failed.
|
||||
/// </summary>
|
||||
public string? Error { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// URL of the posted comment (if available).
|
||||
/// </summary>
|
||||
public string? CommentUrl { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Status check result (if posted).
|
||||
/// </summary>
|
||||
public string? StatusCheckResult { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Implementation of PR annotation webhook handling.
|
||||
/// Sprint: SCANNER-PR-003 - Posts PR/MR comments via Integrations SCM clients.
|
||||
/// </summary>
|
||||
public sealed class PrAnnotationWebhookHandler : IPrAnnotationWebhookHandler
|
||||
{
|
||||
private readonly IPrAnnotationService _annotationService;
|
||||
private readonly IScmAnnotationClient? _scmAnnotationClient;
|
||||
private readonly ILogger<PrAnnotationWebhookHandler> _logger;
|
||||
|
||||
/// <summary>
|
||||
/// Maximum retry attempts for transient failures.
|
||||
/// </summary>
|
||||
private const int MaxRetryAttempts = 3;
|
||||
|
||||
/// <summary>
|
||||
/// Initial backoff delay in milliseconds.
|
||||
/// </summary>
|
||||
private const int InitialBackoffMs = 500;
|
||||
|
||||
public PrAnnotationWebhookHandler(
|
||||
IPrAnnotationService annotationService,
|
||||
ILogger<PrAnnotationWebhookHandler> logger,
|
||||
IScmAnnotationClient? scmAnnotationClient = null)
|
||||
{
|
||||
_annotationService = annotationService ?? throw new ArgumentNullException(nameof(annotationService));
|
||||
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||
_scmAnnotationClient = scmAnnotationClient;
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public PrWebhookContext? ExtractPrContext(JsonDocument payload, string provider)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(payload);
|
||||
|
||||
try
|
||||
{
|
||||
var root = payload.RootElement;
|
||||
|
||||
return provider.ToUpperInvariant() switch
|
||||
{
|
||||
"GITHUB" => ExtractGitHubPrContext(root),
|
||||
"GITLAB" => ExtractGitLabMrContext(root),
|
||||
_ => null
|
||||
};
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogWarning(ex, "Failed to extract PR context from {Provider} webhook payload", provider);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
private static PrWebhookContext? ExtractGitHubPrContext(JsonElement root)
|
||||
{
|
||||
// Check if this is a PR event
|
||||
if (!root.TryGetProperty("pull_request", out var pr))
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
if (!root.TryGetProperty("repository", out var repo))
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
// Extract owner and repo
|
||||
var fullName = repo.TryGetProperty("full_name", out var fn) ? fn.GetString() : null;
|
||||
if (string.IsNullOrEmpty(fullName) || !fullName.Contains('/'))
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
var parts = fullName.Split('/', 2);
|
||||
|
||||
// Extract PR number
|
||||
if (!pr.TryGetProperty("number", out var numProp) || numProp.ValueKind != JsonValueKind.Number)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
// Extract branches
|
||||
var baseBranch = pr.TryGetProperty("base", out var baseProp) &&
|
||||
baseProp.TryGetProperty("ref", out var baseRef)
|
||||
? baseRef.GetString()
|
||||
: null;
|
||||
|
||||
var headBranch = pr.TryGetProperty("head", out var headProp) &&
|
||||
headProp.TryGetProperty("ref", out var headRef)
|
||||
? headRef.GetString()
|
||||
: null;
|
||||
|
||||
if (string.IsNullOrEmpty(baseBranch) || string.IsNullOrEmpty(headBranch))
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
return new PrWebhookContext
|
||||
{
|
||||
Provider = "GitHub",
|
||||
Owner = parts[0],
|
||||
Repository = parts[1],
|
||||
PrNumber = numProp.GetInt32(),
|
||||
BaseBranch = baseBranch,
|
||||
HeadBranch = headBranch,
|
||||
BaseCommitSha = baseProp.TryGetProperty("sha", out var baseSha) ? baseSha.GetString() : null,
|
||||
HeadCommitSha = headProp.TryGetProperty("sha", out var headSha) ? headSha.GetString() : null,
|
||||
Action = root.TryGetProperty("action", out var action) ? action.GetString() : null,
|
||||
Author = pr.TryGetProperty("user", out var user) &&
|
||||
user.TryGetProperty("login", out var login)
|
||||
? login.GetString()
|
||||
: null,
|
||||
Title = pr.TryGetProperty("title", out var title) ? title.GetString() : null
|
||||
};
|
||||
}
|
||||
|
||||
private static PrWebhookContext? ExtractGitLabMrContext(JsonElement root)
|
||||
{
|
||||
// Check if this is a merge request event
|
||||
if (!root.TryGetProperty("object_kind", out var kind) || kind.GetString() != "merge_request")
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
if (!root.TryGetProperty("object_attributes", out var mr))
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
if (!root.TryGetProperty("project", out var project))
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
// Extract project path
|
||||
var pathWithNamespace = project.TryGetProperty("path_with_namespace", out var path)
|
||||
? path.GetString()
|
||||
: null;
|
||||
|
||||
if (string.IsNullOrEmpty(pathWithNamespace) || !pathWithNamespace.Contains('/'))
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
var lastSlash = pathWithNamespace.LastIndexOf('/');
|
||||
var owner = pathWithNamespace[..lastSlash];
|
||||
var repoName = pathWithNamespace[(lastSlash + 1)..];
|
||||
|
||||
// Extract MR IID (internal ID)
|
||||
if (!mr.TryGetProperty("iid", out var iidProp) || iidProp.ValueKind != JsonValueKind.Number)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
// Extract branches
|
||||
var sourceBranch = mr.TryGetProperty("source_branch", out var srcBranch)
|
||||
? srcBranch.GetString()
|
||||
: null;
|
||||
|
||||
var targetBranch = mr.TryGetProperty("target_branch", out var tgtBranch)
|
||||
? tgtBranch.GetString()
|
||||
: null;
|
||||
|
||||
if (string.IsNullOrEmpty(sourceBranch) || string.IsNullOrEmpty(targetBranch))
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
return new PrWebhookContext
|
||||
{
|
||||
Provider = "GitLab",
|
||||
Owner = owner,
|
||||
Repository = repoName,
|
||||
PrNumber = iidProp.GetInt32(),
|
||||
BaseBranch = targetBranch,
|
||||
HeadBranch = sourceBranch,
|
||||
HeadCommitSha = mr.TryGetProperty("last_commit", out var lastCommit) &&
|
||||
lastCommit.TryGetProperty("id", out var commitId)
|
||||
? commitId.GetString()
|
||||
: null,
|
||||
Action = mr.TryGetProperty("action", out var action) ? action.GetString() : null,
|
||||
Author = root.TryGetProperty("user", out var user) &&
|
||||
user.TryGetProperty("username", out var username)
|
||||
? username.GetString()
|
||||
: null,
|
||||
Title = mr.TryGetProperty("title", out var title) ? title.GetString() : null
|
||||
};
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public async Task<PrAnnotationPostResult> GenerateAndPostAnnotationAsync(
|
||||
PrWebhookContext context,
|
||||
string baseGraphId,
|
||||
string headGraphId,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(context);
|
||||
|
||||
_logger.LogInformation(
|
||||
"Generating PR annotation for {Provider} {Owner}/{Repo}#{PrNumber}",
|
||||
context.Provider,
|
||||
context.Owner,
|
||||
context.Repository,
|
||||
context.PrNumber.ToString(CultureInfo.InvariantCulture));
|
||||
|
||||
try
|
||||
{
|
||||
// Generate annotation using PrAnnotationService
|
||||
var annotationResult = await _annotationService.GenerateAnnotationAsync(
|
||||
baseGraphId,
|
||||
headGraphId,
|
||||
cancellationToken);
|
||||
|
||||
if (!annotationResult.Success)
|
||||
{
|
||||
_logger.LogWarning(
|
||||
"Failed to generate PR annotation for {Owner}/{Repo}#{PrNumber}: {Error}",
|
||||
context.Owner,
|
||||
context.Repository,
|
||||
context.PrNumber.ToString(CultureInfo.InvariantCulture),
|
||||
annotationResult.Error);
|
||||
|
||||
return new PrAnnotationPostResult
|
||||
{
|
||||
Success = false,
|
||||
Error = annotationResult.Error
|
||||
};
|
||||
}
|
||||
|
||||
// SCANNER-PR-003: Post annotation via Integrations SCM annotation clients
|
||||
string? commentUrl = null;
|
||||
string? statusCheckResult = annotationResult.Summary?.ShouldBlockPr == true ? "failure" : "success";
|
||||
|
||||
if (_scmAnnotationClient != null && !string.IsNullOrEmpty(annotationResult.CommentBody))
|
||||
{
|
||||
// Post main comment with retry/backoff
|
||||
var commentResult = await PostCommentWithRetryAsync(
|
||||
context,
|
||||
annotationResult.CommentBody!,
|
||||
cancellationToken);
|
||||
|
||||
if (commentResult.Success && commentResult.Value != null)
|
||||
{
|
||||
commentUrl = commentResult.Value.Url;
|
||||
_logger.LogInformation(
|
||||
"Posted PR comment for {Owner}/{Repo}#{PrNumber}: {Url}",
|
||||
context.Owner,
|
||||
context.Repository,
|
||||
context.PrNumber.ToString(CultureInfo.InvariantCulture),
|
||||
commentUrl);
|
||||
}
|
||||
else if (!commentResult.Success)
|
||||
{
|
||||
_logger.LogWarning(
|
||||
"Failed to post PR comment for {Owner}/{Repo}#{PrNumber}: {Error} (Code: {Code})",
|
||||
context.Owner,
|
||||
context.Repository,
|
||||
context.PrNumber.ToString(CultureInfo.InvariantCulture),
|
||||
commentResult.ErrorMessage ?? "unknown",
|
||||
commentResult.ErrorCode ?? "N/A");
|
||||
}
|
||||
|
||||
// Post status check
|
||||
if (!string.IsNullOrEmpty(context.HeadCommitSha))
|
||||
{
|
||||
var statusResult = await PostStatusWithRetryAsync(
|
||||
context,
|
||||
annotationResult.Summary?.ShouldBlockPr == true ? ScmStatusState.Failure : ScmStatusState.Success,
|
||||
annotationResult.Summary?.Summary ?? "Reachability analysis complete",
|
||||
cancellationToken);
|
||||
|
||||
if (statusResult.Success)
|
||||
{
|
||||
statusCheckResult = statusResult.Value?.State.ToString().ToLowerInvariant();
|
||||
_logger.LogInformation(
|
||||
"Posted status check for {Owner}/{Repo}@{Sha}: {State}",
|
||||
context.Owner,
|
||||
context.Repository,
|
||||
context.HeadCommitSha,
|
||||
statusCheckResult);
|
||||
}
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
// No SCM client configured - log annotation only
|
||||
_logger.LogInformation(
|
||||
"Generated PR annotation for {Provider} {Owner}/{Repo}#{PrNumber} (no SCM client configured): " +
|
||||
"{NewRisks} new risks, {Mitigated} mitigated, block={ShouldBlock}",
|
||||
context.Provider,
|
||||
context.Owner,
|
||||
context.Repository,
|
||||
context.PrNumber.ToString(CultureInfo.InvariantCulture),
|
||||
annotationResult.Summary?.NewRiskCount.ToString(CultureInfo.InvariantCulture) ?? "0",
|
||||
annotationResult.Summary?.MitigatedCount.ToString(CultureInfo.InvariantCulture) ?? "0",
|
||||
annotationResult.Summary?.ShouldBlockPr.ToString(CultureInfo.InvariantCulture) ?? "false");
|
||||
}
|
||||
|
||||
return new PrAnnotationPostResult
|
||||
{
|
||||
Success = true,
|
||||
CommentUrl = commentUrl,
|
||||
StatusCheckResult = statusCheckResult
|
||||
};
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogError(
|
||||
ex,
|
||||
"Exception generating PR annotation for {Owner}/{Repo}#{PrNumber}",
|
||||
context.Owner,
|
||||
context.Repository,
|
||||
context.PrNumber.ToString(CultureInfo.InvariantCulture));
|
||||
|
||||
return new PrAnnotationPostResult
|
||||
{
|
||||
Success = false,
|
||||
Error = ex.Message
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Posts a PR comment with exponential backoff retry for transient failures.
|
||||
/// </summary>
|
||||
private async Task<ScmOperationResult<ScmCommentResponse>> PostCommentWithRetryAsync(
|
||||
PrWebhookContext context,
|
||||
string body,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
var request = new ScmCommentRequest
|
||||
{
|
||||
Owner = context.Owner,
|
||||
Repo = context.Repository,
|
||||
PrNumber = context.PrNumber,
|
||||
Body = body,
|
||||
CommitSha = context.HeadCommitSha,
|
||||
Context = "stellaops-reachability"
|
||||
};
|
||||
|
||||
return await ExecuteWithRetryAsync(
|
||||
() => _scmAnnotationClient!.PostCommentAsync(request, cancellationToken),
|
||||
"PostComment",
|
||||
context,
|
||||
cancellationToken);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Posts a status check with exponential backoff retry for transient failures.
|
||||
/// </summary>
|
||||
private async Task<ScmOperationResult<ScmStatusResponse>> PostStatusWithRetryAsync(
|
||||
PrWebhookContext context,
|
||||
ScmStatusState state,
|
||||
string description,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
var request = new ScmStatusRequest
|
||||
{
|
||||
Owner = context.Owner,
|
||||
Repo = context.Repository,
|
||||
CommitSha = context.HeadCommitSha!,
|
||||
State = state,
|
||||
Context = "stellaops/reachability",
|
||||
Description = TruncateDescription(description, 140),
|
||||
TargetUrl = null // Could link to evidence pack
|
||||
};
|
||||
|
||||
return await ExecuteWithRetryAsync(
|
||||
() => _scmAnnotationClient!.PostStatusAsync(request, cancellationToken),
|
||||
"PostStatus",
|
||||
context,
|
||||
cancellationToken);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Executes an SCM operation with exponential backoff retry for transient failures.
|
||||
/// </summary>
|
||||
private async Task<ScmOperationResult<T>> ExecuteWithRetryAsync<T>(
|
||||
Func<Task<ScmOperationResult<T>>> operation,
|
||||
string operationName,
|
||||
PrWebhookContext context,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
ScmOperationResult<T>? lastResult = null;
|
||||
var backoffMs = InitialBackoffMs;
|
||||
|
||||
for (var attempt = 1; attempt <= MaxRetryAttempts; attempt++)
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
|
||||
lastResult = await operation();
|
||||
|
||||
if (lastResult.Success)
|
||||
{
|
||||
return lastResult;
|
||||
}
|
||||
|
||||
// Only retry on transient errors
|
||||
if (!lastResult.IsTransient)
|
||||
{
|
||||
_logger.LogWarning(
|
||||
"{Operation} failed for {Owner}/{Repo}#{PrNumber} with non-transient error: {Error} (Code: {Code})",
|
||||
operationName,
|
||||
context.Owner,
|
||||
context.Repository,
|
||||
context.PrNumber.ToString(CultureInfo.InvariantCulture),
|
||||
lastResult.ErrorMessage ?? "unknown",
|
||||
lastResult.ErrorCode ?? "N/A");
|
||||
return lastResult;
|
||||
}
|
||||
|
||||
if (attempt < MaxRetryAttempts)
|
||||
{
|
||||
_logger.LogInformation(
|
||||
"{Operation} failed for {Owner}/{Repo}#{PrNumber} with transient error, " +
|
||||
"retrying in {BackoffMs}ms (attempt {Attempt}/{MaxAttempts}): {Error}",
|
||||
operationName,
|
||||
context.Owner,
|
||||
context.Repository,
|
||||
context.PrNumber.ToString(CultureInfo.InvariantCulture),
|
||||
backoffMs.ToString(CultureInfo.InvariantCulture),
|
||||
attempt.ToString(CultureInfo.InvariantCulture),
|
||||
MaxRetryAttempts.ToString(CultureInfo.InvariantCulture),
|
||||
lastResult.ErrorMessage ?? "unknown");
|
||||
|
||||
await Task.Delay(backoffMs, cancellationToken);
|
||||
backoffMs *= 2; // Exponential backoff
|
||||
}
|
||||
}
|
||||
|
||||
_logger.LogWarning(
|
||||
"{Operation} failed for {Owner}/{Repo}#{PrNumber} after {MaxAttempts} attempts: {Error}",
|
||||
operationName,
|
||||
context.Owner,
|
||||
context.Repository,
|
||||
context.PrNumber.ToString(CultureInfo.InvariantCulture),
|
||||
MaxRetryAttempts.ToString(CultureInfo.InvariantCulture),
|
||||
lastResult?.ErrorMessage ?? "unknown");
|
||||
|
||||
return lastResult!;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Truncates description to fit SCM limits (GitHub status descriptions are max 140 chars).
|
||||
/// </summary>
|
||||
private static string TruncateDescription(string description, int maxLength)
|
||||
{
|
||||
if (string.IsNullOrEmpty(description))
|
||||
{
|
||||
return string.Empty;
|
||||
}
|
||||
|
||||
if (description.Length <= maxLength)
|
||||
{
|
||||
return description;
|
||||
}
|
||||
|
||||
return description[..(maxLength - 3)] + "...";
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,727 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// SignedSbomArchiveBuilder.cs
|
||||
// Sprint: SPRINT_20260112_016_SCANNER_signed_sbom_archive_spec
|
||||
// Tasks: SBOM-SPEC-003 through SBOM-SPEC-009
|
||||
// Description: Builds signed SBOM archives with verification materials
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System.IO.Compression;
|
||||
using System.Security.Cryptography;
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
using System.Text.Json.Serialization;
|
||||
|
||||
using Microsoft.Extensions.Logging;
|
||||
|
||||
using StellaOps.Scanner.WebService.Domain;
|
||||
|
||||
namespace StellaOps.Scanner.WebService.Services;
|
||||
|
||||
/// <summary>
|
||||
/// Service for building signed SBOM archives per signed-sbom-archive-spec.md.
|
||||
/// </summary>
|
||||
public interface ISignedSbomArchiveBuilder
|
||||
{
|
||||
/// <summary>
|
||||
/// Builds a signed SBOM archive containing the SBOM, signature, metadata, and verification materials.
|
||||
/// </summary>
|
||||
Task<SignedSbomArchiveResult> BuildAsync(
|
||||
SignedSbomArchiveRequest request,
|
||||
CancellationToken cancellationToken = default);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Request parameters for building a signed SBOM archive.
|
||||
/// </summary>
|
||||
public sealed record SignedSbomArchiveRequest
|
||||
{
|
||||
/// <summary>The scan identifier.</summary>
|
||||
public required ScanId ScanId { get; init; }
|
||||
|
||||
/// <summary>SBOM bytes (SPDX or CycloneDX JSON).</summary>
|
||||
public required byte[] SbomBytes { get; init; }
|
||||
|
||||
/// <summary>SBOM format (spdx-2.3, spdx-3.0.1, cyclonedx-1.7, etc.).</summary>
|
||||
public required string SbomFormat { get; init; }
|
||||
|
||||
/// <summary>DSSE envelope JSON bytes containing the signature.</summary>
|
||||
public required byte[] DsseEnvelopeBytes { get; init; }
|
||||
|
||||
/// <summary>Signing certificate PEM.</summary>
|
||||
public required string SigningCertPem { get; init; }
|
||||
|
||||
/// <summary>Certificate chain PEM (optional).</summary>
|
||||
public string? SigningChainPem { get; init; }
|
||||
|
||||
/// <summary>Image reference being scanned.</summary>
|
||||
public required string ImageRef { get; init; }
|
||||
|
||||
/// <summary>Image digest.</summary>
|
||||
public required string ImageDigest { get; init; }
|
||||
|
||||
/// <summary>Platform (e.g., linux/amd64).</summary>
|
||||
public string? Platform { get; init; }
|
||||
|
||||
/// <summary>Component count in SBOM.</summary>
|
||||
public int ComponentCount { get; init; }
|
||||
|
||||
/// <summary>Package count in SBOM.</summary>
|
||||
public int PackageCount { get; init; }
|
||||
|
||||
/// <summary>File count in SBOM.</summary>
|
||||
public int FileCount { get; init; }
|
||||
|
||||
/// <summary>Operator identity (e.g., email).</summary>
|
||||
public string? Operator { get; init; }
|
||||
|
||||
/// <summary>Signature issuer (e.g., OIDC issuer URL).</summary>
|
||||
public string? SignatureIssuer { get; init; }
|
||||
|
||||
/// <summary>Signature subject (e.g., identity email).</summary>
|
||||
public string? SignatureSubject { get; init; }
|
||||
|
||||
/// <summary>Signature type (keyless, key-based).</summary>
|
||||
public string SignatureType { get; init; } = "keyless";
|
||||
|
||||
/// <summary>Include Rekor transparency proof.</summary>
|
||||
public bool IncludeRekorProof { get; init; } = true;
|
||||
|
||||
/// <summary>Rekor inclusion proof JSON (optional).</summary>
|
||||
public byte[]? RekorInclusionProofBytes { get; init; }
|
||||
|
||||
/// <summary>Rekor checkpoint signature (optional).</summary>
|
||||
public byte[]? RekorCheckpointBytes { get; init; }
|
||||
|
||||
/// <summary>Rekor public key PEM (optional).</summary>
|
||||
public string? RekorPublicKeyPem { get; init; }
|
||||
|
||||
/// <summary>Rekor log index (optional).</summary>
|
||||
public long? RekorLogIndex { get; init; }
|
||||
|
||||
/// <summary>Include bundled JSON schemas for offline validation.</summary>
|
||||
public bool IncludeSchemas { get; init; } = true;
|
||||
|
||||
/// <summary>Fulcio root CA PEM for keyless verification.</summary>
|
||||
public string? FulcioRootPem { get; init; }
|
||||
|
||||
/// <summary>Compression format (gzip or zstd).</summary>
|
||||
public string Compression { get; init; } = "gzip";
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Result of building a signed SBOM archive.
|
||||
/// </summary>
|
||||
public sealed record SignedSbomArchiveResult
|
||||
{
|
||||
/// <summary>Archive stream.</summary>
|
||||
public required Stream Stream { get; init; }
|
||||
|
||||
/// <summary>Archive filename.</summary>
|
||||
public required string FileName { get; init; }
|
||||
|
||||
/// <summary>Content type.</summary>
|
||||
public required string ContentType { get; init; }
|
||||
|
||||
/// <summary>Archive size in bytes.</summary>
|
||||
public required long Size { get; init; }
|
||||
|
||||
/// <summary>SHA-256 digest of the archive.</summary>
|
||||
public required string ArchiveDigest { get; init; }
|
||||
|
||||
/// <summary>SHA-256 digest of the SBOM content.</summary>
|
||||
public required string SbomDigest { get; init; }
|
||||
|
||||
/// <summary>Merkle root of archive files.</summary>
|
||||
public required string MerkleRoot { get; init; }
|
||||
|
||||
/// <summary>Rekor log index (if applicable).</summary>
|
||||
public long? RekorLogIndex { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Builds signed SBOM archives per signed-sbom-archive-spec.md.
|
||||
/// </summary>
|
||||
public sealed class SignedSbomArchiveBuilder : ISignedSbomArchiveBuilder
|
||||
{
|
||||
private readonly TimeProvider _timeProvider;
|
||||
private readonly ILogger<SignedSbomArchiveBuilder> _logger;
|
||||
|
||||
private static readonly JsonSerializerOptions JsonOptions = new()
|
||||
{
|
||||
WriteIndented = true,
|
||||
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
|
||||
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull
|
||||
};
|
||||
|
||||
/// <summary>
|
||||
/// Initializes a new instance of the <see cref="SignedSbomArchiveBuilder"/> class.
|
||||
/// </summary>
|
||||
public SignedSbomArchiveBuilder(
|
||||
TimeProvider timeProvider,
|
||||
ILogger<SignedSbomArchiveBuilder> logger)
|
||||
{
|
||||
_timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider));
|
||||
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public async Task<SignedSbomArchiveResult> BuildAsync(
|
||||
SignedSbomArchiveRequest request,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(request);
|
||||
|
||||
var timestamp = _timeProvider.GetUtcNow();
|
||||
var sbomDigest = ComputeSha256Hex(request.SbomBytes);
|
||||
var digestShort = sbomDigest[..12];
|
||||
var timestampStr = timestamp.ToString("yyyyMMdd'T'HHmmss'Z'");
|
||||
var archiveId = $"signed-sbom-{digestShort}-{timestampStr}";
|
||||
|
||||
_logger.LogInformation(
|
||||
"Building signed SBOM archive {ArchiveId} for scan {ScanId}",
|
||||
archiveId,
|
||||
request.ScanId);
|
||||
|
||||
var files = new List<ArchiveFile>();
|
||||
|
||||
// 1. Add SBOM file
|
||||
var sbomFileName = GetSbomFileName(request.SbomFormat);
|
||||
files.Add(new ArchiveFile(sbomFileName, request.SbomBytes, GetSbomMediaType(request.SbomFormat)));
|
||||
|
||||
// 2. Add DSSE envelope
|
||||
files.Add(new ArchiveFile("sbom.dsse.json", request.DsseEnvelopeBytes, "application/vnd.dsse+json"));
|
||||
|
||||
// 3. Add certificates
|
||||
files.Add(new ArchiveFile("certs/signing-cert.pem", Encoding.UTF8.GetBytes(request.SigningCertPem), "application/x-pem-file"));
|
||||
|
||||
if (!string.IsNullOrEmpty(request.SigningChainPem))
|
||||
{
|
||||
files.Add(new ArchiveFile("certs/signing-chain.pem", Encoding.UTF8.GetBytes(request.SigningChainPem), "application/x-pem-file"));
|
||||
}
|
||||
|
||||
if (!string.IsNullOrEmpty(request.FulcioRootPem))
|
||||
{
|
||||
files.Add(new ArchiveFile("certs/fulcio-root.pem", Encoding.UTF8.GetBytes(request.FulcioRootPem), "application/x-pem-file"));
|
||||
}
|
||||
|
||||
// 4. Add Rekor proof (optional)
|
||||
if (request.IncludeRekorProof)
|
||||
{
|
||||
if (request.RekorInclusionProofBytes is not null)
|
||||
{
|
||||
files.Add(new ArchiveFile("rekor-proof/inclusion-proof.json", request.RekorInclusionProofBytes, "application/json"));
|
||||
}
|
||||
|
||||
if (request.RekorCheckpointBytes is not null)
|
||||
{
|
||||
files.Add(new ArchiveFile("rekor-proof/checkpoint.sig", request.RekorCheckpointBytes, "application/octet-stream"));
|
||||
}
|
||||
|
||||
if (!string.IsNullOrEmpty(request.RekorPublicKeyPem))
|
||||
{
|
||||
files.Add(new ArchiveFile("rekor-proof/rekor-public.pem", Encoding.UTF8.GetBytes(request.RekorPublicKeyPem), "application/x-pem-file"));
|
||||
}
|
||||
}
|
||||
|
||||
// 5. Add bundled schemas (optional)
|
||||
if (request.IncludeSchemas)
|
||||
{
|
||||
// Schema stubs - in production, these would be loaded from embedded resources
|
||||
files.Add(new ArchiveFile("schemas/README.md", Encoding.UTF8.GetBytes(GenerateSchemasReadme()), "text/markdown"));
|
||||
}
|
||||
|
||||
// 6. Create metadata.json (SBOM-SPEC-004, SBOM-SPEC-005)
|
||||
var metadata = CreateMetadata(request, timestamp, sbomDigest);
|
||||
var metadataBytes = JsonSerializer.SerializeToUtf8Bytes(metadata, JsonOptions);
|
||||
files.Add(new ArchiveFile("metadata.json", metadataBytes, "application/json"));
|
||||
|
||||
// 7. Create manifest.json (SBOM-SPEC-006)
|
||||
var manifest = CreateManifest(archiveId, timestamp, files);
|
||||
var manifestBytes = JsonSerializer.SerializeToUtf8Bytes(manifest, JsonOptions);
|
||||
files.Insert(0, new ArchiveFile("manifest.json", manifestBytes, "application/json"));
|
||||
|
||||
// 8. Generate VERIFY.md (SBOM-SPEC-009)
|
||||
var verifyMd = GenerateVerifyMd(request, manifest, sbomFileName);
|
||||
files.Add(new ArchiveFile("VERIFY.md", Encoding.UTF8.GetBytes(verifyMd), "text/markdown"));
|
||||
|
||||
// 9. Create archive
|
||||
var archiveStream = new MemoryStream();
|
||||
await CreateTarGzArchiveAsync(archiveId, files, archiveStream, cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
|
||||
archiveStream.Position = 0;
|
||||
var archiveDigest = ComputeSha256Hex(archiveStream);
|
||||
archiveStream.Position = 0;
|
||||
|
||||
var fileName = $"{archiveId}.tar.gz";
|
||||
var contentType = request.Compression == "zstd" ? "application/zstd" : "application/gzip";
|
||||
|
||||
_logger.LogInformation(
|
||||
"Built signed SBOM archive {FileName} ({Size} bytes, digest: {Digest})",
|
||||
fileName,
|
||||
archiveStream.Length,
|
||||
archiveDigest);
|
||||
|
||||
return new SignedSbomArchiveResult
|
||||
{
|
||||
Stream = archiveStream,
|
||||
FileName = fileName,
|
||||
ContentType = contentType,
|
||||
Size = archiveStream.Length,
|
||||
ArchiveDigest = archiveDigest,
|
||||
SbomDigest = sbomDigest,
|
||||
MerkleRoot = manifest.MerkleRoot,
|
||||
RekorLogIndex = request.RekorLogIndex
|
||||
};
|
||||
}
|
||||
|
||||
private static string GetSbomFileName(string format) => format.StartsWith("spdx", StringComparison.OrdinalIgnoreCase)
|
||||
? "sbom.spdx.json"
|
||||
: "sbom.cdx.json";
|
||||
|
||||
private static string GetSbomMediaType(string format) => format.StartsWith("spdx", StringComparison.OrdinalIgnoreCase)
|
||||
? "application/spdx+json"
|
||||
: "application/vnd.cyclonedx+json";
|
||||
|
||||
private static SignedSbomMetadata CreateMetadata(
|
||||
SignedSbomArchiveRequest request,
|
||||
DateTimeOffset timestamp,
|
||||
string sbomDigest)
|
||||
{
|
||||
return new SignedSbomMetadata
|
||||
{
|
||||
SchemaVersion = "1.0.0",
|
||||
StellaOps = new StellaOpsVersionInfo
|
||||
{
|
||||
SuiteVersion = GetSuiteVersion(),
|
||||
ScannerVersion = GetScannerVersion(),
|
||||
ScannerDigest = GetScannerDigest(),
|
||||
SignerVersion = "1.0.0",
|
||||
SbomServiceVersion = "1.0.0"
|
||||
},
|
||||
Generation = new GenerationInfo
|
||||
{
|
||||
Timestamp = timestamp,
|
||||
HlcTimestamp = timestamp.ToUnixTimeMilliseconds().ToString() + "000000",
|
||||
Operator = request.Operator
|
||||
},
|
||||
Input = new InputInfo
|
||||
{
|
||||
ImageRef = request.ImageRef,
|
||||
ImageDigest = request.ImageDigest,
|
||||
Platform = request.Platform
|
||||
},
|
||||
Sbom = new SbomInfo
|
||||
{
|
||||
Format = request.SbomFormat,
|
||||
Digest = sbomDigest,
|
||||
ComponentCount = request.ComponentCount,
|
||||
PackageCount = request.PackageCount,
|
||||
FileCount = request.FileCount
|
||||
},
|
||||
Signature = new SignatureInfo
|
||||
{
|
||||
Type = request.SignatureType,
|
||||
Issuer = request.SignatureIssuer,
|
||||
Subject = request.SignatureSubject,
|
||||
SignedAt = timestamp
|
||||
},
|
||||
Reproducibility = new ReproducibilityInfo
|
||||
{
|
||||
Deterministic = true,
|
||||
ExpectedDigest = sbomDigest
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
private static SignedSbomManifest CreateManifest(
|
||||
string archiveId,
|
||||
DateTimeOffset timestamp,
|
||||
IReadOnlyList<ArchiveFile> files)
|
||||
{
|
||||
var fileEntries = files.Select(f => new ManifestFileEntry
|
||||
{
|
||||
Path = f.Path,
|
||||
Sha256 = ComputeSha256Hex(f.Bytes),
|
||||
Size = f.Bytes.Length,
|
||||
MediaType = f.MediaType
|
||||
}).ToList();
|
||||
|
||||
// Compute Merkle root from file hashes
|
||||
var merkleRoot = ComputeMerkleRoot(fileEntries.Select(f => f.Sha256).ToList());
|
||||
|
||||
return new SignedSbomManifest
|
||||
{
|
||||
SchemaVersion = "1.0.0",
|
||||
ArchiveId = archiveId,
|
||||
GeneratedAt = timestamp,
|
||||
Files = fileEntries,
|
||||
MerkleRoot = $"sha256:{merkleRoot}",
|
||||
TotalFiles = fileEntries.Count,
|
||||
TotalSize = fileEntries.Sum(f => f.Size)
|
||||
};
|
||||
}
|
||||
|
||||
private static string GenerateVerifyMd(
|
||||
SignedSbomArchiveRequest request,
|
||||
SignedSbomManifest manifest,
|
||||
string sbomFileName)
|
||||
{
|
||||
var sb = new StringBuilder();
|
||||
sb.AppendLine("# SBOM Archive Verification");
|
||||
sb.AppendLine();
|
||||
sb.AppendLine("This archive contains a cryptographically signed SBOM with verification materials.");
|
||||
sb.AppendLine();
|
||||
sb.AppendLine("## Quick Verification");
|
||||
sb.AppendLine();
|
||||
sb.AppendLine("```bash");
|
||||
sb.AppendLine("# Verify archive integrity");
|
||||
sb.AppendLine("sha256sum -c <<EOF");
|
||||
|
||||
foreach (var file in manifest.Files.Where(f => !f.Path.StartsWith("schemas/")))
|
||||
{
|
||||
sb.AppendLine($"{file.Sha256} {file.Path}");
|
||||
}
|
||||
|
||||
sb.AppendLine("EOF");
|
||||
sb.AppendLine("```");
|
||||
sb.AppendLine();
|
||||
sb.AppendLine("## Signature Verification");
|
||||
sb.AppendLine();
|
||||
sb.AppendLine("```bash");
|
||||
sb.AppendLine("# Verify signature using cosign");
|
||||
sb.AppendLine("cosign verify-blob \\");
|
||||
sb.AppendLine(" --signature sbom.dsse.json \\");
|
||||
sb.AppendLine(" --certificate certs/signing-cert.pem \\");
|
||||
|
||||
if (!string.IsNullOrEmpty(request.SigningChainPem))
|
||||
{
|
||||
sb.AppendLine(" --certificate-chain certs/signing-chain.pem \\");
|
||||
}
|
||||
|
||||
sb.AppendLine($" {sbomFileName}");
|
||||
sb.AppendLine("```");
|
||||
sb.AppendLine();
|
||||
sb.AppendLine("## Offline Verification");
|
||||
sb.AppendLine();
|
||||
sb.AppendLine("```bash");
|
||||
sb.AppendLine("# Using bundled Fulcio root");
|
||||
sb.AppendLine("cosign verify-blob \\");
|
||||
sb.AppendLine(" --signature sbom.dsse.json \\");
|
||||
sb.AppendLine(" --certificate certs/signing-cert.pem \\");
|
||||
|
||||
if (!string.IsNullOrEmpty(request.SigningChainPem))
|
||||
{
|
||||
sb.AppendLine(" --certificate-chain certs/signing-chain.pem \\");
|
||||
}
|
||||
|
||||
if (!string.IsNullOrEmpty(request.SignatureIssuer))
|
||||
{
|
||||
sb.AppendLine($" --certificate-oidc-issuer {request.SignatureIssuer} \\");
|
||||
}
|
||||
|
||||
sb.AppendLine(" --offline \\");
|
||||
sb.AppendLine($" {sbomFileName}");
|
||||
sb.AppendLine("```");
|
||||
sb.AppendLine();
|
||||
|
||||
if (request.IncludeRekorProof && request.RekorLogIndex.HasValue)
|
||||
{
|
||||
sb.AppendLine("## Rekor Transparency Log");
|
||||
sb.AppendLine();
|
||||
sb.AppendLine($"Log Index: {request.RekorLogIndex}");
|
||||
sb.AppendLine();
|
||||
sb.AppendLine("```bash");
|
||||
sb.AppendLine("# Verify transparency log inclusion");
|
||||
sb.AppendLine("rekor-cli verify \\");
|
||||
sb.AppendLine($" --artifact {sbomFileName} \\");
|
||||
sb.AppendLine(" --signature sbom.dsse.json \\");
|
||||
sb.AppendLine(" --public-key certs/signing-cert.pem \\");
|
||||
sb.AppendLine(" --rekor-server https://rekor.sigstore.dev");
|
||||
sb.AppendLine("```");
|
||||
sb.AppendLine();
|
||||
}
|
||||
|
||||
sb.AppendLine("## Archive Contents");
|
||||
sb.AppendLine();
|
||||
sb.AppendLine("| File | Size | SHA-256 |");
|
||||
sb.AppendLine("|------|------|---------|");
|
||||
|
||||
foreach (var file in manifest.Files)
|
||||
{
|
||||
sb.AppendLine($"| {file.Path} | {file.Size} | {file.Sha256[..12]}... |");
|
||||
}
|
||||
|
||||
sb.AppendLine();
|
||||
sb.AppendLine($"**Merkle Root**: {manifest.MerkleRoot}");
|
||||
sb.AppendLine();
|
||||
sb.AppendLine("---");
|
||||
sb.AppendLine("Generated by StellaOps Scanner");
|
||||
|
||||
return sb.ToString();
|
||||
}
|
||||
|
||||
private static string GenerateSchemasReadme()
|
||||
{
|
||||
return """
|
||||
# Bundled JSON Schemas
|
||||
|
||||
This directory contains JSON schemas for offline validation.
|
||||
|
||||
## Available Schemas
|
||||
|
||||
For offline SBOM validation, download schemas from:
|
||||
- SPDX: https://github.com/spdx/spdx-spec/tree/development/v2.3/schemas
|
||||
- CycloneDX: https://github.com/CycloneDX/specification/tree/master/schema
|
||||
|
||||
## Usage
|
||||
|
||||
```bash
|
||||
# Validate SPDX SBOM
|
||||
jsonschema -i sbom.spdx.json schemas/spdx-2.3.schema.json
|
||||
|
||||
# Validate CycloneDX SBOM
|
||||
jsonschema -i sbom.cdx.json schemas/cyclonedx-1.7.schema.json
|
||||
```
|
||||
""";
|
||||
}
|
||||
|
||||
private static async Task CreateTarGzArchiveAsync(
|
||||
string rootFolder,
|
||||
IReadOnlyList<ArchiveFile> files,
|
||||
Stream outputStream,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
await using var gzipStream = new GZipStream(outputStream, CompressionLevel.Optimal, leaveOpen: true);
|
||||
await using var tarWriter = new MemoryStream();
|
||||
|
||||
foreach (var file in files)
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
|
||||
var fullPath = $"{rootFolder}/{file.Path}";
|
||||
WriteTarEntry(tarWriter, fullPath, file.Bytes);
|
||||
}
|
||||
|
||||
// Write end-of-archive markers (two 512-byte zero blocks)
|
||||
var endMarker = new byte[1024];
|
||||
tarWriter.Write(endMarker);
|
||||
|
||||
tarWriter.Position = 0;
|
||||
await tarWriter.CopyToAsync(gzipStream, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
private static void WriteTarEntry(Stream stream, string path, byte[] content)
|
||||
{
|
||||
// POSIX ustar header (512 bytes)
|
||||
var header = new byte[512];
|
||||
|
||||
// File name (100 bytes)
|
||||
var pathBytes = Encoding.ASCII.GetBytes(path);
|
||||
Array.Copy(pathBytes, 0, header, 0, Math.Min(pathBytes.Length, 100));
|
||||
|
||||
// File mode (8 bytes) - 0644
|
||||
Encoding.ASCII.GetBytes("0000644\0").CopyTo(header, 100);
|
||||
|
||||
// Owner UID (8 bytes)
|
||||
Encoding.ASCII.GetBytes("0000000\0").CopyTo(header, 108);
|
||||
|
||||
// Owner GID (8 bytes)
|
||||
Encoding.ASCII.GetBytes("0000000\0").CopyTo(header, 116);
|
||||
|
||||
// File size in octal (12 bytes)
|
||||
var sizeOctal = Convert.ToString(content.Length, 8).PadLeft(11, '0') + "\0";
|
||||
Encoding.ASCII.GetBytes(sizeOctal).CopyTo(header, 124);
|
||||
|
||||
// Modification time (12 bytes) - use epoch
|
||||
var mtime = DateTimeOffset.UtcNow.ToUnixTimeSeconds();
|
||||
var mtimeOctal = Convert.ToString(mtime, 8).PadLeft(11, '0') + "\0";
|
||||
Encoding.ASCII.GetBytes(mtimeOctal).CopyTo(header, 136);
|
||||
|
||||
// Checksum placeholder (8 bytes of spaces)
|
||||
for (int i = 148; i < 156; i++) header[i] = 0x20;
|
||||
|
||||
// Type flag (1 byte) - '0' for regular file
|
||||
header[156] = (byte)'0';
|
||||
|
||||
// Link name (100 bytes) - empty
|
||||
|
||||
// USTAR magic (6 bytes)
|
||||
Encoding.ASCII.GetBytes("ustar\0").CopyTo(header, 257);
|
||||
|
||||
// USTAR version (2 bytes)
|
||||
Encoding.ASCII.GetBytes("00").CopyTo(header, 263);
|
||||
|
||||
// Owner name (32 bytes)
|
||||
Encoding.ASCII.GetBytes("stellaops").CopyTo(header, 265);
|
||||
|
||||
// Group name (32 bytes)
|
||||
Encoding.ASCII.GetBytes("stellaops").CopyTo(header, 297);
|
||||
|
||||
// Calculate checksum
|
||||
var checksum = 0;
|
||||
for (int i = 0; i < 512; i++)
|
||||
{
|
||||
checksum += header[i];
|
||||
}
|
||||
|
||||
var checksumOctal = Convert.ToString(checksum, 8).PadLeft(6, '0') + "\0 ";
|
||||
Encoding.ASCII.GetBytes(checksumOctal).CopyTo(header, 148);
|
||||
|
||||
// Write header
|
||||
stream.Write(header);
|
||||
|
||||
// Write content
|
||||
stream.Write(content);
|
||||
|
||||
// Pad to 512-byte boundary
|
||||
var padding = (512 - (content.Length % 512)) % 512;
|
||||
if (padding > 0)
|
||||
{
|
||||
stream.Write(new byte[padding]);
|
||||
}
|
||||
}
|
||||
|
||||
private static string ComputeSha256Hex(byte[] data)
|
||||
{
|
||||
var hash = SHA256.HashData(data);
|
||||
return Convert.ToHexString(hash).ToLowerInvariant();
|
||||
}
|
||||
|
||||
private static string ComputeSha256Hex(Stream stream)
|
||||
{
|
||||
var hash = SHA256.HashData(stream);
|
||||
return Convert.ToHexString(hash).ToLowerInvariant();
|
||||
}
|
||||
|
||||
private static string ComputeMerkleRoot(IReadOnlyList<string> hashes)
|
||||
{
|
||||
if (hashes.Count == 0)
|
||||
return string.Empty;
|
||||
|
||||
if (hashes.Count == 1)
|
||||
return hashes[0];
|
||||
|
||||
var currentLevel = hashes.ToList();
|
||||
|
||||
while (currentLevel.Count > 1)
|
||||
{
|
||||
var nextLevel = new List<string>();
|
||||
|
||||
for (int i = 0; i < currentLevel.Count; i += 2)
|
||||
{
|
||||
if (i + 1 < currentLevel.Count)
|
||||
{
|
||||
var combined = currentLevel[i] + currentLevel[i + 1];
|
||||
var hash = SHA256.HashData(Encoding.UTF8.GetBytes(combined));
|
||||
nextLevel.Add(Convert.ToHexString(hash).ToLowerInvariant());
|
||||
}
|
||||
else
|
||||
{
|
||||
// Odd element, promote to next level
|
||||
nextLevel.Add(currentLevel[i]);
|
||||
}
|
||||
}
|
||||
|
||||
currentLevel = nextLevel;
|
||||
}
|
||||
|
||||
return currentLevel[0];
|
||||
}
|
||||
|
||||
private static string GetSuiteVersion() => "2027.Q1";
|
||||
private static string GetScannerVersion() => "1.0.0";
|
||||
private static string GetScannerDigest() => "sha256:scanner-image-digest";
|
||||
|
||||
private sealed record ArchiveFile(string Path, byte[] Bytes, string MediaType);
|
||||
}
|
||||
|
||||
#region Metadata DTOs
|
||||
|
||||
/// <summary>
|
||||
/// Metadata for signed SBOM archive.
|
||||
/// </summary>
|
||||
public sealed class SignedSbomMetadata
|
||||
{
|
||||
public required string SchemaVersion { get; init; }
|
||||
public required StellaOpsVersionInfo StellaOps { get; init; }
|
||||
public required GenerationInfo Generation { get; init; }
|
||||
public required InputInfo Input { get; init; }
|
||||
public required SbomInfo Sbom { get; init; }
|
||||
public required SignatureInfo Signature { get; init; }
|
||||
public required ReproducibilityInfo Reproducibility { get; init; }
|
||||
}
|
||||
|
||||
public sealed class StellaOpsVersionInfo
|
||||
{
|
||||
public required string SuiteVersion { get; init; }
|
||||
public required string ScannerVersion { get; init; }
|
||||
public required string ScannerDigest { get; init; }
|
||||
public required string SignerVersion { get; init; }
|
||||
public required string SbomServiceVersion { get; init; }
|
||||
}
|
||||
|
||||
public sealed class GenerationInfo
|
||||
{
|
||||
public required DateTimeOffset Timestamp { get; init; }
|
||||
public required string HlcTimestamp { get; init; }
|
||||
public string? Operator { get; init; }
|
||||
}
|
||||
|
||||
public sealed class InputInfo
|
||||
{
|
||||
public required string ImageRef { get; init; }
|
||||
public required string ImageDigest { get; init; }
|
||||
public string? Platform { get; init; }
|
||||
}
|
||||
|
||||
public sealed class SbomInfo
|
||||
{
|
||||
public required string Format { get; init; }
|
||||
public required string Digest { get; init; }
|
||||
public int ComponentCount { get; init; }
|
||||
public int PackageCount { get; init; }
|
||||
public int FileCount { get; init; }
|
||||
}
|
||||
|
||||
public sealed class SignatureInfo
|
||||
{
|
||||
public required string Type { get; init; }
|
||||
public string? Issuer { get; init; }
|
||||
public string? Subject { get; init; }
|
||||
public DateTimeOffset SignedAt { get; init; }
|
||||
}
|
||||
|
||||
public sealed class ReproducibilityInfo
|
||||
{
|
||||
public bool Deterministic { get; init; }
|
||||
public string? ExpectedDigest { get; init; }
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Manifest DTOs
|
||||
|
||||
/// <summary>
|
||||
/// Manifest for signed SBOM archive.
|
||||
/// </summary>
|
||||
public sealed class SignedSbomManifest
|
||||
{
|
||||
public required string SchemaVersion { get; init; }
|
||||
public required string ArchiveId { get; init; }
|
||||
public required DateTimeOffset GeneratedAt { get; init; }
|
||||
public required IReadOnlyList<ManifestFileEntry> Files { get; init; }
|
||||
public required string MerkleRoot { get; init; }
|
||||
public int TotalFiles { get; init; }
|
||||
public long TotalSize { get; init; }
|
||||
}
|
||||
|
||||
public sealed class ManifestFileEntry
|
||||
{
|
||||
public required string Path { get; init; }
|
||||
public required string Sha256 { get; init; }
|
||||
public int Size { get; init; }
|
||||
public required string MediaType { get; init; }
|
||||
}
|
||||
|
||||
#endregion
|
||||
@@ -0,0 +1,137 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// AiCodeGuardOptions.cs
|
||||
// Sprint: SPRINT_20260112_010_SCANNER_ai_code_guard_core
|
||||
// Task: SCANNER-AIGUARD-001
|
||||
// Description: AI Code Guard options with deterministic defaults.
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System.Collections.Immutable;
|
||||
|
||||
namespace StellaOps.Scanner.AiCodeGuard;
|
||||
|
||||
/// <summary>
|
||||
/// Configuration options for AI Code Guard analysis.
|
||||
/// </summary>
|
||||
public sealed class AiCodeGuardOptions
|
||||
{
|
||||
/// <summary>
|
||||
/// Configuration section name.
|
||||
/// </summary>
|
||||
public const string SectionName = "AiCodeGuard";
|
||||
|
||||
/// <summary>
|
||||
/// Whether AI Code Guard is enabled.
|
||||
/// </summary>
|
||||
public bool Enabled { get; set; } = true;
|
||||
|
||||
/// <summary>
|
||||
/// Detection confidence threshold (0.0-1.0).
|
||||
/// Findings below this threshold are excluded.
|
||||
/// </summary>
|
||||
public double ConfidenceThreshold { get; set; } = 0.7;
|
||||
|
||||
/// <summary>
|
||||
/// Enabled detection categories.
|
||||
/// </summary>
|
||||
public IReadOnlyList<string> EnabledCategories { get; set; } = new[]
|
||||
{
|
||||
"AiGenerated",
|
||||
"InsecurePattern",
|
||||
"Hallucination",
|
||||
"LicenseRisk",
|
||||
"UntrustedDependency",
|
||||
"QualityIssue"
|
||||
};
|
||||
|
||||
/// <summary>
|
||||
/// Severity threshold for blocking (findings at or above this level block).
|
||||
/// </summary>
|
||||
public string BlockingSeverity { get; set; } = "High";
|
||||
|
||||
/// <summary>
|
||||
/// Maximum number of hunks to analyze per file.
|
||||
/// </summary>
|
||||
public int MaxHunksPerFile { get; set; } = 100;
|
||||
|
||||
/// <summary>
|
||||
/// Maximum total lines to analyze per scan.
|
||||
/// </summary>
|
||||
public int MaxTotalLines { get; set; } = 50000;
|
||||
|
||||
/// <summary>
|
||||
/// Path to allowlist corpus for similarity checking.
|
||||
/// </summary>
|
||||
public string? AllowlistCorpusPath { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Path to denylist corpus for similarity checking.
|
||||
/// </summary>
|
||||
public string? DenylistCorpusPath { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Similarity threshold for snippet matching (0.0-1.0).
|
||||
/// </summary>
|
||||
public double SimilarityThreshold { get; set; } = 0.85;
|
||||
|
||||
/// <summary>
|
||||
/// License hygiene configuration.
|
||||
/// </summary>
|
||||
public LicenseHygieneOptions LicenseHygiene { get; set; } = new();
|
||||
|
||||
/// <summary>
|
||||
/// Rule sets to apply (null = all default rules).
|
||||
/// </summary>
|
||||
public IReadOnlyList<string>? RuleSets { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Scanner version identifier for reproducibility.
|
||||
/// </summary>
|
||||
public string ScannerVersion { get; set; } = "1.0.0";
|
||||
|
||||
/// <summary>
|
||||
/// Model version identifier for reproducibility.
|
||||
/// </summary>
|
||||
public string ModelVersion { get; set; } = "1.0.0";
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// License hygiene check options.
|
||||
/// </summary>
|
||||
public sealed class LicenseHygieneOptions
|
||||
{
|
||||
/// <summary>
|
||||
/// Whether license hygiene checks are enabled.
|
||||
/// </summary>
|
||||
public bool Enabled { get; set; } = true;
|
||||
|
||||
/// <summary>
|
||||
/// Allowed license SPDX identifiers.
|
||||
/// </summary>
|
||||
public IReadOnlyList<string> AllowedLicenses { get; set; } = new[]
|
||||
{
|
||||
"MIT",
|
||||
"Apache-2.0",
|
||||
"BSD-2-Clause",
|
||||
"BSD-3-Clause",
|
||||
"ISC",
|
||||
"CC0-1.0",
|
||||
"Unlicense"
|
||||
};
|
||||
|
||||
/// <summary>
|
||||
/// Denied license SPDX identifiers (block if detected).
|
||||
/// </summary>
|
||||
public IReadOnlyList<string> DeniedLicenses { get; set; } = new[]
|
||||
{
|
||||
"GPL-2.0-only",
|
||||
"GPL-3.0-only",
|
||||
"AGPL-3.0-only",
|
||||
"LGPL-2.1-only",
|
||||
"LGPL-3.0-only"
|
||||
};
|
||||
|
||||
/// <summary>
|
||||
/// Action when unknown license is detected.
|
||||
/// </summary>
|
||||
public string UnknownLicenseAction { get; set; } = "RequireReview";
|
||||
}
|
||||
@@ -0,0 +1,214 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// IAiCodeGuardService.cs
|
||||
// Sprint: SPRINT_20260112_010_SCANNER_ai_code_guard_core
|
||||
// Task: SCANNER-AIGUARD-002/006
|
||||
// Description: AI Code Guard service interface for Scanner.
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System.Collections.Immutable;
|
||||
|
||||
namespace StellaOps.Scanner.AiCodeGuard;
|
||||
|
||||
/// <summary>
|
||||
/// Service for AI Code Guard analysis.
|
||||
/// </summary>
|
||||
public interface IAiCodeGuardService
|
||||
{
|
||||
/// <summary>
|
||||
/// Analyzes changed hunks for AI-generated code issues.
|
||||
/// </summary>
|
||||
/// <param name="request">Analysis request with hunks and options.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>Analysis result with findings and verdict.</returns>
|
||||
Task<AiCodeGuardAnalysisResult> AnalyzeAsync(
|
||||
AiCodeGuardAnalysisRequest request,
|
||||
CancellationToken cancellationToken = default);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Analysis request for AI Code Guard.
|
||||
/// </summary>
|
||||
public sealed record AiCodeGuardAnalysisRequest
|
||||
{
|
||||
/// <summary>
|
||||
/// Repository URI.
|
||||
/// </summary>
|
||||
public required string RepositoryUri { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Commit SHA being analyzed.
|
||||
/// </summary>
|
||||
public required string CommitSha { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Branch name (optional).
|
||||
/// </summary>
|
||||
public string? Branch { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Base commit for diff comparison (optional, for PR analysis).
|
||||
/// </summary>
|
||||
public string? BaseCommitSha { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Changed hunks to analyze.
|
||||
/// </summary>
|
||||
public required IReadOnlyList<CodeHunk> Hunks { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Analysis timestamp (input, not wall-clock for determinism).
|
||||
/// </summary>
|
||||
public required DateTimeOffset AnalysisTimestamp { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Optional options override (uses defaults if null).
|
||||
/// </summary>
|
||||
public AiCodeGuardOptions? Options { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// A code hunk to analyze.
|
||||
/// </summary>
|
||||
public sealed record CodeHunk
|
||||
{
|
||||
/// <summary>
|
||||
/// File path relative to repository root.
|
||||
/// </summary>
|
||||
public required string FilePath { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Programming language (detected or specified).
|
||||
/// </summary>
|
||||
public required string Language { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Start line in the file (1-based).
|
||||
/// </summary>
|
||||
public required int StartLine { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// End line in the file (1-based).
|
||||
/// </summary>
|
||||
public required int EndLine { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Hunk content (source code).
|
||||
/// </summary>
|
||||
public required string Content { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Whether this is new code (added) vs existing.
|
||||
/// </summary>
|
||||
public required bool IsNew { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// SHA-256 hash of normalized content for deterministic hunk ID.
|
||||
/// </summary>
|
||||
public string? ContentHash { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// AI Code Guard analysis result.
|
||||
/// </summary>
|
||||
public sealed record AiCodeGuardAnalysisResult
|
||||
{
|
||||
/// <summary>
|
||||
/// Whether analysis completed successfully.
|
||||
/// </summary>
|
||||
public required bool Success { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Scanner configuration used.
|
||||
/// </summary>
|
||||
public required AiCodeGuardScannerConfigResult ScannerConfig { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Files analyzed.
|
||||
/// </summary>
|
||||
public required ImmutableList<AiCodeGuardFileResult> Files { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Detected findings.
|
||||
/// </summary>
|
||||
public required ImmutableList<AiCodeGuardFindingResult> Findings { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Overall verdict.
|
||||
/// </summary>
|
||||
public required AiCodeGuardVerdictResult Verdict { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Total lines analyzed.
|
||||
/// </summary>
|
||||
public required long TotalLinesAnalyzed { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Error message if Success is false.
|
||||
/// </summary>
|
||||
public string? Error { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Content digest for the analysis result (SHA-256).
|
||||
/// </summary>
|
||||
public string? ContentDigest { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Scanner configuration in result.
|
||||
/// </summary>
|
||||
public sealed record AiCodeGuardScannerConfigResult
|
||||
{
|
||||
public required string ScannerVersion { get; init; }
|
||||
public required string ModelVersion { get; init; }
|
||||
public required double ConfidenceThreshold { get; init; }
|
||||
public required ImmutableList<string> EnabledCategories { get; init; }
|
||||
public ImmutableList<string>? RuleSets { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// File analyzed in result.
|
||||
/// </summary>
|
||||
public sealed record AiCodeGuardFileResult
|
||||
{
|
||||
public required string Path { get; init; }
|
||||
public required string Digest { get; init; }
|
||||
public required int LineCount { get; init; }
|
||||
public string? Language { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Finding in result.
|
||||
/// </summary>
|
||||
public sealed record AiCodeGuardFindingResult
|
||||
{
|
||||
public required string Id { get; init; }
|
||||
public required string Category { get; init; }
|
||||
public required string Severity { get; init; }
|
||||
public required double Confidence { get; init; }
|
||||
public required string FilePath { get; init; }
|
||||
public required int StartLine { get; init; }
|
||||
public required int EndLine { get; init; }
|
||||
public int? StartColumn { get; init; }
|
||||
public int? EndColumn { get; init; }
|
||||
public string? Snippet { get; init; }
|
||||
public required string Description { get; init; }
|
||||
public required string RuleId { get; init; }
|
||||
public string? DetectionMethod { get; init; }
|
||||
public ImmutableList<string>? Indicators { get; init; }
|
||||
public double? PerplexityScore { get; init; }
|
||||
public ImmutableList<string>? PatternMatches { get; init; }
|
||||
public string? Remediation { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Verdict in result.
|
||||
/// </summary>
|
||||
public sealed record AiCodeGuardVerdictResult
|
||||
{
|
||||
public required string Status { get; init; }
|
||||
public required int TotalFindings { get; init; }
|
||||
public required ImmutableDictionary<string, int> FindingsBySeverity { get; init; }
|
||||
public double? AiGeneratedPercentage { get; init; }
|
||||
public required string Message { get; init; }
|
||||
public string? Recommendation { get; init; }
|
||||
}
|
||||
@@ -49,6 +49,30 @@ public static class RichGraphSemanticAttributes
|
||||
|
||||
/// <summary>CWE ID if applicable.</summary>
|
||||
public const string CweId = "cwe_id";
|
||||
|
||||
// Sprint: SPRINT_20260112_004_SCANNER_reachability_trace_runtime_evidence
|
||||
// Runtime evidence overlay attributes (do not alter lattice precedence)
|
||||
|
||||
/// <summary>Reachability score (0.0-1.0) - computed from path confidence.</summary>
|
||||
public const string ReachabilityScore = "reachability_score";
|
||||
|
||||
/// <summary>Whether this node/edge was confirmed at runtime ("true"/"false").</summary>
|
||||
public const string RuntimeConfirmed = "runtime_confirmed";
|
||||
|
||||
/// <summary>Number of runtime observations for this node/edge.</summary>
|
||||
public const string RuntimeObservationCount = "runtime_observation_count";
|
||||
|
||||
/// <summary>Timestamp of first runtime observation (ISO 8601).</summary>
|
||||
public const string RuntimeFirstObserved = "runtime_first_observed";
|
||||
|
||||
/// <summary>Timestamp of last runtime observation (ISO 8601).</summary>
|
||||
public const string RuntimeLastObserved = "runtime_last_observed";
|
||||
|
||||
/// <summary>Runtime evidence URI reference.</summary>
|
||||
public const string RuntimeEvidenceUri = "runtime_evidence_uri";
|
||||
|
||||
/// <summary>Runtime confirmation type (confirmed/partial/none).</summary>
|
||||
public const string RuntimeConfirmationType = "runtime_confirmation_type";
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
@@ -162,6 +186,88 @@ public static class RichGraphSemanticExtensions
|
||||
// Use max risk score as overall
|
||||
return riskScores.Max();
|
||||
}
|
||||
|
||||
// Sprint: SPRINT_20260112_004_SCANNER_reachability_trace_runtime_evidence
|
||||
// Extension methods for runtime evidence overlay attributes
|
||||
|
||||
/// <summary>Gets the reachability score (0.0-1.0).</summary>
|
||||
public static double? GetReachabilityScore(this RichGraphNode node)
|
||||
{
|
||||
if (node.Attributes?.TryGetValue(RichGraphSemanticAttributes.ReachabilityScore, out var value) != true ||
|
||||
string.IsNullOrWhiteSpace(value))
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
return double.TryParse(value, NumberStyles.Float, CultureInfo.InvariantCulture, out var score) ? score : null;
|
||||
}
|
||||
|
||||
/// <summary>Gets whether this node was confirmed at runtime.</summary>
|
||||
public static bool? GetRuntimeConfirmed(this RichGraphNode node)
|
||||
{
|
||||
if (node.Attributes?.TryGetValue(RichGraphSemanticAttributes.RuntimeConfirmed, out var value) != true ||
|
||||
string.IsNullOrWhiteSpace(value))
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
return bool.TryParse(value, out var result) ? result : null;
|
||||
}
|
||||
|
||||
/// <summary>Gets the runtime observation count.</summary>
|
||||
public static ulong? GetRuntimeObservationCount(this RichGraphNode node)
|
||||
{
|
||||
if (node.Attributes?.TryGetValue(RichGraphSemanticAttributes.RuntimeObservationCount, out var value) != true ||
|
||||
string.IsNullOrWhiteSpace(value))
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
return ulong.TryParse(value, NumberStyles.Integer, CultureInfo.InvariantCulture, out var count) ? count : null;
|
||||
}
|
||||
|
||||
/// <summary>Gets the runtime confirmation type (confirmed/partial/none).</summary>
|
||||
public static string? GetRuntimeConfirmationType(this RichGraphNode node)
|
||||
{
|
||||
return node.Attributes?.TryGetValue(RichGraphSemanticAttributes.RuntimeConfirmationType, out var value) == true ? value : null;
|
||||
}
|
||||
|
||||
/// <summary>Gets the runtime evidence URI.</summary>
|
||||
public static string? GetRuntimeEvidenceUri(this RichGraphNode node)
|
||||
{
|
||||
return node.Attributes?.TryGetValue(RichGraphSemanticAttributes.RuntimeEvidenceUri, out var value) == true ? value : null;
|
||||
}
|
||||
|
||||
/// <summary>Gets nodes with runtime confirmation.</summary>
|
||||
public static IReadOnlyList<RichGraphNode> GetRuntimeConfirmedNodes(this RichGraph graph)
|
||||
{
|
||||
return graph.Nodes.Where(n => n.GetRuntimeConfirmed() == true).ToList();
|
||||
}
|
||||
|
||||
/// <summary>Calculates the graph-level runtime coverage percentage.</summary>
|
||||
public static double CalculateRuntimeCoverage(this RichGraph graph)
|
||||
{
|
||||
if (graph.Nodes.Count == 0)
|
||||
return 0.0;
|
||||
|
||||
var confirmedCount = graph.Nodes.Count(n => n.GetRuntimeConfirmed() == true);
|
||||
return (double)confirmedCount / graph.Nodes.Count * 100.0;
|
||||
}
|
||||
|
||||
/// <summary>Gets the average reachability score for the graph.</summary>
|
||||
public static double? CalculateAverageReachabilityScore(this RichGraph graph)
|
||||
{
|
||||
var scores = graph.Nodes
|
||||
.Select(n => n.GetReachabilityScore())
|
||||
.Where(s => s.HasValue)
|
||||
.Select(s => s!.Value)
|
||||
.ToList();
|
||||
|
||||
if (scores.Count == 0)
|
||||
return null;
|
||||
|
||||
return scores.Average();
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
@@ -230,6 +336,52 @@ public sealed class RichGraphNodeSemanticBuilder
|
||||
return this;
|
||||
}
|
||||
|
||||
// Sprint: SPRINT_20260112_004_SCANNER_reachability_trace_runtime_evidence
|
||||
// Builder methods for runtime evidence overlay attributes
|
||||
|
||||
/// <summary>Sets the reachability score (0.0-1.0).</summary>
|
||||
public RichGraphNodeSemanticBuilder WithReachabilityScore(double score)
|
||||
{
|
||||
_attributes[RichGraphSemanticAttributes.ReachabilityScore] = Math.Clamp(score, 0.0, 1.0).ToString("F3", CultureInfo.InvariantCulture);
|
||||
return this;
|
||||
}
|
||||
|
||||
/// <summary>Sets the runtime confirmed flag.</summary>
|
||||
public RichGraphNodeSemanticBuilder WithRuntimeConfirmed(bool confirmed)
|
||||
{
|
||||
_attributes[RichGraphSemanticAttributes.RuntimeConfirmed] = confirmed.ToString().ToLowerInvariant();
|
||||
return this;
|
||||
}
|
||||
|
||||
/// <summary>Sets the runtime observation count.</summary>
|
||||
public RichGraphNodeSemanticBuilder WithRuntimeObservationCount(ulong count)
|
||||
{
|
||||
_attributes[RichGraphSemanticAttributes.RuntimeObservationCount] = count.ToString(CultureInfo.InvariantCulture);
|
||||
return this;
|
||||
}
|
||||
|
||||
/// <summary>Sets the runtime observation timestamps.</summary>
|
||||
public RichGraphNodeSemanticBuilder WithRuntimeObservationTimes(DateTimeOffset firstObserved, DateTimeOffset lastObserved)
|
||||
{
|
||||
_attributes[RichGraphSemanticAttributes.RuntimeFirstObserved] = firstObserved.ToString("O", CultureInfo.InvariantCulture);
|
||||
_attributes[RichGraphSemanticAttributes.RuntimeLastObserved] = lastObserved.ToString("O", CultureInfo.InvariantCulture);
|
||||
return this;
|
||||
}
|
||||
|
||||
/// <summary>Sets the runtime evidence URI.</summary>
|
||||
public RichGraphNodeSemanticBuilder WithRuntimeEvidenceUri(string uri)
|
||||
{
|
||||
_attributes[RichGraphSemanticAttributes.RuntimeEvidenceUri] = uri;
|
||||
return this;
|
||||
}
|
||||
|
||||
/// <summary>Sets the runtime confirmation type (confirmed/partial/none).</summary>
|
||||
public RichGraphNodeSemanticBuilder WithRuntimeConfirmationType(string confirmationType)
|
||||
{
|
||||
_attributes[RichGraphSemanticAttributes.RuntimeConfirmationType] = confirmationType;
|
||||
return this;
|
||||
}
|
||||
|
||||
/// <summary>Builds the attributes dictionary.</summary>
|
||||
public IReadOnlyDictionary<string, string> Build()
|
||||
{
|
||||
|
||||
@@ -0,0 +1,672 @@
|
||||
// <copyright file="SignedSbomArchiveBuilderTests.cs" company="StellaOps">
|
||||
// SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
// Sprint: SPRINT_20260112_016_SCANNER_signed_sbom_archive_spec (SBOM-SPEC-011)
|
||||
// </copyright>
|
||||
|
||||
using System.IO.Compression;
|
||||
using System.Security.Cryptography;
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using Microsoft.Extensions.Time.Testing;
|
||||
|
||||
using StellaOps.Scanner.WebService.Domain;
|
||||
using StellaOps.Scanner.WebService.Services;
|
||||
using StellaOps.TestKit;
|
||||
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Scanner.WebService.Tests;
|
||||
|
||||
/// <summary>
|
||||
/// Tests for <see cref="SignedSbomArchiveBuilder"/>.
|
||||
/// Sprint: SPRINT_20260112_016_SCANNER_signed_sbom_archive_spec (SBOM-SPEC-011)
|
||||
/// </summary>
|
||||
[Trait("Category", TestCategories.Unit)]
|
||||
public sealed class SignedSbomArchiveBuilderTests : IDisposable
|
||||
{
|
||||
private static readonly DateTimeOffset FixedTime = new(2026, 1, 16, 10, 30, 0, TimeSpan.Zero);
|
||||
private readonly SignedSbomArchiveBuilder _builder;
|
||||
private readonly List<Stream> _streamsToDispose = new();
|
||||
|
||||
public SignedSbomArchiveBuilderTests()
|
||||
{
|
||||
var timeProvider = new FakeTimeProvider(FixedTime);
|
||||
_builder = new SignedSbomArchiveBuilder(timeProvider, NullLogger<SignedSbomArchiveBuilder>.Instance);
|
||||
}
|
||||
|
||||
public void Dispose()
|
||||
{
|
||||
foreach (var stream in _streamsToDispose)
|
||||
{
|
||||
stream.Dispose();
|
||||
}
|
||||
}
|
||||
|
||||
#region Archive Structure Tests
|
||||
|
||||
[Fact]
|
||||
public async Task BuildAsync_WithMinimalInput_CreatesValidArchive()
|
||||
{
|
||||
// Arrange
|
||||
var request = CreateMinimalRequest();
|
||||
|
||||
// Act
|
||||
var result = await _builder.BuildAsync(request);
|
||||
_streamsToDispose.Add(result.Stream);
|
||||
|
||||
// Assert
|
||||
Assert.NotNull(result);
|
||||
Assert.True(result.Size > 0);
|
||||
Assert.StartsWith("signed-sbom-", result.FileName);
|
||||
Assert.EndsWith(".tar.gz", result.FileName);
|
||||
Assert.Equal("application/gzip", result.ContentType);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task BuildAsync_IncludesMandatoryFiles()
|
||||
{
|
||||
// Arrange
|
||||
var request = CreateMinimalRequest();
|
||||
|
||||
// Act
|
||||
var result = await _builder.BuildAsync(request);
|
||||
_streamsToDispose.Add(result.Stream);
|
||||
|
||||
// Assert - Extract and verify file list
|
||||
var files = await ExtractTarGzFileListAsync(result.Stream);
|
||||
|
||||
Assert.Contains(files, f => f.EndsWith("manifest.json"));
|
||||
Assert.Contains(files, f => f.EndsWith("metadata.json"));
|
||||
Assert.Contains(files, f => f.EndsWith("sbom.spdx.json") || f.EndsWith("sbom.cdx.json"));
|
||||
Assert.Contains(files, f => f.EndsWith("sbom.dsse.json"));
|
||||
Assert.Contains(files, f => f.EndsWith("certs/signing-cert.pem"));
|
||||
Assert.Contains(files, f => f.EndsWith("VERIFY.md"));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task BuildAsync_WithSpdxFormat_UsesSpdxFileName()
|
||||
{
|
||||
// Arrange
|
||||
var request = CreateMinimalRequest() with { SbomFormat = "spdx-2.3" };
|
||||
|
||||
// Act
|
||||
var result = await _builder.BuildAsync(request);
|
||||
_streamsToDispose.Add(result.Stream);
|
||||
|
||||
// Assert
|
||||
var files = await ExtractTarGzFileListAsync(result.Stream);
|
||||
Assert.Contains(files, f => f.EndsWith("sbom.spdx.json"));
|
||||
Assert.DoesNotContain(files, f => f.EndsWith("sbom.cdx.json"));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task BuildAsync_WithCycloneDxFormat_UsesCdxFileName()
|
||||
{
|
||||
// Arrange
|
||||
var request = CreateMinimalRequest() with { SbomFormat = "cyclonedx-1.7" };
|
||||
|
||||
// Act
|
||||
var result = await _builder.BuildAsync(request);
|
||||
_streamsToDispose.Add(result.Stream);
|
||||
|
||||
// Assert
|
||||
var files = await ExtractTarGzFileListAsync(result.Stream);
|
||||
Assert.Contains(files, f => f.EndsWith("sbom.cdx.json"));
|
||||
Assert.DoesNotContain(files, f => f.EndsWith("sbom.spdx.json"));
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Optional Content Tests
|
||||
|
||||
[Fact]
|
||||
public async Task BuildAsync_WithSigningChain_IncludesChainFile()
|
||||
{
|
||||
// Arrange
|
||||
var request = CreateMinimalRequest() with
|
||||
{
|
||||
SigningChainPem = "-----BEGIN CERTIFICATE-----\nCHAIN\n-----END CERTIFICATE-----"
|
||||
};
|
||||
|
||||
// Act
|
||||
var result = await _builder.BuildAsync(request);
|
||||
_streamsToDispose.Add(result.Stream);
|
||||
|
||||
// Assert
|
||||
var files = await ExtractTarGzFileListAsync(result.Stream);
|
||||
Assert.Contains(files, f => f.EndsWith("certs/signing-chain.pem"));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task BuildAsync_WithFulcioRoot_IncludesFulcioRootFile()
|
||||
{
|
||||
// Arrange
|
||||
var request = CreateMinimalRequest() with
|
||||
{
|
||||
FulcioRootPem = "-----BEGIN CERTIFICATE-----\nFULCIO\n-----END CERTIFICATE-----"
|
||||
};
|
||||
|
||||
// Act
|
||||
var result = await _builder.BuildAsync(request);
|
||||
_streamsToDispose.Add(result.Stream);
|
||||
|
||||
// Assert
|
||||
var files = await ExtractTarGzFileListAsync(result.Stream);
|
||||
Assert.Contains(files, f => f.EndsWith("certs/fulcio-root.pem"));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task BuildAsync_WithRekorProof_IncludesRekorFiles()
|
||||
{
|
||||
// Arrange
|
||||
var request = CreateMinimalRequest() with
|
||||
{
|
||||
IncludeRekorProof = true,
|
||||
RekorInclusionProofBytes = Encoding.UTF8.GetBytes("{\"proof\": \"test\"}"),
|
||||
RekorCheckpointBytes = Encoding.UTF8.GetBytes("checkpoint"),
|
||||
RekorPublicKeyPem = "-----BEGIN PUBLIC KEY-----\nREKOR\n-----END PUBLIC KEY-----",
|
||||
RekorLogIndex = 12345678
|
||||
};
|
||||
|
||||
// Act
|
||||
var result = await _builder.BuildAsync(request);
|
||||
_streamsToDispose.Add(result.Stream);
|
||||
|
||||
// Assert
|
||||
var files = await ExtractTarGzFileListAsync(result.Stream);
|
||||
Assert.Contains(files, f => f.EndsWith("rekor-proof/inclusion-proof.json"));
|
||||
Assert.Contains(files, f => f.EndsWith("rekor-proof/checkpoint.sig"));
|
||||
Assert.Contains(files, f => f.EndsWith("rekor-proof/rekor-public.pem"));
|
||||
Assert.Equal(12345678, result.RekorLogIndex);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task BuildAsync_WithRekorProofDisabled_ExcludesRekorFiles()
|
||||
{
|
||||
// Arrange
|
||||
var request = CreateMinimalRequest() with
|
||||
{
|
||||
IncludeRekorProof = false
|
||||
};
|
||||
|
||||
// Act
|
||||
var result = await _builder.BuildAsync(request);
|
||||
_streamsToDispose.Add(result.Stream);
|
||||
|
||||
// Assert
|
||||
var files = await ExtractTarGzFileListAsync(result.Stream);
|
||||
Assert.DoesNotContain(files, f => f.Contains("rekor-proof/"));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task BuildAsync_WithSchemas_IncludesSchemasReadme()
|
||||
{
|
||||
// Arrange
|
||||
var request = CreateMinimalRequest() with { IncludeSchemas = true };
|
||||
|
||||
// Act
|
||||
var result = await _builder.BuildAsync(request);
|
||||
_streamsToDispose.Add(result.Stream);
|
||||
|
||||
// Assert
|
||||
var files = await ExtractTarGzFileListAsync(result.Stream);
|
||||
Assert.Contains(files, f => f.EndsWith("schemas/README.md"));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task BuildAsync_WithoutSchemas_ExcludesSchemasDirectory()
|
||||
{
|
||||
// Arrange
|
||||
var request = CreateMinimalRequest() with { IncludeSchemas = false };
|
||||
|
||||
// Act
|
||||
var result = await _builder.BuildAsync(request);
|
||||
_streamsToDispose.Add(result.Stream);
|
||||
|
||||
// Assert
|
||||
var files = await ExtractTarGzFileListAsync(result.Stream);
|
||||
Assert.DoesNotContain(files, f => f.Contains("schemas/"));
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Digest and Hash Tests
|
||||
|
||||
[Fact]
|
||||
public async Task BuildAsync_ComputesCorrectSbomDigest()
|
||||
{
|
||||
// Arrange
|
||||
var sbomContent = "{\"spdxVersion\": \"SPDX-2.3\"}";
|
||||
var sbomBytes = Encoding.UTF8.GetBytes(sbomContent);
|
||||
var expectedDigest = ComputeSha256Hex(sbomBytes);
|
||||
|
||||
var request = CreateMinimalRequest() with { SbomBytes = sbomBytes };
|
||||
|
||||
// Act
|
||||
var result = await _builder.BuildAsync(request);
|
||||
_streamsToDispose.Add(result.Stream);
|
||||
|
||||
// Assert
|
||||
Assert.Equal(expectedDigest, result.SbomDigest);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task BuildAsync_ComputesNonEmptyArchiveDigest()
|
||||
{
|
||||
// Arrange
|
||||
var request = CreateMinimalRequest();
|
||||
|
||||
// Act
|
||||
var result = await _builder.BuildAsync(request);
|
||||
_streamsToDispose.Add(result.Stream);
|
||||
|
||||
// Assert
|
||||
Assert.NotNull(result.ArchiveDigest);
|
||||
Assert.Equal(64, result.ArchiveDigest.Length); // SHA-256 hex string length
|
||||
Assert.Matches("^[a-f0-9]{64}$", result.ArchiveDigest);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task BuildAsync_ComputesNonEmptyMerkleRoot()
|
||||
{
|
||||
// Arrange
|
||||
var request = CreateMinimalRequest();
|
||||
|
||||
// Act
|
||||
var result = await _builder.BuildAsync(request);
|
||||
_streamsToDispose.Add(result.Stream);
|
||||
|
||||
// Assert
|
||||
Assert.NotNull(result.MerkleRoot);
|
||||
Assert.StartsWith("sha256:", result.MerkleRoot);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Determinism Tests
|
||||
|
||||
[Fact]
|
||||
public async Task BuildAsync_SameInput_ProducesSameSbomDigest()
|
||||
{
|
||||
// Arrange
|
||||
var request = CreateMinimalRequest();
|
||||
|
||||
// Act
|
||||
var result1 = await _builder.BuildAsync(request);
|
||||
_streamsToDispose.Add(result1.Stream);
|
||||
|
||||
var result2 = await _builder.BuildAsync(request);
|
||||
_streamsToDispose.Add(result2.Stream);
|
||||
|
||||
// Assert
|
||||
Assert.Equal(result1.SbomDigest, result2.SbomDigest);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task BuildAsync_SameInput_ProducesSameMerkleRoot()
|
||||
{
|
||||
// Arrange
|
||||
var request = CreateMinimalRequest();
|
||||
|
||||
// Act
|
||||
var result1 = await _builder.BuildAsync(request);
|
||||
_streamsToDispose.Add(result1.Stream);
|
||||
|
||||
var result2 = await _builder.BuildAsync(request);
|
||||
_streamsToDispose.Add(result2.Stream);
|
||||
|
||||
// Assert
|
||||
Assert.Equal(result1.MerkleRoot, result2.MerkleRoot);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Metadata Tests
|
||||
|
||||
[Fact]
|
||||
public async Task BuildAsync_MetadataContainsRequiredFields()
|
||||
{
|
||||
// Arrange
|
||||
var request = CreateMinimalRequest() with
|
||||
{
|
||||
ImageRef = "ghcr.io/test/image:v1.0.0",
|
||||
ImageDigest = "sha256:abc123",
|
||||
SbomFormat = "spdx-2.3",
|
||||
ComponentCount = 10,
|
||||
PackageCount = 5,
|
||||
FileCount = 100,
|
||||
SignatureIssuer = "https://accounts.google.com",
|
||||
SignatureSubject = "test@example.com"
|
||||
};
|
||||
|
||||
// Act
|
||||
var result = await _builder.BuildAsync(request);
|
||||
_streamsToDispose.Add(result.Stream);
|
||||
|
||||
// Assert - Extract and parse metadata.json
|
||||
var metadataJson = await ExtractFileContentAsync(result.Stream, "metadata.json");
|
||||
Assert.NotNull(metadataJson);
|
||||
|
||||
var metadata = JsonSerializer.Deserialize<JsonElement>(metadataJson);
|
||||
|
||||
Assert.Equal("1.0.0", metadata.GetProperty("schemaVersion").GetString());
|
||||
Assert.True(metadata.TryGetProperty("stellaOps", out _));
|
||||
Assert.True(metadata.TryGetProperty("generation", out _));
|
||||
Assert.True(metadata.TryGetProperty("input", out _));
|
||||
Assert.True(metadata.TryGetProperty("sbom", out _));
|
||||
Assert.True(metadata.TryGetProperty("signature", out _));
|
||||
|
||||
var input = metadata.GetProperty("input");
|
||||
Assert.Equal("ghcr.io/test/image:v1.0.0", input.GetProperty("imageRef").GetString());
|
||||
Assert.Equal("sha256:abc123", input.GetProperty("imageDigest").GetString());
|
||||
|
||||
var sbom = metadata.GetProperty("sbom");
|
||||
Assert.Equal("spdx-2.3", sbom.GetProperty("format").GetString());
|
||||
Assert.Equal(10, sbom.GetProperty("componentCount").GetInt32());
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Manifest Tests
|
||||
|
||||
[Fact]
|
||||
public async Task BuildAsync_ManifestListsAllFiles()
|
||||
{
|
||||
// Arrange
|
||||
var request = CreateMinimalRequest() with { IncludeSchemas = true };
|
||||
|
||||
// Act
|
||||
var result = await _builder.BuildAsync(request);
|
||||
_streamsToDispose.Add(result.Stream);
|
||||
|
||||
// Assert - Extract and parse manifest.json
|
||||
var manifestJson = await ExtractFileContentAsync(result.Stream, "manifest.json");
|
||||
Assert.NotNull(manifestJson);
|
||||
|
||||
var manifest = JsonSerializer.Deserialize<JsonElement>(manifestJson);
|
||||
|
||||
Assert.Equal("1.0.0", manifest.GetProperty("schemaVersion").GetString());
|
||||
Assert.True(manifest.TryGetProperty("archiveId", out _));
|
||||
Assert.True(manifest.TryGetProperty("generatedAt", out _));
|
||||
Assert.True(manifest.TryGetProperty("files", out _));
|
||||
Assert.True(manifest.TryGetProperty("merkleRoot", out _));
|
||||
Assert.True(manifest.TryGetProperty("totalFiles", out _));
|
||||
Assert.True(manifest.TryGetProperty("totalSize", out _));
|
||||
|
||||
var files = manifest.GetProperty("files");
|
||||
Assert.True(files.GetArrayLength() > 0);
|
||||
|
||||
// Verify each file entry has required fields
|
||||
foreach (var file in files.EnumerateArray())
|
||||
{
|
||||
Assert.True(file.TryGetProperty("path", out _));
|
||||
Assert.True(file.TryGetProperty("sha256", out _));
|
||||
Assert.True(file.TryGetProperty("size", out _));
|
||||
Assert.True(file.TryGetProperty("mediaType", out _));
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task BuildAsync_ManifestFileHashesAreValid()
|
||||
{
|
||||
// Arrange
|
||||
var sbomContent = "{\"test\": \"sbom\"}";
|
||||
var request = CreateMinimalRequest() with
|
||||
{
|
||||
SbomBytes = Encoding.UTF8.GetBytes(sbomContent),
|
||||
SbomFormat = "spdx-2.3"
|
||||
};
|
||||
|
||||
// Act
|
||||
var result = await _builder.BuildAsync(request);
|
||||
_streamsToDispose.Add(result.Stream);
|
||||
|
||||
// Assert
|
||||
var manifestJson = await ExtractFileContentAsync(result.Stream, "manifest.json");
|
||||
var manifest = JsonSerializer.Deserialize<JsonElement>(manifestJson);
|
||||
|
||||
var files = manifest.GetProperty("files");
|
||||
var sbomEntry = files.EnumerateArray()
|
||||
.FirstOrDefault(f => f.GetProperty("path").GetString()?.EndsWith("sbom.spdx.json") == true);
|
||||
|
||||
Assert.NotNull(sbomEntry.GetProperty("sha256").GetString());
|
||||
|
||||
// Verify SBOM hash matches expected
|
||||
var expectedHash = ComputeSha256Hex(Encoding.UTF8.GetBytes(sbomContent));
|
||||
Assert.Equal(expectedHash, sbomEntry.GetProperty("sha256").GetString());
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region VERIFY.md Tests
|
||||
|
||||
[Fact]
|
||||
public async Task BuildAsync_VerifyMdContainsVerificationInstructions()
|
||||
{
|
||||
// Arrange
|
||||
var request = CreateMinimalRequest() with
|
||||
{
|
||||
SbomFormat = "spdx-2.3",
|
||||
RekorLogIndex = 12345678
|
||||
};
|
||||
|
||||
// Act
|
||||
var result = await _builder.BuildAsync(request);
|
||||
_streamsToDispose.Add(result.Stream);
|
||||
|
||||
// Assert
|
||||
var verifyMd = await ExtractFileContentAsync(result.Stream, "VERIFY.md");
|
||||
Assert.NotNull(verifyMd);
|
||||
|
||||
Assert.Contains("# SBOM Archive Verification", verifyMd);
|
||||
Assert.Contains("Quick Verification", verifyMd);
|
||||
Assert.Contains("Signature Verification", verifyMd);
|
||||
Assert.Contains("cosign verify-blob", verifyMd);
|
||||
Assert.Contains("sbom.spdx.json", verifyMd);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task BuildAsync_VerifyMdIncludesRekorSectionWhenAvailable()
|
||||
{
|
||||
// Arrange
|
||||
var request = CreateMinimalRequest() with
|
||||
{
|
||||
IncludeRekorProof = true,
|
||||
RekorLogIndex = 12345678
|
||||
};
|
||||
|
||||
// Act
|
||||
var result = await _builder.BuildAsync(request);
|
||||
_streamsToDispose.Add(result.Stream);
|
||||
|
||||
// Assert
|
||||
var verifyMd = await ExtractFileContentAsync(result.Stream, "VERIFY.md");
|
||||
|
||||
Assert.Contains("Rekor Transparency Log", verifyMd);
|
||||
Assert.Contains("12345678", verifyMd);
|
||||
Assert.Contains("rekor-cli verify", verifyMd);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task BuildAsync_VerifyMdIncludesFileHashTable()
|
||||
{
|
||||
// Arrange
|
||||
var request = CreateMinimalRequest();
|
||||
|
||||
// Act
|
||||
var result = await _builder.BuildAsync(request);
|
||||
_streamsToDispose.Add(result.Stream);
|
||||
|
||||
// Assert
|
||||
var verifyMd = await ExtractFileContentAsync(result.Stream, "VERIFY.md");
|
||||
|
||||
Assert.Contains("Archive Contents", verifyMd);
|
||||
Assert.Contains("| File | Size | SHA-256 |", verifyMd);
|
||||
Assert.Contains("Merkle Root", verifyMd);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Error Handling Tests
|
||||
|
||||
[Fact]
|
||||
public async Task BuildAsync_WithNullRequest_ThrowsArgumentNullException()
|
||||
{
|
||||
// Act & Assert
|
||||
await Assert.ThrowsAsync<ArgumentNullException>(() => _builder.BuildAsync(null!));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task BuildAsync_SupportsCancellation()
|
||||
{
|
||||
// Arrange
|
||||
var request = CreateMinimalRequest();
|
||||
using var cts = new CancellationTokenSource();
|
||||
cts.Cancel();
|
||||
|
||||
// Act & Assert
|
||||
await Assert.ThrowsAsync<OperationCanceledException>(
|
||||
() => _builder.BuildAsync(request, cts.Token));
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Test Helpers
|
||||
|
||||
private static SignedSbomArchiveRequest CreateMinimalRequest()
|
||||
{
|
||||
var sbomBytes = Encoding.UTF8.GetBytes("{\"spdxVersion\": \"SPDX-2.3\", \"packages\": []}");
|
||||
var dsseBytes = Encoding.UTF8.GetBytes("""
|
||||
{
|
||||
"payloadType": "application/vnd.in-toto+json",
|
||||
"payload": "base64-encoded-payload",
|
||||
"signatures": [{"sig": "test-signature"}]
|
||||
}
|
||||
""");
|
||||
var certPem = """
|
||||
-----BEGIN CERTIFICATE-----
|
||||
MIIBkTCB+wIJAKHBfFmJ/r7CMA0GCSqGSIb3DQEBCwUAMBExDzANBgNVBAMMBnRl
|
||||
c3RjYTAeFw0yNjAxMTYwMDAwMDBaFw0yNzAxMTYwMDAwMDBaMBExDzANBgNVBAMM
|
||||
BnRlc3RjYTBcMA0GCSqGSIb3DQEBAQUAA0sAMEgCQQC5Q2QRqzFVcFm5AwQKDQCu
|
||||
xK5nMPVPu9F4Nz7Q3z5F5w5F5w5F5w5F5w5F5w5F5w5F5w5F5w5F5w5F5w5F5w5F
|
||||
AgMBAAGjUDBOMB0GA1UdDgQWBBQExample0MB8GA1UdIwQYMBaAFExample0MAwGA
|
||||
1UdEwQFMAMBAf8wDQYJKoZIhvcNAQELBQADQQExample
|
||||
-----END CERTIFICATE-----
|
||||
""";
|
||||
|
||||
return new SignedSbomArchiveRequest
|
||||
{
|
||||
ScanId = ScanId.CreateNew(),
|
||||
SbomBytes = sbomBytes,
|
||||
SbomFormat = "spdx-2.3",
|
||||
DsseEnvelopeBytes = dsseBytes,
|
||||
SigningCertPem = certPem,
|
||||
ImageRef = "ghcr.io/test/image:latest",
|
||||
ImageDigest = "sha256:1234567890abcdef1234567890abcdef1234567890abcdef1234567890abcdef",
|
||||
ComponentCount = 5,
|
||||
PackageCount = 3,
|
||||
FileCount = 20,
|
||||
IncludeRekorProof = false,
|
||||
IncludeSchemas = false
|
||||
};
|
||||
}
|
||||
|
||||
private static async Task<List<string>> ExtractTarGzFileListAsync(Stream stream)
|
||||
{
|
||||
var files = new List<string>();
|
||||
stream.Position = 0;
|
||||
|
||||
await using var gzipStream = new GZipStream(stream, CompressionMode.Decompress, leaveOpen: true);
|
||||
using var memoryStream = new MemoryStream();
|
||||
await gzipStream.CopyToAsync(memoryStream);
|
||||
|
||||
memoryStream.Position = 0;
|
||||
var buffer = new byte[512];
|
||||
|
||||
while (memoryStream.Position < memoryStream.Length - 1024)
|
||||
{
|
||||
var bytesRead = await memoryStream.ReadAsync(buffer.AsMemory(0, 512));
|
||||
if (bytesRead < 512) break;
|
||||
|
||||
// Check for end-of-archive marker (all zeros)
|
||||
if (buffer.All(b => b == 0)) break;
|
||||
|
||||
// Extract file name from header (first 100 bytes)
|
||||
var nameEnd = Array.IndexOf(buffer, (byte)0);
|
||||
if (nameEnd < 0) nameEnd = 100;
|
||||
var fileName = Encoding.ASCII.GetString(buffer, 0, Math.Min(nameEnd, 100)).TrimEnd('\0');
|
||||
|
||||
if (!string.IsNullOrEmpty(fileName))
|
||||
{
|
||||
files.Add(fileName);
|
||||
}
|
||||
|
||||
// Get file size from header (bytes 124-135, octal)
|
||||
var sizeStr = Encoding.ASCII.GetString(buffer, 124, 11).Trim('\0', ' ');
|
||||
var fileSize = string.IsNullOrEmpty(sizeStr) ? 0 : Convert.ToInt64(sizeStr, 8);
|
||||
|
||||
// Skip file content (rounded up to 512-byte boundary)
|
||||
var paddedSize = ((fileSize + 511) / 512) * 512;
|
||||
memoryStream.Position += paddedSize;
|
||||
}
|
||||
|
||||
stream.Position = 0;
|
||||
return files;
|
||||
}
|
||||
|
||||
private static async Task<string?> ExtractFileContentAsync(Stream stream, string fileNamePattern)
|
||||
{
|
||||
stream.Position = 0;
|
||||
|
||||
await using var gzipStream = new GZipStream(stream, CompressionMode.Decompress, leaveOpen: true);
|
||||
using var memoryStream = new MemoryStream();
|
||||
await gzipStream.CopyToAsync(memoryStream);
|
||||
|
||||
memoryStream.Position = 0;
|
||||
var headerBuffer = new byte[512];
|
||||
|
||||
while (memoryStream.Position < memoryStream.Length - 1024)
|
||||
{
|
||||
var bytesRead = await memoryStream.ReadAsync(headerBuffer.AsMemory(0, 512));
|
||||
if (bytesRead < 512) break;
|
||||
|
||||
// Check for end-of-archive marker
|
||||
if (headerBuffer.All(b => b == 0)) break;
|
||||
|
||||
// Extract file name
|
||||
var nameEnd = Array.IndexOf(headerBuffer, (byte)0);
|
||||
if (nameEnd < 0) nameEnd = 100;
|
||||
var fileName = Encoding.ASCII.GetString(headerBuffer, 0, Math.Min(nameEnd, 100)).TrimEnd('\0');
|
||||
|
||||
// Get file size
|
||||
var sizeStr = Encoding.ASCII.GetString(headerBuffer, 124, 11).Trim('\0', ' ');
|
||||
var fileSize = string.IsNullOrEmpty(sizeStr) ? 0 : Convert.ToInt64(sizeStr, 8);
|
||||
|
||||
if (fileName.EndsWith(fileNamePattern))
|
||||
{
|
||||
var contentBuffer = new byte[fileSize];
|
||||
await memoryStream.ReadAsync(contentBuffer.AsMemory(0, (int)fileSize));
|
||||
stream.Position = 0;
|
||||
return Encoding.UTF8.GetString(contentBuffer);
|
||||
}
|
||||
|
||||
// Skip file content
|
||||
var paddedSize = ((fileSize + 511) / 512) * 512;
|
||||
memoryStream.Position += paddedSize - fileSize; // We haven't read content, so skip entire padded block
|
||||
memoryStream.Position += fileSize;
|
||||
}
|
||||
|
||||
stream.Position = 0;
|
||||
return null;
|
||||
}
|
||||
|
||||
private static string ComputeSha256Hex(byte[] data)
|
||||
{
|
||||
var hash = SHA256.HashData(data);
|
||||
return Convert.ToHexString(hash).ToLowerInvariant();
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
Reference in New Issue
Block a user