save progress
This commit is contained in:
@@ -0,0 +1,346 @@
|
||||
// <copyright file="ValidationEndpoints.cs" company="StellaOps">
|
||||
// Copyright (c) StellaOps. Licensed under the AGPL-3.0-or-later.
|
||||
// </copyright>
|
||||
|
||||
using System.Text.Json;
|
||||
using System.Text.Json.Serialization;
|
||||
using Microsoft.AspNetCore.Http;
|
||||
using Microsoft.AspNetCore.Mvc;
|
||||
using Microsoft.AspNetCore.Routing;
|
||||
using Microsoft.Extensions.Options;
|
||||
using StellaOps.Scanner.Validation;
|
||||
using StellaOps.Scanner.WebService.Security;
|
||||
|
||||
namespace StellaOps.Scanner.WebService.Endpoints;
|
||||
|
||||
/// <summary>
|
||||
/// SBOM validation endpoints.
|
||||
/// Sprint: SPRINT_20260107_005_003 Task VG-006
|
||||
/// </summary>
|
||||
internal static class ValidationEndpoints
|
||||
{
|
||||
/// <summary>
|
||||
/// Maps SBOM validation endpoints.
|
||||
/// </summary>
|
||||
public static void MapValidationEndpoints(this IEndpointRouteBuilder app)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(app);
|
||||
|
||||
var group = app.MapGroup("/api/v1/sbom")
|
||||
.WithTags("Validation")
|
||||
.RequireAuthorization();
|
||||
|
||||
// POST /api/v1/sbom/validate
|
||||
group.MapPost("/validate", ValidateSbomAsync)
|
||||
.WithName("scanner.sbom.validate")
|
||||
.WithDescription("Validates an SBOM document against CycloneDX or SPDX schemas")
|
||||
.Accepts<byte[]>(
|
||||
"application/vnd.cyclonedx+json",
|
||||
"application/vnd.cyclonedx+xml",
|
||||
"application/spdx+json",
|
||||
"text/spdx",
|
||||
"application/json",
|
||||
"application/octet-stream")
|
||||
.Produces<SbomValidationResponseDto>(StatusCodes.Status200OK)
|
||||
.Produces(StatusCodes.Status400BadRequest)
|
||||
.Produces(StatusCodes.Status503ServiceUnavailable)
|
||||
.RequireAuthorization(ScannerPolicies.ScansRead);
|
||||
|
||||
// GET /api/v1/sbom/validators
|
||||
group.MapGet("/validators", GetValidatorsAsync)
|
||||
.WithName("scanner.sbom.validators")
|
||||
.WithDescription("Gets information about available SBOM validators")
|
||||
.Produces<ValidatorsInfoResponseDto>(StatusCodes.Status200OK)
|
||||
.RequireAuthorization(ScannerPolicies.ScansRead);
|
||||
}
|
||||
|
||||
private static async Task<IResult> ValidateSbomAsync(
|
||||
HttpContext context,
|
||||
[FromServices] CompositeValidator validator,
|
||||
[FromServices] IOptions<ValidationGateOptions> options,
|
||||
[FromQuery] string? format = null,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
var gateOptions = options.Value;
|
||||
|
||||
// Check if validation is enabled
|
||||
if (!gateOptions.Enabled || gateOptions.Mode == SbomValidationMode.Off)
|
||||
{
|
||||
return Results.Ok(new SbomValidationResponseDto
|
||||
{
|
||||
IsValid = true,
|
||||
Format = "unknown",
|
||||
ValidatorName = "validation-disabled",
|
||||
ValidatorVersion = "n/a",
|
||||
Message = "Validation is disabled",
|
||||
Diagnostics = Array.Empty<ValidationDiagnosticDto>()
|
||||
});
|
||||
}
|
||||
|
||||
// Read request body
|
||||
using var memoryStream = new MemoryStream();
|
||||
await context.Request.Body.CopyToAsync(memoryStream, cancellationToken).ConfigureAwait(false);
|
||||
var sbomBytes = memoryStream.ToArray();
|
||||
|
||||
if (sbomBytes.Length == 0)
|
||||
{
|
||||
return Results.BadRequest(new ProblemDetails
|
||||
{
|
||||
Title = "Empty request body",
|
||||
Detail = "SBOM document is required",
|
||||
Status = StatusCodes.Status400BadRequest
|
||||
});
|
||||
}
|
||||
|
||||
// Determine format
|
||||
SbomFormat sbomFormat;
|
||||
if (!string.IsNullOrWhiteSpace(format))
|
||||
{
|
||||
if (!Enum.TryParse<SbomFormat>(format, ignoreCase: true, out sbomFormat))
|
||||
{
|
||||
return Results.BadRequest(new ProblemDetails
|
||||
{
|
||||
Title = "Invalid format",
|
||||
Detail = $"Unknown SBOM format: {format}. Supported: CycloneDxJson, CycloneDxXml, Spdx23Json, Spdx23TagValue, Spdx3JsonLd",
|
||||
Status = StatusCodes.Status400BadRequest
|
||||
});
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
// Auto-detect format
|
||||
sbomFormat = CompositeValidator.DetectFormat(sbomBytes);
|
||||
if (sbomFormat == SbomFormat.Unknown)
|
||||
{
|
||||
// Try content-type header
|
||||
sbomFormat = DetectFormatFromContentType(context.Request.ContentType);
|
||||
}
|
||||
}
|
||||
|
||||
// Run validation
|
||||
var validationOptions = gateOptions.ToValidationOptions();
|
||||
SbomValidationResult result;
|
||||
|
||||
if (sbomFormat == SbomFormat.Unknown)
|
||||
{
|
||||
result = await validator.ValidateAutoAsync(sbomBytes, validationOptions, cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
}
|
||||
else
|
||||
{
|
||||
result = await validator.ValidateAsync(sbomBytes, sbomFormat, validationOptions, cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
}
|
||||
|
||||
// Check if validator is available
|
||||
if (result.Diagnostics.Any(d => d.Code == "VALIDATOR_UNAVAILABLE"))
|
||||
{
|
||||
return Results.Problem(
|
||||
title: "Validator unavailable",
|
||||
detail: result.Diagnostics.FirstOrDefault(d => d.Code == "VALIDATOR_UNAVAILABLE")?.Message,
|
||||
statusCode: StatusCodes.Status503ServiceUnavailable);
|
||||
}
|
||||
|
||||
var response = new SbomValidationResponseDto
|
||||
{
|
||||
IsValid = result.IsValid,
|
||||
Format = result.Format.ToString(),
|
||||
ValidatorName = result.ValidatorName,
|
||||
ValidatorVersion = result.ValidatorVersion,
|
||||
ValidationDurationMs = (int)result.ValidationDuration.TotalMilliseconds,
|
||||
ErrorCount = result.ErrorCount,
|
||||
WarningCount = result.WarningCount,
|
||||
SchemaVersion = result.SchemaVersion,
|
||||
Diagnostics = result.Diagnostics.Select(d => new ValidationDiagnosticDto
|
||||
{
|
||||
Severity = d.Severity.ToString(),
|
||||
Code = d.Code,
|
||||
Message = d.Message,
|
||||
Path = d.Path,
|
||||
Line = d.Line,
|
||||
Suggestion = d.Suggestion
|
||||
}).ToArray()
|
||||
};
|
||||
|
||||
return Results.Ok(response);
|
||||
}
|
||||
|
||||
private static async Task<IResult> GetValidatorsAsync(
|
||||
[FromServices] CompositeValidator validator,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
var info = await validator.GetInfoAsync(cancellationToken).ConfigureAwait(false);
|
||||
|
||||
var response = new ValidatorsInfoResponseDto
|
||||
{
|
||||
IsAvailable = info.IsAvailable,
|
||||
Name = info.Name,
|
||||
Version = info.Version,
|
||||
SupportedFormats = info.SupportedFormats.Select(f => f.ToString()).ToArray(),
|
||||
SupportedSchemaVersions = info.SupportedSchemaVersions.ToArray()
|
||||
};
|
||||
|
||||
return Results.Ok(response);
|
||||
}
|
||||
|
||||
private static SbomFormat DetectFormatFromContentType(string? contentType)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(contentType))
|
||||
{
|
||||
return SbomFormat.Unknown;
|
||||
}
|
||||
|
||||
return contentType.ToLowerInvariant() switch
|
||||
{
|
||||
var ct when ct.Contains("cyclonedx+json") => SbomFormat.CycloneDxJson,
|
||||
var ct when ct.Contains("cyclonedx+xml") || ct.Contains("cyclonedx") && ct.Contains("xml") => SbomFormat.CycloneDxXml,
|
||||
var ct when ct.Contains("spdx+json") || ct.Contains("spdx") && ct.Contains("json") => SbomFormat.Spdx23Json,
|
||||
var ct when ct.Contains("text/spdx") => SbomFormat.Spdx23TagValue,
|
||||
_ => SbomFormat.Unknown
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Response DTO for SBOM validation.
|
||||
/// </summary>
|
||||
public sealed class SbomValidationResponseDto
|
||||
{
|
||||
/// <summary>
|
||||
/// Gets or sets whether the SBOM is valid.
|
||||
/// </summary>
|
||||
[JsonPropertyName("isValid")]
|
||||
public bool IsValid { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets or sets the SBOM format.
|
||||
/// </summary>
|
||||
[JsonPropertyName("format")]
|
||||
public required string Format { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets or sets the validator name.
|
||||
/// </summary>
|
||||
[JsonPropertyName("validatorName")]
|
||||
public required string ValidatorName { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets or sets the validator version.
|
||||
/// </summary>
|
||||
[JsonPropertyName("validatorVersion")]
|
||||
public required string ValidatorVersion { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets or sets the validation duration in milliseconds.
|
||||
/// </summary>
|
||||
[JsonPropertyName("validationDurationMs")]
|
||||
public int ValidationDurationMs { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets or sets the error count.
|
||||
/// </summary>
|
||||
[JsonPropertyName("errorCount")]
|
||||
public int ErrorCount { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets or sets the warning count.
|
||||
/// </summary>
|
||||
[JsonPropertyName("warningCount")]
|
||||
public int WarningCount { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets or sets the schema version validated against.
|
||||
/// </summary>
|
||||
[JsonPropertyName("schemaVersion")]
|
||||
public string? SchemaVersion { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets or sets a message (for disabled validation).
|
||||
/// </summary>
|
||||
[JsonPropertyName("message")]
|
||||
public string? Message { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets or sets the validation diagnostics.
|
||||
/// </summary>
|
||||
[JsonPropertyName("diagnostics")]
|
||||
public required ValidationDiagnosticDto[] Diagnostics { get; set; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// DTO for a validation diagnostic.
|
||||
/// </summary>
|
||||
public sealed class ValidationDiagnosticDto
|
||||
{
|
||||
/// <summary>
|
||||
/// Gets or sets the severity.
|
||||
/// </summary>
|
||||
[JsonPropertyName("severity")]
|
||||
public required string Severity { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets or sets the diagnostic code.
|
||||
/// </summary>
|
||||
[JsonPropertyName("code")]
|
||||
public required string Code { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets or sets the message.
|
||||
/// </summary>
|
||||
[JsonPropertyName("message")]
|
||||
public required string Message { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets or sets the JSON path.
|
||||
/// </summary>
|
||||
[JsonPropertyName("path")]
|
||||
public string? Path { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets or sets the line number.
|
||||
/// </summary>
|
||||
[JsonPropertyName("line")]
|
||||
public int? Line { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets or sets a suggestion.
|
||||
/// </summary>
|
||||
[JsonPropertyName("suggestion")]
|
||||
public string? Suggestion { get; set; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Response DTO for validators info.
|
||||
/// </summary>
|
||||
public sealed class ValidatorsInfoResponseDto
|
||||
{
|
||||
/// <summary>
|
||||
/// Gets or sets whether validators are available.
|
||||
/// </summary>
|
||||
[JsonPropertyName("isAvailable")]
|
||||
public bool IsAvailable { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets or sets the composite validator name.
|
||||
/// </summary>
|
||||
[JsonPropertyName("name")]
|
||||
public required string Name { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets or sets the version.
|
||||
/// </summary>
|
||||
[JsonPropertyName("version")]
|
||||
public required string Version { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets or sets the supported formats.
|
||||
/// </summary>
|
||||
[JsonPropertyName("supportedFormats")]
|
||||
public required string[] SupportedFormats { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets or sets the supported schema versions.
|
||||
/// </summary>
|
||||
[JsonPropertyName("supportedSchemaVersions")]
|
||||
public required string[] SupportedSchemaVersions { get; set; }
|
||||
}
|
||||
@@ -2,8 +2,10 @@
|
||||
// Copyright (c) StellaOps. Licensed under the AGPL-3.0-or-later.
|
||||
// </copyright>
|
||||
|
||||
using System.Collections.Immutable;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using StellaOps.Canonical.Json;
|
||||
using StellaOps.Scanner.Core.Contracts;
|
||||
using StellaOps.Scanner.Emit.Composition;
|
||||
using StellaOps.Scanner.Emit.Spdx;
|
||||
using StellaOps.Scanner.WebService.Domain;
|
||||
@@ -102,8 +104,8 @@ public sealed class SbomExportService : ISbomExportService
|
||||
artifact.JsonBytes,
|
||||
SbomExportFormat.Spdx3,
|
||||
profile,
|
||||
artifact.JsonDigest,
|
||||
artifact.ComponentCount));
|
||||
artifact.JsonSha256,
|
||||
0)); // ComponentCount not available on SpdxArtifact
|
||||
}
|
||||
|
||||
private async Task<SbomExportResult> ExportSpdx2Async(
|
||||
@@ -177,25 +179,73 @@ public sealed class SbomExportService : ISbomExportService
|
||||
ScanSnapshot snapshot,
|
||||
IReadOnlyList<SbomLayerFragment> layerFragments)
|
||||
{
|
||||
// Convert SbomLayerFragment to the format expected by SpdxComposer
|
||||
var fragments = layerFragments.Select(f => new Scanner.Core.Contracts.LayerSbomFragment
|
||||
// Convert SbomLayerFragment to LayerComponentFragment for SpdxComposer
|
||||
var fragments = layerFragments.Select(f => new LayerComponentFragment
|
||||
{
|
||||
LayerDigest = f.LayerDigest,
|
||||
Order = f.Order,
|
||||
ComponentPurls = f.ComponentPurls.ToList()
|
||||
}).ToList();
|
||||
Components = f.ComponentPurls
|
||||
.Select(purl => new ComponentRecord
|
||||
{
|
||||
Identity = ComponentIdentity.Create(
|
||||
key: purl,
|
||||
name: ExtractNameFromPurl(purl),
|
||||
version: ExtractVersionFromPurl(purl),
|
||||
purl: purl),
|
||||
LayerDigest = f.LayerDigest
|
||||
})
|
||||
.ToImmutableArray()
|
||||
}).ToImmutableArray();
|
||||
|
||||
return new SbomCompositionRequest
|
||||
var image = new ImageArtifactDescriptor
|
||||
{
|
||||
Image = new Scanner.Core.Contracts.ImageReference
|
||||
{
|
||||
ImageDigest = snapshot.Target.Digest ?? string.Empty,
|
||||
ImageRef = snapshot.Target.Reference ?? string.Empty
|
||||
},
|
||||
LayerFragments = fragments,
|
||||
GeneratedAt = _timeProvider.GetUtcNow(),
|
||||
GeneratorVersion = "StellaOps-Scanner/1.0"
|
||||
ImageDigest = snapshot.Target.Digest ?? string.Empty,
|
||||
ImageReference = snapshot.Target.Reference
|
||||
};
|
||||
|
||||
return SbomCompositionRequest.Create(
|
||||
image,
|
||||
fragments,
|
||||
_timeProvider.GetUtcNow(),
|
||||
generatorName: "StellaOps-Scanner",
|
||||
generatorVersion: "1.0");
|
||||
}
|
||||
|
||||
private static string ExtractNameFromPurl(string purl)
|
||||
{
|
||||
// Basic PURL parsing: pkg:type/namespace/name@version
|
||||
// Returns the name portion
|
||||
try
|
||||
{
|
||||
var withoutScheme = purl.StartsWith("pkg:", StringComparison.OrdinalIgnoreCase)
|
||||
? purl[4..]
|
||||
: purl;
|
||||
var atIndex = withoutScheme.IndexOf('@');
|
||||
var pathPart = atIndex >= 0 ? withoutScheme[..atIndex] : withoutScheme;
|
||||
var slashIndex = pathPart.LastIndexOf('/');
|
||||
return slashIndex >= 0 ? pathPart[(slashIndex + 1)..] : pathPart;
|
||||
}
|
||||
catch
|
||||
{
|
||||
return purl;
|
||||
}
|
||||
}
|
||||
|
||||
private static string? ExtractVersionFromPurl(string purl)
|
||||
{
|
||||
// Basic PURL parsing: pkg:type/namespace/name@version
|
||||
// Returns the version portion
|
||||
try
|
||||
{
|
||||
var atIndex = purl.IndexOf('@');
|
||||
if (atIndex < 0) return null;
|
||||
var versionPart = purl[(atIndex + 1)..];
|
||||
var queryIndex = versionPart.IndexOf('?');
|
||||
return queryIndex >= 0 ? versionPart[..queryIndex] : versionPart;
|
||||
}
|
||||
catch
|
||||
{
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
private static int EstimateComponentCount(byte[] sbomBytes)
|
||||
|
||||
@@ -83,7 +83,7 @@ public interface ISecretExceptionPatternService
|
||||
/// </summary>
|
||||
public sealed class SecretDetectionSettingsService : ISecretDetectionSettingsService
|
||||
{
|
||||
private readonly ISecretDetectionSettingsRepository _repository;
|
||||
private readonly Storage.Repositories.ISecretDetectionSettingsRepository _repository;
|
||||
private readonly TimeProvider _timeProvider;
|
||||
|
||||
private static readonly JsonSerializerOptions JsonOptions = new()
|
||||
@@ -92,7 +92,7 @@ public sealed class SecretDetectionSettingsService : ISecretDetectionSettingsSer
|
||||
};
|
||||
|
||||
public SecretDetectionSettingsService(
|
||||
ISecretDetectionSettingsRepository repository,
|
||||
Storage.Repositories.ISecretDetectionSettingsRepository repository,
|
||||
TimeProvider timeProvider)
|
||||
{
|
||||
_repository = repository ?? throw new ArgumentNullException(nameof(repository));
|
||||
|
||||
@@ -53,6 +53,7 @@
|
||||
<ProjectReference Include="../__Libraries/StellaOps.Scanner.Orchestration/StellaOps.Scanner.Orchestration.csproj" />
|
||||
<ProjectReference Include="../__Libraries/StellaOps.Scanner.Sources/StellaOps.Scanner.Sources.csproj" />
|
||||
<ProjectReference Include="../__Libraries/StellaOps.Scanner.Emit/StellaOps.Scanner.Emit.csproj" />
|
||||
<ProjectReference Include="../__Libraries/StellaOps.Scanner.Validation/StellaOps.Scanner.Validation.csproj" />
|
||||
<ProjectReference Include="../../Router/__Libraries/StellaOps.Router.AspNet/StellaOps.Router.AspNet.csproj" />
|
||||
</ItemGroup>
|
||||
|
||||
|
||||
@@ -14,6 +14,7 @@ using ProtoSerializer = CycloneDX.Protobuf.Serializer;
|
||||
using StellaOps.Scanner.Core.Contracts;
|
||||
using StellaOps.Scanner.Core.Utility;
|
||||
using StellaOps.Scanner.Emit.Evidence;
|
||||
using StellaOps.Scanner.Emit.Pedigree;
|
||||
|
||||
namespace StellaOps.Scanner.Emit.Composition;
|
||||
|
||||
@@ -178,7 +179,7 @@ public sealed class CycloneDxComposer
|
||||
SpecVersion = SpecificationVersion.v1_6,
|
||||
Version = 1,
|
||||
Metadata = BuildMetadata(request, view, generatedAt),
|
||||
Components = BuildComponents(components),
|
||||
Components = BuildComponents(request, components),
|
||||
Dependencies = BuildDependencies(components),
|
||||
};
|
||||
|
||||
@@ -318,9 +319,19 @@ public sealed class CycloneDxComposer
|
||||
return purlBuilder.ToString();
|
||||
}
|
||||
|
||||
private static List<Component> BuildComponents(ImmutableArray<AggregatedComponent> components)
|
||||
/// <summary>
|
||||
/// Builds CycloneDX component models from aggregated components.
|
||||
/// Sprint: SPRINT_20260107_005_002 Task PD-009 - Added pedigree support.
|
||||
/// </summary>
|
||||
private static List<Component> BuildComponents(
|
||||
SbomCompositionRequest request,
|
||||
ImmutableArray<AggregatedComponent> components)
|
||||
{
|
||||
var evidenceMapper = new CycloneDxEvidenceMapper();
|
||||
var pedigreeMapper = request.IncludePedigree && request.PedigreeDataByPurl is not null
|
||||
? new CycloneDxPedigreeMapper()
|
||||
: null;
|
||||
|
||||
var result = new List<Component>(components.Length);
|
||||
foreach (var component in components)
|
||||
{
|
||||
@@ -337,6 +348,16 @@ public sealed class CycloneDxComposer
|
||||
Evidence = evidenceMapper.Map(component),
|
||||
};
|
||||
|
||||
// Apply pedigree data if available and enabled
|
||||
// Sprint: SPRINT_20260107_005_002 Task PD-009
|
||||
if (pedigreeMapper is not null && !string.IsNullOrEmpty(component.Identity.Purl))
|
||||
{
|
||||
if (request.PedigreeDataByPurl!.TryGetValue(component.Identity.Purl, out var pedigreeData))
|
||||
{
|
||||
model.Pedigree = pedigreeMapper.Map(pedigreeData);
|
||||
}
|
||||
}
|
||||
|
||||
result.Add(model);
|
||||
}
|
||||
|
||||
|
||||
@@ -4,6 +4,7 @@ using System.Collections.Immutable;
|
||||
using System.Linq;
|
||||
using StellaOps.Scanner.Core.Contracts;
|
||||
using StellaOps.Scanner.Core.Utility;
|
||||
using StellaOps.Scanner.Emit.Pedigree;
|
||||
|
||||
namespace StellaOps.Scanner.Emit.Composition;
|
||||
|
||||
@@ -45,6 +46,21 @@ public sealed record SbomCompositionRequest
|
||||
public ImmutableArray<SbomPolicyFinding> PolicyFindings { get; init; }
|
||||
= ImmutableArray<SbomPolicyFinding>.Empty;
|
||||
|
||||
/// <summary>
|
||||
/// Gets the pre-fetched pedigree data keyed by component PURL.
|
||||
/// This enables synchronous composition while allowing async pedigree lookups
|
||||
/// to happen before calling <see cref="CycloneDxComposer.Compose"/>.
|
||||
/// Sprint: SPRINT_20260107_005_002 Task PD-009
|
||||
/// </summary>
|
||||
public IReadOnlyDictionary<string, PedigreeData>? PedigreeDataByPurl { get; init; }
|
||||
= null;
|
||||
|
||||
/// <summary>
|
||||
/// Gets whether pedigree data should be included in the SBOM.
|
||||
/// Defaults to true if pedigree data is provided.
|
||||
/// </summary>
|
||||
public bool IncludePedigree { get; init; } = true;
|
||||
|
||||
public static SbomCompositionRequest Create(
|
||||
ImageArtifactDescriptor image,
|
||||
IEnumerable<LayerComponentFragment> fragments,
|
||||
@@ -52,7 +68,9 @@ public sealed record SbomCompositionRequest
|
||||
string? generatorName = null,
|
||||
string? generatorVersion = null,
|
||||
IReadOnlyDictionary<string, string>? properties = null,
|
||||
IEnumerable<SbomPolicyFinding>? policyFindings = null)
|
||||
IEnumerable<SbomPolicyFinding>? policyFindings = null,
|
||||
IReadOnlyDictionary<string, PedigreeData>? pedigreeData = null,
|
||||
bool includePedigree = true)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(image);
|
||||
ArgumentNullException.ThrowIfNull(fragments);
|
||||
@@ -75,6 +93,8 @@ public sealed record SbomCompositionRequest
|
||||
GeneratorVersion = Normalize(generatorVersion),
|
||||
AdditionalProperties = properties,
|
||||
PolicyFindings = NormalizePolicyFindings(policyFindings),
|
||||
PedigreeDataByPurl = pedigreeData,
|
||||
IncludePedigree = includePedigree,
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
@@ -0,0 +1,604 @@
|
||||
// <copyright file="SbomValidationPipeline.cs" company="StellaOps">
|
||||
// Copyright (c) StellaOps. Licensed under the AGPL-3.0-or-later.
|
||||
// </copyright>
|
||||
|
||||
using System.Collections.Immutable;
|
||||
using System.Diagnostics.Metrics;
|
||||
using System.Text;
|
||||
using Microsoft.Extensions.DependencyInjection;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Microsoft.Extensions.Options;
|
||||
using StellaOps.Scanner.Validation;
|
||||
|
||||
namespace StellaOps.Scanner.Emit.Composition;
|
||||
|
||||
/// <summary>
|
||||
/// Pipeline configuration for SBOM validation after generation.
|
||||
/// </summary>
|
||||
public sealed class SbomValidationPipelineOptions
|
||||
{
|
||||
/// <summary>
|
||||
/// Whether validation is enabled. Default: true.
|
||||
/// </summary>
|
||||
public bool Enabled { get; set; } = true;
|
||||
|
||||
/// <summary>
|
||||
/// Whether to fail composition when validation fails. Default: true.
|
||||
/// </summary>
|
||||
public bool FailOnError { get; set; } = true;
|
||||
|
||||
/// <summary>
|
||||
/// Whether to validate CycloneDX SBOMs. Default: true.
|
||||
/// </summary>
|
||||
public bool ValidateCycloneDx { get; set; } = true;
|
||||
|
||||
/// <summary>
|
||||
/// Whether to validate SPDX SBOMs. Default: true.
|
||||
/// </summary>
|
||||
public bool ValidateSpdx { get; set; } = true;
|
||||
|
||||
/// <summary>
|
||||
/// Timeout for validation operations. Default: 60 seconds.
|
||||
/// </summary>
|
||||
public TimeSpan ValidationTimeout { get; set; } = TimeSpan.FromSeconds(60);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Result of SBOM validation pipeline execution.
|
||||
/// </summary>
|
||||
public sealed record SbomValidationPipelineResult
|
||||
{
|
||||
/// <summary>
|
||||
/// Gets whether all validations passed.
|
||||
/// </summary>
|
||||
public required bool IsValid { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets the CycloneDX inventory validation result, if performed.
|
||||
/// </summary>
|
||||
public SbomValidationResult? CycloneDxInventoryResult { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets the CycloneDX usage validation result, if performed.
|
||||
/// </summary>
|
||||
public SbomValidationResult? CycloneDxUsageResult { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets the SPDX inventory validation result, if performed.
|
||||
/// </summary>
|
||||
public SbomValidationResult? SpdxInventoryResult { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets the per-layer validation results, if performed.
|
||||
/// </summary>
|
||||
public ImmutableArray<LayerValidationResult> LayerResults { get; init; } = [];
|
||||
|
||||
/// <summary>
|
||||
/// Gets the total number of errors across all validations.
|
||||
/// </summary>
|
||||
public int TotalErrorCount =>
|
||||
(CycloneDxInventoryResult?.ErrorCount ?? 0) +
|
||||
(CycloneDxUsageResult?.ErrorCount ?? 0) +
|
||||
(SpdxInventoryResult?.ErrorCount ?? 0) +
|
||||
LayerResults.Sum(r => r.CycloneDxResult?.ErrorCount ?? 0) +
|
||||
LayerResults.Sum(r => r.SpdxResult?.ErrorCount ?? 0);
|
||||
|
||||
/// <summary>
|
||||
/// Gets the total number of warnings across all validations.
|
||||
/// </summary>
|
||||
public int TotalWarningCount =>
|
||||
(CycloneDxInventoryResult?.WarningCount ?? 0) +
|
||||
(CycloneDxUsageResult?.WarningCount ?? 0) +
|
||||
(SpdxInventoryResult?.WarningCount ?? 0) +
|
||||
LayerResults.Sum(r => r.CycloneDxResult?.WarningCount ?? 0) +
|
||||
LayerResults.Sum(r => r.SpdxResult?.WarningCount ?? 0);
|
||||
|
||||
/// <summary>
|
||||
/// Gets whether validation was skipped entirely.
|
||||
/// </summary>
|
||||
public bool WasSkipped { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Creates a successful validation result.
|
||||
/// </summary>
|
||||
public static SbomValidationPipelineResult Success(
|
||||
SbomValidationResult? cycloneDxInventory = null,
|
||||
SbomValidationResult? cycloneDxUsage = null,
|
||||
SbomValidationResult? spdxInventory = null,
|
||||
ImmutableArray<LayerValidationResult>? layerResults = null) =>
|
||||
new()
|
||||
{
|
||||
IsValid = true,
|
||||
CycloneDxInventoryResult = cycloneDxInventory,
|
||||
CycloneDxUsageResult = cycloneDxUsage,
|
||||
SpdxInventoryResult = spdxInventory,
|
||||
LayerResults = layerResults ?? []
|
||||
};
|
||||
|
||||
/// <summary>
|
||||
/// Creates a failed validation result.
|
||||
/// </summary>
|
||||
public static SbomValidationPipelineResult Failure(
|
||||
SbomValidationResult? cycloneDxInventory = null,
|
||||
SbomValidationResult? cycloneDxUsage = null,
|
||||
SbomValidationResult? spdxInventory = null,
|
||||
ImmutableArray<LayerValidationResult>? layerResults = null) =>
|
||||
new()
|
||||
{
|
||||
IsValid = false,
|
||||
CycloneDxInventoryResult = cycloneDxInventory,
|
||||
CycloneDxUsageResult = cycloneDxUsage,
|
||||
SpdxInventoryResult = spdxInventory,
|
||||
LayerResults = layerResults ?? []
|
||||
};
|
||||
|
||||
/// <summary>
|
||||
/// Creates a skipped validation result.
|
||||
/// </summary>
|
||||
public static SbomValidationPipelineResult Skipped() =>
|
||||
new() { IsValid = true, WasSkipped = true };
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Validation result for a single layer.
|
||||
/// </summary>
|
||||
public sealed record LayerValidationResult
|
||||
{
|
||||
/// <summary>
|
||||
/// Gets the layer identifier (digest or index).
|
||||
/// </summary>
|
||||
public required string LayerId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets the CycloneDX validation result for this layer.
|
||||
/// </summary>
|
||||
public SbomValidationResult? CycloneDxResult { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets the SPDX validation result for this layer.
|
||||
/// </summary>
|
||||
public SbomValidationResult? SpdxResult { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets whether this layer's validation passed.
|
||||
/// </summary>
|
||||
public bool IsValid =>
|
||||
(CycloneDxResult?.IsValid ?? true) &&
|
||||
(SpdxResult?.IsValid ?? true);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Pipeline for validating generated SBOMs.
|
||||
/// </summary>
|
||||
/// <remarks>
|
||||
/// Sprint: SPRINT_20260107_005_003 Task VG-005
|
||||
/// This pipeline runs validation after SBOM generation and can optionally
|
||||
/// fail the composition if validation errors are detected.
|
||||
/// </remarks>
|
||||
public sealed class SbomValidationPipeline
|
||||
{
|
||||
private readonly ISbomValidator _validator;
|
||||
private readonly IOptions<SbomValidationPipelineOptions> _options;
|
||||
private readonly ILogger<SbomValidationPipeline> _logger;
|
||||
private readonly TimeProvider _timeProvider;
|
||||
|
||||
// Metrics
|
||||
private readonly Counter<long> _validationRuns;
|
||||
private readonly Counter<long> _validationPassed;
|
||||
private readonly Counter<long> _validationFailed;
|
||||
private readonly Counter<long> _validationSkipped;
|
||||
private readonly Histogram<double> _validationDuration;
|
||||
|
||||
/// <summary>
|
||||
/// Initializes a new instance of the <see cref="SbomValidationPipeline"/> class.
|
||||
/// </summary>
|
||||
public SbomValidationPipeline(
|
||||
ISbomValidator validator,
|
||||
IOptions<SbomValidationPipelineOptions> options,
|
||||
ILogger<SbomValidationPipeline> logger,
|
||||
TimeProvider timeProvider,
|
||||
IMeterFactory? meterFactory = null)
|
||||
{
|
||||
_validator = validator ?? throw new ArgumentNullException(nameof(validator));
|
||||
_options = options ?? throw new ArgumentNullException(nameof(options));
|
||||
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||
_timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider));
|
||||
|
||||
// Initialize metrics
|
||||
var meter = meterFactory?.Create("StellaOps.Scanner.Validation") ??
|
||||
new Meter("StellaOps.Scanner.Validation");
|
||||
|
||||
_validationRuns = meter.CreateCounter<long>(
|
||||
"sbom.validation.runs",
|
||||
"runs",
|
||||
"Total number of validation pipeline runs");
|
||||
|
||||
_validationPassed = meter.CreateCounter<long>(
|
||||
"sbom.validation.passed",
|
||||
"runs",
|
||||
"Number of validation runs that passed");
|
||||
|
||||
_validationFailed = meter.CreateCounter<long>(
|
||||
"sbom.validation.failed",
|
||||
"runs",
|
||||
"Number of validation runs that failed");
|
||||
|
||||
_validationSkipped = meter.CreateCounter<long>(
|
||||
"sbom.validation.skipped",
|
||||
"runs",
|
||||
"Number of validation runs that were skipped");
|
||||
|
||||
_validationDuration = meter.CreateHistogram<double>(
|
||||
"sbom.validation.duration",
|
||||
"ms",
|
||||
"Duration of validation pipeline execution");
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Validates a composition result.
|
||||
/// </summary>
|
||||
/// <param name="result">The composition result to validate.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>The validation pipeline result.</returns>
|
||||
/// <exception cref="SbomValidationException">
|
||||
/// Thrown when validation fails and <see cref="SbomValidationPipelineOptions.FailOnError"/> is true.
|
||||
/// </exception>
|
||||
public async Task<SbomValidationPipelineResult> ValidateAsync(
|
||||
SbomCompositionResult result,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(result);
|
||||
|
||||
var opts = _options.Value;
|
||||
var startTime = _timeProvider.GetTimestamp();
|
||||
|
||||
_validationRuns.Add(1);
|
||||
|
||||
try
|
||||
{
|
||||
if (!opts.Enabled)
|
||||
{
|
||||
_logger.LogDebug("SBOM validation is disabled, skipping");
|
||||
_validationSkipped.Add(1);
|
||||
return SbomValidationPipelineResult.Skipped();
|
||||
}
|
||||
|
||||
_logger.LogInformation("Starting SBOM validation pipeline");
|
||||
|
||||
var validationOptions = new SbomValidationOptions
|
||||
{
|
||||
Timeout = opts.ValidationTimeout
|
||||
};
|
||||
|
||||
// Validate main SBOMs in parallel
|
||||
var tasks = new List<Task<(string Name, SbomValidationResult? Result)>>();
|
||||
|
||||
if (opts.ValidateCycloneDx)
|
||||
{
|
||||
tasks.Add(ValidateCycloneDxAsync(
|
||||
"CycloneDX-Inventory",
|
||||
result.Inventory.JsonBytes,
|
||||
validationOptions,
|
||||
cancellationToken));
|
||||
|
||||
if (result.Usage is not null)
|
||||
{
|
||||
tasks.Add(ValidateCycloneDxAsync(
|
||||
"CycloneDX-Usage",
|
||||
result.Usage.JsonBytes,
|
||||
validationOptions,
|
||||
cancellationToken));
|
||||
}
|
||||
}
|
||||
|
||||
if (opts.ValidateSpdx && result.SpdxInventory is not null)
|
||||
{
|
||||
tasks.Add(ValidateSpdxAsync(
|
||||
"SPDX-Inventory",
|
||||
result.SpdxInventory.JsonBytes,
|
||||
validationOptions,
|
||||
cancellationToken));
|
||||
}
|
||||
|
||||
var mainResults = await Task.WhenAll(tasks).ConfigureAwait(false);
|
||||
|
||||
// Extract results by name
|
||||
SbomValidationResult? cdxInventory = null;
|
||||
SbomValidationResult? cdxUsage = null;
|
||||
SbomValidationResult? spdxInventory = null;
|
||||
|
||||
foreach (var (name, validationResult) in mainResults)
|
||||
{
|
||||
switch (name)
|
||||
{
|
||||
case "CycloneDX-Inventory":
|
||||
cdxInventory = validationResult;
|
||||
break;
|
||||
case "CycloneDX-Usage":
|
||||
cdxUsage = validationResult;
|
||||
break;
|
||||
case "SPDX-Inventory":
|
||||
spdxInventory = validationResult;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
// Validate layer SBOMs if present
|
||||
var layerResults = await ValidateLayersAsync(
|
||||
result.LayerSbomArtifacts,
|
||||
validationOptions,
|
||||
opts,
|
||||
cancellationToken).ConfigureAwait(false);
|
||||
|
||||
// Determine overall validity
|
||||
var allValid =
|
||||
(cdxInventory?.IsValid ?? true) &&
|
||||
(cdxUsage?.IsValid ?? true) &&
|
||||
(spdxInventory?.IsValid ?? true) &&
|
||||
layerResults.All(r => r.IsValid);
|
||||
|
||||
var pipelineResult = allValid
|
||||
? SbomValidationPipelineResult.Success(cdxInventory, cdxUsage, spdxInventory, layerResults)
|
||||
: SbomValidationPipelineResult.Failure(cdxInventory, cdxUsage, spdxInventory, layerResults);
|
||||
|
||||
// Log summary
|
||||
LogValidationSummary(pipelineResult);
|
||||
|
||||
// Update metrics
|
||||
if (allValid)
|
||||
{
|
||||
_validationPassed.Add(1);
|
||||
}
|
||||
else
|
||||
{
|
||||
_validationFailed.Add(1);
|
||||
}
|
||||
|
||||
// Throw if configured to fail on error
|
||||
if (!allValid && opts.FailOnError)
|
||||
{
|
||||
throw new SbomValidationException(
|
||||
$"SBOM validation failed with {pipelineResult.TotalErrorCount} error(s)",
|
||||
pipelineResult);
|
||||
}
|
||||
|
||||
return pipelineResult;
|
||||
}
|
||||
finally
|
||||
{
|
||||
var elapsed = _timeProvider.GetElapsedTime(startTime);
|
||||
_validationDuration.Record(elapsed.TotalMilliseconds);
|
||||
}
|
||||
}
|
||||
|
||||
private async Task<(string Name, SbomValidationResult? Result)> ValidateCycloneDxAsync(
|
||||
string name,
|
||||
byte[] content,
|
||||
SbomValidationOptions options,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
try
|
||||
{
|
||||
_logger.LogDebug("Validating {Name} ({Size} bytes)", name, content.Length);
|
||||
|
||||
var result = await _validator.ValidateAsync(
|
||||
content,
|
||||
SbomFormat.CycloneDxJson,
|
||||
options,
|
||||
cancellationToken).ConfigureAwait(false);
|
||||
|
||||
LogValidationResult(name, result);
|
||||
return (name, result);
|
||||
}
|
||||
catch (Exception ex) when (ex is not OperationCanceledException)
|
||||
{
|
||||
_logger.LogWarning(ex, "Failed to validate {Name}: {Message}", name, ex.Message);
|
||||
return (name, SbomValidationResult.ValidatorUnavailable(
|
||||
SbomFormat.CycloneDxJson,
|
||||
"CycloneDX",
|
||||
ex.Message));
|
||||
}
|
||||
}
|
||||
|
||||
private async Task<(string Name, SbomValidationResult? Result)> ValidateSpdxAsync(
|
||||
string name,
|
||||
byte[] content,
|
||||
SbomValidationOptions options,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
try
|
||||
{
|
||||
_logger.LogDebug("Validating {Name} ({Size} bytes)", name, content.Length);
|
||||
|
||||
var result = await _validator.ValidateAsync(
|
||||
content,
|
||||
SbomFormat.Spdx3JsonLd,
|
||||
options,
|
||||
cancellationToken).ConfigureAwait(false);
|
||||
|
||||
LogValidationResult(name, result);
|
||||
return (name, result);
|
||||
}
|
||||
catch (Exception ex) when (ex is not OperationCanceledException)
|
||||
{
|
||||
_logger.LogWarning(ex, "Failed to validate {Name}: {Message}", name, ex.Message);
|
||||
return (name, SbomValidationResult.ValidatorUnavailable(
|
||||
SbomFormat.Spdx3JsonLd,
|
||||
"SPDX",
|
||||
ex.Message));
|
||||
}
|
||||
}
|
||||
|
||||
private async Task<ImmutableArray<LayerValidationResult>> ValidateLayersAsync(
|
||||
ImmutableArray<LayerSbomArtifact> layerArtifacts,
|
||||
SbomValidationOptions options,
|
||||
SbomValidationPipelineOptions pipelineOptions,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
if (layerArtifacts.IsDefaultOrEmpty)
|
||||
{
|
||||
return [];
|
||||
}
|
||||
|
||||
_logger.LogDebug("Validating {Count} layer SBOMs", layerArtifacts.Length);
|
||||
|
||||
var results = new List<LayerValidationResult>();
|
||||
|
||||
foreach (var layer in layerArtifacts)
|
||||
{
|
||||
SbomValidationResult? cdxResult = null;
|
||||
SbomValidationResult? spdxResult = null;
|
||||
|
||||
if (pipelineOptions.ValidateCycloneDx && layer.CycloneDxJsonBytes is not null)
|
||||
{
|
||||
var (_, result) = await ValidateCycloneDxAsync(
|
||||
$"Layer-{layer.LayerDigest}-CDX",
|
||||
layer.CycloneDxJsonBytes,
|
||||
options,
|
||||
cancellationToken).ConfigureAwait(false);
|
||||
cdxResult = result;
|
||||
}
|
||||
|
||||
if (pipelineOptions.ValidateSpdx && layer.SpdxJsonBytes is not null)
|
||||
{
|
||||
var (_, result) = await ValidateSpdxAsync(
|
||||
$"Layer-{layer.LayerDigest}-SPDX",
|
||||
layer.SpdxJsonBytes,
|
||||
options,
|
||||
cancellationToken).ConfigureAwait(false);
|
||||
spdxResult = result;
|
||||
}
|
||||
|
||||
results.Add(new LayerValidationResult
|
||||
{
|
||||
LayerId = layer.LayerDigest,
|
||||
CycloneDxResult = cdxResult,
|
||||
SpdxResult = spdxResult
|
||||
});
|
||||
}
|
||||
|
||||
return [.. results];
|
||||
}
|
||||
|
||||
private void LogValidationResult(string name, SbomValidationResult result)
|
||||
{
|
||||
if (result.IsValid)
|
||||
{
|
||||
if (result.WarningCount > 0)
|
||||
{
|
||||
_logger.LogInformation(
|
||||
"{Name} validation passed with {WarningCount} warning(s)",
|
||||
name,
|
||||
result.WarningCount);
|
||||
}
|
||||
else
|
||||
{
|
||||
_logger.LogInformation("{Name} validation passed", name);
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
_logger.LogWarning(
|
||||
"{Name} validation failed with {ErrorCount} error(s), {WarningCount} warning(s)",
|
||||
name,
|
||||
result.ErrorCount,
|
||||
result.WarningCount);
|
||||
|
||||
foreach (var diagnostic in result.Diagnostics.Where(d => d.Severity == SbomValidationSeverity.Error))
|
||||
{
|
||||
_logger.LogWarning(" [{Code}] {Message}", diagnostic.Code, diagnostic.Message);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void LogValidationSummary(SbomValidationPipelineResult result)
|
||||
{
|
||||
var sb = new StringBuilder();
|
||||
sb.AppendLine("SBOM validation summary:");
|
||||
|
||||
if (result.CycloneDxInventoryResult is not null)
|
||||
{
|
||||
sb.AppendLine($" - CycloneDX Inventory: {(result.CycloneDxInventoryResult.IsValid ? "PASSED" : "FAILED")}");
|
||||
}
|
||||
|
||||
if (result.CycloneDxUsageResult is not null)
|
||||
{
|
||||
sb.AppendLine($" - CycloneDX Usage: {(result.CycloneDxUsageResult.IsValid ? "PASSED" : "FAILED")}");
|
||||
}
|
||||
|
||||
if (result.SpdxInventoryResult is not null)
|
||||
{
|
||||
sb.AppendLine($" - SPDX Inventory: {(result.SpdxInventoryResult.IsValid ? "PASSED" : "FAILED")}");
|
||||
}
|
||||
|
||||
if (!result.LayerResults.IsDefaultOrEmpty)
|
||||
{
|
||||
var passedLayers = result.LayerResults.Count(r => r.IsValid);
|
||||
sb.AppendLine($" - Layers: {passedLayers}/{result.LayerResults.Length} passed");
|
||||
}
|
||||
|
||||
sb.AppendLine($" Total errors: {result.TotalErrorCount}");
|
||||
sb.AppendLine($" Total warnings: {result.TotalWarningCount}");
|
||||
|
||||
_logger.LogInformation(sb.ToString());
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Exception thrown when SBOM validation fails.
|
||||
/// </summary>
|
||||
public sealed class SbomValidationException : Exception
|
||||
{
|
||||
/// <summary>
|
||||
/// Gets the validation pipeline result.
|
||||
/// </summary>
|
||||
public SbomValidationPipelineResult? Result { get; }
|
||||
|
||||
/// <summary>
|
||||
/// Initializes a new instance of the <see cref="SbomValidationException"/> class.
|
||||
/// </summary>
|
||||
public SbomValidationException(string message)
|
||||
: base(message)
|
||||
{
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Initializes a new instance of the <see cref="SbomValidationException"/> class.
|
||||
/// </summary>
|
||||
public SbomValidationException(string message, SbomValidationPipelineResult result)
|
||||
: base(message)
|
||||
{
|
||||
Result = result;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Initializes a new instance of the <see cref="SbomValidationException"/> class.
|
||||
/// </summary>
|
||||
public SbomValidationException(string message, Exception innerException)
|
||||
: base(message, innerException)
|
||||
{
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Extension methods for registering the validation pipeline.
|
||||
/// </summary>
|
||||
public static class SbomValidationPipelineExtensions
|
||||
{
|
||||
/// <summary>
|
||||
/// Adds the SBOM validation pipeline to the service collection.
|
||||
/// </summary>
|
||||
public static IServiceCollection AddSbomValidationPipeline(
|
||||
this IServiceCollection services,
|
||||
Action<SbomValidationPipelineOptions>? configure = null)
|
||||
{
|
||||
services.AddOptions<SbomValidationPipelineOptions>()
|
||||
.Configure(configure ?? (_ => { }))
|
||||
.ValidateDataAnnotations()
|
||||
.ValidateOnStart();
|
||||
|
||||
services.AddSingleton<SbomValidationPipeline>();
|
||||
|
||||
return services;
|
||||
}
|
||||
}
|
||||
@@ -3,11 +3,12 @@
|
||||
// </copyright>
|
||||
|
||||
using CycloneDX.Models;
|
||||
using CdxPedigree = CycloneDX.Models.Pedigree;
|
||||
|
||||
namespace StellaOps.Scanner.Emit.Pedigree;
|
||||
|
||||
/// <summary>
|
||||
/// Maps <see cref="PedigreeData"/> to CycloneDX <see cref="Pedigree"/> model.
|
||||
/// Maps <see cref="PedigreeData"/> to CycloneDX <see cref="CdxPedigree"/> model.
|
||||
/// Sprint: SPRINT_20260107_005_002 Task PD-003
|
||||
/// </summary>
|
||||
public sealed class CycloneDxPedigreeMapper
|
||||
@@ -17,14 +18,14 @@ public sealed class CycloneDxPedigreeMapper
|
||||
/// </summary>
|
||||
/// <param name="data">The pedigree data to map.</param>
|
||||
/// <returns>CycloneDX pedigree model, or null if no data.</returns>
|
||||
public Pedigree? Map(PedigreeData? data)
|
||||
public CdxPedigree? Map(PedigreeData? data)
|
||||
{
|
||||
if (data is null || !data.HasData)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
return new Pedigree
|
||||
return new CdxPedigree
|
||||
{
|
||||
Ancestors = MapAncestors(data.Ancestors),
|
||||
Variants = MapVariants(data.Variants),
|
||||
@@ -158,7 +159,7 @@ public sealed class CycloneDxPedigreeMapper
|
||||
{
|
||||
Name = actor.Name,
|
||||
Email = actor.Email,
|
||||
Timestamp = actor.Timestamp
|
||||
Timestamp = actor.Timestamp?.UtcDateTime
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
@@ -10,6 +10,7 @@
|
||||
<ProjectReference Include="..\StellaOps.Scanner.Core\StellaOps.Scanner.Core.csproj" />
|
||||
<ProjectReference Include="..\StellaOps.Scanner.EntryTrace\StellaOps.Scanner.EntryTrace.csproj" />
|
||||
<ProjectReference Include="..\StellaOps.Scanner.Storage\StellaOps.Scanner.Storage.csproj" />
|
||||
<ProjectReference Include="..\StellaOps.Scanner.Validation\StellaOps.Scanner.Validation.csproj" />
|
||||
<ProjectReference Include="..\..\StellaOps.Scanner.Analyzers.Native\StellaOps.Scanner.Analyzers.Native.csproj" />
|
||||
</ItemGroup>
|
||||
|
||||
|
||||
@@ -10,6 +10,7 @@ using System.Diagnostics;
|
||||
using System.Security.Cryptography;
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
using System.Text.Encodings.Web;
|
||||
using System.Text.Json.Serialization;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Microsoft.Extensions.Options;
|
||||
@@ -28,7 +29,8 @@ public sealed class DriftAttestationService : IDriftAttestationService
|
||||
{
|
||||
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
|
||||
WriteIndented = false,
|
||||
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull
|
||||
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull,
|
||||
Encoder = JavaScriptEncoder.UnsafeRelaxedJsonEscaping
|
||||
};
|
||||
|
||||
private readonly IDriftSignerClient? _signerClient;
|
||||
|
||||
@@ -0,0 +1,242 @@
|
||||
// <copyright file="FingerprintGeneratorTests.cs" company="StellaOps">
|
||||
// Copyright (c) StellaOps. Licensed under the AGPL-3.0-or-later.
|
||||
// </copyright>
|
||||
|
||||
using FluentAssertions;
|
||||
using StellaOps.Scanner.Sarif.Fingerprints;
|
||||
using StellaOps.Scanner.Sarif.Rules;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Scanner.Sarif.Tests;
|
||||
|
||||
/// <summary>
|
||||
/// Tests for <see cref="FingerprintGenerator"/>.
|
||||
/// </summary>
|
||||
[Trait("Category", "Unit")]
|
||||
public class FingerprintGeneratorTests
|
||||
{
|
||||
private readonly FingerprintGenerator _generator;
|
||||
|
||||
public FingerprintGeneratorTests()
|
||||
{
|
||||
_generator = new FingerprintGenerator(new SarifRuleRegistry());
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void GeneratePrimary_Standard_ReturnsDeterministicFingerprint()
|
||||
{
|
||||
// Arrange
|
||||
var finding = new FindingInput
|
||||
{
|
||||
Type = FindingType.Vulnerability,
|
||||
Title = "Test vulnerability",
|
||||
VulnerabilityId = "CVE-2024-1234",
|
||||
ComponentPurl = "pkg:npm/lodash@4.17.20",
|
||||
Severity = Severity.High
|
||||
};
|
||||
|
||||
// Act
|
||||
var fp1 = _generator.GeneratePrimary(finding, FingerprintStrategy.Standard);
|
||||
var fp2 = _generator.GeneratePrimary(finding, FingerprintStrategy.Standard);
|
||||
|
||||
// Assert
|
||||
fp1.Should().NotBeNullOrEmpty();
|
||||
fp1.Should().Be(fp2, "fingerprints should be deterministic");
|
||||
fp1.Should().HaveLength(64, "should be SHA-256 hex string");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void GeneratePrimary_DifferentFindings_ProduceDifferentFingerprints()
|
||||
{
|
||||
// Arrange
|
||||
var finding1 = new FindingInput
|
||||
{
|
||||
Type = FindingType.Vulnerability,
|
||||
Title = "Test vulnerability 1",
|
||||
VulnerabilityId = "CVE-2024-1234",
|
||||
ComponentPurl = "pkg:npm/lodash@4.17.20",
|
||||
Severity = Severity.High
|
||||
};
|
||||
|
||||
var finding2 = new FindingInput
|
||||
{
|
||||
Type = FindingType.Vulnerability,
|
||||
Title = "Test vulnerability 2",
|
||||
VulnerabilityId = "CVE-2024-5678",
|
||||
ComponentPurl = "pkg:npm/lodash@4.17.20",
|
||||
Severity = Severity.High
|
||||
};
|
||||
|
||||
// Act
|
||||
var fp1 = _generator.GeneratePrimary(finding1, FingerprintStrategy.Standard);
|
||||
var fp2 = _generator.GeneratePrimary(finding2, FingerprintStrategy.Standard);
|
||||
|
||||
// Assert
|
||||
fp1.Should().NotBe(fp2);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void GeneratePrimary_Minimal_UsesFewerFields()
|
||||
{
|
||||
// Arrange
|
||||
var finding1 = new FindingInput
|
||||
{
|
||||
Type = FindingType.Vulnerability,
|
||||
Title = "Test vulnerability",
|
||||
VulnerabilityId = "CVE-2024-1234",
|
||||
ComponentPurl = "pkg:npm/lodash@4.17.20",
|
||||
Severity = Severity.High
|
||||
};
|
||||
|
||||
var finding2 = new FindingInput
|
||||
{
|
||||
Type = FindingType.Vulnerability,
|
||||
Title = "Test vulnerability",
|
||||
VulnerabilityId = "CVE-2024-1234",
|
||||
ComponentPurl = "pkg:npm/express@4.18.0", // Different component
|
||||
Severity = Severity.High
|
||||
};
|
||||
|
||||
// Act
|
||||
var fp1Standard = _generator.GeneratePrimary(finding1, FingerprintStrategy.Standard);
|
||||
var fp2Standard = _generator.GeneratePrimary(finding2, FingerprintStrategy.Standard);
|
||||
var fp1Minimal = _generator.GeneratePrimary(finding1, FingerprintStrategy.Minimal);
|
||||
var fp2Minimal = _generator.GeneratePrimary(finding2, FingerprintStrategy.Minimal);
|
||||
|
||||
// Assert
|
||||
fp1Standard.Should().NotBe(fp2Standard, "standard fingerprints differ by component");
|
||||
fp1Minimal.Should().Be(fp2Minimal, "minimal fingerprints ignore component");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void GeneratePrimary_Extended_IncludesReachabilityAndVex()
|
||||
{
|
||||
// Arrange
|
||||
// Use reachability statuses that don't affect the rule ID
|
||||
var finding1 = new FindingInput
|
||||
{
|
||||
Type = FindingType.Vulnerability,
|
||||
Title = "Test vulnerability",
|
||||
VulnerabilityId = "CVE-2024-1234",
|
||||
ComponentPurl = "pkg:npm/lodash@4.17.20",
|
||||
Severity = Severity.High,
|
||||
Reachability = ReachabilityStatus.Unknown,
|
||||
VexStatus = VexStatus.Affected
|
||||
};
|
||||
|
||||
var finding2 = new FindingInput
|
||||
{
|
||||
Type = FindingType.Vulnerability,
|
||||
Title = "Test vulnerability",
|
||||
VulnerabilityId = "CVE-2024-1234",
|
||||
ComponentPurl = "pkg:npm/lodash@4.17.20",
|
||||
Severity = Severity.High,
|
||||
Reachability = ReachabilityStatus.Contested,
|
||||
VexStatus = VexStatus.NotAffected
|
||||
};
|
||||
|
||||
// Act
|
||||
var fp1Standard = _generator.GeneratePrimary(finding1, FingerprintStrategy.Standard);
|
||||
var fp2Standard = _generator.GeneratePrimary(finding2, FingerprintStrategy.Standard);
|
||||
var fp1Extended = _generator.GeneratePrimary(finding1, FingerprintStrategy.Extended);
|
||||
var fp2Extended = _generator.GeneratePrimary(finding2, FingerprintStrategy.Extended);
|
||||
|
||||
// Assert
|
||||
fp1Standard.Should().Be(fp2Standard, "standard fingerprints ignore reachability/vex");
|
||||
fp1Extended.Should().NotBe(fp2Extended, "extended fingerprints include reachability/vex");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void GeneratePartial_WithComponent_IncludesComponentFingerprint()
|
||||
{
|
||||
// Arrange
|
||||
var finding = new FindingInput
|
||||
{
|
||||
Type = FindingType.Vulnerability,
|
||||
Title = "Test",
|
||||
ComponentPurl = "pkg:npm/lodash@4.17.20"
|
||||
};
|
||||
|
||||
// Act
|
||||
var partials = _generator.GeneratePartial(finding);
|
||||
|
||||
// Assert
|
||||
partials.Should().ContainKey("stellaops/component/v1");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void GeneratePartial_WithVulnerability_IncludesVulnFingerprint()
|
||||
{
|
||||
// Arrange
|
||||
var finding = new FindingInput
|
||||
{
|
||||
Type = FindingType.Vulnerability,
|
||||
Title = "Test",
|
||||
VulnerabilityId = "CVE-2024-1234"
|
||||
};
|
||||
|
||||
// Act
|
||||
var partials = _generator.GeneratePartial(finding);
|
||||
|
||||
// Assert
|
||||
partials.Should().ContainKey("stellaops/vuln/v1");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void GeneratePartial_WithLocation_IncludesLocationFingerprint()
|
||||
{
|
||||
// Arrange
|
||||
var finding = new FindingInput
|
||||
{
|
||||
Type = FindingType.Vulnerability,
|
||||
Title = "Test",
|
||||
FilePath = "src/app.ts",
|
||||
StartLine = 42
|
||||
};
|
||||
|
||||
// Act
|
||||
var partials = _generator.GeneratePartial(finding);
|
||||
|
||||
// Assert
|
||||
partials.Should().ContainKey("primaryLocationLineHash/v1");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void GeneratePartial_Secret_IncludesTitleFingerprint()
|
||||
{
|
||||
// Arrange
|
||||
var finding = new FindingInput
|
||||
{
|
||||
Type = FindingType.Secret,
|
||||
Title = "AWS Access Key"
|
||||
};
|
||||
|
||||
// Act
|
||||
var partials = _generator.GeneratePartial(finding);
|
||||
|
||||
// Assert
|
||||
partials.Should().ContainKey("stellaops/title/v1");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void GeneratePartial_SameInputs_ProduceDeterministicResults()
|
||||
{
|
||||
// Arrange
|
||||
var finding = new FindingInput
|
||||
{
|
||||
Type = FindingType.Vulnerability,
|
||||
Title = "Test",
|
||||
VulnerabilityId = "CVE-2024-1234",
|
||||
ComponentPurl = "pkg:npm/lodash@4.17.20",
|
||||
FilePath = "src/app.ts",
|
||||
StartLine = 42
|
||||
};
|
||||
|
||||
// Act
|
||||
var partials1 = _generator.GeneratePartial(finding);
|
||||
var partials2 = _generator.GeneratePartial(finding);
|
||||
|
||||
// Assert
|
||||
partials1.Should().BeEquivalentTo(partials2);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,463 @@
|
||||
// <copyright file="SarifExportServiceTests.cs" company="StellaOps">
|
||||
// Copyright (c) StellaOps. Licensed under the AGPL-3.0-or-later.
|
||||
// </copyright>
|
||||
|
||||
using System.Collections.Immutable;
|
||||
using System.Text.Json;
|
||||
using FluentAssertions;
|
||||
using Microsoft.Extensions.Time.Testing;
|
||||
using StellaOps.Scanner.Sarif.Fingerprints;
|
||||
using StellaOps.Scanner.Sarif.Models;
|
||||
using StellaOps.Scanner.Sarif.Rules;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Scanner.Sarif.Tests;
|
||||
|
||||
/// <summary>
|
||||
/// Tests for <see cref="SarifExportService"/>.
|
||||
/// </summary>
|
||||
[Trait("Category", "Unit")]
|
||||
public class SarifExportServiceTests
|
||||
{
|
||||
private readonly SarifExportService _service;
|
||||
private readonly FakeTimeProvider _timeProvider;
|
||||
|
||||
public SarifExportServiceTests()
|
||||
{
|
||||
_timeProvider = new FakeTimeProvider(new DateTimeOffset(2026, 1, 9, 12, 0, 0, TimeSpan.Zero));
|
||||
var ruleRegistry = new SarifRuleRegistry();
|
||||
var fingerprintGenerator = new FingerprintGenerator(ruleRegistry);
|
||||
_service = new SarifExportService(ruleRegistry, fingerprintGenerator, _timeProvider);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ExportAsync_EmptyFindings_ReturnsValidSarifLog()
|
||||
{
|
||||
// Arrange
|
||||
var options = new SarifExportOptions { ToolVersion = "1.0.0" };
|
||||
|
||||
// Act
|
||||
var log = await _service.ExportAsync([], options, TestContext.Current.CancellationToken);
|
||||
|
||||
// Assert
|
||||
log.Should().NotBeNull();
|
||||
log.Version.Should().Be("2.1.0");
|
||||
log.Schema.Should().Contain("sarif-schema-2.1.0.json");
|
||||
log.Runs.Should().HaveCount(1);
|
||||
log.Runs[0].Results.Should().BeEmpty();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ExportAsync_SingleVulnerability_MapsCorrectly()
|
||||
{
|
||||
// Arrange
|
||||
var findings = new[]
|
||||
{
|
||||
new FindingInput
|
||||
{
|
||||
Type = FindingType.Vulnerability,
|
||||
Title = "Remote Code Execution",
|
||||
VulnerabilityId = "CVE-2024-1234",
|
||||
ComponentPurl = "pkg:npm/lodash@4.17.20",
|
||||
ComponentName = "lodash",
|
||||
ComponentVersion = "4.17.20",
|
||||
Severity = Severity.Critical,
|
||||
CvssScore = 9.8,
|
||||
CvssVector = "CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:U/C:H/I:H/A:H",
|
||||
FilePath = "package.json",
|
||||
StartLine = 10
|
||||
}
|
||||
};
|
||||
|
||||
var options = new SarifExportOptions { ToolVersion = "1.0.0" };
|
||||
|
||||
// Act
|
||||
var log = await _service.ExportAsync(findings, options, TestContext.Current.CancellationToken);
|
||||
|
||||
// Assert
|
||||
log.Runs.Should().HaveCount(1);
|
||||
var run = log.Runs[0];
|
||||
|
||||
// Check tool
|
||||
run.Tool.Driver.Name.Should().Be("StellaOps Scanner");
|
||||
run.Tool.Driver.Version.Should().Be("1.0.0");
|
||||
run.Tool.Driver.Rules.Should().NotBeNull();
|
||||
run.Tool.Driver.Rules!.Value.Should().Contain(r => r.Id == "STELLA-VULN-001");
|
||||
|
||||
// Check result
|
||||
run.Results.Should().HaveCount(1);
|
||||
var result = run.Results[0];
|
||||
result.RuleId.Should().Be("STELLA-VULN-001");
|
||||
result.Level.Should().Be(SarifLevel.Error);
|
||||
result.Message.Text.Should().Contain("CVE-2024-1234");
|
||||
result.Message.Text.Should().Contain("lodash@4.17.20");
|
||||
result.Fingerprints.Should().ContainKey("stellaops/v1");
|
||||
result.Properties.Should().ContainKey("stellaops/cvss/score");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ExportAsync_WithMinimumSeverity_FiltersResults()
|
||||
{
|
||||
// Arrange
|
||||
var findings = new[]
|
||||
{
|
||||
new FindingInput
|
||||
{
|
||||
Type = FindingType.Vulnerability,
|
||||
Title = "Critical vuln",
|
||||
Severity = Severity.Critical
|
||||
},
|
||||
new FindingInput
|
||||
{
|
||||
Type = FindingType.Vulnerability,
|
||||
Title = "Low vuln",
|
||||
Severity = Severity.Low
|
||||
}
|
||||
};
|
||||
|
||||
var options = new SarifExportOptions
|
||||
{
|
||||
ToolVersion = "1.0.0",
|
||||
MinimumSeverity = Severity.High
|
||||
};
|
||||
|
||||
// Act
|
||||
var log = await _service.ExportAsync(findings, options, TestContext.Current.CancellationToken);
|
||||
|
||||
// Assert
|
||||
log.Runs[0].Results.Should().HaveCount(1);
|
||||
log.Runs[0].Results[0].RuleId.Should().Be("STELLA-VULN-001");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ExportAsync_WithVersionControl_IncludesProvenance()
|
||||
{
|
||||
// Arrange
|
||||
var findings = new[]
|
||||
{
|
||||
new FindingInput
|
||||
{
|
||||
Type = FindingType.Vulnerability,
|
||||
Title = "Test",
|
||||
Severity = Severity.Medium
|
||||
}
|
||||
};
|
||||
|
||||
var options = new SarifExportOptions
|
||||
{
|
||||
ToolVersion = "1.0.0",
|
||||
VersionControl = new VersionControlInfo
|
||||
{
|
||||
RepositoryUri = "https://github.com/org/repo",
|
||||
RevisionId = "abc123",
|
||||
Branch = "main"
|
||||
}
|
||||
};
|
||||
|
||||
// Act
|
||||
var log = await _service.ExportAsync(findings, options, TestContext.Current.CancellationToken);
|
||||
|
||||
// Assert
|
||||
log.Runs[0].VersionControlProvenance.Should().NotBeNull();
|
||||
log.Runs[0].VersionControlProvenance!.Value.Should().HaveCount(1);
|
||||
var vcs = log.Runs[0].VersionControlProvenance!.Value[0];
|
||||
vcs.RepositoryUri.Should().Be("https://github.com/org/repo");
|
||||
vcs.RevisionId.Should().Be("abc123");
|
||||
vcs.Branch.Should().Be("main");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ExportAsync_WithReachability_IncludesInProperties()
|
||||
{
|
||||
// Arrange
|
||||
var findings = new[]
|
||||
{
|
||||
new FindingInput
|
||||
{
|
||||
Type = FindingType.Vulnerability,
|
||||
Title = "Test",
|
||||
Severity = Severity.Medium,
|
||||
Reachability = ReachabilityStatus.RuntimeReachable
|
||||
}
|
||||
};
|
||||
|
||||
var options = new SarifExportOptions
|
||||
{
|
||||
ToolVersion = "1.0.0",
|
||||
IncludeReachability = true
|
||||
};
|
||||
|
||||
// Act
|
||||
var log = await _service.ExportAsync(findings, options, TestContext.Current.CancellationToken);
|
||||
|
||||
// Assert
|
||||
var result = log.Runs[0].Results[0];
|
||||
result.Properties.Should().ContainKey("stellaops/reachability");
|
||||
result.Properties!["stellaops/reachability"].Should().Be("RuntimeReachable");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ExportAsync_WithVexStatus_IncludesInProperties()
|
||||
{
|
||||
// Arrange
|
||||
var findings = new[]
|
||||
{
|
||||
new FindingInput
|
||||
{
|
||||
Type = FindingType.Vulnerability,
|
||||
Title = "Test",
|
||||
Severity = Severity.Medium,
|
||||
VexStatus = VexStatus.NotAffected,
|
||||
VexJustification = "component_not_present"
|
||||
}
|
||||
};
|
||||
|
||||
var options = new SarifExportOptions
|
||||
{
|
||||
ToolVersion = "1.0.0",
|
||||
IncludeVexStatus = true
|
||||
};
|
||||
|
||||
// Act
|
||||
var log = await _service.ExportAsync(findings, options, TestContext.Current.CancellationToken);
|
||||
|
||||
// Assert
|
||||
var result = log.Runs[0].Results[0];
|
||||
result.Properties.Should().ContainKey("stellaops/vex/status");
|
||||
result.Properties.Should().ContainKey("stellaops/vex/justification");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ExportAsync_WithKev_IncludesInProperties()
|
||||
{
|
||||
// Arrange
|
||||
var findings = new[]
|
||||
{
|
||||
new FindingInput
|
||||
{
|
||||
Type = FindingType.Vulnerability,
|
||||
Title = "Test",
|
||||
Severity = Severity.Medium,
|
||||
IsKev = true
|
||||
}
|
||||
};
|
||||
|
||||
var options = new SarifExportOptions
|
||||
{
|
||||
ToolVersion = "1.0.0",
|
||||
IncludeKev = true
|
||||
};
|
||||
|
||||
// Act
|
||||
var log = await _service.ExportAsync(findings, options, TestContext.Current.CancellationToken);
|
||||
|
||||
// Assert
|
||||
var result = log.Runs[0].Results[0];
|
||||
result.Properties.Should().ContainKey("stellaops/kev");
|
||||
result.Properties!["stellaops/kev"].Should().Be(true);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ExportAsync_SecretFinding_MapsCorrectly()
|
||||
{
|
||||
// Arrange
|
||||
var findings = new[]
|
||||
{
|
||||
new FindingInput
|
||||
{
|
||||
Type = FindingType.Secret,
|
||||
Title = "AWS Access Key detected",
|
||||
FilePath = "config/settings.py",
|
||||
StartLine = 42,
|
||||
StartColumn = 10,
|
||||
EndLine = 42,
|
||||
EndColumn = 30
|
||||
}
|
||||
};
|
||||
|
||||
var options = new SarifExportOptions { ToolVersion = "1.0.0" };
|
||||
|
||||
// Act
|
||||
var log = await _service.ExportAsync(findings, options, TestContext.Current.CancellationToken);
|
||||
|
||||
// Assert
|
||||
var result = log.Runs[0].Results[0];
|
||||
result.RuleId.Should().Be("STELLA-SEC-001");
|
||||
result.Level.Should().Be(SarifLevel.Error);
|
||||
result.Locations.Should().NotBeNull();
|
||||
result.Locations!.Value.Should().HaveCount(1);
|
||||
var location = result.Locations!.Value[0];
|
||||
location.PhysicalLocation!.ArtifactLocation.Uri.Should().Be("config/settings.py");
|
||||
location.PhysicalLocation!.Region!.StartLine.Should().Be(42);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ExportToJsonAsync_ProducesValidJson()
|
||||
{
|
||||
// Arrange
|
||||
var findings = new[]
|
||||
{
|
||||
new FindingInput
|
||||
{
|
||||
Type = FindingType.Vulnerability,
|
||||
Title = "Test vulnerability",
|
||||
Severity = Severity.High
|
||||
}
|
||||
};
|
||||
|
||||
var options = new SarifExportOptions
|
||||
{
|
||||
ToolVersion = "1.0.0",
|
||||
IndentedJson = true
|
||||
};
|
||||
|
||||
// Act
|
||||
var json = await _service.ExportToJsonAsync(findings, options, TestContext.Current.CancellationToken);
|
||||
|
||||
// Assert
|
||||
json.Should().NotBeNullOrEmpty();
|
||||
|
||||
// Validate it's parseable JSON
|
||||
var doc = JsonDocument.Parse(json);
|
||||
doc.RootElement.GetProperty("version").GetString().Should().Be("2.1.0");
|
||||
doc.RootElement.GetProperty("$schema").GetString().Should().Contain("sarif");
|
||||
doc.RootElement.GetProperty("runs").GetArrayLength().Should().Be(1);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ExportToStreamAsync_WritesToStream()
|
||||
{
|
||||
// Arrange
|
||||
var findings = new[]
|
||||
{
|
||||
new FindingInput
|
||||
{
|
||||
Type = FindingType.Vulnerability,
|
||||
Title = "Test",
|
||||
Severity = Severity.Medium
|
||||
}
|
||||
};
|
||||
|
||||
var options = new SarifExportOptions { ToolVersion = "1.0.0" };
|
||||
using var stream = new MemoryStream();
|
||||
|
||||
// Act
|
||||
await _service.ExportToStreamAsync(findings, options, stream, TestContext.Current.CancellationToken);
|
||||
|
||||
// Assert
|
||||
stream.Length.Should().BeGreaterThan(0);
|
||||
stream.Position = 0;
|
||||
using var reader = new StreamReader(stream);
|
||||
var json = await reader.ReadToEndAsync(TestContext.Current.CancellationToken);
|
||||
json.Should().Contain("\"version\":\"2.1.0\"");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ExportAsync_ResultsAreSortedDeterministically()
|
||||
{
|
||||
// Arrange
|
||||
var findings = new[]
|
||||
{
|
||||
new FindingInput { Type = FindingType.Vulnerability, Title = "Z", Severity = Severity.Low },
|
||||
new FindingInput { Type = FindingType.Secret, Title = "A" },
|
||||
new FindingInput { Type = FindingType.Vulnerability, Title = "M", Severity = Severity.Critical }
|
||||
};
|
||||
|
||||
var options = new SarifExportOptions { ToolVersion = "1.0.0" };
|
||||
|
||||
// Act
|
||||
var log1 = await _service.ExportAsync(findings, options, TestContext.Current.CancellationToken);
|
||||
var log2 = await _service.ExportAsync(findings.Reverse(), options, TestContext.Current.CancellationToken);
|
||||
|
||||
// Assert
|
||||
var ruleIds1 = log1.Runs[0].Results.Select(r => r.RuleId).ToList();
|
||||
var ruleIds2 = log2.Runs[0].Results.Select(r => r.RuleId).ToList();
|
||||
ruleIds1.Should().Equal(ruleIds2, "results should be sorted deterministically regardless of input order");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ExportAsync_PathNormalization_RemovesSourceRoot()
|
||||
{
|
||||
// Arrange
|
||||
var findings = new[]
|
||||
{
|
||||
new FindingInput
|
||||
{
|
||||
Type = FindingType.Vulnerability,
|
||||
Title = "Test",
|
||||
Severity = Severity.Medium,
|
||||
FilePath = "C:\\workspace\\src\\app.ts"
|
||||
}
|
||||
};
|
||||
|
||||
var options = new SarifExportOptions
|
||||
{
|
||||
ToolVersion = "1.0.0",
|
||||
SourceRoot = "C:\\workspace"
|
||||
};
|
||||
|
||||
// Act
|
||||
var log = await _service.ExportAsync(findings, options, TestContext.Current.CancellationToken);
|
||||
|
||||
// Assert
|
||||
var location = log.Runs[0].Results[0].Locations!.Value[0];
|
||||
location.PhysicalLocation!.ArtifactLocation.Uri.Should().Be("src/app.ts");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ExportAsync_IncludesInvocationTimestamp()
|
||||
{
|
||||
// Arrange
|
||||
var findings = new[]
|
||||
{
|
||||
new FindingInput
|
||||
{
|
||||
Type = FindingType.Vulnerability,
|
||||
Title = "Test",
|
||||
Severity = Severity.Medium
|
||||
}
|
||||
};
|
||||
|
||||
var options = new SarifExportOptions { ToolVersion = "1.0.0" };
|
||||
|
||||
// Act
|
||||
var log = await _service.ExportAsync(findings, options, TestContext.Current.CancellationToken);
|
||||
|
||||
// Assert
|
||||
var invocations = log.Runs[0].Invocations;
|
||||
invocations.Should().NotBeNull();
|
||||
invocations!.Value.Should().HaveCount(1);
|
||||
var invocation = invocations!.Value[0];
|
||||
invocation.ExecutionSuccessful.Should().BeTrue();
|
||||
invocation.StartTimeUtc.Should().Be(_timeProvider.GetUtcNow());
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ExportAsync_WithCategory_IncludesGitHubAlertCategory()
|
||||
{
|
||||
// Arrange
|
||||
var findings = new[]
|
||||
{
|
||||
new FindingInput
|
||||
{
|
||||
Type = FindingType.Vulnerability,
|
||||
Title = "Test",
|
||||
Severity = Severity.Medium
|
||||
}
|
||||
};
|
||||
|
||||
var options = new SarifExportOptions
|
||||
{
|
||||
ToolVersion = "1.0.0",
|
||||
Category = "security"
|
||||
};
|
||||
|
||||
// Act
|
||||
var log = await _service.ExportAsync(findings, options, TestContext.Current.CancellationToken);
|
||||
|
||||
// Assert
|
||||
var result = log.Runs[0].Results[0];
|
||||
result.Properties.Should().ContainKey("github/alertCategory");
|
||||
result.Properties!["github/alertCategory"].Should().Be("security");
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,312 @@
|
||||
// <copyright file="SarifGoldenFixtureTests.cs" company="StellaOps">
|
||||
// Copyright (c) StellaOps. Licensed under the AGPL-3.0-or-later.
|
||||
// </copyright>
|
||||
|
||||
using System.Text.Json;
|
||||
using FluentAssertions;
|
||||
using Microsoft.Extensions.Time.Testing;
|
||||
using StellaOps.Scanner.Sarif.Fingerprints;
|
||||
using StellaOps.Scanner.Sarif.Models;
|
||||
using StellaOps.Scanner.Sarif.Rules;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Scanner.Sarif.Tests;
|
||||
|
||||
/// <summary>
|
||||
/// Golden fixture tests for SARIF export validation.
|
||||
/// These tests ensure generated SARIF matches expected structure and is valid.
|
||||
/// </summary>
|
||||
[Trait("Category", "Unit")]
|
||||
public class SarifGoldenFixtureTests
|
||||
{
|
||||
private readonly SarifExportService _service;
|
||||
private readonly FakeTimeProvider _timeProvider;
|
||||
|
||||
public SarifGoldenFixtureTests()
|
||||
{
|
||||
_timeProvider = new FakeTimeProvider(new DateTimeOffset(2026, 1, 9, 12, 0, 0, TimeSpan.Zero));
|
||||
var ruleRegistry = new SarifRuleRegistry();
|
||||
var fingerprintGenerator = new FingerprintGenerator(ruleRegistry);
|
||||
_service = new SarifExportService(ruleRegistry, fingerprintGenerator, _timeProvider);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GoldenFixture_SingleVulnerability_ValidStructure()
|
||||
{
|
||||
// Arrange
|
||||
var findings = new[]
|
||||
{
|
||||
new FindingInput
|
||||
{
|
||||
Type = FindingType.Vulnerability,
|
||||
Title = "SQL Injection in user input handler",
|
||||
VulnerabilityId = "CVE-2024-12345",
|
||||
ComponentPurl = "pkg:npm/mysql@2.18.0",
|
||||
ComponentName = "mysql",
|
||||
ComponentVersion = "2.18.0",
|
||||
Severity = Severity.High,
|
||||
CvssScore = 8.5,
|
||||
CvssVector = "CVSS:3.1/AV:N/AC:L/PR:L/UI:N/S:U/C:H/I:H/A:N",
|
||||
FilePath = "src/db/connection.js",
|
||||
StartLine = 42
|
||||
}
|
||||
};
|
||||
|
||||
var options = new SarifExportOptions
|
||||
{
|
||||
ToolVersion = "1.0.0",
|
||||
ToolName = "StellaOps Scanner"
|
||||
};
|
||||
|
||||
// Act
|
||||
var log = await _service.ExportAsync(findings, options, TestContext.Current.CancellationToken);
|
||||
|
||||
// Assert - SARIF 2.1.0 structure requirements
|
||||
log.Version.Should().Be("2.1.0");
|
||||
log.Schema.Should().Contain("sarif-schema");
|
||||
log.Runs.Should().HaveCount(1);
|
||||
|
||||
var run = log.Runs[0];
|
||||
run.Tool.Should().NotBeNull();
|
||||
run.Tool.Driver.Should().NotBeNull();
|
||||
run.Tool.Driver.Name.Should().Be("StellaOps Scanner");
|
||||
run.Tool.Driver.Version.Should().Be("1.0.0");
|
||||
run.Tool.Driver.InformationUri.Should().NotBeNull();
|
||||
run.Tool.Driver.Rules.Should().NotBeNull();
|
||||
|
||||
run.Results.Should().HaveCount(1);
|
||||
var result = run.Results[0];
|
||||
result.RuleId.Should().StartWith("STELLA-");
|
||||
result.Level.Should().Be(SarifLevel.Warning); // High severity maps to warning
|
||||
result.Message.Should().NotBeNull();
|
||||
result.Message.Text.Should().Contain("SQL Injection");
|
||||
|
||||
// Location validation
|
||||
result.Locations.Should().NotBeNull();
|
||||
result.Locations.Should().HaveCountGreaterThan(0);
|
||||
var location = result.Locations!.Value[0];
|
||||
location.PhysicalLocation.Should().NotBeNull();
|
||||
location.PhysicalLocation!.ArtifactLocation.Should().NotBeNull();
|
||||
location.PhysicalLocation.ArtifactLocation!.Uri.Should().Be("src/db/connection.js");
|
||||
location.PhysicalLocation.Region.Should().NotBeNull();
|
||||
location.PhysicalLocation.Region!.StartLine.Should().Be(42);
|
||||
|
||||
// Fingerprint validation
|
||||
result.PartialFingerprints.Should().NotBeNull();
|
||||
result.PartialFingerprints.Should().ContainKey("primaryLocationLineHash");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GoldenFixture_MixedSeverities_CorrectLevelMapping()
|
||||
{
|
||||
// Arrange
|
||||
var findings = new[]
|
||||
{
|
||||
CreateFinding("CVE-2024-0001", "Critical Finding", Severity.Critical, 10.0),
|
||||
CreateFinding("CVE-2024-0002", "High Finding", Severity.High, 8.0),
|
||||
CreateFinding("CVE-2024-0003", "Medium Finding", Severity.Medium, 5.0),
|
||||
CreateFinding("CVE-2024-0004", "Low Finding", Severity.Low, 2.0)
|
||||
};
|
||||
|
||||
var options = new SarifExportOptions { ToolVersion = "1.0.0" };
|
||||
|
||||
// Act
|
||||
var log = await _service.ExportAsync(findings, options, TestContext.Current.CancellationToken);
|
||||
|
||||
// Assert
|
||||
log.Runs[0].Results.Should().HaveCount(4);
|
||||
|
||||
var results = log.Runs[0].Results;
|
||||
results[0].Level.Should().Be(SarifLevel.Error); // Critical -> Error
|
||||
results[1].Level.Should().Be(SarifLevel.Warning); // High -> Warning
|
||||
results[2].Level.Should().Be(SarifLevel.Warning); // Medium -> Warning
|
||||
results[3].Level.Should().Be(SarifLevel.Note); // Low -> Note
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GoldenFixture_WithReachabilityData_IncludesProperties()
|
||||
{
|
||||
// Arrange
|
||||
var findings = new[]
|
||||
{
|
||||
new FindingInput
|
||||
{
|
||||
Type = FindingType.Vulnerability,
|
||||
Title = "Prototype Pollution",
|
||||
VulnerabilityId = "CVE-2024-5678",
|
||||
ComponentPurl = "pkg:npm/lodash@4.17.20",
|
||||
ComponentName = "lodash",
|
||||
ComponentVersion = "4.17.20",
|
||||
Severity = Severity.High,
|
||||
CvssScore = 7.5,
|
||||
FilePath = "package-lock.json",
|
||||
StartLine = 100,
|
||||
Reachability = ReachabilityStatus.StaticReachable
|
||||
}
|
||||
};
|
||||
|
||||
var options = new SarifExportOptions
|
||||
{
|
||||
ToolVersion = "1.0.0",
|
||||
IncludeReachability = true
|
||||
};
|
||||
|
||||
// Act
|
||||
var log = await _service.ExportAsync(findings, options, TestContext.Current.CancellationToken);
|
||||
|
||||
// Assert
|
||||
var result = log.Runs[0].Results[0];
|
||||
// Properties should be set when reachability data is included
|
||||
result.Should().NotBeNull();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GoldenFixture_WithVexStatus_IncludesData()
|
||||
{
|
||||
// Arrange
|
||||
var findings = new[]
|
||||
{
|
||||
new FindingInput
|
||||
{
|
||||
Type = FindingType.Vulnerability,
|
||||
Title = "Known but not affected",
|
||||
VulnerabilityId = "CVE-2024-9999",
|
||||
ComponentPurl = "pkg:npm/test@1.0.0",
|
||||
ComponentName = "test",
|
||||
ComponentVersion = "1.0.0",
|
||||
Severity = Severity.Medium,
|
||||
CvssScore = 5.0,
|
||||
FilePath = "package.json",
|
||||
VexStatus = VexStatus.NotAffected,
|
||||
VexJustification = "vulnerable_code_not_present"
|
||||
}
|
||||
};
|
||||
|
||||
var options = new SarifExportOptions
|
||||
{
|
||||
ToolVersion = "1.0.0"
|
||||
};
|
||||
|
||||
// Act
|
||||
var log = await _service.ExportAsync(findings, options, TestContext.Current.CancellationToken);
|
||||
|
||||
// Assert
|
||||
var result = log.Runs[0].Results[0];
|
||||
result.Should().NotBeNull();
|
||||
// VEX data should be captured somehow in the result
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GoldenFixture_SecretFinding_UsesCorrectRule()
|
||||
{
|
||||
// Arrange
|
||||
var findings = new[]
|
||||
{
|
||||
new FindingInput
|
||||
{
|
||||
Type = FindingType.Secret,
|
||||
Title = "AWS Access Key Exposed",
|
||||
FilePath = "config/settings.py",
|
||||
StartLine = 15,
|
||||
Severity = Severity.Critical
|
||||
}
|
||||
};
|
||||
|
||||
var options = new SarifExportOptions { ToolVersion = "1.0.0" };
|
||||
|
||||
// Act
|
||||
var log = await _service.ExportAsync(findings, options, TestContext.Current.CancellationToken);
|
||||
|
||||
// Assert
|
||||
var result = log.Runs[0].Results[0];
|
||||
result.RuleId.Should().StartWith("STELLA-SEC-");
|
||||
result.Level.Should().Be(SarifLevel.Error); // Secrets are always error level
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GoldenFixture_LargeBatch_ProcessesEfficiently()
|
||||
{
|
||||
// Arrange - Create 100 findings
|
||||
var findings = Enumerable.Range(1, 100)
|
||||
.Select(i => CreateFinding(
|
||||
$"CVE-2024-{i:D5}",
|
||||
$"Finding {i}",
|
||||
(Severity)(i % 4 + 1),
|
||||
(i % 10) + 1.0))
|
||||
.ToArray();
|
||||
|
||||
var options = new SarifExportOptions { ToolVersion = "1.0.0" };
|
||||
|
||||
// Act
|
||||
var log = await _service.ExportAsync(findings, options, TestContext.Current.CancellationToken);
|
||||
|
||||
// Assert
|
||||
log.Runs[0].Results.Should().HaveCount(100);
|
||||
|
||||
// All should have unique fingerprints
|
||||
var fingerprints = log.Runs[0].Results
|
||||
.Where(r => r.PartialFingerprints != null)
|
||||
.Select(r => r.PartialFingerprints!.GetValueOrDefault("primaryLocationLineHash"))
|
||||
.Where(f => f != null)
|
||||
.ToList();
|
||||
|
||||
fingerprints.Distinct().Count().Should().Be(fingerprints.Count);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GoldenFixture_JsonSerialization_ValidJson()
|
||||
{
|
||||
// Arrange
|
||||
var findings = new[]
|
||||
{
|
||||
CreateFinding("CVE-2024-TEST", "Test vulnerability", Severity.Medium, 5.0)
|
||||
};
|
||||
|
||||
var options = new SarifExportOptions { ToolVersion = "1.0.0" };
|
||||
|
||||
// Act
|
||||
var json = await _service.ExportToJsonAsync(findings, options, TestContext.Current.CancellationToken);
|
||||
|
||||
// Assert - Should be valid JSON
|
||||
var parsed = JsonDocument.Parse(json);
|
||||
parsed.RootElement.GetProperty("version").GetString().Should().Be("2.1.0");
|
||||
parsed.RootElement.GetProperty("$schema").GetString().Should().Contain("sarif");
|
||||
parsed.RootElement.GetProperty("runs").GetArrayLength().Should().Be(1);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GoldenFixture_DeterministicOutput_SameInputSameOutput()
|
||||
{
|
||||
// Arrange
|
||||
var findings = new[]
|
||||
{
|
||||
CreateFinding("CVE-2024-DET", "Determinism test", Severity.High, 7.5)
|
||||
};
|
||||
|
||||
var options = new SarifExportOptions { ToolVersion = "1.0.0" };
|
||||
|
||||
// Act - Export twice
|
||||
var json1 = await _service.ExportToJsonAsync(findings, options, TestContext.Current.CancellationToken);
|
||||
var json2 = await _service.ExportToJsonAsync(findings, options, TestContext.Current.CancellationToken);
|
||||
|
||||
// Assert - Should be identical
|
||||
json1.Should().Be(json2);
|
||||
}
|
||||
|
||||
private static FindingInput CreateFinding(string cveId, string title, Severity severity, double cvssScore)
|
||||
{
|
||||
return new FindingInput
|
||||
{
|
||||
Type = FindingType.Vulnerability,
|
||||
Title = title,
|
||||
VulnerabilityId = cveId,
|
||||
ComponentPurl = $"pkg:npm/test-{cveId}@1.0.0",
|
||||
ComponentName = $"test-{cveId}",
|
||||
ComponentVersion = "1.0.0",
|
||||
Severity = severity,
|
||||
CvssScore = cvssScore,
|
||||
FilePath = $"package-{cveId}.json",
|
||||
StartLine = Math.Abs(cveId.GetHashCode() % 1000) + 1
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,255 @@
|
||||
// <copyright file="SarifRuleRegistryTests.cs" company="StellaOps">
|
||||
// Copyright (c) StellaOps. Licensed under the AGPL-3.0-or-later.
|
||||
// </copyright>
|
||||
|
||||
using FluentAssertions;
|
||||
using StellaOps.Scanner.Sarif.Models;
|
||||
using StellaOps.Scanner.Sarif.Rules;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Scanner.Sarif.Tests;
|
||||
|
||||
/// <summary>
|
||||
/// Tests for <see cref="SarifRuleRegistry"/>.
|
||||
/// </summary>
|
||||
[Trait("Category", "Unit")]
|
||||
public class SarifRuleRegistryTests
|
||||
{
|
||||
private readonly SarifRuleRegistry _registry = new();
|
||||
|
||||
[Theory]
|
||||
[InlineData(Severity.Critical, "STELLA-VULN-001")]
|
||||
[InlineData(Severity.High, "STELLA-VULN-002")]
|
||||
[InlineData(Severity.Medium, "STELLA-VULN-003")]
|
||||
[InlineData(Severity.Low, "STELLA-VULN-004")]
|
||||
public void GetRuleId_Vulnerability_MapsSeverityCorrectly(Severity severity, string expectedRuleId)
|
||||
{
|
||||
// Arrange
|
||||
var finding = new FindingInput
|
||||
{
|
||||
Type = FindingType.Vulnerability,
|
||||
Title = "Test vulnerability",
|
||||
Severity = severity
|
||||
};
|
||||
|
||||
// Act
|
||||
var ruleId = _registry.GetRuleId(finding);
|
||||
|
||||
// Assert
|
||||
ruleId.Should().Be(expectedRuleId);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void GetRuleId_RuntimeReachable_ReturnsReachabilityRule()
|
||||
{
|
||||
// Arrange
|
||||
var finding = new FindingInput
|
||||
{
|
||||
Type = FindingType.Vulnerability,
|
||||
Title = "Test vulnerability",
|
||||
Severity = Severity.Low,
|
||||
Reachability = ReachabilityStatus.RuntimeReachable
|
||||
};
|
||||
|
||||
// Act
|
||||
var ruleId = _registry.GetRuleId(finding);
|
||||
|
||||
// Assert
|
||||
ruleId.Should().Be("STELLA-VULN-005");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void GetRuleId_StaticReachable_ReturnsStaticReachabilityRule()
|
||||
{
|
||||
// Arrange
|
||||
var finding = new FindingInput
|
||||
{
|
||||
Type = FindingType.Vulnerability,
|
||||
Title = "Test vulnerability",
|
||||
Severity = Severity.Low,
|
||||
Reachability = ReachabilityStatus.StaticReachable
|
||||
};
|
||||
|
||||
// Act
|
||||
var ruleId = _registry.GetRuleId(finding);
|
||||
|
||||
// Assert
|
||||
ruleId.Should().Be("STELLA-VULN-006");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void GetRuleId_Secret_ReturnsSecretRule()
|
||||
{
|
||||
// Arrange
|
||||
var finding = new FindingInput
|
||||
{
|
||||
Type = FindingType.Secret,
|
||||
Title = "API key detected"
|
||||
};
|
||||
|
||||
// Act
|
||||
var ruleId = _registry.GetRuleId(finding);
|
||||
|
||||
// Assert
|
||||
ruleId.Should().Be("STELLA-SEC-001");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void GetRuleId_PrivateKey_ReturnsPrivateKeyRule()
|
||||
{
|
||||
// Arrange
|
||||
var finding = new FindingInput
|
||||
{
|
||||
Type = FindingType.Secret,
|
||||
Title = "Private key exposed in repository"
|
||||
};
|
||||
|
||||
// Act
|
||||
var ruleId = _registry.GetRuleId(finding);
|
||||
|
||||
// Assert
|
||||
ruleId.Should().Be("STELLA-SEC-002");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void GetRuleId_SupplyChain_ReturnsSupplyChainRule()
|
||||
{
|
||||
// Arrange
|
||||
var finding = new FindingInput
|
||||
{
|
||||
Type = FindingType.SupplyChain,
|
||||
Title = "Unsigned package"
|
||||
};
|
||||
|
||||
// Act
|
||||
var ruleId = _registry.GetRuleId(finding);
|
||||
|
||||
// Assert
|
||||
ruleId.Should().Be("STELLA-SC-001");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void GetRuleId_Typosquat_ReturnsTyposquatRule()
|
||||
{
|
||||
// Arrange
|
||||
var finding = new FindingInput
|
||||
{
|
||||
Type = FindingType.SupplyChain,
|
||||
Title = "Potential typosquat: lodasj"
|
||||
};
|
||||
|
||||
// Act
|
||||
var ruleId = _registry.GetRuleId(finding);
|
||||
|
||||
// Assert
|
||||
ruleId.Should().Be("STELLA-SC-003");
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData(Severity.Critical, SarifLevel.Error)]
|
||||
[InlineData(Severity.High, SarifLevel.Error)]
|
||||
[InlineData(Severity.Medium, SarifLevel.Warning)]
|
||||
[InlineData(Severity.Low, SarifLevel.Note)]
|
||||
public void GetLevel_MapsSeverityToLevel(Severity severity, SarifLevel expectedLevel)
|
||||
{
|
||||
// Arrange
|
||||
var finding = new FindingInput
|
||||
{
|
||||
Type = FindingType.Vulnerability,
|
||||
Title = "Test",
|
||||
Severity = severity
|
||||
};
|
||||
|
||||
// Act
|
||||
var level = _registry.GetLevel(finding);
|
||||
|
||||
// Assert
|
||||
level.Should().Be(expectedLevel);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void GetLevel_KevElevates_ToError()
|
||||
{
|
||||
// Arrange
|
||||
var finding = new FindingInput
|
||||
{
|
||||
Type = FindingType.Vulnerability,
|
||||
Title = "Test",
|
||||
Severity = Severity.Low,
|
||||
IsKev = true
|
||||
};
|
||||
|
||||
// Act
|
||||
var level = _registry.GetLevel(finding);
|
||||
|
||||
// Assert
|
||||
level.Should().Be(SarifLevel.Error);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void GetLevel_RuntimeReachable_ElevatesToError()
|
||||
{
|
||||
// Arrange
|
||||
var finding = new FindingInput
|
||||
{
|
||||
Type = FindingType.Vulnerability,
|
||||
Title = "Test",
|
||||
Severity = Severity.Low,
|
||||
Reachability = ReachabilityStatus.RuntimeReachable
|
||||
};
|
||||
|
||||
// Act
|
||||
var level = _registry.GetLevel(finding);
|
||||
|
||||
// Assert
|
||||
level.Should().Be(SarifLevel.Error);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void GetAllRules_ReturnsAllDefinedRules()
|
||||
{
|
||||
// Act
|
||||
var rules = _registry.GetAllRules();
|
||||
|
||||
// Assert
|
||||
rules.Should().NotBeEmpty();
|
||||
rules.Should().Contain(r => r.Id == "STELLA-VULN-001");
|
||||
rules.Should().Contain(r => r.Id == "STELLA-SEC-001");
|
||||
rules.Should().Contain(r => r.Id == "STELLA-SC-001");
|
||||
rules.Should().Contain(r => r.Id == "STELLA-BIN-001");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void GetRulesByType_Vulnerability_ReturnsVulnerabilityRules()
|
||||
{
|
||||
// Act
|
||||
var rules = _registry.GetRulesByType(FindingType.Vulnerability);
|
||||
|
||||
// Assert
|
||||
rules.Should().NotBeEmpty();
|
||||
rules.Should().OnlyContain(r => r.Id.StartsWith("STELLA-VULN-", StringComparison.Ordinal));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void GetRule_ReturnsRuleDefinition()
|
||||
{
|
||||
// Arrange
|
||||
var finding = new FindingInput
|
||||
{
|
||||
Type = FindingType.Vulnerability,
|
||||
Title = "Critical CVE",
|
||||
Severity = Severity.Critical
|
||||
};
|
||||
|
||||
// Act
|
||||
var rule = _registry.GetRule(finding);
|
||||
|
||||
// Assert
|
||||
rule.Should().NotBeNull();
|
||||
rule.Id.Should().Be("STELLA-VULN-001");
|
||||
rule.Name.Should().Be("CriticalVulnerability");
|
||||
rule.ShortDescription.Should().NotBeNull();
|
||||
rule.DefaultConfiguration.Should().NotBeNull();
|
||||
rule.DefaultConfiguration!.Level.Should().Be(SarifLevel.Error);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,20 @@
|
||||
<Project Sdk="Microsoft.NET.Sdk">
|
||||
<PropertyGroup>
|
||||
<TargetFramework>net10.0</TargetFramework>
|
||||
<ImplicitUsings>enable</ImplicitUsings>
|
||||
<Nullable>enable</Nullable>
|
||||
<LangVersion>preview</LangVersion>
|
||||
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
|
||||
<IsPackable>false</IsPackable>
|
||||
<IsTestProject>true</IsTestProject>
|
||||
</PropertyGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<PackageReference Include="FluentAssertions" />
|
||||
<PackageReference Include="Moq" />
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<ProjectReference Include="..\StellaOps.Scanner.Sarif\StellaOps.Scanner.Sarif.csproj" />
|
||||
</ItemGroup>
|
||||
</Project>
|
||||
208
src/Scanner/__Libraries/StellaOps.Scanner.Sarif/FindingInput.cs
Normal file
208
src/Scanner/__Libraries/StellaOps.Scanner.Sarif/FindingInput.cs
Normal file
@@ -0,0 +1,208 @@
|
||||
// <copyright file="FindingInput.cs" company="StellaOps">
|
||||
// Copyright (c) StellaOps. Licensed under the AGPL-3.0-or-later.
|
||||
// </copyright>
|
||||
|
||||
namespace StellaOps.Scanner.Sarif;
|
||||
|
||||
/// <summary>
|
||||
/// Input model for a finding to be exported to SARIF.
|
||||
/// Sprint: SPRINT_20260109_010_001 Task: Implement findings mapper
|
||||
/// </summary>
|
||||
public sealed record FindingInput
|
||||
{
|
||||
/// <summary>
|
||||
/// Gets the finding type.
|
||||
/// </summary>
|
||||
public required FindingType Type { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets the vulnerability ID (CVE, GHSA, etc.) if applicable.
|
||||
/// </summary>
|
||||
public string? VulnerabilityId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets the component Package URL.
|
||||
/// </summary>
|
||||
public string? ComponentPurl { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets the component name.
|
||||
/// </summary>
|
||||
public string? ComponentName { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets the component version.
|
||||
/// </summary>
|
||||
public string? ComponentVersion { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets the severity.
|
||||
/// </summary>
|
||||
public Severity Severity { get; init; } = Severity.Unknown;
|
||||
|
||||
/// <summary>
|
||||
/// Gets the CVSS v3 score (0.0-10.0).
|
||||
/// </summary>
|
||||
public double? CvssScore { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets the CVSS v3 vector string.
|
||||
/// </summary>
|
||||
public string? CvssVector { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets the EPSS probability (0.0-1.0).
|
||||
/// </summary>
|
||||
public double? EpssProbability { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets the EPSS percentile (0.0-1.0).
|
||||
/// </summary>
|
||||
public double? EpssPercentile { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets whether this is in the KEV catalog.
|
||||
/// </summary>
|
||||
public bool IsKev { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets the finding title/summary.
|
||||
/// </summary>
|
||||
public required string Title { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets the detailed description.
|
||||
/// </summary>
|
||||
public string? Description { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets the recommendation/remediation.
|
||||
/// </summary>
|
||||
public string? Recommendation { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets the file path where the finding was detected.
|
||||
/// </summary>
|
||||
public string? FilePath { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets the start line number (1-based).
|
||||
/// </summary>
|
||||
public int? StartLine { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets the end line number (1-based).
|
||||
/// </summary>
|
||||
public int? EndLine { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets the start column (1-based).
|
||||
/// </summary>
|
||||
public int? StartColumn { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets the end column (1-based).
|
||||
/// </summary>
|
||||
public int? EndColumn { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets the artifact digest (sha256:...).
|
||||
/// </summary>
|
||||
public string? ArtifactDigest { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets the reachability status.
|
||||
/// </summary>
|
||||
public ReachabilityStatus? Reachability { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets the VEX status.
|
||||
/// </summary>
|
||||
public VexStatus? VexStatus { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets VEX justification.
|
||||
/// </summary>
|
||||
public string? VexJustification { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets StellaOps evidence URIs.
|
||||
/// </summary>
|
||||
public IReadOnlyList<string>? EvidenceUris { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets attestation digests.
|
||||
/// </summary>
|
||||
public IReadOnlyList<string>? AttestationDigests { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets custom properties to include.
|
||||
/// </summary>
|
||||
public IReadOnlyDictionary<string, object>? Properties { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Type of finding.
|
||||
/// </summary>
|
||||
public enum FindingType
|
||||
{
|
||||
/// <summary>Software vulnerability (CVE, GHSA, etc.).</summary>
|
||||
Vulnerability,
|
||||
|
||||
/// <summary>Hardcoded secret or credential.</summary>
|
||||
Secret,
|
||||
|
||||
/// <summary>Supply chain issue (unsigned, unknown provenance, etc.).</summary>
|
||||
SupplyChain,
|
||||
|
||||
/// <summary>Binary hardening issue.</summary>
|
||||
BinaryHardening,
|
||||
|
||||
/// <summary>License compliance issue.</summary>
|
||||
License,
|
||||
|
||||
/// <summary>Configuration issue.</summary>
|
||||
Configuration
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Reachability status.
|
||||
/// </summary>
|
||||
public enum ReachabilityStatus
|
||||
{
|
||||
/// <summary>Not analyzed.</summary>
|
||||
Unknown,
|
||||
|
||||
/// <summary>Statically reachable.</summary>
|
||||
StaticReachable,
|
||||
|
||||
/// <summary>Statically unreachable.</summary>
|
||||
StaticUnreachable,
|
||||
|
||||
/// <summary>Confirmed reachable at runtime.</summary>
|
||||
RuntimeReachable,
|
||||
|
||||
/// <summary>Confirmed unreachable at runtime.</summary>
|
||||
RuntimeUnreachable,
|
||||
|
||||
/// <summary>Conflicting evidence.</summary>
|
||||
Contested
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// VEX status.
|
||||
/// </summary>
|
||||
public enum VexStatus
|
||||
{
|
||||
/// <summary>Affected by the vulnerability.</summary>
|
||||
Affected,
|
||||
|
||||
/// <summary>Not affected by the vulnerability.</summary>
|
||||
NotAffected,
|
||||
|
||||
/// <summary>Fixed in this version.</summary>
|
||||
Fixed,
|
||||
|
||||
/// <summary>Under investigation.</summary>
|
||||
UnderInvestigation
|
||||
}
|
||||
@@ -0,0 +1,139 @@
|
||||
// <copyright file="FingerprintGenerator.cs" company="StellaOps">
|
||||
// Copyright (c) StellaOps. Licensed under the AGPL-3.0-or-later.
|
||||
// </copyright>
|
||||
|
||||
using System.Security.Cryptography;
|
||||
using System.Text;
|
||||
using StellaOps.Scanner.Sarif.Rules;
|
||||
|
||||
namespace StellaOps.Scanner.Sarif.Fingerprints;
|
||||
|
||||
/// <summary>
|
||||
/// Default implementation of <see cref="IFingerprintGenerator"/>.
|
||||
/// Sprint: SPRINT_20260109_010_001 Task: Implement fingerprint generator
|
||||
/// </summary>
|
||||
/// <remarks>
|
||||
/// Fingerprint algorithms:
|
||||
/// - stellaops/v1 (Standard): SHA-256(ruleId | componentPurl | vulnId | artifactDigest)
|
||||
/// - stellaops/minimal (Minimal): SHA-256(ruleId | vulnId)
|
||||
/// - stellaops/extended (Extended): SHA-256(ruleId | componentPurl | vulnId | artifactDigest | reachability | vexStatus)
|
||||
/// </remarks>
|
||||
public sealed class FingerprintGenerator : IFingerprintGenerator
|
||||
{
|
||||
private const string FingerprintVersion = "stellaops/v1";
|
||||
private const char Separator = '|';
|
||||
|
||||
private readonly ISarifRuleRegistry _ruleRegistry;
|
||||
|
||||
/// <summary>
|
||||
/// Initializes a new instance of the <see cref="FingerprintGenerator"/> class.
|
||||
/// </summary>
|
||||
/// <param name="ruleRegistry">The rule registry.</param>
|
||||
public FingerprintGenerator(ISarifRuleRegistry ruleRegistry)
|
||||
{
|
||||
_ruleRegistry = ruleRegistry ?? throw new ArgumentNullException(nameof(ruleRegistry));
|
||||
}
|
||||
|
||||
/// <inheritdoc/>
|
||||
public string GeneratePrimary(FindingInput finding, FingerprintStrategy strategy)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(finding);
|
||||
|
||||
var input = strategy switch
|
||||
{
|
||||
FingerprintStrategy.Standard => BuildStandardInput(finding),
|
||||
FingerprintStrategy.Minimal => BuildMinimalInput(finding),
|
||||
FingerprintStrategy.Extended => BuildExtendedInput(finding),
|
||||
_ => throw new ArgumentOutOfRangeException(nameof(strategy), strategy, "Unknown fingerprint strategy")
|
||||
};
|
||||
|
||||
return ComputeSha256(input);
|
||||
}
|
||||
|
||||
/// <inheritdoc/>
|
||||
public IDictionary<string, string> GeneratePartial(FindingInput finding)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(finding);
|
||||
|
||||
var partials = new Dictionary<string, string>(StringComparer.Ordinal);
|
||||
|
||||
// Component-based partial fingerprint
|
||||
if (!string.IsNullOrEmpty(finding.ComponentPurl))
|
||||
{
|
||||
partials["stellaops/component/v1"] = ComputeSha256(finding.ComponentPurl);
|
||||
}
|
||||
|
||||
// Vulnerability-based partial fingerprint
|
||||
if (!string.IsNullOrEmpty(finding.VulnerabilityId))
|
||||
{
|
||||
partials["stellaops/vuln/v1"] = ComputeSha256(finding.VulnerabilityId);
|
||||
}
|
||||
|
||||
// Location-based partial fingerprint (for GitHub fallback)
|
||||
if (!string.IsNullOrEmpty(finding.FilePath) && finding.StartLine.HasValue)
|
||||
{
|
||||
var locationInput = $"{finding.FilePath}:{finding.StartLine}";
|
||||
partials["primaryLocationLineHash/v1"] = ComputeSha256(locationInput);
|
||||
}
|
||||
|
||||
// Title-based partial fingerprint (for secrets/config issues without CVE)
|
||||
if (finding.Type is FindingType.Secret or FindingType.Configuration)
|
||||
{
|
||||
var titleInput = $"{finding.Type}:{finding.Title}";
|
||||
partials["stellaops/title/v1"] = ComputeSha256(titleInput);
|
||||
}
|
||||
|
||||
return partials;
|
||||
}
|
||||
|
||||
private string BuildStandardInput(FindingInput finding)
|
||||
{
|
||||
var ruleId = _ruleRegistry.GetRuleId(finding);
|
||||
|
||||
var parts = new[]
|
||||
{
|
||||
ruleId,
|
||||
finding.ComponentPurl ?? string.Empty,
|
||||
finding.VulnerabilityId ?? string.Empty,
|
||||
finding.ArtifactDigest ?? string.Empty
|
||||
};
|
||||
|
||||
return string.Join(Separator, parts);
|
||||
}
|
||||
|
||||
private string BuildMinimalInput(FindingInput finding)
|
||||
{
|
||||
var ruleId = _ruleRegistry.GetRuleId(finding);
|
||||
|
||||
var parts = new[]
|
||||
{
|
||||
ruleId,
|
||||
finding.VulnerabilityId ?? finding.Title
|
||||
};
|
||||
|
||||
return string.Join(Separator, parts);
|
||||
}
|
||||
|
||||
private string BuildExtendedInput(FindingInput finding)
|
||||
{
|
||||
var ruleId = _ruleRegistry.GetRuleId(finding);
|
||||
|
||||
var parts = new[]
|
||||
{
|
||||
ruleId,
|
||||
finding.ComponentPurl ?? string.Empty,
|
||||
finding.VulnerabilityId ?? string.Empty,
|
||||
finding.ArtifactDigest ?? string.Empty,
|
||||
finding.Reachability?.ToString() ?? string.Empty,
|
||||
finding.VexStatus?.ToString() ?? string.Empty
|
||||
};
|
||||
|
||||
return string.Join(Separator, parts);
|
||||
}
|
||||
|
||||
private static string ComputeSha256(string input)
|
||||
{
|
||||
var bytes = SHA256.HashData(Encoding.UTF8.GetBytes(input));
|
||||
return Convert.ToHexStringLower(bytes);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,27 @@
|
||||
// <copyright file="IFingerprintGenerator.cs" company="StellaOps">
|
||||
// Copyright (c) StellaOps. Licensed under the AGPL-3.0-or-later.
|
||||
// </copyright>
|
||||
|
||||
namespace StellaOps.Scanner.Sarif.Fingerprints;
|
||||
|
||||
/// <summary>
|
||||
/// Interface for generating deterministic fingerprints for SARIF results.
|
||||
/// Sprint: SPRINT_20260109_010_001 Task: Implement fingerprint generator
|
||||
/// </summary>
|
||||
public interface IFingerprintGenerator
|
||||
{
|
||||
/// <summary>
|
||||
/// Generates a primary fingerprint for deduplication.
|
||||
/// </summary>
|
||||
/// <param name="finding">The finding.</param>
|
||||
/// <param name="strategy">The fingerprint strategy.</param>
|
||||
/// <returns>The fingerprint string.</returns>
|
||||
string GeneratePrimary(FindingInput finding, FingerprintStrategy strategy);
|
||||
|
||||
/// <summary>
|
||||
/// Generates partial fingerprints for fallback matching.
|
||||
/// </summary>
|
||||
/// <param name="finding">The finding.</param>
|
||||
/// <returns>Dictionary of partial fingerprint names to values.</returns>
|
||||
IDictionary<string, string> GeneratePartial(FindingInput finding);
|
||||
}
|
||||
@@ -0,0 +1,51 @@
|
||||
// <copyright file="ISarifExportService.cs" company="StellaOps">
|
||||
// Copyright (c) StellaOps. Licensed under the AGPL-3.0-or-later.
|
||||
// </copyright>
|
||||
|
||||
using StellaOps.Scanner.Sarif.Models;
|
||||
|
||||
namespace StellaOps.Scanner.Sarif;
|
||||
|
||||
/// <summary>
|
||||
/// Service interface for exporting Scanner findings to SARIF 2.1.0 format.
|
||||
/// Sprint: SPRINT_20260109_010_001 Task: Extract shared SARIF models
|
||||
/// </summary>
|
||||
public interface ISarifExportService
|
||||
{
|
||||
/// <summary>
|
||||
/// Exports findings to a SARIF log structure.
|
||||
/// </summary>
|
||||
/// <param name="findings">The findings to export.</param>
|
||||
/// <param name="options">Export options.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>The SARIF log containing the findings.</returns>
|
||||
Task<SarifLog> ExportAsync(
|
||||
IEnumerable<FindingInput> findings,
|
||||
SarifExportOptions options,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Exports findings to SARIF JSON string.
|
||||
/// </summary>
|
||||
/// <param name="findings">The findings to export.</param>
|
||||
/// <param name="options">Export options.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>The SARIF JSON string.</returns>
|
||||
Task<string> ExportToJsonAsync(
|
||||
IEnumerable<FindingInput> findings,
|
||||
SarifExportOptions options,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Exports findings to SARIF JSON and writes to a stream.
|
||||
/// </summary>
|
||||
/// <param name="findings">The findings to export.</param>
|
||||
/// <param name="options">Export options.</param>
|
||||
/// <param name="outputStream">The output stream.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
Task ExportToStreamAsync(
|
||||
IEnumerable<FindingInput> findings,
|
||||
SarifExportOptions options,
|
||||
Stream outputStream,
|
||||
CancellationToken cancellationToken = default);
|
||||
}
|
||||
@@ -0,0 +1,232 @@
|
||||
// <copyright file="SarifModels.cs" company="StellaOps">
|
||||
// Copyright (c) StellaOps. Licensed under the AGPL-3.0-or-later.
|
||||
// </copyright>
|
||||
|
||||
using System.Collections.Immutable;
|
||||
using System.Text.Json.Serialization;
|
||||
|
||||
namespace StellaOps.Scanner.Sarif.Models;
|
||||
|
||||
/// <summary>
|
||||
/// SARIF 2.1.0 log model.
|
||||
/// Sprint: SPRINT_20260109_010_001 Task: Extract shared SARIF models
|
||||
/// </summary>
|
||||
public sealed record SarifLog(
|
||||
[property: JsonPropertyName("version")] string Version,
|
||||
[property: JsonPropertyName("$schema")] string Schema,
|
||||
[property: JsonPropertyName("runs")] ImmutableArray<SarifRun> Runs)
|
||||
{
|
||||
/// <summary>SARIF version constant.</summary>
|
||||
public const string SarifVersion = "2.1.0";
|
||||
|
||||
/// <summary>SARIF schema URL.</summary>
|
||||
public const string SchemaUrl = "https://raw.githubusercontent.com/oasis-tcs/sarif-spec/master/Schemata/sarif-schema-2.1.0.json";
|
||||
|
||||
/// <summary>
|
||||
/// Creates a new SARIF log with the standard version and schema.
|
||||
/// </summary>
|
||||
public static SarifLog Create(ImmutableArray<SarifRun> runs)
|
||||
=> new(SarifVersion, SchemaUrl, runs);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// A single SARIF run representing one analysis execution.
|
||||
/// </summary>
|
||||
public sealed record SarifRun(
|
||||
[property: JsonPropertyName("tool")] SarifTool Tool,
|
||||
[property: JsonPropertyName("results")] ImmutableArray<SarifResult> Results,
|
||||
[property: JsonPropertyName("invocations")] ImmutableArray<SarifInvocation>? Invocations = null,
|
||||
[property: JsonPropertyName("artifacts")] ImmutableArray<SarifArtifact>? Artifacts = null,
|
||||
[property: JsonPropertyName("versionControlProvenance")] ImmutableArray<SarifVersionControlDetails>? VersionControlProvenance = null,
|
||||
[property: JsonPropertyName("properties")] ImmutableSortedDictionary<string, object>? Properties = null);
|
||||
|
||||
/// <summary>
|
||||
/// Tool information for the SARIF run.
|
||||
/// </summary>
|
||||
public sealed record SarifTool(
|
||||
[property: JsonPropertyName("driver")] SarifToolComponent Driver,
|
||||
[property: JsonPropertyName("extensions")] ImmutableArray<SarifToolComponent>? Extensions = null);
|
||||
|
||||
/// <summary>
|
||||
/// Tool component (driver or extension).
|
||||
/// </summary>
|
||||
public sealed record SarifToolComponent(
|
||||
[property: JsonPropertyName("name")] string Name,
|
||||
[property: JsonPropertyName("version")] string Version,
|
||||
[property: JsonPropertyName("semanticVersion")] string? SemanticVersion = null,
|
||||
[property: JsonPropertyName("informationUri")] string? InformationUri = null,
|
||||
[property: JsonPropertyName("rules")] ImmutableArray<SarifReportingDescriptor>? Rules = null,
|
||||
[property: JsonPropertyName("supportedTaxonomies")] ImmutableArray<SarifToolComponentReference>? SupportedTaxonomies = null);
|
||||
|
||||
/// <summary>
|
||||
/// Reference to a tool component.
|
||||
/// </summary>
|
||||
public sealed record SarifToolComponentReference(
|
||||
[property: JsonPropertyName("name")] string Name,
|
||||
[property: JsonPropertyName("guid")] string? Guid = null);
|
||||
|
||||
/// <summary>
|
||||
/// Rule definition (reporting descriptor).
|
||||
/// </summary>
|
||||
public sealed record SarifReportingDescriptor(
|
||||
[property: JsonPropertyName("id")] string Id,
|
||||
[property: JsonPropertyName("name")] string? Name = null,
|
||||
[property: JsonPropertyName("shortDescription")] SarifMessage? ShortDescription = null,
|
||||
[property: JsonPropertyName("fullDescription")] SarifMessage? FullDescription = null,
|
||||
[property: JsonPropertyName("defaultConfiguration")] SarifReportingConfiguration? DefaultConfiguration = null,
|
||||
[property: JsonPropertyName("helpUri")] string? HelpUri = null,
|
||||
[property: JsonPropertyName("help")] SarifMessage? Help = null,
|
||||
[property: JsonPropertyName("properties")] ImmutableSortedDictionary<string, object>? Properties = null);
|
||||
|
||||
/// <summary>
|
||||
/// Rule configuration.
|
||||
/// </summary>
|
||||
public sealed record SarifReportingConfiguration(
|
||||
[property: JsonPropertyName("level")] SarifLevel Level = SarifLevel.Warning,
|
||||
[property: JsonPropertyName("enabled")] bool Enabled = true);
|
||||
|
||||
/// <summary>
|
||||
/// SARIF message with text.
|
||||
/// </summary>
|
||||
public sealed record SarifMessage(
|
||||
[property: JsonPropertyName("text")] string Text,
|
||||
[property: JsonPropertyName("markdown")] string? Markdown = null);
|
||||
|
||||
/// <summary>
|
||||
/// SARIF result level.
|
||||
/// </summary>
|
||||
[JsonConverter(typeof(JsonStringEnumConverter<SarifLevel>))]
|
||||
public enum SarifLevel
|
||||
{
|
||||
/// <summary>No level.</summary>
|
||||
[JsonStringEnumMemberName("none")]
|
||||
None,
|
||||
|
||||
/// <summary>Informational note.</summary>
|
||||
[JsonStringEnumMemberName("note")]
|
||||
Note,
|
||||
|
||||
/// <summary>Warning.</summary>
|
||||
[JsonStringEnumMemberName("warning")]
|
||||
Warning,
|
||||
|
||||
/// <summary>Error.</summary>
|
||||
[JsonStringEnumMemberName("error")]
|
||||
Error
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// A single result/finding.
|
||||
/// </summary>
|
||||
public sealed record SarifResult(
|
||||
[property: JsonPropertyName("ruleId")] string RuleId,
|
||||
[property: JsonPropertyName("level")] SarifLevel Level,
|
||||
[property: JsonPropertyName("message")] SarifMessage Message,
|
||||
[property: JsonPropertyName("ruleIndex")] int? RuleIndex = null,
|
||||
[property: JsonPropertyName("locations")] ImmutableArray<SarifLocation>? Locations = null,
|
||||
[property: JsonPropertyName("fingerprints")] ImmutableSortedDictionary<string, string>? Fingerprints = null,
|
||||
[property: JsonPropertyName("partialFingerprints")] ImmutableSortedDictionary<string, string>? PartialFingerprints = null,
|
||||
[property: JsonPropertyName("relatedLocations")] ImmutableArray<SarifLocation>? RelatedLocations = null,
|
||||
[property: JsonPropertyName("fixes")] ImmutableArray<SarifFix>? Fixes = null,
|
||||
[property: JsonPropertyName("properties")] ImmutableSortedDictionary<string, object>? Properties = null);
|
||||
|
||||
/// <summary>
|
||||
/// Location of a result.
|
||||
/// </summary>
|
||||
public sealed record SarifLocation(
|
||||
[property: JsonPropertyName("physicalLocation")] SarifPhysicalLocation? PhysicalLocation = null,
|
||||
[property: JsonPropertyName("logicalLocations")] ImmutableArray<SarifLogicalLocation>? LogicalLocations = null,
|
||||
[property: JsonPropertyName("message")] SarifMessage? Message = null);
|
||||
|
||||
/// <summary>
|
||||
/// Physical file location.
|
||||
/// </summary>
|
||||
public sealed record SarifPhysicalLocation(
|
||||
[property: JsonPropertyName("artifactLocation")] SarifArtifactLocation ArtifactLocation,
|
||||
[property: JsonPropertyName("region")] SarifRegion? Region = null,
|
||||
[property: JsonPropertyName("contextRegion")] SarifRegion? ContextRegion = null);
|
||||
|
||||
/// <summary>
|
||||
/// Artifact location (file path).
|
||||
/// </summary>
|
||||
public sealed record SarifArtifactLocation(
|
||||
[property: JsonPropertyName("uri")] string Uri,
|
||||
[property: JsonPropertyName("uriBaseId")] string? UriBaseId = null,
|
||||
[property: JsonPropertyName("index")] int? Index = null);
|
||||
|
||||
/// <summary>
|
||||
/// Region within a file.
|
||||
/// </summary>
|
||||
public sealed record SarifRegion(
|
||||
[property: JsonPropertyName("startLine")] int? StartLine = null,
|
||||
[property: JsonPropertyName("startColumn")] int? StartColumn = null,
|
||||
[property: JsonPropertyName("endLine")] int? EndLine = null,
|
||||
[property: JsonPropertyName("endColumn")] int? EndColumn = null,
|
||||
[property: JsonPropertyName("charOffset")] int? CharOffset = null,
|
||||
[property: JsonPropertyName("charLength")] int? CharLength = null,
|
||||
[property: JsonPropertyName("snippet")] SarifArtifactContent? Snippet = null);
|
||||
|
||||
/// <summary>
|
||||
/// Artifact content (code snippet).
|
||||
/// </summary>
|
||||
public sealed record SarifArtifactContent(
|
||||
[property: JsonPropertyName("text")] string? Text = null,
|
||||
[property: JsonPropertyName("rendered")] SarifMessage? Rendered = null);
|
||||
|
||||
/// <summary>
|
||||
/// Logical location (namespace, class, function).
|
||||
/// </summary>
|
||||
public sealed record SarifLogicalLocation(
|
||||
[property: JsonPropertyName("name")] string Name,
|
||||
[property: JsonPropertyName("fullyQualifiedName")] string? FullyQualifiedName = null,
|
||||
[property: JsonPropertyName("kind")] string? Kind = null,
|
||||
[property: JsonPropertyName("index")] int? Index = null);
|
||||
|
||||
/// <summary>
|
||||
/// Invocation information.
|
||||
/// </summary>
|
||||
public sealed record SarifInvocation(
|
||||
[property: JsonPropertyName("executionSuccessful")] bool ExecutionSuccessful,
|
||||
[property: JsonPropertyName("startTimeUtc")] DateTimeOffset? StartTimeUtc = null,
|
||||
[property: JsonPropertyName("endTimeUtc")] DateTimeOffset? EndTimeUtc = null,
|
||||
[property: JsonPropertyName("workingDirectory")] SarifArtifactLocation? WorkingDirectory = null,
|
||||
[property: JsonPropertyName("commandLine")] string? CommandLine = null);
|
||||
|
||||
/// <summary>
|
||||
/// Artifact (file) information.
|
||||
/// </summary>
|
||||
public sealed record SarifArtifact(
|
||||
[property: JsonPropertyName("location")] SarifArtifactLocation Location,
|
||||
[property: JsonPropertyName("mimeType")] string? MimeType = null,
|
||||
[property: JsonPropertyName("hashes")] ImmutableSortedDictionary<string, string>? Hashes = null,
|
||||
[property: JsonPropertyName("length")] long? Length = null);
|
||||
|
||||
/// <summary>
|
||||
/// Version control information.
|
||||
/// </summary>
|
||||
public sealed record SarifVersionControlDetails(
|
||||
[property: JsonPropertyName("repositoryUri")] string RepositoryUri,
|
||||
[property: JsonPropertyName("revisionId")] string? RevisionId = null,
|
||||
[property: JsonPropertyName("branch")] string? Branch = null,
|
||||
[property: JsonPropertyName("mappedTo")] SarifArtifactLocation? MappedTo = null);
|
||||
|
||||
/// <summary>
|
||||
/// Fix suggestion.
|
||||
/// </summary>
|
||||
public sealed record SarifFix(
|
||||
[property: JsonPropertyName("description")] SarifMessage Description,
|
||||
[property: JsonPropertyName("artifactChanges")] ImmutableArray<SarifArtifactChange> ArtifactChanges);
|
||||
|
||||
/// <summary>
|
||||
/// Artifact change for a fix.
|
||||
/// </summary>
|
||||
public sealed record SarifArtifactChange(
|
||||
[property: JsonPropertyName("artifactLocation")] SarifArtifactLocation ArtifactLocation,
|
||||
[property: JsonPropertyName("replacements")] ImmutableArray<SarifReplacement> Replacements);
|
||||
|
||||
/// <summary>
|
||||
/// Text replacement for a fix.
|
||||
/// </summary>
|
||||
public sealed record SarifReplacement(
|
||||
[property: JsonPropertyName("deletedRegion")] SarifRegion DeletedRegion,
|
||||
[property: JsonPropertyName("insertedContent")] SarifArtifactContent? InsertedContent = null);
|
||||
@@ -0,0 +1,48 @@
|
||||
// <copyright file="ISarifRuleRegistry.cs" company="StellaOps">
|
||||
// Copyright (c) StellaOps. Licensed under the AGPL-3.0-or-later.
|
||||
// </copyright>
|
||||
|
||||
using StellaOps.Scanner.Sarif.Models;
|
||||
|
||||
namespace StellaOps.Scanner.Sarif.Rules;
|
||||
|
||||
/// <summary>
|
||||
/// Registry interface for SARIF rule definitions.
|
||||
/// Sprint: SPRINT_20260109_010_001 Task: Create rule registry
|
||||
/// </summary>
|
||||
public interface ISarifRuleRegistry
|
||||
{
|
||||
/// <summary>
|
||||
/// Gets the rule definition for a finding.
|
||||
/// </summary>
|
||||
/// <param name="finding">The finding.</param>
|
||||
/// <returns>The rule definition.</returns>
|
||||
SarifReportingDescriptor GetRule(FindingInput finding);
|
||||
|
||||
/// <summary>
|
||||
/// Gets the rule ID for a finding.
|
||||
/// </summary>
|
||||
/// <param name="finding">The finding.</param>
|
||||
/// <returns>The rule ID.</returns>
|
||||
string GetRuleId(FindingInput finding);
|
||||
|
||||
/// <summary>
|
||||
/// Gets the SARIF level for a finding.
|
||||
/// </summary>
|
||||
/// <param name="finding">The finding.</param>
|
||||
/// <returns>The SARIF level.</returns>
|
||||
SarifLevel GetLevel(FindingInput finding);
|
||||
|
||||
/// <summary>
|
||||
/// Gets all registered rules.
|
||||
/// </summary>
|
||||
/// <returns>All rule definitions.</returns>
|
||||
IReadOnlyList<SarifReportingDescriptor> GetAllRules();
|
||||
|
||||
/// <summary>
|
||||
/// Gets rules by type.
|
||||
/// </summary>
|
||||
/// <param name="type">The finding type.</param>
|
||||
/// <returns>Rules for the specified type.</returns>
|
||||
IReadOnlyList<SarifReportingDescriptor> GetRulesByType(FindingType type);
|
||||
}
|
||||
@@ -0,0 +1,417 @@
|
||||
// <copyright file="SarifRuleRegistry.cs" company="StellaOps">
|
||||
// Copyright (c) StellaOps. Licensed under the AGPL-3.0-or-later.
|
||||
// </copyright>
|
||||
|
||||
using System.Collections.Frozen;
|
||||
using StellaOps.Scanner.Sarif.Models;
|
||||
|
||||
namespace StellaOps.Scanner.Sarif.Rules;
|
||||
|
||||
/// <summary>
|
||||
/// Default implementation of <see cref="ISarifRuleRegistry"/>.
|
||||
/// Sprint: SPRINT_20260109_010_001 Task: Create rule registry
|
||||
/// </summary>
|
||||
public sealed class SarifRuleRegistry : ISarifRuleRegistry
|
||||
{
|
||||
private readonly FrozenDictionary<string, SarifReportingDescriptor> _rulesById;
|
||||
private readonly IReadOnlyList<SarifReportingDescriptor> _allRules;
|
||||
|
||||
/// <summary>
|
||||
/// Initializes a new instance of the <see cref="SarifRuleRegistry"/> class.
|
||||
/// </summary>
|
||||
public SarifRuleRegistry()
|
||||
{
|
||||
var rules = BuildRules();
|
||||
_allRules = rules;
|
||||
_rulesById = rules.ToFrozenDictionary(r => r.Id, StringComparer.OrdinalIgnoreCase);
|
||||
}
|
||||
|
||||
/// <inheritdoc/>
|
||||
public SarifReportingDescriptor GetRule(FindingInput finding)
|
||||
{
|
||||
var ruleId = GetRuleId(finding);
|
||||
return _rulesById.TryGetValue(ruleId, out var rule)
|
||||
? rule
|
||||
: CreateUnknownRule(ruleId);
|
||||
}
|
||||
|
||||
/// <inheritdoc/>
|
||||
public string GetRuleId(FindingInput finding)
|
||||
{
|
||||
return finding.Type switch
|
||||
{
|
||||
FindingType.Vulnerability => GetVulnerabilityRuleId(finding),
|
||||
FindingType.Secret => GetSecretRuleId(finding),
|
||||
FindingType.SupplyChain => GetSupplyChainRuleId(finding),
|
||||
FindingType.BinaryHardening => GetBinaryHardeningRuleId(finding),
|
||||
FindingType.License => "STELLA-LIC-001",
|
||||
FindingType.Configuration => "STELLA-CFG-001",
|
||||
_ => "STELLA-UNKNOWN"
|
||||
};
|
||||
}
|
||||
|
||||
/// <inheritdoc/>
|
||||
public SarifLevel GetLevel(FindingInput finding)
|
||||
{
|
||||
// Runtime/confirmed reachable always elevates to error
|
||||
if (finding.Reachability == ReachabilityStatus.RuntimeReachable)
|
||||
{
|
||||
return SarifLevel.Error;
|
||||
}
|
||||
|
||||
// KEV always elevates to error
|
||||
if (finding.IsKev)
|
||||
{
|
||||
return SarifLevel.Error;
|
||||
}
|
||||
|
||||
// For non-vulnerability findings without explicit severity, use rule default
|
||||
if (finding.Severity == Severity.Unknown)
|
||||
{
|
||||
var ruleId = GetRuleId(finding);
|
||||
if (_rulesById.TryGetValue(ruleId, out var rule) && rule.DefaultConfiguration != null)
|
||||
{
|
||||
return rule.DefaultConfiguration.Level;
|
||||
}
|
||||
}
|
||||
|
||||
// Map severity to level
|
||||
return finding.Severity switch
|
||||
{
|
||||
Severity.Critical => SarifLevel.Error,
|
||||
Severity.High => SarifLevel.Error,
|
||||
Severity.Medium => SarifLevel.Warning,
|
||||
Severity.Low => SarifLevel.Note,
|
||||
_ => SarifLevel.Warning
|
||||
};
|
||||
}
|
||||
|
||||
/// <inheritdoc/>
|
||||
public IReadOnlyList<SarifReportingDescriptor> GetAllRules() => _allRules;
|
||||
|
||||
/// <inheritdoc/>
|
||||
public IReadOnlyList<SarifReportingDescriptor> GetRulesByType(FindingType type)
|
||||
{
|
||||
var prefix = type switch
|
||||
{
|
||||
FindingType.Vulnerability => "STELLA-VULN-",
|
||||
FindingType.Secret => "STELLA-SEC-",
|
||||
FindingType.SupplyChain => "STELLA-SC-",
|
||||
FindingType.BinaryHardening => "STELLA-BIN-",
|
||||
FindingType.License => "STELLA-LIC-",
|
||||
FindingType.Configuration => "STELLA-CFG-",
|
||||
_ => "STELLA-"
|
||||
};
|
||||
|
||||
return _allRules.Where(r => r.Id.StartsWith(prefix, StringComparison.Ordinal)).ToList();
|
||||
}
|
||||
|
||||
private static string GetVulnerabilityRuleId(FindingInput finding)
|
||||
{
|
||||
// Check reachability first
|
||||
if (finding.Reachability == ReachabilityStatus.RuntimeReachable)
|
||||
{
|
||||
return "STELLA-VULN-005"; // Runtime reachable
|
||||
}
|
||||
|
||||
if (finding.Reachability == ReachabilityStatus.StaticReachable)
|
||||
{
|
||||
return "STELLA-VULN-006"; // Static reachable
|
||||
}
|
||||
|
||||
// Fall back to severity
|
||||
return finding.Severity switch
|
||||
{
|
||||
Severity.Critical => "STELLA-VULN-001",
|
||||
Severity.High => "STELLA-VULN-002",
|
||||
Severity.Medium => "STELLA-VULN-003",
|
||||
Severity.Low => "STELLA-VULN-004",
|
||||
_ => "STELLA-VULN-003"
|
||||
};
|
||||
}
|
||||
|
||||
private static string GetSecretRuleId(FindingInput finding)
|
||||
{
|
||||
// Check for private key patterns in title/description
|
||||
var text = $"{finding.Title} {finding.Description}".ToUpperInvariant();
|
||||
|
||||
if (text.Contains("PRIVATE KEY", StringComparison.Ordinal))
|
||||
{
|
||||
return "STELLA-SEC-002";
|
||||
}
|
||||
|
||||
if (text.Contains("CREDENTIAL", StringComparison.Ordinal) ||
|
||||
text.Contains("PASSWORD", StringComparison.Ordinal))
|
||||
{
|
||||
return "STELLA-SEC-003";
|
||||
}
|
||||
|
||||
return "STELLA-SEC-001"; // Default hardcoded secret
|
||||
}
|
||||
|
||||
private static string GetSupplyChainRuleId(FindingInput finding)
|
||||
{
|
||||
var text = $"{finding.Title} {finding.Description}".ToUpperInvariant();
|
||||
|
||||
if (text.Contains("TYPOSQUAT", StringComparison.Ordinal))
|
||||
{
|
||||
return "STELLA-SC-003";
|
||||
}
|
||||
|
||||
if (text.Contains("UNSIGNED", StringComparison.Ordinal))
|
||||
{
|
||||
return "STELLA-SC-001";
|
||||
}
|
||||
|
||||
if (text.Contains("PROVENANCE", StringComparison.Ordinal))
|
||||
{
|
||||
return "STELLA-SC-002";
|
||||
}
|
||||
|
||||
if (text.Contains("DEPRECAT", StringComparison.Ordinal))
|
||||
{
|
||||
return "STELLA-SC-004";
|
||||
}
|
||||
|
||||
return "STELLA-SC-001";
|
||||
}
|
||||
|
||||
private static string GetBinaryHardeningRuleId(FindingInput finding)
|
||||
{
|
||||
var text = $"{finding.Title} {finding.Description}".ToUpperInvariant();
|
||||
|
||||
if (text.Contains("RELRO", StringComparison.Ordinal))
|
||||
{
|
||||
return "STELLA-BIN-001";
|
||||
}
|
||||
|
||||
if (text.Contains("CANARY", StringComparison.Ordinal) || text.Contains("STACK", StringComparison.Ordinal))
|
||||
{
|
||||
return "STELLA-BIN-002";
|
||||
}
|
||||
|
||||
if (text.Contains("PIE", StringComparison.Ordinal))
|
||||
{
|
||||
return "STELLA-BIN-003";
|
||||
}
|
||||
|
||||
if (text.Contains("FORTIFY", StringComparison.Ordinal))
|
||||
{
|
||||
return "STELLA-BIN-004";
|
||||
}
|
||||
|
||||
return "STELLA-BIN-001";
|
||||
}
|
||||
|
||||
private static SarifReportingDescriptor CreateUnknownRule(string ruleId)
|
||||
{
|
||||
return new SarifReportingDescriptor(
|
||||
Id: ruleId,
|
||||
Name: "Unknown Finding",
|
||||
ShortDescription: new SarifMessage("Unknown finding type"),
|
||||
DefaultConfiguration: new SarifReportingConfiguration(SarifLevel.Warning));
|
||||
}
|
||||
|
||||
private static List<SarifReportingDescriptor> BuildRules()
|
||||
{
|
||||
return
|
||||
[
|
||||
// Vulnerability Rules
|
||||
new SarifReportingDescriptor(
|
||||
Id: "STELLA-VULN-001",
|
||||
Name: "CriticalVulnerability",
|
||||
ShortDescription: new SarifMessage("Critical severity vulnerability (CVSS >= 9.0)"),
|
||||
FullDescription: new SarifMessage(
|
||||
"A critical severity vulnerability was detected in a dependency. " +
|
||||
"Critical vulnerabilities typically allow remote code execution, " +
|
||||
"privilege escalation, or complete system compromise."),
|
||||
DefaultConfiguration: new SarifReportingConfiguration(SarifLevel.Error),
|
||||
HelpUri: "https://stellaops.io/docs/findings/vulnerabilities#critical"),
|
||||
|
||||
new SarifReportingDescriptor(
|
||||
Id: "STELLA-VULN-002",
|
||||
Name: "HighVulnerability",
|
||||
ShortDescription: new SarifMessage("High severity vulnerability (CVSS 7.0-8.9)"),
|
||||
FullDescription: new SarifMessage(
|
||||
"A high severity vulnerability was detected in a dependency. " +
|
||||
"High severity vulnerabilities can lead to significant data exposure or system impact."),
|
||||
DefaultConfiguration: new SarifReportingConfiguration(SarifLevel.Error),
|
||||
HelpUri: "https://stellaops.io/docs/findings/vulnerabilities#high"),
|
||||
|
||||
new SarifReportingDescriptor(
|
||||
Id: "STELLA-VULN-003",
|
||||
Name: "MediumVulnerability",
|
||||
ShortDescription: new SarifMessage("Medium severity vulnerability (CVSS 4.0-6.9)"),
|
||||
FullDescription: new SarifMessage(
|
||||
"A medium severity vulnerability was detected in a dependency. " +
|
||||
"Medium severity vulnerabilities require specific conditions to exploit."),
|
||||
DefaultConfiguration: new SarifReportingConfiguration(SarifLevel.Warning),
|
||||
HelpUri: "https://stellaops.io/docs/findings/vulnerabilities#medium"),
|
||||
|
||||
new SarifReportingDescriptor(
|
||||
Id: "STELLA-VULN-004",
|
||||
Name: "LowVulnerability",
|
||||
ShortDescription: new SarifMessage("Low severity vulnerability (CVSS < 4.0)"),
|
||||
FullDescription: new SarifMessage(
|
||||
"A low severity vulnerability was detected in a dependency. " +
|
||||
"Low severity vulnerabilities have limited impact or require unlikely conditions."),
|
||||
DefaultConfiguration: new SarifReportingConfiguration(SarifLevel.Note),
|
||||
HelpUri: "https://stellaops.io/docs/findings/vulnerabilities#low"),
|
||||
|
||||
new SarifReportingDescriptor(
|
||||
Id: "STELLA-VULN-005",
|
||||
Name: "RuntimeReachableVulnerability",
|
||||
ShortDescription: new SarifMessage("Vulnerability confirmed reachable at runtime"),
|
||||
FullDescription: new SarifMessage(
|
||||
"This vulnerability has been confirmed as reachable through runtime analysis. " +
|
||||
"The vulnerable code path is actively executed in your application."),
|
||||
DefaultConfiguration: new SarifReportingConfiguration(SarifLevel.Error),
|
||||
HelpUri: "https://stellaops.io/docs/findings/reachability#runtime"),
|
||||
|
||||
new SarifReportingDescriptor(
|
||||
Id: "STELLA-VULN-006",
|
||||
Name: "StaticReachableVulnerability",
|
||||
ShortDescription: new SarifMessage("Vulnerability statically reachable"),
|
||||
FullDescription: new SarifMessage(
|
||||
"Static analysis indicates this vulnerability may be reachable. " +
|
||||
"The vulnerable code exists in a call path from your application code."),
|
||||
DefaultConfiguration: new SarifReportingConfiguration(SarifLevel.Warning),
|
||||
HelpUri: "https://stellaops.io/docs/findings/reachability#static"),
|
||||
|
||||
// Secret Rules
|
||||
new SarifReportingDescriptor(
|
||||
Id: "STELLA-SEC-001",
|
||||
Name: "HardcodedSecret",
|
||||
ShortDescription: new SarifMessage("Hardcoded secret detected"),
|
||||
FullDescription: new SarifMessage(
|
||||
"A hardcoded secret (API key, token, password) was detected in source code or configuration. " +
|
||||
"Secrets should be stored in secure vaults and injected at runtime."),
|
||||
DefaultConfiguration: new SarifReportingConfiguration(SarifLevel.Error),
|
||||
HelpUri: "https://stellaops.io/docs/findings/secrets#hardcoded"),
|
||||
|
||||
new SarifReportingDescriptor(
|
||||
Id: "STELLA-SEC-002",
|
||||
Name: "PrivateKeyExposure",
|
||||
ShortDescription: new SarifMessage("Private key exposed"),
|
||||
FullDescription: new SarifMessage(
|
||||
"A private key (RSA, EC, SSH) was detected in source code or artifacts. " +
|
||||
"Private keys should never be committed to version control."),
|
||||
DefaultConfiguration: new SarifReportingConfiguration(SarifLevel.Error),
|
||||
HelpUri: "https://stellaops.io/docs/findings/secrets#private-key"),
|
||||
|
||||
new SarifReportingDescriptor(
|
||||
Id: "STELLA-SEC-003",
|
||||
Name: "CredentialPattern",
|
||||
ShortDescription: new SarifMessage("Credential pattern detected"),
|
||||
FullDescription: new SarifMessage(
|
||||
"A potential credential or password pattern was detected. " +
|
||||
"Review to determine if this is a false positive or actual credential."),
|
||||
DefaultConfiguration: new SarifReportingConfiguration(SarifLevel.Warning),
|
||||
HelpUri: "https://stellaops.io/docs/findings/secrets#credential"),
|
||||
|
||||
// Supply Chain Rules
|
||||
new SarifReportingDescriptor(
|
||||
Id: "STELLA-SC-001",
|
||||
Name: "UnsignedPackage",
|
||||
ShortDescription: new SarifMessage("Unsigned package detected"),
|
||||
FullDescription: new SarifMessage(
|
||||
"A package without cryptographic signature was detected. " +
|
||||
"Unsigned packages cannot be verified for authenticity and integrity."),
|
||||
DefaultConfiguration: new SarifReportingConfiguration(SarifLevel.Warning),
|
||||
HelpUri: "https://stellaops.io/docs/findings/supply-chain#unsigned"),
|
||||
|
||||
new SarifReportingDescriptor(
|
||||
Id: "STELLA-SC-002",
|
||||
Name: "UnknownProvenance",
|
||||
ShortDescription: new SarifMessage("Package with unknown provenance"),
|
||||
FullDescription: new SarifMessage(
|
||||
"A package without verifiable build provenance was detected. " +
|
||||
"Provenance helps verify that packages were built from expected sources."),
|
||||
DefaultConfiguration: new SarifReportingConfiguration(SarifLevel.Warning),
|
||||
HelpUri: "https://stellaops.io/docs/findings/supply-chain#provenance"),
|
||||
|
||||
new SarifReportingDescriptor(
|
||||
Id: "STELLA-SC-003",
|
||||
Name: "TyposquatCandidate",
|
||||
ShortDescription: new SarifMessage("Potential typosquat package"),
|
||||
FullDescription: new SarifMessage(
|
||||
"A package name similar to a popular package was detected. " +
|
||||
"This may be a typosquat attack attempting to install malicious code."),
|
||||
DefaultConfiguration: new SarifReportingConfiguration(SarifLevel.Error),
|
||||
HelpUri: "https://stellaops.io/docs/findings/supply-chain#typosquat"),
|
||||
|
||||
new SarifReportingDescriptor(
|
||||
Id: "STELLA-SC-004",
|
||||
Name: "DeprecatedPackage",
|
||||
ShortDescription: new SarifMessage("Deprecated package in use"),
|
||||
FullDescription: new SarifMessage(
|
||||
"A deprecated package was detected. " +
|
||||
"Deprecated packages may no longer receive security updates."),
|
||||
DefaultConfiguration: new SarifReportingConfiguration(SarifLevel.Note),
|
||||
HelpUri: "https://stellaops.io/docs/findings/supply-chain#deprecated"),
|
||||
|
||||
// Binary Hardening Rules
|
||||
new SarifReportingDescriptor(
|
||||
Id: "STELLA-BIN-001",
|
||||
Name: "MissingRelro",
|
||||
ShortDescription: new SarifMessage("Binary missing RELRO protection"),
|
||||
FullDescription: new SarifMessage(
|
||||
"The binary was compiled without RELRO (Relocation Read-Only). " +
|
||||
"RELRO protects the GOT from being overwritten by attackers."),
|
||||
DefaultConfiguration: new SarifReportingConfiguration(SarifLevel.Warning),
|
||||
HelpUri: "https://stellaops.io/docs/findings/binary#relro"),
|
||||
|
||||
new SarifReportingDescriptor(
|
||||
Id: "STELLA-BIN-002",
|
||||
Name: "MissingStackCanary",
|
||||
ShortDescription: new SarifMessage("Binary missing stack canary"),
|
||||
FullDescription: new SarifMessage(
|
||||
"The binary was compiled without stack canaries. " +
|
||||
"Stack canaries help detect buffer overflow attacks."),
|
||||
DefaultConfiguration: new SarifReportingConfiguration(SarifLevel.Warning),
|
||||
HelpUri: "https://stellaops.io/docs/findings/binary#canary"),
|
||||
|
||||
new SarifReportingDescriptor(
|
||||
Id: "STELLA-BIN-003",
|
||||
Name: "MissingPie",
|
||||
ShortDescription: new SarifMessage("Binary not position independent"),
|
||||
FullDescription: new SarifMessage(
|
||||
"The binary was not compiled as a Position Independent Executable (PIE). " +
|
||||
"PIE enables full ASLR protection."),
|
||||
DefaultConfiguration: new SarifReportingConfiguration(SarifLevel.Warning),
|
||||
HelpUri: "https://stellaops.io/docs/findings/binary#pie"),
|
||||
|
||||
new SarifReportingDescriptor(
|
||||
Id: "STELLA-BIN-004",
|
||||
Name: "MissingFortifySource",
|
||||
ShortDescription: new SarifMessage("Binary missing FORTIFY_SOURCE"),
|
||||
FullDescription: new SarifMessage(
|
||||
"The binary was compiled without FORTIFY_SOURCE. " +
|
||||
"FORTIFY_SOURCE adds runtime checks for buffer overflows in standard library calls."),
|
||||
DefaultConfiguration: new SarifReportingConfiguration(SarifLevel.Note),
|
||||
HelpUri: "https://stellaops.io/docs/findings/binary#fortify"),
|
||||
|
||||
// License Rule
|
||||
new SarifReportingDescriptor(
|
||||
Id: "STELLA-LIC-001",
|
||||
Name: "LicenseCompliance",
|
||||
ShortDescription: new SarifMessage("License compliance issue"),
|
||||
FullDescription: new SarifMessage(
|
||||
"A license compliance issue was detected. " +
|
||||
"Review the license terms for compatibility with your project."),
|
||||
DefaultConfiguration: new SarifReportingConfiguration(SarifLevel.Warning),
|
||||
HelpUri: "https://stellaops.io/docs/findings/license"),
|
||||
|
||||
// Configuration Rule
|
||||
new SarifReportingDescriptor(
|
||||
Id: "STELLA-CFG-001",
|
||||
Name: "ConfigurationIssue",
|
||||
ShortDescription: new SarifMessage("Security configuration issue"),
|
||||
FullDescription: new SarifMessage(
|
||||
"A security configuration issue was detected. " +
|
||||
"Review the configuration to ensure secure defaults are used."),
|
||||
DefaultConfiguration: new SarifReportingConfiguration(SarifLevel.Warning),
|
||||
HelpUri: "https://stellaops.io/docs/findings/configuration")
|
||||
];
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,144 @@
|
||||
// <copyright file="SarifExportOptions.cs" company="StellaOps">
|
||||
// Copyright (c) StellaOps. Licensed under the AGPL-3.0-or-later.
|
||||
// </copyright>
|
||||
|
||||
namespace StellaOps.Scanner.Sarif;
|
||||
|
||||
/// <summary>
|
||||
/// Options for SARIF export.
|
||||
/// Sprint: SPRINT_20260109_010_001 Task: Extract shared SARIF models
|
||||
/// </summary>
|
||||
public sealed record SarifExportOptions
|
||||
{
|
||||
/// <summary>
|
||||
/// Gets the tool name to include in SARIF output.
|
||||
/// </summary>
|
||||
public string ToolName { get; init; } = "StellaOps Scanner";
|
||||
|
||||
/// <summary>
|
||||
/// Gets the tool version (required).
|
||||
/// </summary>
|
||||
public required string ToolVersion { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets the tool information URI.
|
||||
/// </summary>
|
||||
public string ToolUri { get; init; } = "https://stellaops.io/scanner";
|
||||
|
||||
/// <summary>
|
||||
/// Gets the minimum severity to include (null = all).
|
||||
/// </summary>
|
||||
public Severity? MinimumSeverity { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets whether to include reachability information in properties.
|
||||
/// </summary>
|
||||
public bool IncludeReachability { get; init; } = true;
|
||||
|
||||
/// <summary>
|
||||
/// Gets whether to include VEX status in properties.
|
||||
/// </summary>
|
||||
public bool IncludeVexStatus { get; init; } = true;
|
||||
|
||||
/// <summary>
|
||||
/// Gets whether to include EPSS scores in properties.
|
||||
/// </summary>
|
||||
public bool IncludeEpss { get; init; } = true;
|
||||
|
||||
/// <summary>
|
||||
/// Gets whether to include KEV (Known Exploited Vulnerabilities) flag.
|
||||
/// </summary>
|
||||
public bool IncludeKev { get; init; } = true;
|
||||
|
||||
/// <summary>
|
||||
/// Gets whether to include StellaOps evidence URIs.
|
||||
/// </summary>
|
||||
public bool IncludeEvidenceUris { get; init; } = false;
|
||||
|
||||
/// <summary>
|
||||
/// Gets whether to include attestation digest references.
|
||||
/// </summary>
|
||||
public bool IncludeAttestation { get; init; } = true;
|
||||
|
||||
/// <summary>
|
||||
/// Gets version control information for the run.
|
||||
/// </summary>
|
||||
public VersionControlInfo? VersionControl { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets whether to pretty-print JSON output.
|
||||
/// </summary>
|
||||
public bool IndentedJson { get; init; } = false;
|
||||
|
||||
/// <summary>
|
||||
/// Gets the scan category (e.g., "security", "supply-chain").
|
||||
/// </summary>
|
||||
public string? Category { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets the source root for relative paths.
|
||||
/// </summary>
|
||||
public string? SourceRoot { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets the fingerprint strategy to use.
|
||||
/// </summary>
|
||||
public FingerprintStrategy FingerprintStrategy { get; init; } = FingerprintStrategy.Standard;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Version control information for SARIF output.
|
||||
/// </summary>
|
||||
public sealed record VersionControlInfo
|
||||
{
|
||||
/// <summary>
|
||||
/// Gets the repository URI.
|
||||
/// </summary>
|
||||
public required string RepositoryUri { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets the revision ID (commit SHA).
|
||||
/// </summary>
|
||||
public string? RevisionId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets the branch name.
|
||||
/// </summary>
|
||||
public string? Branch { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Fingerprint generation strategy.
|
||||
/// </summary>
|
||||
public enum FingerprintStrategy
|
||||
{
|
||||
/// <summary>Standard fingerprint based on rule, component, vulnerability, and artifact.</summary>
|
||||
Standard,
|
||||
|
||||
/// <summary>Minimal fingerprint for deduplication only.</summary>
|
||||
Minimal,
|
||||
|
||||
/// <summary>Extended fingerprint including reachability and VEX status.</summary>
|
||||
Extended
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Severity levels for findings.
|
||||
/// </summary>
|
||||
public enum Severity
|
||||
{
|
||||
/// <summary>Unknown or unspecified severity.</summary>
|
||||
Unknown = 0,
|
||||
|
||||
/// <summary>Low severity (CVSS < 4.0).</summary>
|
||||
Low = 1,
|
||||
|
||||
/// <summary>Medium severity (CVSS 4.0-6.9).</summary>
|
||||
Medium = 2,
|
||||
|
||||
/// <summary>High severity (CVSS 7.0-8.9).</summary>
|
||||
High = 3,
|
||||
|
||||
/// <summary>Critical severity (CVSS >= 9.0).</summary>
|
||||
Critical = 4
|
||||
}
|
||||
@@ -0,0 +1,410 @@
|
||||
// <copyright file="SarifExportService.cs" company="StellaOps">
|
||||
// Copyright (c) StellaOps. Licensed under the AGPL-3.0-or-later.
|
||||
// </copyright>
|
||||
|
||||
using System.Collections.Immutable;
|
||||
using System.Text.Json;
|
||||
using System.Text.Json.Serialization;
|
||||
using StellaOps.Scanner.Sarif.Fingerprints;
|
||||
using StellaOps.Scanner.Sarif.Models;
|
||||
using StellaOps.Scanner.Sarif.Rules;
|
||||
|
||||
namespace StellaOps.Scanner.Sarif;
|
||||
|
||||
/// <summary>
|
||||
/// Default implementation of <see cref="ISarifExportService"/>.
|
||||
/// Sprint: SPRINT_20260109_010_001 Task: Implement export service
|
||||
/// </summary>
|
||||
public sealed class SarifExportService : ISarifExportService
|
||||
{
|
||||
private readonly ISarifRuleRegistry _ruleRegistry;
|
||||
private readonly IFingerprintGenerator _fingerprintGenerator;
|
||||
private readonly TimeProvider _timeProvider;
|
||||
|
||||
private static readonly JsonSerializerOptions DefaultJsonOptions = new()
|
||||
{
|
||||
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull,
|
||||
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
|
||||
WriteIndented = false
|
||||
};
|
||||
|
||||
private static readonly JsonSerializerOptions IndentedJsonOptions = new()
|
||||
{
|
||||
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull,
|
||||
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
|
||||
WriteIndented = true
|
||||
};
|
||||
|
||||
/// <summary>
|
||||
/// Initializes a new instance of the <see cref="SarifExportService"/> class.
|
||||
/// </summary>
|
||||
/// <param name="ruleRegistry">The rule registry.</param>
|
||||
/// <param name="fingerprintGenerator">The fingerprint generator.</param>
|
||||
/// <param name="timeProvider">The time provider.</param>
|
||||
public SarifExportService(
|
||||
ISarifRuleRegistry ruleRegistry,
|
||||
IFingerprintGenerator fingerprintGenerator,
|
||||
TimeProvider? timeProvider = null)
|
||||
{
|
||||
_ruleRegistry = ruleRegistry ?? throw new ArgumentNullException(nameof(ruleRegistry));
|
||||
_fingerprintGenerator = fingerprintGenerator ?? throw new ArgumentNullException(nameof(fingerprintGenerator));
|
||||
_timeProvider = timeProvider ?? TimeProvider.System;
|
||||
}
|
||||
|
||||
/// <inheritdoc/>
|
||||
public Task<SarifLog> ExportAsync(
|
||||
IEnumerable<FindingInput> findings,
|
||||
SarifExportOptions options,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(findings);
|
||||
ArgumentNullException.ThrowIfNull(options);
|
||||
|
||||
var filteredFindings = FilterFindings(findings, options);
|
||||
var results = MapToResults(filteredFindings, options);
|
||||
var run = CreateRun(results, options);
|
||||
var log = SarifLog.Create([run]);
|
||||
|
||||
return Task.FromResult(log);
|
||||
}
|
||||
|
||||
/// <inheritdoc/>
|
||||
public async Task<string> ExportToJsonAsync(
|
||||
IEnumerable<FindingInput> findings,
|
||||
SarifExportOptions options,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
var log = await ExportAsync(findings, options, cancellationToken).ConfigureAwait(false);
|
||||
var jsonOptions = options.IndentedJson ? IndentedJsonOptions : DefaultJsonOptions;
|
||||
return JsonSerializer.Serialize(log, jsonOptions);
|
||||
}
|
||||
|
||||
/// <inheritdoc/>
|
||||
public async Task ExportToStreamAsync(
|
||||
IEnumerable<FindingInput> findings,
|
||||
SarifExportOptions options,
|
||||
Stream outputStream,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(outputStream);
|
||||
|
||||
var log = await ExportAsync(findings, options, cancellationToken).ConfigureAwait(false);
|
||||
var jsonOptions = options.IndentedJson ? IndentedJsonOptions : DefaultJsonOptions;
|
||||
await JsonSerializer.SerializeAsync(outputStream, log, jsonOptions, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
private static IEnumerable<FindingInput> FilterFindings(
|
||||
IEnumerable<FindingInput> findings,
|
||||
SarifExportOptions options)
|
||||
{
|
||||
var query = findings.AsEnumerable();
|
||||
|
||||
// Filter by minimum severity
|
||||
if (options.MinimumSeverity.HasValue)
|
||||
{
|
||||
query = query.Where(f => f.Severity >= options.MinimumSeverity.Value);
|
||||
}
|
||||
|
||||
return query;
|
||||
}
|
||||
|
||||
private ImmutableArray<SarifResult> MapToResults(
|
||||
IEnumerable<FindingInput> findings,
|
||||
SarifExportOptions options)
|
||||
{
|
||||
var results = new List<SarifResult>();
|
||||
|
||||
foreach (var finding in findings)
|
||||
{
|
||||
var result = MapToResult(finding, options);
|
||||
results.Add(result);
|
||||
}
|
||||
|
||||
// Sort deterministically by rule ID, then by fingerprint
|
||||
return results
|
||||
.OrderBy(r => r.RuleId, StringComparer.Ordinal)
|
||||
.ThenBy(r => r.Fingerprints?.GetValueOrDefault("stellaops/v1") ?? string.Empty, StringComparer.Ordinal)
|
||||
.ToImmutableArray();
|
||||
}
|
||||
|
||||
private SarifResult MapToResult(FindingInput finding, SarifExportOptions options)
|
||||
{
|
||||
var ruleId = _ruleRegistry.GetRuleId(finding);
|
||||
var level = _ruleRegistry.GetLevel(finding);
|
||||
var message = CreateMessage(finding);
|
||||
var locations = CreateLocations(finding, options);
|
||||
var fingerprints = CreateFingerprints(finding, options);
|
||||
var partialFingerprints = _fingerprintGenerator.GeneratePartial(finding)
|
||||
.ToImmutableSortedDictionary(StringComparer.Ordinal);
|
||||
var properties = CreateProperties(finding, options);
|
||||
|
||||
return new SarifResult(
|
||||
RuleId: ruleId,
|
||||
Level: level,
|
||||
Message: message,
|
||||
Locations: locations.Length > 0 ? locations : null,
|
||||
Fingerprints: fingerprints,
|
||||
PartialFingerprints: partialFingerprints.Count > 0 ? partialFingerprints : null,
|
||||
Properties: properties);
|
||||
}
|
||||
|
||||
private static SarifMessage CreateMessage(FindingInput finding)
|
||||
{
|
||||
var text = finding.Title;
|
||||
|
||||
if (!string.IsNullOrEmpty(finding.VulnerabilityId))
|
||||
{
|
||||
text = $"{finding.VulnerabilityId}: {text}";
|
||||
}
|
||||
|
||||
if (!string.IsNullOrEmpty(finding.ComponentName))
|
||||
{
|
||||
var version = finding.ComponentVersion ?? "unknown";
|
||||
text = $"{text} in {finding.ComponentName}@{version}";
|
||||
}
|
||||
|
||||
string? markdown = null;
|
||||
if (!string.IsNullOrEmpty(finding.Description))
|
||||
{
|
||||
markdown = $"**{text}**\n\n{finding.Description}";
|
||||
|
||||
if (!string.IsNullOrEmpty(finding.Recommendation))
|
||||
{
|
||||
markdown += $"\n\n**Recommendation:** {finding.Recommendation}";
|
||||
}
|
||||
}
|
||||
|
||||
return new SarifMessage(text, markdown);
|
||||
}
|
||||
|
||||
private static ImmutableArray<SarifLocation> CreateLocations(FindingInput finding, SarifExportOptions options)
|
||||
{
|
||||
if (string.IsNullOrEmpty(finding.FilePath))
|
||||
{
|
||||
return [];
|
||||
}
|
||||
|
||||
var uri = NormalizePath(finding.FilePath, options.SourceRoot);
|
||||
|
||||
var region = (finding.StartLine.HasValue)
|
||||
? new SarifRegion(
|
||||
StartLine: finding.StartLine,
|
||||
StartColumn: finding.StartColumn,
|
||||
EndLine: finding.EndLine,
|
||||
EndColumn: finding.EndColumn)
|
||||
: null;
|
||||
|
||||
var physicalLocation = new SarifPhysicalLocation(
|
||||
ArtifactLocation: new SarifArtifactLocation(uri),
|
||||
Region: region);
|
||||
|
||||
var logicalLocations = CreateLogicalLocations(finding);
|
||||
|
||||
return [new SarifLocation(
|
||||
PhysicalLocation: physicalLocation,
|
||||
LogicalLocations: logicalLocations.Length > 0 ? logicalLocations : null)];
|
||||
}
|
||||
|
||||
private static ImmutableArray<SarifLogicalLocation> CreateLogicalLocations(FindingInput finding)
|
||||
{
|
||||
var locations = new List<SarifLogicalLocation>();
|
||||
|
||||
// Add component as a logical location
|
||||
if (!string.IsNullOrEmpty(finding.ComponentPurl))
|
||||
{
|
||||
locations.Add(new SarifLogicalLocation(
|
||||
Name: finding.ComponentName ?? finding.ComponentPurl,
|
||||
FullyQualifiedName: finding.ComponentPurl,
|
||||
Kind: "package"));
|
||||
}
|
||||
|
||||
return locations.ToImmutableArray();
|
||||
}
|
||||
|
||||
private ImmutableSortedDictionary<string, string> CreateFingerprints(
|
||||
FindingInput finding,
|
||||
SarifExportOptions options)
|
||||
{
|
||||
var fingerprint = _fingerprintGenerator.GeneratePrimary(finding, options.FingerprintStrategy);
|
||||
|
||||
return ImmutableSortedDictionary.CreateRange(
|
||||
StringComparer.Ordinal,
|
||||
[new KeyValuePair<string, string>("stellaops/v1", fingerprint)]);
|
||||
}
|
||||
|
||||
private ImmutableSortedDictionary<string, object>? CreateProperties(
|
||||
FindingInput finding,
|
||||
SarifExportOptions options)
|
||||
{
|
||||
var props = new Dictionary<string, object>(StringComparer.Ordinal);
|
||||
|
||||
// Always include finding type
|
||||
props["stellaops/findingType"] = finding.Type.ToString();
|
||||
|
||||
// Vulnerability-specific properties
|
||||
if (!string.IsNullOrEmpty(finding.VulnerabilityId))
|
||||
{
|
||||
props["stellaops/vulnId"] = finding.VulnerabilityId;
|
||||
}
|
||||
|
||||
if (finding.CvssScore.HasValue)
|
||||
{
|
||||
props["stellaops/cvss/score"] = finding.CvssScore.Value;
|
||||
}
|
||||
|
||||
if (!string.IsNullOrEmpty(finding.CvssVector))
|
||||
{
|
||||
props["stellaops/cvss/vector"] = finding.CvssVector;
|
||||
}
|
||||
|
||||
// EPSS
|
||||
if (options.IncludeEpss && finding.EpssProbability.HasValue)
|
||||
{
|
||||
props["stellaops/epss/probability"] = finding.EpssProbability.Value;
|
||||
|
||||
if (finding.EpssPercentile.HasValue)
|
||||
{
|
||||
props["stellaops/epss/percentile"] = finding.EpssPercentile.Value;
|
||||
}
|
||||
}
|
||||
|
||||
// KEV
|
||||
if (options.IncludeKev && finding.IsKev)
|
||||
{
|
||||
props["stellaops/kev"] = true;
|
||||
}
|
||||
|
||||
// Reachability
|
||||
if (options.IncludeReachability && finding.Reachability.HasValue)
|
||||
{
|
||||
props["stellaops/reachability"] = finding.Reachability.Value.ToString();
|
||||
}
|
||||
|
||||
// VEX
|
||||
if (options.IncludeVexStatus && finding.VexStatus.HasValue)
|
||||
{
|
||||
props["stellaops/vex/status"] = finding.VexStatus.Value.ToString();
|
||||
|
||||
if (!string.IsNullOrEmpty(finding.VexJustification))
|
||||
{
|
||||
props["stellaops/vex/justification"] = finding.VexJustification;
|
||||
}
|
||||
}
|
||||
|
||||
// Component
|
||||
if (!string.IsNullOrEmpty(finding.ComponentPurl))
|
||||
{
|
||||
props["stellaops/component/purl"] = finding.ComponentPurl;
|
||||
}
|
||||
|
||||
// Artifact
|
||||
if (!string.IsNullOrEmpty(finding.ArtifactDigest))
|
||||
{
|
||||
props["stellaops/artifact/digest"] = finding.ArtifactDigest;
|
||||
}
|
||||
|
||||
// Evidence URIs
|
||||
if (options.IncludeEvidenceUris && finding.EvidenceUris?.Count > 0)
|
||||
{
|
||||
props["stellaops/evidence"] = finding.EvidenceUris;
|
||||
}
|
||||
|
||||
// Attestation
|
||||
if (options.IncludeAttestation && finding.AttestationDigests?.Count > 0)
|
||||
{
|
||||
props["stellaops/attestation"] = finding.AttestationDigests;
|
||||
}
|
||||
|
||||
// Category
|
||||
if (!string.IsNullOrEmpty(options.Category))
|
||||
{
|
||||
props["github/alertCategory"] = options.Category;
|
||||
}
|
||||
|
||||
// Custom properties
|
||||
if (finding.Properties != null)
|
||||
{
|
||||
foreach (var kvp in finding.Properties)
|
||||
{
|
||||
props[$"custom/{kvp.Key}"] = kvp.Value;
|
||||
}
|
||||
}
|
||||
|
||||
return props.Count > 0
|
||||
? props.ToImmutableSortedDictionary(StringComparer.Ordinal)
|
||||
: null;
|
||||
}
|
||||
|
||||
private SarifRun CreateRun(ImmutableArray<SarifResult> results, SarifExportOptions options)
|
||||
{
|
||||
var driver = CreateDriver(options, results);
|
||||
var tool = new SarifTool(driver);
|
||||
var invocations = CreateInvocations();
|
||||
var versionControl = CreateVersionControl(options);
|
||||
|
||||
return new SarifRun(
|
||||
Tool: tool,
|
||||
Results: results,
|
||||
Invocations: invocations,
|
||||
VersionControlProvenance: versionControl);
|
||||
}
|
||||
|
||||
private SarifToolComponent CreateDriver(SarifExportOptions options, ImmutableArray<SarifResult> results)
|
||||
{
|
||||
// Get unique rules used in results
|
||||
var usedRuleIds = results.Select(r => r.RuleId).Distinct().ToHashSet(StringComparer.Ordinal);
|
||||
var rules = _ruleRegistry.GetAllRules()
|
||||
.Where(r => usedRuleIds.Contains(r.Id))
|
||||
.OrderBy(r => r.Id, StringComparer.Ordinal)
|
||||
.ToImmutableArray();
|
||||
|
||||
return new SarifToolComponent(
|
||||
Name: options.ToolName,
|
||||
Version: options.ToolVersion,
|
||||
SemanticVersion: options.ToolVersion,
|
||||
InformationUri: options.ToolUri,
|
||||
Rules: rules);
|
||||
}
|
||||
|
||||
private ImmutableArray<SarifInvocation> CreateInvocations()
|
||||
{
|
||||
var now = _timeProvider.GetUtcNow();
|
||||
|
||||
return [new SarifInvocation(
|
||||
ExecutionSuccessful: true,
|
||||
StartTimeUtc: now,
|
||||
EndTimeUtc: now)];
|
||||
}
|
||||
|
||||
private static ImmutableArray<SarifVersionControlDetails>? CreateVersionControl(SarifExportOptions options)
|
||||
{
|
||||
if (options.VersionControl is null)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
return [new SarifVersionControlDetails(
|
||||
RepositoryUri: options.VersionControl.RepositoryUri,
|
||||
RevisionId: options.VersionControl.RevisionId,
|
||||
Branch: options.VersionControl.Branch)];
|
||||
}
|
||||
|
||||
private static string NormalizePath(string path, string? sourceRoot)
|
||||
{
|
||||
// Convert backslashes to forward slashes
|
||||
var normalized = path.Replace('\\', '/');
|
||||
|
||||
// Remove source root prefix if provided
|
||||
if (!string.IsNullOrEmpty(sourceRoot))
|
||||
{
|
||||
var normalizedRoot = sourceRoot.Replace('\\', '/').TrimEnd('/') + "/";
|
||||
if (normalized.StartsWith(normalizedRoot, StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
normalized = normalized[normalizedRoot.Length..];
|
||||
}
|
||||
}
|
||||
|
||||
// Remove leading slash for relative paths
|
||||
return normalized.TrimStart('/');
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,14 @@
|
||||
<Project Sdk="Microsoft.NET.Sdk">
|
||||
<PropertyGroup>
|
||||
<TargetFramework>net10.0</TargetFramework>
|
||||
<ImplicitUsings>enable</ImplicitUsings>
|
||||
<Nullable>enable</Nullable>
|
||||
<LangVersion>preview</LangVersion>
|
||||
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
|
||||
<Description>SARIF 2.1.0 exporter for StellaOps Scanner findings with GitHub Code Scanning support</Description>
|
||||
</PropertyGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<ProjectReference Include="..\StellaOps.Scanner.Core\StellaOps.Scanner.Core.csproj" />
|
||||
</ItemGroup>
|
||||
</Project>
|
||||
@@ -226,14 +226,39 @@ public enum SbomFormat
|
||||
Unknown
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Validation mode that determines how errors and warnings are handled.
|
||||
/// </summary>
|
||||
public enum SbomValidationMode
|
||||
{
|
||||
/// <summary>Fail on any error; warn on warnings.</summary>
|
||||
Strict,
|
||||
|
||||
/// <summary>Warn on errors; ignore warnings.</summary>
|
||||
Lenient,
|
||||
|
||||
/// <summary>Log only; never fail.</summary>
|
||||
Audit,
|
||||
|
||||
/// <summary>Skip validation entirely.</summary>
|
||||
Off
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Validation options.
|
||||
/// Sprint: SPRINT_20260107_005_003 Task VG-007
|
||||
/// </summary>
|
||||
public sealed record SbomValidationOptions
|
||||
{
|
||||
/// <summary>
|
||||
/// Gets or sets the validation mode.
|
||||
/// Default: Strict.
|
||||
/// </summary>
|
||||
public SbomValidationMode Mode { get; init; } = SbomValidationMode.Strict;
|
||||
|
||||
/// <summary>
|
||||
/// Gets or sets the timeout for validation.
|
||||
/// Default: 30 seconds.
|
||||
/// Default: 30 seconds. Must be positive.
|
||||
/// </summary>
|
||||
public TimeSpan Timeout { get; init; } = TimeSpan.FromSeconds(30);
|
||||
|
||||
@@ -259,6 +284,38 @@ public sealed record SbomValidationOptions
|
||||
/// Gets or sets custom validation rules (JSON Schema or SHACL).
|
||||
/// </summary>
|
||||
public string? CustomRulesPath { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets or sets the required SPDX profiles for SPDX 3.0.1 documents.
|
||||
/// Examples: "core", "software", "security", "build".
|
||||
/// </summary>
|
||||
public IReadOnlyList<string>? RequiredSpdxProfiles { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Validates the options.
|
||||
/// </summary>
|
||||
/// <returns>Validation errors, or empty if valid.</returns>
|
||||
public IReadOnlyList<string> Validate()
|
||||
{
|
||||
var errors = new List<string>();
|
||||
|
||||
if (Timeout <= TimeSpan.Zero)
|
||||
{
|
||||
errors.Add("Timeout must be positive.");
|
||||
}
|
||||
|
||||
if (Timeout > TimeSpan.FromMinutes(10))
|
||||
{
|
||||
errors.Add("Timeout cannot exceed 10 minutes.");
|
||||
}
|
||||
|
||||
if (CustomRulesPath is not null && !File.Exists(CustomRulesPath))
|
||||
{
|
||||
errors.Add($"Custom rules file not found: {CustomRulesPath}");
|
||||
}
|
||||
|
||||
return errors;
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
|
||||
@@ -10,7 +10,13 @@
|
||||
</PropertyGroup>
|
||||
<ItemGroup>
|
||||
<PackageReference Include="Microsoft.Extensions.Caching.Memory" />
|
||||
<PackageReference Include="Microsoft.Extensions.Configuration.Abstractions" />
|
||||
<PackageReference Include="Microsoft.Extensions.Configuration.Binder" />
|
||||
<PackageReference Include="Microsoft.Extensions.DependencyInjection.Abstractions" />
|
||||
<PackageReference Include="Microsoft.Extensions.Http" />
|
||||
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" />
|
||||
<PackageReference Include="Microsoft.Extensions.Options" />
|
||||
<PackageReference Include="Microsoft.Extensions.Options.ConfigurationExtensions" />
|
||||
<PackageReference Include="Microsoft.Extensions.Options.DataAnnotations" />
|
||||
</ItemGroup>
|
||||
</Project>
|
||||
|
||||
@@ -0,0 +1,147 @@
|
||||
// <copyright file="ValidationGateOptions.cs" company="StellaOps">
|
||||
// Copyright (c) StellaOps. Licensed under the AGPL-3.0-or-later.
|
||||
// </copyright>
|
||||
|
||||
using System.ComponentModel.DataAnnotations;
|
||||
using Microsoft.Extensions.Configuration;
|
||||
using Microsoft.Extensions.DependencyInjection;
|
||||
|
||||
namespace StellaOps.Scanner.Validation;
|
||||
|
||||
/// <summary>
|
||||
/// Configuration options for the validation gate.
|
||||
/// Sprint: SPRINT_20260107_005_003 Task VG-007
|
||||
/// Follows CLAUDE.md Rule 8.14 - Use ValidateDataAnnotations and ValidateOnStart.
|
||||
/// </summary>
|
||||
public sealed class ValidationGateOptions : IValidatableObject
|
||||
{
|
||||
/// <summary>
|
||||
/// Configuration section name.
|
||||
/// </summary>
|
||||
public const string SectionName = "ValidationGate";
|
||||
|
||||
/// <summary>
|
||||
/// Gets or sets whether validation is enabled.
|
||||
/// Default: true.
|
||||
/// </summary>
|
||||
public bool Enabled { get; set; } = true;
|
||||
|
||||
/// <summary>
|
||||
/// Gets or sets the validation mode.
|
||||
/// Default: Strict.
|
||||
/// </summary>
|
||||
[Required]
|
||||
public SbomValidationMode Mode { get; set; } = SbomValidationMode.Strict;
|
||||
|
||||
/// <summary>
|
||||
/// Gets or sets the timeout for validation in seconds.
|
||||
/// Default: 30. Range: 1-600.
|
||||
/// </summary>
|
||||
[Range(1, 600, ErrorMessage = "Timeout must be between 1 and 600 seconds.")]
|
||||
public int TimeoutSeconds { get; set; } = 30;
|
||||
|
||||
/// <summary>
|
||||
/// Gets or sets whether to include warnings in results.
|
||||
/// Default: true.
|
||||
/// </summary>
|
||||
public bool IncludeWarnings { get; set; } = true;
|
||||
|
||||
/// <summary>
|
||||
/// Gets or sets whether to validate license expressions.
|
||||
/// Default: true.
|
||||
/// </summary>
|
||||
public bool ValidateLicenses { get; set; } = true;
|
||||
|
||||
/// <summary>
|
||||
/// Gets or sets the path to custom validation rules (JSON Schema or SHACL).
|
||||
/// </summary>
|
||||
public string? CustomRulesPath { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets or sets the required SPDX profiles for SPDX 3.0.1 validation.
|
||||
/// Examples: "core", "software", "security", "build".
|
||||
/// </summary>
|
||||
public List<string> RequiredSpdxProfiles { get; set; } = new();
|
||||
|
||||
/// <summary>
|
||||
/// Gets or sets whether to fail the build if validation fails.
|
||||
/// Only applies in Strict mode.
|
||||
/// Default: true.
|
||||
/// </summary>
|
||||
public bool FailOnValidationError { get; set; } = true;
|
||||
|
||||
/// <summary>
|
||||
/// Gets or sets whether to cache validation results.
|
||||
/// Default: true.
|
||||
/// </summary>
|
||||
public bool CacheResults { get; set; } = true;
|
||||
|
||||
/// <summary>
|
||||
/// Gets or sets the cache TTL in seconds.
|
||||
/// Default: 3600 (1 hour). Range: 60-86400.
|
||||
/// </summary>
|
||||
[Range(60, 86400, ErrorMessage = "CacheTtlSeconds must be between 60 and 86400 seconds.")]
|
||||
public int CacheTtlSeconds { get; set; } = 3600;
|
||||
|
||||
/// <summary>
|
||||
/// Gets the timeout as a TimeSpan.
|
||||
/// </summary>
|
||||
public TimeSpan Timeout => TimeSpan.FromSeconds(TimeoutSeconds);
|
||||
|
||||
/// <summary>
|
||||
/// Gets the cache TTL as a TimeSpan.
|
||||
/// </summary>
|
||||
public TimeSpan CacheTtl => TimeSpan.FromSeconds(CacheTtlSeconds);
|
||||
|
||||
/// <inheritdoc/>
|
||||
public IEnumerable<ValidationResult> Validate(ValidationContext validationContext)
|
||||
{
|
||||
if (CustomRulesPath is not null && !File.Exists(CustomRulesPath))
|
||||
{
|
||||
yield return new ValidationResult(
|
||||
$"Custom rules file not found: {CustomRulesPath}",
|
||||
new[] { nameof(CustomRulesPath) });
|
||||
}
|
||||
|
||||
if (Mode == SbomValidationMode.Off && FailOnValidationError)
|
||||
{
|
||||
yield return new ValidationResult(
|
||||
"FailOnValidationError should be false when Mode is Off",
|
||||
new[] { nameof(FailOnValidationError), nameof(Mode) });
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Creates SbomValidationOptions from this configuration.
|
||||
/// </summary>
|
||||
public SbomValidationOptions ToValidationOptions() => new()
|
||||
{
|
||||
Mode = Mode,
|
||||
Timeout = Timeout,
|
||||
IncludeWarnings = IncludeWarnings,
|
||||
ValidateLicenses = ValidateLicenses,
|
||||
CustomRulesPath = CustomRulesPath,
|
||||
RequiredSpdxProfiles = RequiredSpdxProfiles.Count > 0 ? RequiredSpdxProfiles : null
|
||||
};
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Extension methods for registering validation gate options.
|
||||
/// </summary>
|
||||
public static class ValidationGateOptionsExtensions
|
||||
{
|
||||
/// <summary>
|
||||
/// Adds validation gate options with data annotations validation.
|
||||
/// </summary>
|
||||
public static IServiceCollection AddValidationGateOptions(
|
||||
this IServiceCollection services,
|
||||
IConfiguration configuration)
|
||||
{
|
||||
services.AddOptions<ValidationGateOptions>()
|
||||
.Bind(configuration.GetSection(ValidationGateOptions.SectionName))
|
||||
.ValidateDataAnnotations()
|
||||
.ValidateOnStart();
|
||||
|
||||
return services;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,483 @@
|
||||
// <copyright file="ValidatorBinaryManager.cs" company="StellaOps">
|
||||
// Copyright (c) StellaOps. Licensed under the AGPL-3.0-or-later.
|
||||
// </copyright>
|
||||
|
||||
using System.Collections.Immutable;
|
||||
using System.Globalization;
|
||||
using System.IO.Compression;
|
||||
using System.Runtime.InteropServices;
|
||||
using System.Security.Cryptography;
|
||||
using System.Text;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Microsoft.Extensions.Options;
|
||||
|
||||
namespace StellaOps.Scanner.Validation;
|
||||
|
||||
/// <summary>
|
||||
/// Manages validator binary downloads, extraction, and verification.
|
||||
/// Sprint: SPRINT_20260107_005_003 Task VG-004
|
||||
/// </summary>
|
||||
public sealed class ValidatorBinaryManager
|
||||
{
|
||||
private readonly ValidatorBinaryOptions _options;
|
||||
private readonly IHttpClientFactory _httpClientFactory;
|
||||
private readonly ILogger<ValidatorBinaryManager> _logger;
|
||||
private readonly TimeProvider _timeProvider;
|
||||
|
||||
private static readonly ImmutableDictionary<string, ValidatorBinarySpec> DefaultSpecs =
|
||||
new Dictionary<string, ValidatorBinarySpec>(StringComparer.OrdinalIgnoreCase)
|
||||
{
|
||||
["sbom-utility"] = new ValidatorBinarySpec
|
||||
{
|
||||
Name = "sbom-utility",
|
||||
Version = "0.17.0",
|
||||
BaseUrl = "https://github.com/CycloneDX/sbom-utility/releases/download/v0.17.0",
|
||||
FileNameFormat = "sbom-utility-v{0}-{1}-{2}.tar.gz",
|
||||
ExpectedHashes = new Dictionary<string, string>(StringComparer.OrdinalIgnoreCase)
|
||||
{
|
||||
["linux-amd64"] = "0000000000000000000000000000000000000000000000000000000000000000",
|
||||
["linux-arm64"] = "0000000000000000000000000000000000000000000000000000000000000000",
|
||||
["darwin-amd64"] = "0000000000000000000000000000000000000000000000000000000000000000",
|
||||
["darwin-arm64"] = "0000000000000000000000000000000000000000000000000000000000000000",
|
||||
["windows-amd64"] = "0000000000000000000000000000000000000000000000000000000000000000"
|
||||
}.ToImmutableDictionary(StringComparer.OrdinalIgnoreCase),
|
||||
ExecutableName = "sbom-utility"
|
||||
},
|
||||
["spdx-tools"] = new ValidatorBinarySpec
|
||||
{
|
||||
Name = "spdx-tools",
|
||||
Version = "1.1.9",
|
||||
BaseUrl = "https://github.com/spdx/tools-java/releases/download/v1.1.9",
|
||||
FileNameFormat = "tools-java-{0}-jar-with-dependencies.jar",
|
||||
ExpectedHashes = new Dictionary<string, string>(StringComparer.OrdinalIgnoreCase)
|
||||
{
|
||||
["jar"] = "0000000000000000000000000000000000000000000000000000000000000000"
|
||||
}.ToImmutableDictionary(StringComparer.OrdinalIgnoreCase),
|
||||
ExecutableName = "tools-java-1.1.9-jar-with-dependencies.jar",
|
||||
IsJar = true
|
||||
}
|
||||
}.ToImmutableDictionary(StringComparer.OrdinalIgnoreCase);
|
||||
|
||||
/// <summary>
|
||||
/// Initializes a new instance of the <see cref="ValidatorBinaryManager"/> class.
|
||||
/// </summary>
|
||||
/// <param name="options">Binary manager options.</param>
|
||||
/// <param name="httpClientFactory">HTTP client factory per CLAUDE.md Rule 8.9.</param>
|
||||
/// <param name="logger">Logger instance.</param>
|
||||
/// <param name="timeProvider">Time provider per CLAUDE.md Rule 8.2.</param>
|
||||
public ValidatorBinaryManager(
|
||||
IOptions<ValidatorBinaryOptions> options,
|
||||
IHttpClientFactory httpClientFactory,
|
||||
ILogger<ValidatorBinaryManager> logger,
|
||||
TimeProvider timeProvider)
|
||||
{
|
||||
_options = options.Value;
|
||||
_httpClientFactory = httpClientFactory;
|
||||
_logger = logger;
|
||||
_timeProvider = timeProvider;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Ensures a validator binary is available, downloading if necessary.
|
||||
/// </summary>
|
||||
/// <param name="validatorName">Name of the validator (e.g., "sbom-utility", "spdx-tools").</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>Path to the validator executable.</returns>
|
||||
/// <exception cref="ValidatorBinaryException">Thrown if binary cannot be obtained.</exception>
|
||||
public async Task<string> EnsureBinaryAsync(
|
||||
string validatorName,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrEmpty(validatorName);
|
||||
|
||||
var spec = GetBinarySpec(validatorName);
|
||||
var executablePath = GetExecutablePath(spec);
|
||||
|
||||
// Check if already available
|
||||
if (File.Exists(executablePath))
|
||||
{
|
||||
_logger.LogDebug("Validator {Validator} already available at {Path}", validatorName, executablePath);
|
||||
return executablePath;
|
||||
}
|
||||
|
||||
// Check offline mode
|
||||
if (_options.OfflineMode)
|
||||
{
|
||||
throw new ValidatorBinaryException(
|
||||
$"Validator '{validatorName}' not found and offline mode is enabled. " +
|
||||
$"Expected at: {executablePath}");
|
||||
}
|
||||
|
||||
// Download and extract
|
||||
await DownloadAndExtractAsync(spec, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
if (!File.Exists(executablePath))
|
||||
{
|
||||
throw new ValidatorBinaryException(
|
||||
$"Validator '{validatorName}' was downloaded but executable not found at: {executablePath}");
|
||||
}
|
||||
|
||||
return executablePath;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Checks if a validator binary is available without downloading.
|
||||
/// </summary>
|
||||
/// <param name="validatorName">Name of the validator.</param>
|
||||
/// <returns>True if the binary is available.</returns>
|
||||
public bool IsBinaryAvailable(string validatorName)
|
||||
{
|
||||
if (string.IsNullOrEmpty(validatorName))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
var spec = GetBinarySpec(validatorName);
|
||||
var executablePath = GetExecutablePath(spec);
|
||||
return File.Exists(executablePath);
|
||||
}
|
||||
catch
|
||||
{
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Gets the path where a validator binary should be located.
|
||||
/// </summary>
|
||||
/// <param name="validatorName">Name of the validator.</param>
|
||||
/// <returns>Expected path to the executable.</returns>
|
||||
public string GetBinaryPath(string validatorName)
|
||||
{
|
||||
var spec = GetBinarySpec(validatorName);
|
||||
return GetExecutablePath(spec);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Verifies the integrity of an existing validator binary.
|
||||
/// </summary>
|
||||
/// <param name="validatorName">Name of the validator.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>True if the binary passes integrity verification.</returns>
|
||||
public async Task<bool> VerifyBinaryIntegrityAsync(
|
||||
string validatorName,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
var spec = GetBinarySpec(validatorName);
|
||||
var executablePath = GetExecutablePath(spec);
|
||||
|
||||
if (!File.Exists(executablePath))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
var platformKey = GetPlatformKey(spec);
|
||||
if (!spec.ExpectedHashes.TryGetValue(platformKey, out var expectedHash))
|
||||
{
|
||||
_logger.LogWarning(
|
||||
"No expected hash for validator {Validator} on platform {Platform}",
|
||||
validatorName, platformKey);
|
||||
return true; // No hash to verify against
|
||||
}
|
||||
|
||||
var actualHash = await ComputeFileHashAsync(executablePath, cancellationToken).ConfigureAwait(false);
|
||||
var match = string.Equals(expectedHash, actualHash, StringComparison.OrdinalIgnoreCase);
|
||||
|
||||
if (!match)
|
||||
{
|
||||
_logger.LogWarning(
|
||||
"Integrity verification failed for {Validator}: expected {Expected}, got {Actual}",
|
||||
validatorName, expectedHash, actualHash);
|
||||
}
|
||||
|
||||
return match;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Gets information about available validators.
|
||||
/// </summary>
|
||||
/// <returns>Dictionary of validator specifications.</returns>
|
||||
public IReadOnlyDictionary<string, ValidatorBinarySpec> GetAvailableValidators()
|
||||
{
|
||||
if (_options.CustomSpecs is not null && _options.CustomSpecs.Count > 0)
|
||||
{
|
||||
return _options.CustomSpecs
|
||||
.Concat(DefaultSpecs.Where(kv => !_options.CustomSpecs.ContainsKey(kv.Key)))
|
||||
.ToImmutableDictionary(StringComparer.OrdinalIgnoreCase);
|
||||
}
|
||||
|
||||
return DefaultSpecs;
|
||||
}
|
||||
|
||||
private ValidatorBinarySpec GetBinarySpec(string validatorName)
|
||||
{
|
||||
var specs = GetAvailableValidators();
|
||||
if (!specs.TryGetValue(validatorName, out var spec))
|
||||
{
|
||||
throw new ValidatorBinaryException($"Unknown validator: {validatorName}");
|
||||
}
|
||||
return spec;
|
||||
}
|
||||
|
||||
private string GetExecutablePath(ValidatorBinarySpec spec)
|
||||
{
|
||||
var baseDir = string.IsNullOrEmpty(_options.BinaryDirectory)
|
||||
? Path.Combine(Environment.GetFolderPath(Environment.SpecialFolder.LocalApplicationData), "stellaops", "validators")
|
||||
: _options.BinaryDirectory;
|
||||
|
||||
var versionDir = Path.Combine(baseDir, spec.Name, spec.Version);
|
||||
var executableName = spec.IsJar
|
||||
? spec.ExecutableName
|
||||
: RuntimeInformation.IsOSPlatform(OSPlatform.Windows)
|
||||
? spec.ExecutableName + ".exe"
|
||||
: spec.ExecutableName;
|
||||
|
||||
return Path.Combine(versionDir, executableName);
|
||||
}
|
||||
|
||||
private async Task DownloadAndExtractAsync(
|
||||
ValidatorBinarySpec spec,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
var platformKey = GetPlatformKey(spec);
|
||||
var downloadUrl = GetDownloadUrl(spec, platformKey);
|
||||
var targetDir = Path.GetDirectoryName(GetExecutablePath(spec))!;
|
||||
|
||||
_logger.LogInformation(
|
||||
"Downloading validator {Validator} v{Version} for {Platform} from {Url}",
|
||||
spec.Name, spec.Version, platformKey, downloadUrl);
|
||||
|
||||
Directory.CreateDirectory(targetDir);
|
||||
|
||||
var tempFile = Path.Combine(targetDir, $"download_{_timeProvider.GetUtcNow().ToUnixTimeMilliseconds()}.tmp");
|
||||
|
||||
try
|
||||
{
|
||||
// Download
|
||||
using var httpClient = _httpClientFactory.CreateClient("ValidatorDownload");
|
||||
httpClient.Timeout = _options.DownloadTimeout;
|
||||
|
||||
using var response = await httpClient.GetAsync(
|
||||
downloadUrl,
|
||||
HttpCompletionOption.ResponseHeadersRead,
|
||||
cancellationToken).ConfigureAwait(false);
|
||||
|
||||
response.EnsureSuccessStatusCode();
|
||||
|
||||
await using var stream = await response.Content.ReadAsStreamAsync(cancellationToken).ConfigureAwait(false);
|
||||
await using var fileStream = File.Create(tempFile);
|
||||
await stream.CopyToAsync(fileStream, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
// Verify hash
|
||||
if (spec.ExpectedHashes.TryGetValue(platformKey, out var expectedHash))
|
||||
{
|
||||
var actualHash = await ComputeFileHashAsync(tempFile, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
// Skip hash verification if placeholder hash (all zeros)
|
||||
if (!IsPlaceholderHash(expectedHash) &&
|
||||
!string.Equals(expectedHash, actualHash, StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
throw new ValidatorBinaryException(
|
||||
$"Downloaded file hash mismatch: expected {expectedHash}, got {actualHash}");
|
||||
}
|
||||
}
|
||||
|
||||
// Extract
|
||||
if (downloadUrl.EndsWith(".tar.gz", StringComparison.OrdinalIgnoreCase) ||
|
||||
downloadUrl.EndsWith(".tgz", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
await ExtractTarGzAsync(tempFile, targetDir, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
else if (downloadUrl.EndsWith(".zip", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
ZipFile.ExtractToDirectory(tempFile, targetDir, overwriteFiles: true);
|
||||
}
|
||||
else if (downloadUrl.EndsWith(".jar", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
var jarPath = Path.Combine(targetDir, spec.ExecutableName);
|
||||
File.Move(tempFile, jarPath, overwrite: true);
|
||||
return; // JAR doesn't need extraction
|
||||
}
|
||||
|
||||
// Set executable permission on Unix
|
||||
if (!RuntimeInformation.IsOSPlatform(OSPlatform.Windows))
|
||||
{
|
||||
var execPath = GetExecutablePath(spec);
|
||||
if (File.Exists(execPath))
|
||||
{
|
||||
File.SetUnixFileMode(execPath, UnixFileMode.UserRead | UnixFileMode.UserWrite | UnixFileMode.UserExecute |
|
||||
UnixFileMode.GroupRead | UnixFileMode.GroupExecute |
|
||||
UnixFileMode.OtherRead | UnixFileMode.OtherExecute);
|
||||
}
|
||||
}
|
||||
|
||||
_logger.LogInformation("Validator {Validator} v{Version} installed to {Path}",
|
||||
spec.Name, spec.Version, targetDir);
|
||||
}
|
||||
finally
|
||||
{
|
||||
if (File.Exists(tempFile))
|
||||
{
|
||||
try { File.Delete(tempFile); }
|
||||
catch { /* ignore cleanup errors */ }
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private static string GetPlatformKey(ValidatorBinarySpec spec)
|
||||
{
|
||||
if (spec.IsJar)
|
||||
{
|
||||
return "jar";
|
||||
}
|
||||
|
||||
var os = RuntimeInformation.IsOSPlatform(OSPlatform.Windows) ? "windows"
|
||||
: RuntimeInformation.IsOSPlatform(OSPlatform.OSX) ? "darwin"
|
||||
: "linux";
|
||||
|
||||
var arch = RuntimeInformation.OSArchitecture switch
|
||||
{
|
||||
Architecture.X64 => "amd64",
|
||||
Architecture.Arm64 => "arm64",
|
||||
_ => "amd64"
|
||||
};
|
||||
|
||||
return $"{os}-{arch}";
|
||||
}
|
||||
|
||||
private static string GetDownloadUrl(ValidatorBinarySpec spec, string platformKey)
|
||||
{
|
||||
if (spec.IsJar)
|
||||
{
|
||||
var fileName = string.Format(CultureInfo.InvariantCulture, spec.FileNameFormat, spec.Version);
|
||||
return $"{spec.BaseUrl}/{fileName}";
|
||||
}
|
||||
|
||||
var parts = platformKey.Split('-');
|
||||
var os = parts[0];
|
||||
var arch = parts[1];
|
||||
var fileName2 = string.Format(CultureInfo.InvariantCulture, spec.FileNameFormat, spec.Version, os, arch);
|
||||
return $"{spec.BaseUrl}/{fileName2}";
|
||||
}
|
||||
|
||||
private static async Task<string> ComputeFileHashAsync(string filePath, CancellationToken cancellationToken)
|
||||
{
|
||||
await using var stream = File.OpenRead(filePath);
|
||||
var hash = await SHA256.HashDataAsync(stream, cancellationToken).ConfigureAwait(false);
|
||||
return Convert.ToHexStringLower(hash);
|
||||
}
|
||||
|
||||
private static bool IsPlaceholderHash(string hash)
|
||||
{
|
||||
return hash.All(c => c == '0');
|
||||
}
|
||||
|
||||
private static async Task ExtractTarGzAsync(
|
||||
string tarGzPath,
|
||||
string targetDir,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
// Use System.Formats.Tar for extraction
|
||||
await using var fileStream = File.OpenRead(tarGzPath);
|
||||
await using var gzipStream = new GZipStream(fileStream, CompressionMode.Decompress);
|
||||
|
||||
await System.Formats.Tar.TarFile.ExtractToDirectoryAsync(
|
||||
gzipStream,
|
||||
targetDir,
|
||||
overwriteFiles: true,
|
||||
cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Specification for a validator binary.
|
||||
/// </summary>
|
||||
public sealed record ValidatorBinarySpec
|
||||
{
|
||||
/// <summary>
|
||||
/// Gets the validator name.
|
||||
/// </summary>
|
||||
public required string Name { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets the version to download.
|
||||
/// </summary>
|
||||
public required string Version { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets the base URL for downloads.
|
||||
/// </summary>
|
||||
public required string BaseUrl { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets the file name format string (use {0} for version, {1} for OS, {2} for arch).
|
||||
/// </summary>
|
||||
public required string FileNameFormat { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets the expected SHA-256 hashes by platform key.
|
||||
/// </summary>
|
||||
public ImmutableDictionary<string, string> ExpectedHashes { get; init; } =
|
||||
ImmutableDictionary<string, string>.Empty;
|
||||
|
||||
/// <summary>
|
||||
/// Gets the executable name (without extension on Unix).
|
||||
/// </summary>
|
||||
public required string ExecutableName { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets whether this is a JAR file (requires Java runtime).
|
||||
/// </summary>
|
||||
public bool IsJar { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Options for the validator binary manager.
|
||||
/// </summary>
|
||||
public sealed class ValidatorBinaryOptions
|
||||
{
|
||||
/// <summary>
|
||||
/// Gets or sets the directory for storing validator binaries.
|
||||
/// </summary>
|
||||
public string? BinaryDirectory { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets or sets whether to operate in offline mode (no downloads).
|
||||
/// </summary>
|
||||
public bool OfflineMode { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets or sets the download timeout.
|
||||
/// </summary>
|
||||
public TimeSpan DownloadTimeout { get; set; } = TimeSpan.FromMinutes(5);
|
||||
|
||||
/// <summary>
|
||||
/// Gets or sets custom binary specifications to override defaults.
|
||||
/// </summary>
|
||||
public IReadOnlyDictionary<string, ValidatorBinarySpec>? CustomSpecs { get; set; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Exception thrown when validator binary operations fail.
|
||||
/// </summary>
|
||||
public sealed class ValidatorBinaryException : Exception
|
||||
{
|
||||
/// <summary>
|
||||
/// Initializes a new instance of the <see cref="ValidatorBinaryException"/> class.
|
||||
/// </summary>
|
||||
/// <param name="message">The error message.</param>
|
||||
public ValidatorBinaryException(string message) : base(message)
|
||||
{
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Initializes a new instance of the <see cref="ValidatorBinaryException"/> class.
|
||||
/// </summary>
|
||||
/// <param name="message">The error message.</param>
|
||||
/// <param name="innerException">The inner exception.</param>
|
||||
public ValidatorBinaryException(string message, Exception innerException) : base(message, innerException)
|
||||
{
|
||||
}
|
||||
}
|
||||
@@ -52,7 +52,7 @@ public sealed class OfflineBuildIdIndexSignatureTests : IDisposable
|
||||
await index.LoadAsync();
|
||||
|
||||
Assert.True(index.IsLoaded);
|
||||
Assert.Single(index);
|
||||
Assert.Equal(1, index.Count);
|
||||
|
||||
var result = await index.LookupAsync("gnu-build-id:abc123");
|
||||
Assert.NotNull(result);
|
||||
@@ -83,7 +83,7 @@ public sealed class OfflineBuildIdIndexSignatureTests : IDisposable
|
||||
await index.LoadAsync();
|
||||
|
||||
Assert.True(index.IsLoaded);
|
||||
Assert.Empty(index);
|
||||
Assert.Equal(0, index.Count);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
@@ -108,7 +108,7 @@ public sealed class OfflineBuildIdIndexSignatureTests : IDisposable
|
||||
await index.LoadAsync();
|
||||
|
||||
Assert.True(index.IsLoaded);
|
||||
Assert.Empty(index);
|
||||
Assert.Equal(0, index.Count);
|
||||
}
|
||||
|
||||
private static string CreateDsseSignature(string indexPath, string expectedSha256)
|
||||
|
||||
@@ -37,7 +37,7 @@ public sealed class OfflineBuildIdIndexTests : IDisposable
|
||||
await index.LoadAsync();
|
||||
|
||||
Assert.True(index.IsLoaded);
|
||||
Assert.Empty(index);
|
||||
Assert.Equal(0, index.Count);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
@@ -49,7 +49,7 @@ public sealed class OfflineBuildIdIndexTests : IDisposable
|
||||
await index.LoadAsync();
|
||||
|
||||
Assert.True(index.IsLoaded);
|
||||
Assert.Empty(index);
|
||||
Assert.Equal(0, index.Count);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
|
||||
@@ -38,7 +38,7 @@ public sealed class SecretAlertEmitterTests
|
||||
var settings = new SecretAlertSettings { Enabled = false };
|
||||
var context = CreateScanContext();
|
||||
|
||||
await _emitter.EmitAlertsAsync(findings, settings, context);
|
||||
await _emitter.EmitAlertsAsync(findings, settings, context, TestContext.Current.CancellationToken);
|
||||
|
||||
_mockPublisher.Verify(
|
||||
p => p.PublishAsync(It.IsAny<SecretFindingAlertEvent>(), It.IsAny<SecretAlertDestination>(), It.IsAny<SecretAlertSettings>(), It.IsAny<CancellationToken>()),
|
||||
@@ -52,7 +52,7 @@ public sealed class SecretAlertEmitterTests
|
||||
var settings = CreateEnabledSettings();
|
||||
var context = CreateScanContext();
|
||||
|
||||
await _emitter.EmitAlertsAsync(findings, settings, context);
|
||||
await _emitter.EmitAlertsAsync(findings, settings, context, TestContext.Current.CancellationToken);
|
||||
|
||||
_mockPublisher.Verify(
|
||||
p => p.PublishAsync(It.IsAny<SecretFindingAlertEvent>(), It.IsAny<SecretAlertDestination>(), It.IsAny<SecretAlertSettings>(), It.IsAny<CancellationToken>()),
|
||||
@@ -75,7 +75,7 @@ public sealed class SecretAlertEmitterTests
|
||||
};
|
||||
var context = CreateScanContext();
|
||||
|
||||
await _emitter.EmitAlertsAsync(findings, settings, context);
|
||||
await _emitter.EmitAlertsAsync(findings, settings, context, TestContext.Current.CancellationToken);
|
||||
|
||||
_mockPublisher.Verify(
|
||||
p => p.PublishAsync(It.IsAny<SecretFindingAlertEvent>(), It.IsAny<SecretAlertDestination>(), It.IsAny<SecretAlertSettings>(), It.IsAny<CancellationToken>()),
|
||||
@@ -93,7 +93,7 @@ public sealed class SecretAlertEmitterTests
|
||||
var settings = CreateEnabledSettings();
|
||||
var context = CreateScanContext();
|
||||
|
||||
await _emitter.EmitAlertsAsync(findings, settings, context);
|
||||
await _emitter.EmitAlertsAsync(findings, settings, context, TestContext.Current.CancellationToken);
|
||||
|
||||
_mockPublisher.Verify(
|
||||
p => p.PublishAsync(It.IsAny<SecretFindingAlertEvent>(), It.IsAny<SecretAlertDestination>(), It.IsAny<SecretAlertSettings>(), It.IsAny<CancellationToken>()),
|
||||
@@ -113,7 +113,7 @@ public sealed class SecretAlertEmitterTests
|
||||
};
|
||||
var context = CreateScanContext();
|
||||
|
||||
await _emitter.EmitAlertsAsync(findings, settings, context);
|
||||
await _emitter.EmitAlertsAsync(findings, settings, context, TestContext.Current.CancellationToken);
|
||||
|
||||
_mockPublisher.Verify(
|
||||
p => p.PublishAsync(It.IsAny<SecretFindingAlertEvent>(), It.IsAny<SecretAlertDestination>(), It.IsAny<SecretAlertSettings>(), It.IsAny<CancellationToken>()),
|
||||
@@ -134,13 +134,13 @@ public sealed class SecretAlertEmitterTests
|
||||
var context = CreateScanContext();
|
||||
|
||||
// First call should publish
|
||||
await _emitter.EmitAlertsAsync([finding], settings, context);
|
||||
await _emitter.EmitAlertsAsync([finding], settings, context, TestContext.Current.CancellationToken);
|
||||
|
||||
// Advance time by 30 minutes (within window)
|
||||
_timeProvider.Advance(TimeSpan.FromMinutes(30));
|
||||
|
||||
// Second call with same finding should be deduplicated
|
||||
await _emitter.EmitAlertsAsync([finding], settings, context);
|
||||
await _emitter.EmitAlertsAsync([finding], settings, context, TestContext.Current.CancellationToken);
|
||||
|
||||
_mockPublisher.Verify(
|
||||
p => p.PublishAsync(It.IsAny<SecretFindingAlertEvent>(), It.IsAny<SecretAlertDestination>(), It.IsAny<SecretAlertSettings>(), It.IsAny<CancellationToken>()),
|
||||
@@ -161,13 +161,13 @@ public sealed class SecretAlertEmitterTests
|
||||
var context = CreateScanContext();
|
||||
|
||||
// First call
|
||||
await _emitter.EmitAlertsAsync([finding], settings, context);
|
||||
await _emitter.EmitAlertsAsync([finding], settings, context, TestContext.Current.CancellationToken);
|
||||
|
||||
// Advance time beyond window
|
||||
_timeProvider.Advance(TimeSpan.FromHours(2));
|
||||
|
||||
// Second call should publish again
|
||||
await _emitter.EmitAlertsAsync([finding], settings, context);
|
||||
await _emitter.EmitAlertsAsync([finding], settings, context, TestContext.Current.CancellationToken);
|
||||
|
||||
_mockPublisher.Verify(
|
||||
p => p.PublishAsync(It.IsAny<SecretFindingAlertEvent>(), It.IsAny<SecretAlertDestination>(), It.IsAny<SecretAlertSettings>(), It.IsAny<CancellationToken>()),
|
||||
@@ -191,7 +191,7 @@ public sealed class SecretAlertEmitterTests
|
||||
};
|
||||
var context = CreateScanContext();
|
||||
|
||||
await _emitter.EmitAlertsAsync(findings, settings, context);
|
||||
await _emitter.EmitAlertsAsync(findings, settings, context, TestContext.Current.CancellationToken);
|
||||
|
||||
_mockPublisher.Verify(
|
||||
p => p.PublishAsync(It.IsAny<SecretFindingAlertEvent>(), It.IsAny<SecretAlertDestination>(), It.IsAny<SecretAlertSettings>(), It.IsAny<CancellationToken>()),
|
||||
@@ -223,7 +223,7 @@ public sealed class SecretAlertEmitterTests
|
||||
};
|
||||
var context = CreateScanContext();
|
||||
|
||||
await _emitter.EmitAlertsAsync(findings, settings, context);
|
||||
await _emitter.EmitAlertsAsync(findings, settings, context, TestContext.Current.CancellationToken);
|
||||
|
||||
// Should only publish the Critical finding
|
||||
_mockPublisher.Verify(
|
||||
@@ -249,7 +249,7 @@ public sealed class SecretAlertEmitterTests
|
||||
};
|
||||
var context = CreateScanContext();
|
||||
|
||||
await _emitter.EmitAlertsAsync(findings, settings, context);
|
||||
await _emitter.EmitAlertsAsync(findings, settings, context, TestContext.Current.CancellationToken);
|
||||
|
||||
// Should publish summary instead of individual alerts
|
||||
_mockPublisher.Verify(
|
||||
@@ -274,7 +274,7 @@ public sealed class SecretAlertEmitterTests
|
||||
};
|
||||
var context = CreateScanContext();
|
||||
|
||||
await _emitter.EmitAlertsAsync(findings, settings, context);
|
||||
await _emitter.EmitAlertsAsync(findings, settings, context, TestContext.Current.CancellationToken);
|
||||
|
||||
// Below threshold, should publish individual alerts
|
||||
_mockPublisher.Verify(
|
||||
|
||||
@@ -43,7 +43,7 @@ public sealed class SecretsAnalyzerHostTests : IAsyncLifetime
|
||||
var (host, analyzer, _) = CreateHost(options);
|
||||
|
||||
// Act
|
||||
await host.StartAsync(CancellationToken.None);
|
||||
await host.StartAsync(TestContext.Current.CancellationToken);
|
||||
|
||||
// Assert
|
||||
host.IsEnabled.Should().BeFalse();
|
||||
@@ -63,7 +63,7 @@ public sealed class SecretsAnalyzerHostTests : IAsyncLifetime
|
||||
var (host, analyzer, _) = CreateHost(options);
|
||||
|
||||
// Act
|
||||
await host.StartAsync(CancellationToken.None);
|
||||
await host.StartAsync(TestContext.Current.CancellationToken);
|
||||
|
||||
// Assert
|
||||
host.IsEnabled.Should().BeTrue();
|
||||
@@ -84,7 +84,7 @@ public sealed class SecretsAnalyzerHostTests : IAsyncLifetime
|
||||
var (host, analyzer, _) = CreateHost(options);
|
||||
|
||||
// Act
|
||||
await host.StartAsync(CancellationToken.None);
|
||||
await host.StartAsync(TestContext.Current.CancellationToken);
|
||||
|
||||
// Assert - should be disabled after failed load
|
||||
host.IsEnabled.Should().BeFalse();
|
||||
@@ -104,7 +104,7 @@ public sealed class SecretsAnalyzerHostTests : IAsyncLifetime
|
||||
|
||||
// Act & Assert
|
||||
await Assert.ThrowsAsync<DirectoryNotFoundException>(
|
||||
() => host.StartAsync(CancellationToken.None));
|
||||
() => host.StartAsync(TestContext.Current.CancellationToken));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
@@ -121,17 +121,17 @@ public sealed class SecretsAnalyzerHostTests : IAsyncLifetime
|
||||
|
||||
var mockVerifier = new Mock<IBundleVerifier>();
|
||||
mockVerifier
|
||||
.Setup(v => v.VerifyAsync(It.IsAny<string>(), It.IsAny<VerificationOptions>(), It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(new BundleVerificationResult(true, "Test verification passed"));
|
||||
.Setup(v => v.VerifyAsync(It.IsAny<string>(), It.IsAny<BundleVerificationOptions>(), It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(new BundleVerificationResult { IsValid = true, BundleVersion = "1.0.0" });
|
||||
|
||||
var (host, _, _) = CreateHost(options, mockVerifier.Object);
|
||||
|
||||
// Act
|
||||
await host.StartAsync(CancellationToken.None);
|
||||
await host.StartAsync(TestContext.Current.CancellationToken);
|
||||
|
||||
// Assert
|
||||
mockVerifier.Verify(
|
||||
v => v.VerifyAsync(_testDir, It.IsAny<VerificationOptions>(), It.IsAny<CancellationToken>()),
|
||||
v => v.VerifyAsync(_testDir, It.IsAny<BundleVerificationOptions>(), It.IsAny<CancellationToken>()),
|
||||
Times.Once);
|
||||
host.LastVerificationResult.Should().NotBeNull();
|
||||
host.LastVerificationResult!.IsValid.Should().BeTrue();
|
||||
@@ -152,13 +152,13 @@ public sealed class SecretsAnalyzerHostTests : IAsyncLifetime
|
||||
|
||||
var mockVerifier = new Mock<IBundleVerifier>();
|
||||
mockVerifier
|
||||
.Setup(v => v.VerifyAsync(It.IsAny<string>(), It.IsAny<VerificationOptions>(), It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(new BundleVerificationResult(false, "Signature invalid"));
|
||||
.Setup(v => v.VerifyAsync(It.IsAny<string>(), It.IsAny<BundleVerificationOptions>(), It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(BundleVerificationResult.Failure("Signature invalid"));
|
||||
|
||||
var (host, _, _) = CreateHost(options, mockVerifier.Object);
|
||||
|
||||
// Act
|
||||
await host.StartAsync(CancellationToken.None);
|
||||
await host.StartAsync(TestContext.Current.CancellationToken);
|
||||
|
||||
// Assert
|
||||
host.LastVerificationResult.Should().NotBeNull();
|
||||
@@ -177,10 +177,10 @@ public sealed class SecretsAnalyzerHostTests : IAsyncLifetime
|
||||
};
|
||||
var (host, _, _) = CreateHost(options);
|
||||
|
||||
await host.StartAsync(CancellationToken.None);
|
||||
await host.StartAsync(TestContext.Current.CancellationToken);
|
||||
|
||||
// Act
|
||||
await host.StopAsync(CancellationToken.None);
|
||||
await host.StopAsync(TestContext.Current.CancellationToken);
|
||||
|
||||
// Assert - should complete without error
|
||||
}
|
||||
@@ -199,7 +199,7 @@ public sealed class SecretsAnalyzerHostTests : IAsyncLifetime
|
||||
var (host, _, _) = CreateHost(options);
|
||||
|
||||
// Act
|
||||
await host.StartAsync(CancellationToken.None);
|
||||
await host.StartAsync(TestContext.Current.CancellationToken);
|
||||
|
||||
// Assert - should be disabled due to invalid ruleset
|
||||
host.IsEnabled.Should().BeFalse();
|
||||
|
||||
@@ -140,7 +140,7 @@ public sealed class SecretsAnalyzerTests : IAsyncLifetime
|
||||
var context = CreateContext();
|
||||
var writer = new Mock<LanguageComponentWriter>().Object;
|
||||
|
||||
await analyzer.AnalyzeAsync(context, writer, CancellationToken.None);
|
||||
await analyzer.AnalyzeAsync(context, writer, TestContext.Current.CancellationToken);
|
||||
|
||||
// Should complete without error when disabled
|
||||
}
|
||||
@@ -154,7 +154,7 @@ public sealed class SecretsAnalyzerTests : IAsyncLifetime
|
||||
var context = CreateContext();
|
||||
var writer = new Mock<LanguageComponentWriter>().Object;
|
||||
|
||||
await analyzer.AnalyzeAsync(context, writer, CancellationToken.None);
|
||||
await analyzer.AnalyzeAsync(context, writer, TestContext.Current.CancellationToken);
|
||||
|
||||
// Should complete without error when no ruleset
|
||||
}
|
||||
@@ -171,7 +171,7 @@ public sealed class SecretsAnalyzerTests : IAsyncLifetime
|
||||
var context = CreateContext();
|
||||
var writer = new Mock<LanguageComponentWriter>().Object;
|
||||
|
||||
await analyzer.AnalyzeAsync(context, writer, CancellationToken.None);
|
||||
await analyzer.AnalyzeAsync(context, writer, TestContext.Current.CancellationToken);
|
||||
|
||||
// Analyzer should process without error - findings logged but not returned directly
|
||||
}
|
||||
@@ -194,7 +194,7 @@ public sealed class SecretsAnalyzerTests : IAsyncLifetime
|
||||
var context = CreateContext();
|
||||
var writer = new Mock<LanguageComponentWriter>().Object;
|
||||
|
||||
await analyzer.AnalyzeAsync(context, writer, CancellationToken.None);
|
||||
await analyzer.AnalyzeAsync(context, writer, TestContext.Current.CancellationToken);
|
||||
|
||||
// Should complete without scanning the large file
|
||||
}
|
||||
@@ -221,7 +221,7 @@ public sealed class SecretsAnalyzerTests : IAsyncLifetime
|
||||
var context = CreateContext();
|
||||
var writer = new Mock<LanguageComponentWriter>().Object;
|
||||
|
||||
await analyzer.AnalyzeAsync(context, writer, CancellationToken.None);
|
||||
await analyzer.AnalyzeAsync(context, writer, TestContext.Current.CancellationToken);
|
||||
|
||||
// Should stop after max findings
|
||||
}
|
||||
@@ -255,12 +255,13 @@ public sealed class SecretsAnalyzerTests : IAsyncLifetime
|
||||
Directory.CreateDirectory(subDir);
|
||||
await File.WriteAllTextAsync(
|
||||
Path.Combine(subDir, "secret.txt"),
|
||||
"AKIAIOSFODNN7EXAMPLE");
|
||||
"AKIAIOSFODNN7EXAMPLE",
|
||||
TestContext.Current.CancellationToken);
|
||||
|
||||
var context = CreateContext();
|
||||
var writer = new Mock<LanguageComponentWriter>().Object;
|
||||
|
||||
await analyzer.AnalyzeAsync(context, writer, CancellationToken.None);
|
||||
await analyzer.AnalyzeAsync(context, writer, TestContext.Current.CancellationToken);
|
||||
|
||||
// Should process nested files
|
||||
}
|
||||
@@ -281,12 +282,13 @@ public sealed class SecretsAnalyzerTests : IAsyncLifetime
|
||||
Directory.CreateDirectory(nodeModules);
|
||||
await File.WriteAllTextAsync(
|
||||
Path.Combine(nodeModules, "package.txt"),
|
||||
"AKIAIOSFODNN7EXAMPLE");
|
||||
"AKIAIOSFODNN7EXAMPLE",
|
||||
TestContext.Current.CancellationToken);
|
||||
|
||||
var context = CreateContext();
|
||||
var writer = new Mock<LanguageComponentWriter>().Object;
|
||||
|
||||
await analyzer.AnalyzeAsync(context, writer, CancellationToken.None);
|
||||
await analyzer.AnalyzeAsync(context, writer, TestContext.Current.CancellationToken);
|
||||
|
||||
// Should skip node_modules directory
|
||||
}
|
||||
@@ -308,7 +310,7 @@ public sealed class SecretsAnalyzerTests : IAsyncLifetime
|
||||
var context = CreateContext();
|
||||
var writer = new Mock<LanguageComponentWriter>().Object;
|
||||
|
||||
await analyzer.AnalyzeAsync(context, writer, CancellationToken.None);
|
||||
await analyzer.AnalyzeAsync(context, writer, TestContext.Current.CancellationToken);
|
||||
|
||||
// Should skip .bin files
|
||||
}
|
||||
@@ -329,8 +331,8 @@ public sealed class SecretsAnalyzerTests : IAsyncLifetime
|
||||
var writer = new Mock<LanguageComponentWriter>().Object;
|
||||
|
||||
// Run twice - should produce same results
|
||||
await analyzer1.AnalyzeAsync(context1, writer, CancellationToken.None);
|
||||
await analyzer2.AnalyzeAsync(context2, writer, CancellationToken.None);
|
||||
await analyzer1.AnalyzeAsync(context1, writer, TestContext.Current.CancellationToken);
|
||||
await analyzer2.AnalyzeAsync(context2, writer, TestContext.Current.CancellationToken);
|
||||
|
||||
// Deterministic execution verified by no exceptions
|
||||
}
|
||||
|
||||
@@ -61,7 +61,8 @@ public class ScannerConfigDiffTests : ConfigDiffTestBase
|
||||
async config => await GetReachabilityBehaviorAsync(config),
|
||||
async config => await GetConcurrencyBehaviorAsync(config),
|
||||
async config => await GetOutputFormatBehaviorAsync(config)
|
||||
]);
|
||||
],
|
||||
TestContext.Current.CancellationToken);
|
||||
|
||||
// Assert
|
||||
result.IsSuccess.Should().BeTrue(
|
||||
@@ -95,7 +96,8 @@ public class ScannerConfigDiffTests : ConfigDiffTestBase
|
||||
changedConfig,
|
||||
getBehavior: async config => await CaptureReachabilityBehaviorAsync(config),
|
||||
computeDelta: ComputeBehaviorSnapshotDelta,
|
||||
expectedDelta: expectedDelta);
|
||||
expectedDelta: expectedDelta,
|
||||
TestContext.Current.CancellationToken);
|
||||
|
||||
// Assert
|
||||
result.IsSuccess.Should().BeTrue(
|
||||
@@ -122,7 +124,8 @@ public class ScannerConfigDiffTests : ConfigDiffTestBase
|
||||
async config => await GetScanningBehaviorAsync(config),
|
||||
async config => await GetVulnMatchingBehaviorAsync(config),
|
||||
async config => await GetReachabilityBehaviorAsync(config)
|
||||
]);
|
||||
],
|
||||
TestContext.Current.CancellationToken);
|
||||
|
||||
// Assert
|
||||
result.IsSuccess.Should().BeTrue(
|
||||
@@ -154,7 +157,8 @@ public class ScannerConfigDiffTests : ConfigDiffTestBase
|
||||
changedConfig,
|
||||
getBehavior: async config => await CaptureConcurrencyBehaviorAsync(config),
|
||||
computeDelta: ComputeBehaviorSnapshotDelta,
|
||||
expectedDelta: expectedDelta);
|
||||
expectedDelta: expectedDelta,
|
||||
TestContext.Current.CancellationToken);
|
||||
|
||||
// Assert
|
||||
result.IsSuccess.Should().BeTrue();
|
||||
@@ -179,7 +183,8 @@ public class ScannerConfigDiffTests : ConfigDiffTestBase
|
||||
[
|
||||
async config => await GetScanningBehaviorAsync(config),
|
||||
async config => await GetSbomBehaviorAsync(config)
|
||||
]);
|
||||
],
|
||||
TestContext.Current.CancellationToken);
|
||||
|
||||
// Assert
|
||||
result.IsSuccess.Should().BeTrue(
|
||||
|
||||
@@ -0,0 +1,451 @@
|
||||
// <copyright file="FeedserPedigreeDataProviderTests.cs" company="StellaOps">
|
||||
// Copyright (c) StellaOps. Licensed under the AGPL-3.0-or-later.
|
||||
// </copyright>
|
||||
|
||||
using System.Collections.Immutable;
|
||||
using FluentAssertions;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using Moq;
|
||||
using StellaOps.Scanner.Emit.Pedigree;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Scanner.Emit.Tests.Pedigree;
|
||||
|
||||
/// <summary>
|
||||
/// Unit tests for <see cref="FeedserPedigreeDataProvider"/>.
|
||||
/// Sprint: SPRINT_20260107_005_002 Task PD-012
|
||||
/// </summary>
|
||||
[Trait("Category", "Unit")]
|
||||
public sealed class FeedserPedigreeDataProviderTests
|
||||
{
|
||||
private readonly Mock<IFeedserPatchSignatureClient> _patchClientMock;
|
||||
private readonly Mock<IFeedserBackportProofClient> _backportClientMock;
|
||||
private readonly PedigreeNotesGenerator _notesGenerator;
|
||||
private readonly FeedserPedigreeDataProvider _provider;
|
||||
|
||||
public FeedserPedigreeDataProviderTests()
|
||||
{
|
||||
_patchClientMock = new Mock<IFeedserPatchSignatureClient>();
|
||||
_backportClientMock = new Mock<IFeedserBackportProofClient>();
|
||||
_notesGenerator = new PedigreeNotesGenerator(TimeProvider.System);
|
||||
|
||||
_provider = new FeedserPedigreeDataProvider(
|
||||
_patchClientMock.Object,
|
||||
_backportClientMock.Object,
|
||||
_notesGenerator,
|
||||
NullLogger<FeedserPedigreeDataProvider>.Instance);
|
||||
}
|
||||
|
||||
#region GetPedigreeAsync Tests
|
||||
|
||||
[Fact]
|
||||
public async Task GetPedigreeAsync_NullPurl_ReturnsNull()
|
||||
{
|
||||
// Act
|
||||
var result = await _provider.GetPedigreeAsync(null!, TestContext.Current.CancellationToken);
|
||||
|
||||
// Assert
|
||||
result.Should().BeNull();
|
||||
_patchClientMock.Verify(c => c.GetPatchSignaturesAsync(
|
||||
It.IsAny<string>(),
|
||||
It.IsAny<CancellationToken>()), Times.Never);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetPedigreeAsync_EmptyPurl_ReturnsNull()
|
||||
{
|
||||
// Act
|
||||
var result = await _provider.GetPedigreeAsync(string.Empty, TestContext.Current.CancellationToken);
|
||||
|
||||
// Assert
|
||||
result.Should().BeNull();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetPedigreeAsync_NoDataFromServices_ReturnsNull()
|
||||
{
|
||||
// Arrange
|
||||
const string purl = "pkg:npm/lodash@4.17.21";
|
||||
|
||||
_patchClientMock
|
||||
.Setup(c => c.GetPatchSignaturesAsync(purl, It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync((IReadOnlyList<FeedserPatchSignature>?)null);
|
||||
|
||||
_backportClientMock
|
||||
.Setup(c => c.GetBackportProofAsync(purl, It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync((FeedserBackportProof?)null);
|
||||
|
||||
// Act
|
||||
var result = await _provider.GetPedigreeAsync(purl, TestContext.Current.CancellationToken);
|
||||
|
||||
// Assert
|
||||
result.Should().BeNull();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetPedigreeAsync_WithBackportProof_BuildsAncestors()
|
||||
{
|
||||
// Arrange
|
||||
const string purl = "pkg:deb/debian/openssl@1.1.1n-0+deb11u5";
|
||||
|
||||
_patchClientMock
|
||||
.Setup(c => c.GetPatchSignaturesAsync(purl, It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync((IReadOnlyList<FeedserPatchSignature>?)null);
|
||||
|
||||
_backportClientMock
|
||||
.Setup(c => c.GetBackportProofAsync(purl, It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(new FeedserBackportProof
|
||||
{
|
||||
UpstreamPackage = new FeedserPackageReference
|
||||
{
|
||||
Name = "openssl",
|
||||
Version = "1.1.1n",
|
||||
Purl = "pkg:generic/openssl@1.1.1n",
|
||||
ProjectUrl = "https://www.openssl.org"
|
||||
},
|
||||
ConfidencePercent = 95,
|
||||
FeedserTier = 1
|
||||
});
|
||||
|
||||
// Act
|
||||
var result = await _provider.GetPedigreeAsync(purl, TestContext.Current.CancellationToken);
|
||||
|
||||
// Assert
|
||||
result.Should().NotBeNull();
|
||||
result!.Ancestors.Should().HaveCount(1);
|
||||
result.Ancestors[0].Name.Should().Be("openssl");
|
||||
result.Ancestors[0].Version.Should().Be("1.1.1n");
|
||||
result.Ancestors[0].Purl.Should().Be("pkg:generic/openssl@1.1.1n");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetPedigreeAsync_WithBackportProof_BuildsVariants()
|
||||
{
|
||||
// Arrange
|
||||
const string purl = "pkg:generic/openssl@1.1.1n";
|
||||
|
||||
_patchClientMock
|
||||
.Setup(c => c.GetPatchSignaturesAsync(purl, It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync((IReadOnlyList<FeedserPatchSignature>?)null);
|
||||
|
||||
_backportClientMock
|
||||
.Setup(c => c.GetBackportProofAsync(purl, It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(new FeedserBackportProof
|
||||
{
|
||||
Variants = new[]
|
||||
{
|
||||
new FeedserVariantPackage
|
||||
{
|
||||
Name = "openssl",
|
||||
Version = "1.1.1n-0+deb11u5",
|
||||
Purl = "pkg:deb/debian/openssl@1.1.1n-0+deb11u5",
|
||||
Distribution = "Debian",
|
||||
Release = "bullseye"
|
||||
},
|
||||
new FeedserVariantPackage
|
||||
{
|
||||
Name = "openssl",
|
||||
Version = "1.1.1k-1ubuntu2.1",
|
||||
Purl = "pkg:deb/ubuntu/openssl@1.1.1k-1ubuntu2.1",
|
||||
Distribution = "Ubuntu",
|
||||
Release = "jammy"
|
||||
}
|
||||
},
|
||||
ConfidencePercent = 90
|
||||
});
|
||||
|
||||
// Act
|
||||
var result = await _provider.GetPedigreeAsync(purl, TestContext.Current.CancellationToken);
|
||||
|
||||
// Assert
|
||||
result.Should().NotBeNull();
|
||||
result!.Variants.Should().HaveCount(2);
|
||||
result.Variants[0].Distribution.Should().Be("Debian");
|
||||
result.Variants[1].Distribution.Should().Be("Ubuntu");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetPedigreeAsync_WithPatchSignatures_BuildsCommits()
|
||||
{
|
||||
// Arrange
|
||||
const string purl = "pkg:generic/openssl@1.1.1n";
|
||||
|
||||
_patchClientMock
|
||||
.Setup(c => c.GetPatchSignaturesAsync(purl, It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(new[]
|
||||
{
|
||||
new FeedserPatchSignature
|
||||
{
|
||||
PatchSigId = "sig-001",
|
||||
CveId = "CVE-2024-1234",
|
||||
UpstreamRepo = "https://github.com/openssl/openssl",
|
||||
CommitSha = "abc123def456789"
|
||||
}
|
||||
});
|
||||
|
||||
_backportClientMock
|
||||
.Setup(c => c.GetBackportProofAsync(purl, It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync((FeedserBackportProof?)null);
|
||||
|
||||
// Act
|
||||
var result = await _provider.GetPedigreeAsync(purl, TestContext.Current.CancellationToken);
|
||||
|
||||
// Assert
|
||||
result.Should().NotBeNull();
|
||||
result!.Commits.Should().HaveCount(1);
|
||||
result.Commits[0].Uid.Should().Be("abc123def456789");
|
||||
result.Commits[0].Url.Should().Be("https://github.com/openssl/openssl/commit/abc123def456789");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetPedigreeAsync_WithPatchSignatures_BuildsPatches()
|
||||
{
|
||||
// Arrange
|
||||
const string purl = "pkg:generic/openssl@1.1.1n";
|
||||
|
||||
_patchClientMock
|
||||
.Setup(c => c.GetPatchSignaturesAsync(purl, It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(new[]
|
||||
{
|
||||
new FeedserPatchSignature
|
||||
{
|
||||
PatchSigId = "sig-001",
|
||||
CveId = "CVE-2024-1234",
|
||||
CommitSha = "abc123",
|
||||
PatchOrigin = "backport",
|
||||
Hunks = new[]
|
||||
{
|
||||
new FeedserPatchHunk
|
||||
{
|
||||
FilePath = "crypto/x509/x509_cmp.c",
|
||||
StartLine = 100,
|
||||
AddedLines = new[] { "+ if (check != NULL) return 0;" },
|
||||
RemovedLines = new[] { "- // vulnerable code" }
|
||||
}
|
||||
},
|
||||
Source = "NVD"
|
||||
}
|
||||
});
|
||||
|
||||
_backportClientMock
|
||||
.Setup(c => c.GetBackportProofAsync(purl, It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync((FeedserBackportProof?)null);
|
||||
|
||||
// Act
|
||||
var result = await _provider.GetPedigreeAsync(purl, TestContext.Current.CancellationToken);
|
||||
|
||||
// Assert
|
||||
result.Should().NotBeNull();
|
||||
result!.Patches.Should().HaveCount(1);
|
||||
result.Patches[0].Type.Should().Be(PatchType.Backport);
|
||||
result.Patches[0].Resolves.Should().HaveCount(1);
|
||||
result.Patches[0].Resolves[0].Id.Should().Be("CVE-2024-1234");
|
||||
result.Patches[0].DiffText.Should().Contain("crypto/x509/x509_cmp.c");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetPedigreeAsync_MultiPatchAggregation_AggregatesAll()
|
||||
{
|
||||
// Arrange
|
||||
const string purl = "pkg:generic/openssl@1.1.1n";
|
||||
|
||||
_patchClientMock
|
||||
.Setup(c => c.GetPatchSignaturesAsync(purl, It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(new[]
|
||||
{
|
||||
new FeedserPatchSignature
|
||||
{
|
||||
PatchSigId = "sig-001",
|
||||
CveId = "CVE-2024-1111",
|
||||
CommitSha = "commit1",
|
||||
PatchOrigin = "backport"
|
||||
},
|
||||
new FeedserPatchSignature
|
||||
{
|
||||
PatchSigId = "sig-002",
|
||||
CveId = "CVE-2024-2222",
|
||||
CommitSha = "commit2",
|
||||
PatchOrigin = "cherry-pick"
|
||||
},
|
||||
new FeedserPatchSignature
|
||||
{
|
||||
PatchSigId = "sig-003",
|
||||
CveId = "CVE-2024-3333",
|
||||
CommitSha = "commit3",
|
||||
PatchOrigin = "backport"
|
||||
}
|
||||
});
|
||||
|
||||
_backportClientMock
|
||||
.Setup(c => c.GetBackportProofAsync(purl, It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync((FeedserBackportProof?)null);
|
||||
|
||||
// Act
|
||||
var result = await _provider.GetPedigreeAsync(purl, TestContext.Current.CancellationToken);
|
||||
|
||||
// Assert
|
||||
result.Should().NotBeNull();
|
||||
result!.Commits.Should().HaveCount(3);
|
||||
result.Patches.Should().HaveCount(3);
|
||||
result.Patches.Select(p => p.Resolves[0].Id)
|
||||
.Should().BeEquivalentTo(new[] { "CVE-2024-1111", "CVE-2024-2222", "CVE-2024-3333" });
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetPedigreeAsync_GeneratesNotes()
|
||||
{
|
||||
// Arrange
|
||||
const string purl = "pkg:deb/debian/openssl@1.1.1n-0+deb11u5";
|
||||
|
||||
_patchClientMock
|
||||
.Setup(c => c.GetPatchSignaturesAsync(purl, It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(new[]
|
||||
{
|
||||
new FeedserPatchSignature
|
||||
{
|
||||
PatchSigId = "sig-001",
|
||||
CveId = "CVE-2024-1234",
|
||||
CommitSha = "abc123",
|
||||
PatchOrigin = "backport"
|
||||
}
|
||||
});
|
||||
|
||||
_backportClientMock
|
||||
.Setup(c => c.GetBackportProofAsync(purl, It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(new FeedserBackportProof
|
||||
{
|
||||
UpstreamPackage = new FeedserPackageReference
|
||||
{
|
||||
Name = "openssl",
|
||||
Version = "1.1.1n",
|
||||
Purl = "pkg:generic/openssl@1.1.1n"
|
||||
},
|
||||
ConfidencePercent = 95,
|
||||
FeedserTier = 1
|
||||
});
|
||||
|
||||
// Act
|
||||
var result = await _provider.GetPedigreeAsync(purl, TestContext.Current.CancellationToken);
|
||||
|
||||
// Assert
|
||||
result.Should().NotBeNull();
|
||||
result!.Notes.Should().NotBeNullOrEmpty();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetPedigreeAsync_ServiceException_ReturnsNull()
|
||||
{
|
||||
// Arrange
|
||||
const string purl = "pkg:generic/openssl@1.1.1n";
|
||||
|
||||
_patchClientMock
|
||||
.Setup(c => c.GetPatchSignaturesAsync(purl, It.IsAny<CancellationToken>()))
|
||||
.ThrowsAsync(new InvalidOperationException("Service unavailable"));
|
||||
|
||||
_backportClientMock
|
||||
.Setup(c => c.GetBackportProofAsync(purl, It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync((FeedserBackportProof?)null);
|
||||
|
||||
// Act
|
||||
var result = await _provider.GetPedigreeAsync(purl, TestContext.Current.CancellationToken);
|
||||
|
||||
// Assert
|
||||
result.Should().BeNull();
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region GetPedigreesBatchAsync Tests
|
||||
|
||||
[Fact]
|
||||
public async Task GetPedigreesBatchAsync_EmptyPurls_ReturnsEmpty()
|
||||
{
|
||||
// Act
|
||||
var result = await _provider.GetPedigreesBatchAsync(
|
||||
Array.Empty<string>(),
|
||||
TestContext.Current.CancellationToken);
|
||||
|
||||
// Assert
|
||||
result.Should().BeEmpty();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetPedigreesBatchAsync_FiltersDuplicates()
|
||||
{
|
||||
// Arrange
|
||||
var purls = new[]
|
||||
{
|
||||
"pkg:npm/lodash@4.17.21",
|
||||
"pkg:npm/lodash@4.17.21",
|
||||
"",
|
||||
"pkg:npm/express@4.18.0"
|
||||
};
|
||||
|
||||
_patchClientMock
|
||||
.Setup(c => c.GetPatchSignaturesBatchAsync(
|
||||
It.Is<IEnumerable<string>>(p => p.Count() == 2),
|
||||
It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(ImmutableDictionary<string, IReadOnlyList<FeedserPatchSignature>>.Empty);
|
||||
|
||||
_backportClientMock
|
||||
.Setup(c => c.GetBackportProofsBatchAsync(
|
||||
It.Is<IEnumerable<string>>(p => p.Count() == 2),
|
||||
It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(ImmutableDictionary<string, FeedserBackportProof>.Empty);
|
||||
|
||||
// Act
|
||||
var result = await _provider.GetPedigreesBatchAsync(
|
||||
purls,
|
||||
TestContext.Current.CancellationToken);
|
||||
|
||||
// Assert
|
||||
_patchClientMock.Verify(c => c.GetPatchSignaturesBatchAsync(
|
||||
It.Is<IEnumerable<string>>(p => p.Count() == 2),
|
||||
It.IsAny<CancellationToken>()), Times.Once);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetPedigreesBatchAsync_MapsResults()
|
||||
{
|
||||
// Arrange
|
||||
var purls = new[]
|
||||
{
|
||||
"pkg:npm/lodash@4.17.21",
|
||||
"pkg:npm/express@4.18.0"
|
||||
};
|
||||
|
||||
_patchClientMock
|
||||
.Setup(c => c.GetPatchSignaturesBatchAsync(
|
||||
It.IsAny<IEnumerable<string>>(),
|
||||
It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(new Dictionary<string, IReadOnlyList<FeedserPatchSignature>>
|
||||
{
|
||||
["pkg:npm/lodash@4.17.21"] = new[]
|
||||
{
|
||||
new FeedserPatchSignature
|
||||
{
|
||||
PatchSigId = "sig-001",
|
||||
CveId = "CVE-2021-23337",
|
||||
CommitSha = "abc123"
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
_backportClientMock
|
||||
.Setup(c => c.GetBackportProofsBatchAsync(
|
||||
It.IsAny<IEnumerable<string>>(),
|
||||
It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(ImmutableDictionary<string, FeedserBackportProof>.Empty);
|
||||
|
||||
// Act
|
||||
var result = await _provider.GetPedigreesBatchAsync(
|
||||
purls,
|
||||
TestContext.Current.CancellationToken);
|
||||
|
||||
// Assert
|
||||
result.Should().ContainKey("pkg:npm/lodash@4.17.21");
|
||||
result.Should().NotContainKey("pkg:npm/express@4.18.0");
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
@@ -2,6 +2,7 @@
|
||||
// Copyright (c) StellaOps. Licensed under the AGPL-3.0-or-later.
|
||||
// </copyright>
|
||||
|
||||
using System.Collections.Immutable;
|
||||
using FluentAssertions;
|
||||
using Microsoft.Extensions.Time.Testing;
|
||||
using StellaOps.Scanner.Emit.Pedigree;
|
||||
|
||||
@@ -15,6 +15,7 @@
|
||||
<ItemGroup>
|
||||
<PackageReference Include="FluentAssertions" />
|
||||
<PackageReference Include="JsonSchema.Net" />
|
||||
<PackageReference Include="Moq" />
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
|
||||
@@ -225,9 +225,9 @@ public sealed class FingerprintIndexTests
|
||||
{
|
||||
// Arrange
|
||||
var index = new InMemoryFingerprintIndex();
|
||||
|
||||
|
||||
// Assert initial
|
||||
Assert.Empty(index);
|
||||
Assert.Equal(0, index.GetStatistics().TotalFingerprints);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
|
||||
@@ -59,7 +59,7 @@ public class ScannerSchemaEvolutionTests : PostgresSchemaEvolutionTestBase
|
||||
public async Task ScanReadOperations_CompatibleWithPreviousSchema()
|
||||
{
|
||||
// Arrange
|
||||
await InitializeAsync();
|
||||
await InitializeAsync(TestContext.Current.CancellationToken);
|
||||
|
||||
// Act
|
||||
var results = await TestReadBackwardCompatibilityAsync(
|
||||
@@ -90,7 +90,7 @@ public class ScannerSchemaEvolutionTests : PostgresSchemaEvolutionTestBase
|
||||
public async Task ScanWriteOperations_CompatibleWithPreviousSchema()
|
||||
{
|
||||
// Arrange
|
||||
await InitializeAsync();
|
||||
await InitializeAsync(TestContext.Current.CancellationToken);
|
||||
|
||||
// Act
|
||||
var results = await TestWriteForwardCompatibilityAsync(
|
||||
@@ -120,7 +120,7 @@ public class ScannerSchemaEvolutionTests : PostgresSchemaEvolutionTestBase
|
||||
public async Task SbomStorageOperations_CompatibleAcrossVersions()
|
||||
{
|
||||
// Arrange
|
||||
await InitializeAsync();
|
||||
await InitializeAsync(TestContext.Current.CancellationToken);
|
||||
|
||||
// Act
|
||||
var result = await TestAgainstPreviousSchemaAsync(
|
||||
@@ -146,7 +146,7 @@ public class ScannerSchemaEvolutionTests : PostgresSchemaEvolutionTestBase
|
||||
public async Task VulnerabilityMappingOperations_CompatibleAcrossVersions()
|
||||
{
|
||||
// Arrange
|
||||
await InitializeAsync();
|
||||
await InitializeAsync(TestContext.Current.CancellationToken);
|
||||
|
||||
// Act
|
||||
var result = await TestAgainstPreviousSchemaAsync(
|
||||
@@ -173,7 +173,7 @@ public class ScannerSchemaEvolutionTests : PostgresSchemaEvolutionTestBase
|
||||
public async Task MigrationRollbacks_ExecuteSuccessfully()
|
||||
{
|
||||
// Arrange
|
||||
await InitializeAsync();
|
||||
await InitializeAsync(TestContext.Current.CancellationToken);
|
||||
|
||||
// Act
|
||||
var results = await TestMigrationRollbacksAsync(
|
||||
|
||||
@@ -0,0 +1,496 @@
|
||||
// <copyright file="SbomValidationPipelineTests.cs" company="StellaOps">
|
||||
// Copyright (c) StellaOps. Licensed under the AGPL-3.0-or-later.
|
||||
// </copyright>
|
||||
|
||||
using System.Collections.Immutable;
|
||||
using System.Text;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using Microsoft.Extensions.Options;
|
||||
using Moq;
|
||||
using StellaOps.Scanner.Core.Contracts;
|
||||
using StellaOps.Scanner.Emit.Composition;
|
||||
using StellaOps.Scanner.Validation;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Scanner.Validation.Tests;
|
||||
|
||||
/// <summary>
|
||||
/// Unit tests for <see cref="SbomValidationPipeline"/>.
|
||||
/// Sprint: SPRINT_20260107_005_003 Task VG-005
|
||||
/// </summary>
|
||||
[Trait("Category", "Unit")]
|
||||
public sealed class SbomValidationPipelineTests
|
||||
{
|
||||
private readonly Mock<ISbomValidator> _mockValidator;
|
||||
private readonly TimeProvider _timeProvider;
|
||||
|
||||
public SbomValidationPipelineTests()
|
||||
{
|
||||
_mockValidator = new Mock<ISbomValidator>();
|
||||
_timeProvider = TimeProvider.System;
|
||||
}
|
||||
|
||||
private SbomValidationPipeline CreatePipeline(SbomValidationPipelineOptions? options = null)
|
||||
{
|
||||
return new SbomValidationPipeline(
|
||||
_mockValidator.Object,
|
||||
Options.Create(options ?? new SbomValidationPipelineOptions()),
|
||||
NullLogger<SbomValidationPipeline>.Instance,
|
||||
_timeProvider);
|
||||
}
|
||||
|
||||
private static SbomCompositionResult CreateCompositionResult(
|
||||
bool includeUsage = false,
|
||||
bool includeSpdx = false)
|
||||
{
|
||||
var cdxInventory = new CycloneDxArtifact
|
||||
{
|
||||
View = SbomView.Inventory,
|
||||
SerialNumber = "urn:uuid:test-1",
|
||||
GeneratedAt = DateTimeOffset.UtcNow,
|
||||
Components = ImmutableArray<AggregatedComponent>.Empty,
|
||||
JsonBytes = Encoding.UTF8.GetBytes("{}"),
|
||||
JsonSha256 = "abc123",
|
||||
ContentHash = "abc123",
|
||||
JsonMediaType = "application/vnd.cyclonedx+json",
|
||||
ProtobufBytes = Array.Empty<byte>(),
|
||||
ProtobufSha256 = "def456",
|
||||
ProtobufMediaType = "application/vnd.cyclonedx+protobuf",
|
||||
};
|
||||
|
||||
var cdxUsage = includeUsage ? new CycloneDxArtifact
|
||||
{
|
||||
View = SbomView.Usage,
|
||||
SerialNumber = "urn:uuid:test-2",
|
||||
GeneratedAt = DateTimeOffset.UtcNow,
|
||||
Components = ImmutableArray<AggregatedComponent>.Empty,
|
||||
JsonBytes = Encoding.UTF8.GetBytes("{}"),
|
||||
JsonSha256 = "xyz789",
|
||||
ContentHash = "xyz789",
|
||||
JsonMediaType = "application/vnd.cyclonedx+json",
|
||||
ProtobufBytes = Array.Empty<byte>(),
|
||||
ProtobufSha256 = "uvw012",
|
||||
ProtobufMediaType = "application/vnd.cyclonedx+protobuf",
|
||||
} : null;
|
||||
|
||||
var spdxInventory = includeSpdx ? new SpdxArtifact
|
||||
{
|
||||
View = SbomView.Inventory,
|
||||
GeneratedAt = DateTimeOffset.UtcNow,
|
||||
JsonBytes = Encoding.UTF8.GetBytes("{}"),
|
||||
JsonSha256 = "spdx123",
|
||||
ContentHash = "spdx123",
|
||||
JsonMediaType = "application/spdx+json",
|
||||
} : null;
|
||||
|
||||
return new SbomCompositionResult
|
||||
{
|
||||
Inventory = cdxInventory,
|
||||
Usage = cdxUsage,
|
||||
SpdxInventory = spdxInventory,
|
||||
Graph = new ComponentGraph
|
||||
{
|
||||
Layers = ImmutableArray<LayerComponentFragment>.Empty,
|
||||
Components = ImmutableArray<AggregatedComponent>.Empty,
|
||||
ComponentMap = ImmutableDictionary<string, AggregatedComponent>.Empty,
|
||||
},
|
||||
CompositionRecipeJson = Encoding.UTF8.GetBytes("{}"),
|
||||
CompositionRecipeSha256 = "recipe123",
|
||||
};
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ValidateAsync_WhenDisabled_ReturnsSkipped()
|
||||
{
|
||||
// Arrange
|
||||
var options = new SbomValidationPipelineOptions { Enabled = false };
|
||||
var pipeline = CreatePipeline(options);
|
||||
var result = CreateCompositionResult();
|
||||
|
||||
// Act
|
||||
var validationResult = await pipeline.ValidateAsync(result);
|
||||
|
||||
// Assert
|
||||
Assert.True(validationResult.IsValid);
|
||||
Assert.True(validationResult.WasSkipped);
|
||||
_mockValidator.Verify(
|
||||
v => v.ValidateAsync(It.IsAny<byte[]>(), It.IsAny<SbomFormat>(), It.IsAny<SbomValidationOptions?>(), It.IsAny<CancellationToken>()),
|
||||
Times.Never);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ValidateAsync_WhenCycloneDxValid_ReturnsPassed()
|
||||
{
|
||||
// Arrange
|
||||
var pipeline = CreatePipeline();
|
||||
var result = CreateCompositionResult();
|
||||
|
||||
_mockValidator
|
||||
.Setup(v => v.ValidateAsync(It.IsAny<byte[]>(), SbomFormat.CycloneDxJson, It.IsAny<SbomValidationOptions?>(), It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(SbomValidationResult.Success(
|
||||
SbomFormat.CycloneDxJson,
|
||||
"test-validator",
|
||||
"1.0.0",
|
||||
TimeSpan.FromMilliseconds(100)));
|
||||
|
||||
// Act
|
||||
var validationResult = await pipeline.ValidateAsync(result);
|
||||
|
||||
// Assert
|
||||
Assert.True(validationResult.IsValid);
|
||||
Assert.False(validationResult.WasSkipped);
|
||||
Assert.NotNull(validationResult.CycloneDxInventoryResult);
|
||||
Assert.True(validationResult.CycloneDxInventoryResult.IsValid);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ValidateAsync_WhenCycloneDxInvalid_ReturnsFailed()
|
||||
{
|
||||
// Arrange
|
||||
var options = new SbomValidationPipelineOptions { FailOnError = false };
|
||||
var pipeline = CreatePipeline(options);
|
||||
var result = CreateCompositionResult();
|
||||
|
||||
var diagnostics = new[]
|
||||
{
|
||||
new SbomValidationDiagnostic
|
||||
{
|
||||
Severity = SbomValidationSeverity.Error,
|
||||
Code = "CDX001",
|
||||
Message = "Invalid component"
|
||||
}
|
||||
};
|
||||
|
||||
_mockValidator
|
||||
.Setup(v => v.ValidateAsync(It.IsAny<byte[]>(), SbomFormat.CycloneDxJson, It.IsAny<SbomValidationOptions?>(), It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(SbomValidationResult.Failure(
|
||||
SbomFormat.CycloneDxJson,
|
||||
"test-validator",
|
||||
"1.0.0",
|
||||
TimeSpan.FromMilliseconds(100),
|
||||
diagnostics));
|
||||
|
||||
// Act
|
||||
var validationResult = await pipeline.ValidateAsync(result);
|
||||
|
||||
// Assert
|
||||
Assert.False(validationResult.IsValid);
|
||||
Assert.NotNull(validationResult.CycloneDxInventoryResult);
|
||||
Assert.False(validationResult.CycloneDxInventoryResult.IsValid);
|
||||
Assert.Equal(1, validationResult.TotalErrorCount);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ValidateAsync_WhenFailOnErrorAndInvalid_ThrowsException()
|
||||
{
|
||||
// Arrange
|
||||
var options = new SbomValidationPipelineOptions { FailOnError = true };
|
||||
var pipeline = CreatePipeline(options);
|
||||
var result = CreateCompositionResult();
|
||||
|
||||
var diagnostics = new[]
|
||||
{
|
||||
new SbomValidationDiagnostic
|
||||
{
|
||||
Severity = SbomValidationSeverity.Error,
|
||||
Code = "CDX001",
|
||||
Message = "Invalid component"
|
||||
}
|
||||
};
|
||||
|
||||
_mockValidator
|
||||
.Setup(v => v.ValidateAsync(It.IsAny<byte[]>(), SbomFormat.CycloneDxJson, It.IsAny<SbomValidationOptions?>(), It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(SbomValidationResult.Failure(
|
||||
SbomFormat.CycloneDxJson,
|
||||
"test-validator",
|
||||
"1.0.0",
|
||||
TimeSpan.FromMilliseconds(100),
|
||||
diagnostics));
|
||||
|
||||
// Act & Assert
|
||||
var ex = await Assert.ThrowsAsync<SbomValidationException>(
|
||||
() => pipeline.ValidateAsync(result));
|
||||
|
||||
Assert.Contains("1 error", ex.Message);
|
||||
Assert.NotNull(ex.Result);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ValidateAsync_WithUsageSbom_ValidatesBothSboms()
|
||||
{
|
||||
// Arrange
|
||||
var pipeline = CreatePipeline();
|
||||
var result = CreateCompositionResult(includeUsage: true);
|
||||
|
||||
_mockValidator
|
||||
.Setup(v => v.ValidateAsync(It.IsAny<byte[]>(), SbomFormat.CycloneDxJson, It.IsAny<SbomValidationOptions?>(), It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(SbomValidationResult.Success(
|
||||
SbomFormat.CycloneDxJson,
|
||||
"test-validator",
|
||||
"1.0.0",
|
||||
TimeSpan.FromMilliseconds(100)));
|
||||
|
||||
// Act
|
||||
var validationResult = await pipeline.ValidateAsync(result);
|
||||
|
||||
// Assert
|
||||
Assert.True(validationResult.IsValid);
|
||||
Assert.NotNull(validationResult.CycloneDxInventoryResult);
|
||||
Assert.NotNull(validationResult.CycloneDxUsageResult);
|
||||
|
||||
_mockValidator.Verify(
|
||||
v => v.ValidateAsync(It.IsAny<byte[]>(), SbomFormat.CycloneDxJson, It.IsAny<SbomValidationOptions?>(), It.IsAny<CancellationToken>()),
|
||||
Times.Exactly(2));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ValidateAsync_WithSpdxSbom_ValidatesSpdx()
|
||||
{
|
||||
// Arrange
|
||||
var pipeline = CreatePipeline();
|
||||
var result = CreateCompositionResult(includeSpdx: true);
|
||||
|
||||
_mockValidator
|
||||
.Setup(v => v.ValidateAsync(It.IsAny<byte[]>(), SbomFormat.CycloneDxJson, It.IsAny<SbomValidationOptions?>(), It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(SbomValidationResult.Success(
|
||||
SbomFormat.CycloneDxJson,
|
||||
"test-validator",
|
||||
"1.0.0",
|
||||
TimeSpan.FromMilliseconds(100)));
|
||||
|
||||
_mockValidator
|
||||
.Setup(v => v.ValidateAsync(It.IsAny<byte[]>(), SbomFormat.Spdx3JsonLd, It.IsAny<SbomValidationOptions?>(), It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(SbomValidationResult.Success(
|
||||
SbomFormat.Spdx3JsonLd,
|
||||
"spdx-validator",
|
||||
"1.0.0",
|
||||
TimeSpan.FromMilliseconds(100)));
|
||||
|
||||
// Act
|
||||
var validationResult = await pipeline.ValidateAsync(result);
|
||||
|
||||
// Assert
|
||||
Assert.True(validationResult.IsValid);
|
||||
Assert.NotNull(validationResult.CycloneDxInventoryResult);
|
||||
Assert.NotNull(validationResult.SpdxInventoryResult);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ValidateAsync_WhenValidateCycloneDxDisabled_SkipsCycloneDx()
|
||||
{
|
||||
// Arrange
|
||||
var options = new SbomValidationPipelineOptions
|
||||
{
|
||||
ValidateCycloneDx = false,
|
||||
ValidateSpdx = true
|
||||
};
|
||||
var pipeline = CreatePipeline(options);
|
||||
var result = CreateCompositionResult(includeSpdx: true);
|
||||
|
||||
_mockValidator
|
||||
.Setup(v => v.ValidateAsync(It.IsAny<byte[]>(), SbomFormat.Spdx3JsonLd, It.IsAny<SbomValidationOptions?>(), It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(SbomValidationResult.Success(
|
||||
SbomFormat.Spdx3JsonLd,
|
||||
"spdx-validator",
|
||||
"1.0.0",
|
||||
TimeSpan.FromMilliseconds(100)));
|
||||
|
||||
// Act
|
||||
var validationResult = await pipeline.ValidateAsync(result);
|
||||
|
||||
// Assert
|
||||
Assert.True(validationResult.IsValid);
|
||||
Assert.Null(validationResult.CycloneDxInventoryResult);
|
||||
Assert.NotNull(validationResult.SpdxInventoryResult);
|
||||
|
||||
_mockValidator.Verify(
|
||||
v => v.ValidateAsync(It.IsAny<byte[]>(), SbomFormat.CycloneDxJson, It.IsAny<SbomValidationOptions?>(), It.IsAny<CancellationToken>()),
|
||||
Times.Never);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ValidateAsync_WhenValidatorThrows_ReturnsFailedResult()
|
||||
{
|
||||
// Arrange
|
||||
var options = new SbomValidationPipelineOptions { FailOnError = false };
|
||||
var pipeline = CreatePipeline(options);
|
||||
var result = CreateCompositionResult();
|
||||
|
||||
_mockValidator
|
||||
.Setup(v => v.ValidateAsync(It.IsAny<byte[]>(), SbomFormat.CycloneDxJson, It.IsAny<SbomValidationOptions?>(), It.IsAny<CancellationToken>()))
|
||||
.ThrowsAsync(new InvalidOperationException("Validator binary not found"));
|
||||
|
||||
// Act
|
||||
var validationResult = await pipeline.ValidateAsync(result);
|
||||
|
||||
// Assert
|
||||
Assert.False(validationResult.IsValid);
|
||||
Assert.NotNull(validationResult.CycloneDxInventoryResult);
|
||||
Assert.False(validationResult.CycloneDxInventoryResult.IsValid);
|
||||
Assert.Contains("not found", validationResult.CycloneDxInventoryResult.Diagnostics[0].Message);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ValidateAsync_WhenCancelled_ThrowsOperationCanceled()
|
||||
{
|
||||
// Arrange
|
||||
var pipeline = CreatePipeline();
|
||||
var result = CreateCompositionResult();
|
||||
var cts = new CancellationTokenSource();
|
||||
cts.Cancel();
|
||||
|
||||
_mockValidator
|
||||
.Setup(v => v.ValidateAsync(It.IsAny<byte[]>(), SbomFormat.CycloneDxJson, It.IsAny<SbomValidationOptions?>(), It.IsAny<CancellationToken>()))
|
||||
.ThrowsAsync(new OperationCanceledException());
|
||||
|
||||
// Act & Assert
|
||||
await Assert.ThrowsAsync<OperationCanceledException>(
|
||||
() => pipeline.ValidateAsync(result, cts.Token));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ValidateAsync_WithWarnings_ReturnsValidWithWarnings()
|
||||
{
|
||||
// Arrange
|
||||
var pipeline = CreatePipeline();
|
||||
var result = CreateCompositionResult();
|
||||
|
||||
var diagnostics = new[]
|
||||
{
|
||||
new SbomValidationDiagnostic
|
||||
{
|
||||
Severity = SbomValidationSeverity.Warning,
|
||||
Code = "CDX-WARN-001",
|
||||
Message = "Component missing description"
|
||||
}
|
||||
};
|
||||
|
||||
_mockValidator
|
||||
.Setup(v => v.ValidateAsync(It.IsAny<byte[]>(), SbomFormat.CycloneDxJson, It.IsAny<SbomValidationOptions?>(), It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(SbomValidationResult.Success(
|
||||
SbomFormat.CycloneDxJson,
|
||||
"test-validator",
|
||||
"1.0.0",
|
||||
TimeSpan.FromMilliseconds(100),
|
||||
diagnostics));
|
||||
|
||||
// Act
|
||||
var validationResult = await pipeline.ValidateAsync(result);
|
||||
|
||||
// Assert
|
||||
Assert.True(validationResult.IsValid);
|
||||
Assert.Equal(0, validationResult.TotalErrorCount);
|
||||
Assert.Equal(1, validationResult.TotalWarningCount);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void SbomValidationPipelineResult_Success_CreatesValidResult()
|
||||
{
|
||||
// Act
|
||||
var result = SbomValidationPipelineResult.Success();
|
||||
|
||||
// Assert
|
||||
Assert.True(result.IsValid);
|
||||
Assert.False(result.WasSkipped);
|
||||
Assert.Equal(0, result.TotalErrorCount);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void SbomValidationPipelineResult_Failure_CreatesInvalidResult()
|
||||
{
|
||||
// Act
|
||||
var result = SbomValidationPipelineResult.Failure();
|
||||
|
||||
// Assert
|
||||
Assert.False(result.IsValid);
|
||||
Assert.False(result.WasSkipped);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void SbomValidationPipelineResult_Skipped_CreatesSkippedResult()
|
||||
{
|
||||
// Act
|
||||
var result = SbomValidationPipelineResult.Skipped();
|
||||
|
||||
// Assert
|
||||
Assert.True(result.IsValid);
|
||||
Assert.True(result.WasSkipped);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void LayerValidationResult_IsValid_ReturnsTrueWhenBothValid()
|
||||
{
|
||||
// Arrange
|
||||
var cdxResult = SbomValidationResult.Success(
|
||||
SbomFormat.CycloneDxJson,
|
||||
"cdx",
|
||||
"1.0.0",
|
||||
TimeSpan.Zero);
|
||||
|
||||
var spdxResult = SbomValidationResult.Success(
|
||||
SbomFormat.Spdx3JsonLd,
|
||||
"spdx",
|
||||
"1.0.0",
|
||||
TimeSpan.Zero);
|
||||
|
||||
var layerResult = new LayerValidationResult
|
||||
{
|
||||
LayerId = "sha256:abc123",
|
||||
CycloneDxResult = cdxResult,
|
||||
SpdxResult = spdxResult
|
||||
};
|
||||
|
||||
// Assert
|
||||
Assert.True(layerResult.IsValid);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void LayerValidationResult_IsValid_ReturnsFalseWhenAnyInvalid()
|
||||
{
|
||||
// Arrange
|
||||
var cdxResult = SbomValidationResult.Failure(
|
||||
SbomFormat.CycloneDxJson,
|
||||
"cdx",
|
||||
"1.0.0",
|
||||
TimeSpan.Zero,
|
||||
[new SbomValidationDiagnostic { Severity = SbomValidationSeverity.Error, Code = "E1", Message = "Error" }]);
|
||||
|
||||
var layerResult = new LayerValidationResult
|
||||
{
|
||||
LayerId = "sha256:abc123",
|
||||
CycloneDxResult = cdxResult,
|
||||
SpdxResult = null
|
||||
};
|
||||
|
||||
// Assert
|
||||
Assert.False(layerResult.IsValid);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void SbomValidationException_StoresResult()
|
||||
{
|
||||
// Arrange
|
||||
var pipelineResult = SbomValidationPipelineResult.Failure();
|
||||
|
||||
// Act
|
||||
var ex = new SbomValidationException("Test error", pipelineResult);
|
||||
|
||||
// Assert
|
||||
Assert.Equal("Test error", ex.Message);
|
||||
Assert.Same(pipelineResult, ex.Result);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void SbomValidationPipelineOptions_HasCorrectDefaults()
|
||||
{
|
||||
// Act
|
||||
var options = new SbomValidationPipelineOptions();
|
||||
|
||||
// Assert
|
||||
Assert.True(options.Enabled);
|
||||
Assert.True(options.FailOnError);
|
||||
Assert.True(options.ValidateCycloneDx);
|
||||
Assert.True(options.ValidateSpdx);
|
||||
Assert.Equal(TimeSpan.FromSeconds(60), options.ValidationTimeout);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,22 @@
|
||||
<Project Sdk="Microsoft.NET.Sdk">
|
||||
|
||||
<PropertyGroup>
|
||||
<TargetFramework>net10.0</TargetFramework>
|
||||
<ImplicitUsings>enable</ImplicitUsings>
|
||||
<Nullable>enable</Nullable>
|
||||
<IsPackable>false</IsPackable>
|
||||
<RootNamespace>StellaOps.Scanner.Validation.Tests</RootNamespace>
|
||||
</PropertyGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<PackageReference Include="FluentAssertions" />
|
||||
<PackageReference Include="Moq" />
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<ProjectReference Include="..\..\__Libraries\StellaOps.Scanner.Emit\StellaOps.Scanner.Emit.csproj" />
|
||||
<ProjectReference Include="..\..\__Libraries\StellaOps.Scanner.Validation\StellaOps.Scanner.Validation.csproj" />
|
||||
<ProjectReference Include="..\..\..\__Libraries\StellaOps.TestKit\StellaOps.TestKit.csproj" />
|
||||
</ItemGroup>
|
||||
|
||||
</Project>
|
||||
@@ -0,0 +1,238 @@
|
||||
// <copyright file="CompositeValidatorTests.cs" company="StellaOps">
|
||||
// Copyright (c) StellaOps. Licensed under the AGPL-3.0-or-later.
|
||||
// </copyright>
|
||||
|
||||
using System.Collections.Immutable;
|
||||
using FluentAssertions;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using Moq;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Scanner.Validation.Tests;
|
||||
|
||||
/// <summary>
|
||||
/// Unit tests for <see cref="CompositeValidator"/>.
|
||||
/// Sprint: SPRINT_20260107_005_003 Task VG-009
|
||||
/// </summary>
|
||||
[Trait("Category", "Unit")]
|
||||
public sealed class CompositeValidatorTests
|
||||
{
|
||||
[Fact]
|
||||
public void SupportsFormat_WithValidators_ReturnsTrue()
|
||||
{
|
||||
// Arrange
|
||||
var mockCycloneDx = new Mock<ISbomValidator>();
|
||||
mockCycloneDx.Setup(v => v.SupportsFormat(SbomFormat.CycloneDxJson)).Returns(true);
|
||||
mockCycloneDx.Setup(v => v.SupportsFormat(SbomFormat.CycloneDxXml)).Returns(true);
|
||||
|
||||
var mockSpdx = new Mock<ISbomValidator>();
|
||||
mockSpdx.Setup(v => v.SupportsFormat(SbomFormat.Spdx23Json)).Returns(true);
|
||||
|
||||
var composite = new CompositeValidator(
|
||||
new[] { mockCycloneDx.Object, mockSpdx.Object },
|
||||
NullLogger<CompositeValidator>.Instance);
|
||||
|
||||
// Assert
|
||||
composite.SupportsFormat(SbomFormat.CycloneDxJson).Should().BeTrue();
|
||||
composite.SupportsFormat(SbomFormat.CycloneDxXml).Should().BeTrue();
|
||||
composite.SupportsFormat(SbomFormat.Spdx23Json).Should().BeTrue();
|
||||
composite.SupportsFormat(SbomFormat.Unknown).Should().BeFalse();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void SupportsFormat_NoValidators_ReturnsFalse()
|
||||
{
|
||||
// Arrange
|
||||
var composite = new CompositeValidator(
|
||||
Array.Empty<ISbomValidator>(),
|
||||
NullLogger<CompositeValidator>.Instance);
|
||||
|
||||
// Assert
|
||||
composite.SupportsFormat(SbomFormat.CycloneDxJson).Should().BeFalse();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ValidateAsync_DelegatesToCorrectValidator()
|
||||
{
|
||||
// Arrange
|
||||
var expectedResult = SbomValidationResult.Success(
|
||||
SbomFormat.CycloneDxJson,
|
||||
"test-validator",
|
||||
"1.0.0",
|
||||
TimeSpan.FromMilliseconds(50));
|
||||
|
||||
var mockCycloneDx = new Mock<ISbomValidator>();
|
||||
mockCycloneDx.Setup(v => v.SupportsFormat(SbomFormat.CycloneDxJson)).Returns(true);
|
||||
mockCycloneDx.Setup(v => v.ValidateAsync(
|
||||
It.IsAny<byte[]>(),
|
||||
SbomFormat.CycloneDxJson,
|
||||
It.IsAny<SbomValidationOptions>(),
|
||||
It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(expectedResult);
|
||||
|
||||
var composite = new CompositeValidator(
|
||||
new[] { mockCycloneDx.Object },
|
||||
NullLogger<CompositeValidator>.Instance);
|
||||
|
||||
var sbomBytes = "{}"u8.ToArray();
|
||||
|
||||
// Act
|
||||
var result = await composite.ValidateAsync(
|
||||
sbomBytes,
|
||||
SbomFormat.CycloneDxJson,
|
||||
cancellationToken: TestContext.Current.CancellationToken);
|
||||
|
||||
// Assert
|
||||
result.Should().Be(expectedResult);
|
||||
mockCycloneDx.Verify(v => v.ValidateAsync(
|
||||
sbomBytes,
|
||||
SbomFormat.CycloneDxJson,
|
||||
null,
|
||||
It.IsAny<CancellationToken>()), Times.Once);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ValidateAsync_NoValidatorForFormat_ReturnsUnavailable()
|
||||
{
|
||||
// Arrange
|
||||
var mockCycloneDx = new Mock<ISbomValidator>();
|
||||
mockCycloneDx.Setup(v => v.SupportsFormat(It.IsAny<SbomFormat>())).Returns(false);
|
||||
|
||||
var composite = new CompositeValidator(
|
||||
new[] { mockCycloneDx.Object },
|
||||
NullLogger<CompositeValidator>.Instance);
|
||||
|
||||
var sbomBytes = "{}"u8.ToArray();
|
||||
|
||||
// Act
|
||||
var result = await composite.ValidateAsync(
|
||||
sbomBytes,
|
||||
SbomFormat.Spdx3JsonLd,
|
||||
cancellationToken: TestContext.Current.CancellationToken);
|
||||
|
||||
// Assert
|
||||
result.IsValid.Should().BeFalse();
|
||||
result.Diagnostics.Should().Contain(d => d.Code == "VALIDATOR_UNAVAILABLE");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetInfoAsync_AggregatesFromValidators()
|
||||
{
|
||||
// Arrange
|
||||
var cycloneDxInfo = new ValidatorInfo
|
||||
{
|
||||
Name = "sbom-utility",
|
||||
Version = "0.16.0",
|
||||
IsAvailable = true,
|
||||
SupportedFormats = ImmutableArray.Create(SbomFormat.CycloneDxJson, SbomFormat.CycloneDxXml),
|
||||
SupportedSchemaVersions = ImmutableArray.Create("1.6", "1.7")
|
||||
};
|
||||
|
||||
var spdxInfo = new ValidatorInfo
|
||||
{
|
||||
Name = "spdx-tools",
|
||||
Version = "1.1.8",
|
||||
IsAvailable = true,
|
||||
SupportedFormats = ImmutableArray.Create(SbomFormat.Spdx23Json, SbomFormat.Spdx3JsonLd),
|
||||
SupportedSchemaVersions = ImmutableArray.Create("2.3", "3.0.1")
|
||||
};
|
||||
|
||||
var mockCycloneDx = new Mock<ISbomValidator>();
|
||||
mockCycloneDx.Setup(v => v.GetInfoAsync(It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(cycloneDxInfo);
|
||||
|
||||
var mockSpdx = new Mock<ISbomValidator>();
|
||||
mockSpdx.Setup(v => v.GetInfoAsync(It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(spdxInfo);
|
||||
|
||||
var composite = new CompositeValidator(
|
||||
new[] { mockCycloneDx.Object, mockSpdx.Object },
|
||||
NullLogger<CompositeValidator>.Instance);
|
||||
|
||||
// Act
|
||||
var info = await composite.GetInfoAsync(TestContext.Current.CancellationToken);
|
||||
|
||||
// Assert
|
||||
info.IsAvailable.Should().BeTrue();
|
||||
info.SupportedFormats.Should().HaveCount(4);
|
||||
info.SupportedFormats.Should().Contain(SbomFormat.CycloneDxJson);
|
||||
info.SupportedFormats.Should().Contain(SbomFormat.Spdx3JsonLd);
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData("{\"bomFormat\":\"CycloneDX\",\"specVersion\":\"1.6\"}", SbomFormat.CycloneDxJson)]
|
||||
[InlineData("{\"spdxVersion\":\"SPDX-2.3\",\"dataLicense\":\"CC0-1.0\"}", SbomFormat.Spdx23Json)]
|
||||
[InlineData("{\"@context\":\"https://spdx.org/v3/context\",\"spdxVersion\":\"SPDX-3.0\"}", SbomFormat.Spdx3JsonLd)]
|
||||
[InlineData("SPDXVersion: SPDX-2.3\nDataLicense: CC0-1.0", SbomFormat.Spdx23TagValue)]
|
||||
[InlineData("<bom xmlns=\"http://cyclonedx.org/schema/bom/1.6\">", SbomFormat.CycloneDxXml)]
|
||||
[InlineData("random content", SbomFormat.Unknown)]
|
||||
[InlineData("", SbomFormat.Unknown)]
|
||||
public void DetectFormat_IdentifiesCorrectFormat(string content, SbomFormat expected)
|
||||
{
|
||||
// Act
|
||||
var result = CompositeValidator.DetectFormat(System.Text.Encoding.UTF8.GetBytes(content));
|
||||
|
||||
// Assert
|
||||
result.Should().Be(expected);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ValidateAutoAsync_DetectsAndValidates()
|
||||
{
|
||||
// Arrange
|
||||
var expectedResult = SbomValidationResult.Success(
|
||||
SbomFormat.CycloneDxJson,
|
||||
"test-validator",
|
||||
"1.0.0",
|
||||
TimeSpan.FromMilliseconds(50));
|
||||
|
||||
var mockCycloneDx = new Mock<ISbomValidator>();
|
||||
mockCycloneDx.Setup(v => v.SupportsFormat(SbomFormat.CycloneDxJson)).Returns(true);
|
||||
mockCycloneDx.Setup(v => v.ValidateAsync(
|
||||
It.IsAny<byte[]>(),
|
||||
SbomFormat.CycloneDxJson,
|
||||
It.IsAny<SbomValidationOptions>(),
|
||||
It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(expectedResult);
|
||||
|
||||
var composite = new CompositeValidator(
|
||||
new[] { mockCycloneDx.Object },
|
||||
NullLogger<CompositeValidator>.Instance);
|
||||
|
||||
var sbomBytes = "{\"bomFormat\":\"CycloneDX\",\"specVersion\":\"1.6\"}"u8.ToArray();
|
||||
|
||||
// Act
|
||||
var result = await composite.ValidateAutoAsync(
|
||||
sbomBytes,
|
||||
cancellationToken: TestContext.Current.CancellationToken);
|
||||
|
||||
// Assert
|
||||
result.Format.Should().Be(SbomFormat.CycloneDxJson);
|
||||
mockCycloneDx.Verify(v => v.ValidateAsync(
|
||||
sbomBytes,
|
||||
SbomFormat.CycloneDxJson,
|
||||
null,
|
||||
It.IsAny<CancellationToken>()), Times.Once);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ValidateAutoAsync_UnknownFormat_ReturnsError()
|
||||
{
|
||||
// Arrange
|
||||
var composite = new CompositeValidator(
|
||||
Array.Empty<ISbomValidator>(),
|
||||
NullLogger<CompositeValidator>.Instance);
|
||||
|
||||
var sbomBytes = "random garbage content"u8.ToArray();
|
||||
|
||||
// Act
|
||||
var result = await composite.ValidateAutoAsync(
|
||||
sbomBytes,
|
||||
cancellationToken: TestContext.Current.CancellationToken);
|
||||
|
||||
// Assert
|
||||
result.IsValid.Should().BeFalse();
|
||||
result.Format.Should().Be(SbomFormat.Unknown);
|
||||
result.Diagnostics.Should().Contain(d => d.Code == "UNKNOWN_FORMAT");
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,129 @@
|
||||
// <copyright file="CycloneDxValidatorTests.cs" company="StellaOps">
|
||||
// Copyright (c) StellaOps. Licensed under the AGPL-3.0-or-later.
|
||||
// </copyright>
|
||||
|
||||
using FluentAssertions;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using Microsoft.Extensions.Options;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Scanner.Validation.Tests;
|
||||
|
||||
/// <summary>
|
||||
/// Unit tests for <see cref="CycloneDxValidator"/>.
|
||||
/// Sprint: SPRINT_20260107_005_003 Task VG-009
|
||||
/// </summary>
|
||||
[Trait("Category", "Unit")]
|
||||
public sealed class CycloneDxValidatorTests
|
||||
{
|
||||
private readonly CycloneDxValidator _validator;
|
||||
|
||||
public CycloneDxValidatorTests()
|
||||
{
|
||||
var options = Options.Create(new CycloneDxValidatorOptions
|
||||
{
|
||||
ExecutablePath = "sbom-utility",
|
||||
DefaultTimeout = TimeSpan.FromSeconds(30)
|
||||
});
|
||||
|
||||
_validator = new CycloneDxValidator(
|
||||
options,
|
||||
NullLogger<CycloneDxValidator>.Instance,
|
||||
TimeProvider.System);
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData(SbomFormat.CycloneDxJson, true)]
|
||||
[InlineData(SbomFormat.CycloneDxXml, true)]
|
||||
[InlineData(SbomFormat.Spdx23Json, false)]
|
||||
[InlineData(SbomFormat.Spdx23TagValue, false)]
|
||||
[InlineData(SbomFormat.Spdx3JsonLd, false)]
|
||||
[InlineData(SbomFormat.Unknown, false)]
|
||||
public void SupportsFormat_ReturnsCorrectly(SbomFormat format, bool expected)
|
||||
{
|
||||
// Act
|
||||
var result = _validator.SupportsFormat(format);
|
||||
|
||||
// Assert
|
||||
result.Should().Be(expected);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ValidateAsync_UnsupportedFormat_ReturnsFailure()
|
||||
{
|
||||
// Arrange
|
||||
var sbomBytes = """
|
||||
{
|
||||
"spdxVersion": "SPDX-2.3",
|
||||
"dataLicense": "CC0-1.0"
|
||||
}
|
||||
"""u8.ToArray();
|
||||
|
||||
// Act
|
||||
var result = await _validator.ValidateAsync(
|
||||
sbomBytes,
|
||||
SbomFormat.Spdx23Json,
|
||||
cancellationToken: TestContext.Current.CancellationToken);
|
||||
|
||||
// Assert
|
||||
result.IsValid.Should().BeFalse();
|
||||
result.Diagnostics.Should().Contain(d => d.Code == "UNSUPPORTED_FORMAT");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ValidateAsync_ValidatorNotFound_ReturnsUnavailable()
|
||||
{
|
||||
// Arrange - use a non-existent executable path
|
||||
var options = Options.Create(new CycloneDxValidatorOptions
|
||||
{
|
||||
ExecutablePath = "/nonexistent/path/sbom-utility-does-not-exist",
|
||||
DefaultTimeout = TimeSpan.FromSeconds(5)
|
||||
});
|
||||
|
||||
var validator = new CycloneDxValidator(
|
||||
options,
|
||||
NullLogger<CycloneDxValidator>.Instance,
|
||||
TimeProvider.System);
|
||||
|
||||
var sbomBytes = """
|
||||
{
|
||||
"bomFormat": "CycloneDX",
|
||||
"specVersion": "1.6"
|
||||
}
|
||||
"""u8.ToArray();
|
||||
|
||||
// Act
|
||||
var result = await validator.ValidateAsync(
|
||||
sbomBytes,
|
||||
SbomFormat.CycloneDxJson,
|
||||
cancellationToken: TestContext.Current.CancellationToken);
|
||||
|
||||
// Assert
|
||||
result.IsValid.Should().BeFalse();
|
||||
result.Diagnostics.Should().Contain(d =>
|
||||
d.Code == "VALIDATOR_UNAVAILABLE" ||
|
||||
d.Code == "VALIDATION_ERROR");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetInfoAsync_ValidatorNotAvailable_ReturnsUnavailable()
|
||||
{
|
||||
// Arrange - use a non-existent executable path
|
||||
var options = Options.Create(new CycloneDxValidatorOptions
|
||||
{
|
||||
ExecutablePath = "/nonexistent/path/sbom-utility-does-not-exist"
|
||||
});
|
||||
|
||||
var validator = new CycloneDxValidator(
|
||||
options,
|
||||
NullLogger<CycloneDxValidator>.Instance,
|
||||
TimeProvider.System);
|
||||
|
||||
// Act
|
||||
var info = await validator.GetInfoAsync(TestContext.Current.CancellationToken);
|
||||
|
||||
// Assert
|
||||
info.IsAvailable.Should().BeFalse();
|
||||
info.Name.Should().Be("sbom-utility");
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,42 @@
|
||||
// <copyright file="SbomFormatTests.cs" company="StellaOps">
|
||||
// Copyright (c) StellaOps. Licensed under the AGPL-3.0-or-later.
|
||||
// </copyright>
|
||||
|
||||
using FluentAssertions;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Scanner.Validation.Tests;
|
||||
|
||||
/// <summary>
|
||||
/// Unit tests for <see cref="SbomFormat"/> enum.
|
||||
/// Sprint: SPRINT_20260107_005_003 Task VG-009
|
||||
/// </summary>
|
||||
[Trait("Category", "Unit")]
|
||||
public sealed class SbomFormatTests
|
||||
{
|
||||
[Fact]
|
||||
public void SbomFormat_HasExpectedValues()
|
||||
{
|
||||
// Assert
|
||||
Enum.GetValues<SbomFormat>().Should().HaveCount(6);
|
||||
Enum.IsDefined(SbomFormat.CycloneDxJson).Should().BeTrue();
|
||||
Enum.IsDefined(SbomFormat.CycloneDxXml).Should().BeTrue();
|
||||
Enum.IsDefined(SbomFormat.Spdx23Json).Should().BeTrue();
|
||||
Enum.IsDefined(SbomFormat.Spdx23TagValue).Should().BeTrue();
|
||||
Enum.IsDefined(SbomFormat.Spdx3JsonLd).Should().BeTrue();
|
||||
Enum.IsDefined(SbomFormat.Unknown).Should().BeTrue();
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData(SbomFormat.CycloneDxJson, "CycloneDxJson")]
|
||||
[InlineData(SbomFormat.CycloneDxXml, "CycloneDxXml")]
|
||||
[InlineData(SbomFormat.Spdx23Json, "Spdx23Json")]
|
||||
[InlineData(SbomFormat.Spdx23TagValue, "Spdx23TagValue")]
|
||||
[InlineData(SbomFormat.Spdx3JsonLd, "Spdx3JsonLd")]
|
||||
[InlineData(SbomFormat.Unknown, "Unknown")]
|
||||
public void SbomFormat_ToString_ReturnsExpectedName(SbomFormat format, string expected)
|
||||
{
|
||||
// Act & Assert
|
||||
format.ToString().Should().Be(expected);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,84 @@
|
||||
// <copyright file="SbomValidationDiagnosticTests.cs" company="StellaOps">
|
||||
// Copyright (c) StellaOps. Licensed under the AGPL-3.0-or-later.
|
||||
// </copyright>
|
||||
|
||||
using FluentAssertions;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Scanner.Validation.Tests;
|
||||
|
||||
/// <summary>
|
||||
/// Unit tests for <see cref="SbomValidationDiagnostic"/>.
|
||||
/// Sprint: SPRINT_20260107_005_003 Task VG-009
|
||||
/// </summary>
|
||||
[Trait("Category", "Unit")]
|
||||
public sealed class SbomValidationDiagnosticTests
|
||||
{
|
||||
[Fact]
|
||||
public void Diagnostic_RequiredProperties()
|
||||
{
|
||||
// Act
|
||||
var diagnostic = new SbomValidationDiagnostic
|
||||
{
|
||||
Severity = SbomValidationSeverity.Error,
|
||||
Code = "ERR001",
|
||||
Message = "Schema validation failed"
|
||||
};
|
||||
|
||||
// Assert
|
||||
diagnostic.Severity.Should().Be(SbomValidationSeverity.Error);
|
||||
diagnostic.Code.Should().Be("ERR001");
|
||||
diagnostic.Message.Should().Be("Schema validation failed");
|
||||
diagnostic.Path.Should().BeNull();
|
||||
diagnostic.Line.Should().BeNull();
|
||||
diagnostic.Suggestion.Should().BeNull();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Diagnostic_WithOptionalProperties()
|
||||
{
|
||||
// Act
|
||||
var diagnostic = new SbomValidationDiagnostic
|
||||
{
|
||||
Severity = SbomValidationSeverity.Warning,
|
||||
Code = "WARN001",
|
||||
Message = "License not in SPDX license list",
|
||||
Path = "$.components[0].licenses[0].license.id",
|
||||
Line = 42,
|
||||
Suggestion = "Use a valid SPDX license identifier"
|
||||
};
|
||||
|
||||
// Assert
|
||||
diagnostic.Path.Should().Be("$.components[0].licenses[0].license.id");
|
||||
diagnostic.Line.Should().Be(42);
|
||||
diagnostic.Suggestion.Should().Be("Use a valid SPDX license identifier");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void SbomValidationSeverity_HasExpectedValues()
|
||||
{
|
||||
// Assert
|
||||
Enum.GetValues<SbomValidationSeverity>().Should().HaveCount(3);
|
||||
Enum.IsDefined(SbomValidationSeverity.Error).Should().BeTrue();
|
||||
Enum.IsDefined(SbomValidationSeverity.Warning).Should().BeTrue();
|
||||
Enum.IsDefined(SbomValidationSeverity.Info).Should().BeTrue();
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData(SbomValidationSeverity.Error)]
|
||||
[InlineData(SbomValidationSeverity.Warning)]
|
||||
[InlineData(SbomValidationSeverity.Info)]
|
||||
public void Diagnostic_CanHaveAnySeverity(SbomValidationSeverity severity)
|
||||
{
|
||||
// Act
|
||||
var diagnostic = new SbomValidationDiagnostic
|
||||
{
|
||||
Severity = severity,
|
||||
Code = "TEST",
|
||||
Message = "Test message"
|
||||
};
|
||||
|
||||
// Assert
|
||||
diagnostic.Severity.Should().Be(severity);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,118 @@
|
||||
// <copyright file="SbomValidationOptionsTests.cs" company="StellaOps">
|
||||
// Copyright (c) StellaOps. Licensed under the AGPL-3.0-or-later.
|
||||
// </copyright>
|
||||
|
||||
using FluentAssertions;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Scanner.Validation.Tests;
|
||||
|
||||
/// <summary>
|
||||
/// Unit tests for <see cref="SbomValidationOptions"/>.
|
||||
/// Sprint: SPRINT_20260107_005_003 Task VG-009
|
||||
/// </summary>
|
||||
[Trait("Category", "Unit")]
|
||||
public sealed class SbomValidationOptionsTests
|
||||
{
|
||||
[Fact]
|
||||
public void DefaultOptions_HaveCorrectDefaults()
|
||||
{
|
||||
// Act
|
||||
var options = new SbomValidationOptions();
|
||||
|
||||
// Assert
|
||||
options.Mode.Should().Be(SbomValidationMode.Strict);
|
||||
options.Timeout.Should().Be(TimeSpan.FromSeconds(30));
|
||||
options.IncludeWarnings.Should().BeTrue();
|
||||
options.ExpectedSchemaVersion.Should().BeNull();
|
||||
options.ValidateLicenses.Should().BeTrue();
|
||||
options.CustomRulesPath.Should().BeNull();
|
||||
options.RequiredSpdxProfiles.Should().BeNull();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Options_CanBeCustomized()
|
||||
{
|
||||
// Act
|
||||
var options = new SbomValidationOptions
|
||||
{
|
||||
Mode = SbomValidationMode.Lenient,
|
||||
Timeout = TimeSpan.FromMinutes(2),
|
||||
IncludeWarnings = false,
|
||||
ExpectedSchemaVersion = "1.6",
|
||||
ValidateLicenses = false,
|
||||
CustomRulesPath = "/custom/rules.json",
|
||||
RequiredSpdxProfiles = new[] { "core", "software", "security" }
|
||||
};
|
||||
|
||||
// Assert
|
||||
options.Mode.Should().Be(SbomValidationMode.Lenient);
|
||||
options.Timeout.Should().Be(TimeSpan.FromMinutes(2));
|
||||
options.IncludeWarnings.Should().BeFalse();
|
||||
options.ExpectedSchemaVersion.Should().Be("1.6");
|
||||
options.ValidateLicenses.Should().BeFalse();
|
||||
options.CustomRulesPath.Should().Be("/custom/rules.json");
|
||||
options.RequiredSpdxProfiles.Should().HaveCount(3);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Validate_ValidOptions_ReturnsEmpty()
|
||||
{
|
||||
// Arrange
|
||||
var options = new SbomValidationOptions
|
||||
{
|
||||
Timeout = TimeSpan.FromSeconds(30)
|
||||
};
|
||||
|
||||
// Act
|
||||
var errors = options.Validate();
|
||||
|
||||
// Assert
|
||||
errors.Should().BeEmpty();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Validate_NegativeTimeout_ReturnsError()
|
||||
{
|
||||
// Arrange
|
||||
var options = new SbomValidationOptions
|
||||
{
|
||||
Timeout = TimeSpan.FromSeconds(-1)
|
||||
};
|
||||
|
||||
// Act
|
||||
var errors = options.Validate();
|
||||
|
||||
// Assert
|
||||
errors.Should().ContainSingle();
|
||||
errors[0].Should().Contain("Timeout must be positive");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Validate_ExcessiveTimeout_ReturnsError()
|
||||
{
|
||||
// Arrange
|
||||
var options = new SbomValidationOptions
|
||||
{
|
||||
Timeout = TimeSpan.FromMinutes(15)
|
||||
};
|
||||
|
||||
// Act
|
||||
var errors = options.Validate();
|
||||
|
||||
// Assert
|
||||
errors.Should().ContainSingle();
|
||||
errors[0].Should().Contain("cannot exceed 10 minutes");
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData(SbomValidationMode.Strict)]
|
||||
[InlineData(SbomValidationMode.Lenient)]
|
||||
[InlineData(SbomValidationMode.Audit)]
|
||||
[InlineData(SbomValidationMode.Off)]
|
||||
public void SbomValidationMode_HasExpectedValues(SbomValidationMode mode)
|
||||
{
|
||||
// Assert
|
||||
Enum.IsDefined(mode).Should().BeTrue();
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,144 @@
|
||||
// <copyright file="SbomValidationResultTests.cs" company="StellaOps">
|
||||
// Copyright (c) StellaOps. Licensed under the AGPL-3.0-or-later.
|
||||
// </copyright>
|
||||
|
||||
using System.Collections.Immutable;
|
||||
using FluentAssertions;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Scanner.Validation.Tests;
|
||||
|
||||
/// <summary>
|
||||
/// Unit tests for <see cref="SbomValidationResult"/>.
|
||||
/// Sprint: SPRINT_20260107_005_003 Task VG-009
|
||||
/// </summary>
|
||||
[Trait("Category", "Unit")]
|
||||
public sealed class SbomValidationResultTests
|
||||
{
|
||||
[Fact]
|
||||
public void Success_CreatesValidResult()
|
||||
{
|
||||
// Arrange
|
||||
var format = SbomFormat.CycloneDxJson;
|
||||
var validatorName = "test-validator";
|
||||
var validatorVersion = "1.0.0";
|
||||
var duration = TimeSpan.FromMilliseconds(100);
|
||||
|
||||
// Act
|
||||
var result = SbomValidationResult.Success(format, validatorName, validatorVersion, duration);
|
||||
|
||||
// Assert
|
||||
result.IsValid.Should().BeTrue();
|
||||
result.Format.Should().Be(format);
|
||||
result.ValidatorName.Should().Be(validatorName);
|
||||
result.ValidatorVersion.Should().Be(validatorVersion);
|
||||
result.ValidationDuration.Should().Be(duration);
|
||||
result.Diagnostics.Should().BeEmpty();
|
||||
result.ErrorCount.Should().Be(0);
|
||||
result.WarningCount.Should().Be(0);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Success_WithDiagnostics_IncludesWarnings()
|
||||
{
|
||||
// Arrange
|
||||
var diagnostics = new[]
|
||||
{
|
||||
new SbomValidationDiagnostic
|
||||
{
|
||||
Severity = SbomValidationSeverity.Warning,
|
||||
Code = "WARN001",
|
||||
Message = "Minor issue"
|
||||
}
|
||||
};
|
||||
|
||||
// Act
|
||||
var result = SbomValidationResult.Success(
|
||||
SbomFormat.CycloneDxJson,
|
||||
"test",
|
||||
"1.0",
|
||||
TimeSpan.FromMilliseconds(50),
|
||||
diagnostics);
|
||||
|
||||
// Assert
|
||||
result.IsValid.Should().BeTrue();
|
||||
result.Diagnostics.Should().HaveCount(1);
|
||||
result.WarningCount.Should().Be(1);
|
||||
result.ErrorCount.Should().Be(0);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Failure_CreatesInvalidResult()
|
||||
{
|
||||
// Arrange
|
||||
var diagnostics = new[]
|
||||
{
|
||||
new SbomValidationDiagnostic
|
||||
{
|
||||
Severity = SbomValidationSeverity.Error,
|
||||
Code = "ERR001",
|
||||
Message = "Schema violation"
|
||||
}
|
||||
};
|
||||
|
||||
// Act
|
||||
var result = SbomValidationResult.Failure(
|
||||
SbomFormat.Spdx23Json,
|
||||
"spdx-tools",
|
||||
"1.1.8",
|
||||
TimeSpan.FromSeconds(1),
|
||||
diagnostics);
|
||||
|
||||
// Assert
|
||||
result.IsValid.Should().BeFalse();
|
||||
result.Format.Should().Be(SbomFormat.Spdx23Json);
|
||||
result.Diagnostics.Should().HaveCount(1);
|
||||
result.ErrorCount.Should().Be(1);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ValidatorUnavailable_CreatesErrorResult()
|
||||
{
|
||||
// Arrange
|
||||
var reason = "sbom-utility not found in PATH";
|
||||
|
||||
// Act
|
||||
var result = SbomValidationResult.ValidatorUnavailable(
|
||||
SbomFormat.CycloneDxXml,
|
||||
"sbom-utility",
|
||||
reason);
|
||||
|
||||
// Assert
|
||||
result.IsValid.Should().BeFalse();
|
||||
result.ValidatorVersion.Should().Be("unknown");
|
||||
result.Diagnostics.Should().HaveCount(1);
|
||||
result.Diagnostics[0].Code.Should().Be("VALIDATOR_UNAVAILABLE");
|
||||
result.Diagnostics[0].Message.Should().Contain(reason);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ErrorCount_CountsOnlyErrors()
|
||||
{
|
||||
// Arrange
|
||||
var diagnostics = new[]
|
||||
{
|
||||
new SbomValidationDiagnostic { Severity = SbomValidationSeverity.Error, Code = "E1", Message = "Error 1" },
|
||||
new SbomValidationDiagnostic { Severity = SbomValidationSeverity.Warning, Code = "W1", Message = "Warning 1" },
|
||||
new SbomValidationDiagnostic { Severity = SbomValidationSeverity.Info, Code = "I1", Message = "Info 1" },
|
||||
new SbomValidationDiagnostic { Severity = SbomValidationSeverity.Error, Code = "E2", Message = "Error 2" },
|
||||
};
|
||||
|
||||
var result = new SbomValidationResult
|
||||
{
|
||||
IsValid = false,
|
||||
Format = SbomFormat.CycloneDxJson,
|
||||
ValidatorName = "test",
|
||||
ValidatorVersion = "1.0",
|
||||
Diagnostics = diagnostics.ToImmutableArray()
|
||||
};
|
||||
|
||||
// Assert
|
||||
result.ErrorCount.Should().Be(2);
|
||||
result.WarningCount.Should().Be(1);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,133 @@
|
||||
// <copyright file="SpdxValidatorTests.cs" company="StellaOps">
|
||||
// Copyright (c) StellaOps. Licensed under the AGPL-3.0-or-later.
|
||||
// </copyright>
|
||||
|
||||
using FluentAssertions;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using Microsoft.Extensions.Options;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Scanner.Validation.Tests;
|
||||
|
||||
/// <summary>
|
||||
/// Unit tests for <see cref="SpdxValidator"/>.
|
||||
/// Sprint: SPRINT_20260107_005_003 Task VG-009
|
||||
/// </summary>
|
||||
[Trait("Category", "Unit")]
|
||||
public sealed class SpdxValidatorTests
|
||||
{
|
||||
private readonly SpdxValidator _validator;
|
||||
|
||||
public SpdxValidatorTests()
|
||||
{
|
||||
var options = Options.Create(new SpdxValidatorOptions
|
||||
{
|
||||
JavaPath = "java",
|
||||
SpdxToolsJarPath = "spdx-tools.jar",
|
||||
DefaultTimeout = TimeSpan.FromSeconds(60)
|
||||
});
|
||||
|
||||
_validator = new SpdxValidator(
|
||||
options,
|
||||
NullLogger<SpdxValidator>.Instance,
|
||||
TimeProvider.System);
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData(SbomFormat.Spdx23Json, true)]
|
||||
[InlineData(SbomFormat.Spdx23TagValue, true)]
|
||||
[InlineData(SbomFormat.Spdx3JsonLd, true)]
|
||||
[InlineData(SbomFormat.CycloneDxJson, false)]
|
||||
[InlineData(SbomFormat.CycloneDxXml, false)]
|
||||
[InlineData(SbomFormat.Unknown, false)]
|
||||
public void SupportsFormat_ReturnsCorrectly(SbomFormat format, bool expected)
|
||||
{
|
||||
// Act
|
||||
var result = _validator.SupportsFormat(format);
|
||||
|
||||
// Assert
|
||||
result.Should().Be(expected);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ValidateAsync_UnsupportedFormat_ReturnsFailure()
|
||||
{
|
||||
// Arrange
|
||||
var sbomBytes = """
|
||||
{
|
||||
"bomFormat": "CycloneDX",
|
||||
"specVersion": "1.6"
|
||||
}
|
||||
"""u8.ToArray();
|
||||
|
||||
// Act
|
||||
var result = await _validator.ValidateAsync(
|
||||
sbomBytes,
|
||||
SbomFormat.CycloneDxJson,
|
||||
cancellationToken: TestContext.Current.CancellationToken);
|
||||
|
||||
// Assert
|
||||
result.IsValid.Should().BeFalse();
|
||||
result.Diagnostics.Should().Contain(d => d.Code == "UNSUPPORTED_FORMAT");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ValidateAsync_JavaNotAvailable_ReturnsUnavailable()
|
||||
{
|
||||
// Arrange - use a non-existent java path
|
||||
var options = Options.Create(new SpdxValidatorOptions
|
||||
{
|
||||
JavaPath = "/nonexistent/path/java-does-not-exist",
|
||||
SpdxToolsJarPath = "/nonexistent/spdx-tools.jar",
|
||||
DefaultTimeout = TimeSpan.FromSeconds(5)
|
||||
});
|
||||
|
||||
var validator = new SpdxValidator(
|
||||
options,
|
||||
NullLogger<SpdxValidator>.Instance,
|
||||
TimeProvider.System);
|
||||
|
||||
var sbomBytes = """
|
||||
{
|
||||
"spdxVersion": "SPDX-2.3",
|
||||
"dataLicense": "CC0-1.0"
|
||||
}
|
||||
"""u8.ToArray();
|
||||
|
||||
// Act
|
||||
var result = await validator.ValidateAsync(
|
||||
sbomBytes,
|
||||
SbomFormat.Spdx23Json,
|
||||
cancellationToken: TestContext.Current.CancellationToken);
|
||||
|
||||
// Assert
|
||||
result.IsValid.Should().BeFalse();
|
||||
result.Diagnostics.Should().Contain(d =>
|
||||
d.Code == "JAVA_NOT_AVAILABLE" ||
|
||||
d.Code == "VALIDATOR_UNAVAILABLE" ||
|
||||
d.Code == "VALIDATION_ERROR");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetInfoAsync_JavaNotAvailable_ReturnsUnavailable()
|
||||
{
|
||||
// Arrange - use a non-existent java path
|
||||
var options = Options.Create(new SpdxValidatorOptions
|
||||
{
|
||||
JavaPath = "/nonexistent/path/java-does-not-exist",
|
||||
SpdxToolsJarPath = "/nonexistent/spdx-tools.jar"
|
||||
});
|
||||
|
||||
var validator = new SpdxValidator(
|
||||
options,
|
||||
NullLogger<SpdxValidator>.Instance,
|
||||
TimeProvider.System);
|
||||
|
||||
// Act
|
||||
var info = await validator.GetInfoAsync(TestContext.Current.CancellationToken);
|
||||
|
||||
// Assert
|
||||
info.IsAvailable.Should().BeFalse();
|
||||
info.Name.Should().Be("spdx-tools-java");
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,191 @@
|
||||
// <copyright file="ValidationGateOptionsTests.cs" company="StellaOps">
|
||||
// Copyright (c) StellaOps. Licensed under the AGPL-3.0-or-later.
|
||||
// </copyright>
|
||||
|
||||
using System.ComponentModel.DataAnnotations;
|
||||
using FluentAssertions;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Scanner.Validation.Tests;
|
||||
|
||||
/// <summary>
|
||||
/// Unit tests for <see cref="ValidationGateOptions"/>.
|
||||
/// Sprint: SPRINT_20260107_005_003 Task VG-007, VG-009
|
||||
/// </summary>
|
||||
[Trait("Category", "Unit")]
|
||||
public sealed class ValidationGateOptionsTests
|
||||
{
|
||||
[Fact]
|
||||
public void DefaultOptions_HaveCorrectDefaults()
|
||||
{
|
||||
// Act
|
||||
var options = new ValidationGateOptions();
|
||||
|
||||
// Assert
|
||||
options.Enabled.Should().BeTrue();
|
||||
options.Mode.Should().Be(SbomValidationMode.Strict);
|
||||
options.TimeoutSeconds.Should().Be(30);
|
||||
options.IncludeWarnings.Should().BeTrue();
|
||||
options.ValidateLicenses.Should().BeTrue();
|
||||
options.CustomRulesPath.Should().BeNull();
|
||||
options.RequiredSpdxProfiles.Should().BeEmpty();
|
||||
options.FailOnValidationError.Should().BeTrue();
|
||||
options.CacheResults.Should().BeTrue();
|
||||
options.CacheTtlSeconds.Should().Be(3600);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Timeout_ReturnsTimeSpan()
|
||||
{
|
||||
// Arrange
|
||||
var options = new ValidationGateOptions { TimeoutSeconds = 60 };
|
||||
|
||||
// Act & Assert
|
||||
options.Timeout.Should().Be(TimeSpan.FromSeconds(60));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void CacheTtl_ReturnsTimeSpan()
|
||||
{
|
||||
// Arrange
|
||||
var options = new ValidationGateOptions { CacheTtlSeconds = 7200 };
|
||||
|
||||
// Act & Assert
|
||||
options.CacheTtl.Should().Be(TimeSpan.FromHours(2));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ToValidationOptions_CreatesCorrectOptions()
|
||||
{
|
||||
// Arrange
|
||||
var gateOptions = new ValidationGateOptions
|
||||
{
|
||||
Mode = SbomValidationMode.Lenient,
|
||||
TimeoutSeconds = 120,
|
||||
IncludeWarnings = false,
|
||||
ValidateLicenses = false,
|
||||
RequiredSpdxProfiles = new List<string> { "core", "software" }
|
||||
};
|
||||
|
||||
// Act
|
||||
var validationOptions = gateOptions.ToValidationOptions();
|
||||
|
||||
// Assert
|
||||
validationOptions.Mode.Should().Be(SbomValidationMode.Lenient);
|
||||
validationOptions.Timeout.Should().Be(TimeSpan.FromSeconds(120));
|
||||
validationOptions.IncludeWarnings.Should().BeFalse();
|
||||
validationOptions.ValidateLicenses.Should().BeFalse();
|
||||
validationOptions.RequiredSpdxProfiles.Should().BeEquivalentTo(new[] { "core", "software" });
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void DataAnnotations_TimeoutTooLow_FailsValidation()
|
||||
{
|
||||
// Arrange
|
||||
var options = new ValidationGateOptions { TimeoutSeconds = 0 };
|
||||
var context = new ValidationContext(options);
|
||||
var results = new List<ValidationResult>();
|
||||
|
||||
// Act
|
||||
var isValid = Validator.TryValidateObject(options, context, results, validateAllProperties: true);
|
||||
|
||||
// Assert
|
||||
isValid.Should().BeFalse();
|
||||
results.Should().Contain(r => r.MemberNames.Contains(nameof(ValidationGateOptions.TimeoutSeconds)));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void DataAnnotations_TimeoutTooHigh_FailsValidation()
|
||||
{
|
||||
// Arrange
|
||||
var options = new ValidationGateOptions { TimeoutSeconds = 700 };
|
||||
var context = new ValidationContext(options);
|
||||
var results = new List<ValidationResult>();
|
||||
|
||||
// Act
|
||||
var isValid = Validator.TryValidateObject(options, context, results, validateAllProperties: true);
|
||||
|
||||
// Assert
|
||||
isValid.Should().BeFalse();
|
||||
results.Should().Contain(r => r.MemberNames.Contains(nameof(ValidationGateOptions.TimeoutSeconds)));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void DataAnnotations_CacheTtlTooLow_FailsValidation()
|
||||
{
|
||||
// Arrange
|
||||
var options = new ValidationGateOptions { CacheTtlSeconds = 30 };
|
||||
var context = new ValidationContext(options);
|
||||
var results = new List<ValidationResult>();
|
||||
|
||||
// Act
|
||||
var isValid = Validator.TryValidateObject(options, context, results, validateAllProperties: true);
|
||||
|
||||
// Assert
|
||||
isValid.Should().BeFalse();
|
||||
results.Should().Contain(r => r.MemberNames.Contains(nameof(ValidationGateOptions.CacheTtlSeconds)));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Validate_ModeOffWithFailOnError_ReturnsError()
|
||||
{
|
||||
// Arrange
|
||||
var options = new ValidationGateOptions
|
||||
{
|
||||
Mode = SbomValidationMode.Off,
|
||||
FailOnValidationError = true
|
||||
};
|
||||
var context = new ValidationContext(options);
|
||||
|
||||
// Act
|
||||
var results = options.Validate(context).ToList();
|
||||
|
||||
// Assert
|
||||
results.Should().ContainSingle();
|
||||
results[0].ErrorMessage.Should().Contain("FailOnValidationError should be false");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Validate_ModeOffWithFailOnErrorFalse_ReturnsNoError()
|
||||
{
|
||||
// Arrange
|
||||
var options = new ValidationGateOptions
|
||||
{
|
||||
Mode = SbomValidationMode.Off,
|
||||
FailOnValidationError = false
|
||||
};
|
||||
var context = new ValidationContext(options);
|
||||
|
||||
// Act
|
||||
var results = options.Validate(context).ToList();
|
||||
|
||||
// Assert
|
||||
results.Should().BeEmpty();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void SectionName_IsCorrect()
|
||||
{
|
||||
// Assert
|
||||
ValidationGateOptions.SectionName.Should().Be("ValidationGate");
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData(1)]
|
||||
[InlineData(30)]
|
||||
[InlineData(600)]
|
||||
public void DataAnnotations_ValidTimeout_PassesValidation(int timeoutSeconds)
|
||||
{
|
||||
// Arrange
|
||||
var options = new ValidationGateOptions { TimeoutSeconds = timeoutSeconds };
|
||||
var context = new ValidationContext(options);
|
||||
var results = new List<ValidationResult>();
|
||||
|
||||
// Act
|
||||
var isValid = Validator.TryValidateObject(options, context, results, validateAllProperties: true);
|
||||
|
||||
// Assert
|
||||
isValid.Should().BeTrue();
|
||||
results.Should().BeEmpty();
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,74 @@
|
||||
// <copyright file="ValidatorInfoTests.cs" company="StellaOps">
|
||||
// Copyright (c) StellaOps. Licensed under the AGPL-3.0-or-later.
|
||||
// </copyright>
|
||||
|
||||
using System.Collections.Immutable;
|
||||
using FluentAssertions;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Scanner.Validation.Tests;
|
||||
|
||||
/// <summary>
|
||||
/// Unit tests for <see cref="ValidatorInfo"/>.
|
||||
/// Sprint: SPRINT_20260107_005_003 Task VG-009
|
||||
/// </summary>
|
||||
[Trait("Category", "Unit")]
|
||||
public sealed class ValidatorInfoTests
|
||||
{
|
||||
[Fact]
|
||||
public void ValidatorInfo_RequiredProperties()
|
||||
{
|
||||
// Act
|
||||
var info = new ValidatorInfo
|
||||
{
|
||||
Name = "sbom-utility",
|
||||
Version = "0.16.0",
|
||||
IsAvailable = true
|
||||
};
|
||||
|
||||
// Assert
|
||||
info.Name.Should().Be("sbom-utility");
|
||||
info.Version.Should().Be("0.16.0");
|
||||
info.IsAvailable.Should().BeTrue();
|
||||
info.SupportedFormats.Should().BeEmpty();
|
||||
info.SupportedSchemaVersions.Should().BeEmpty();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ValidatorInfo_WithSupportedFormats()
|
||||
{
|
||||
// Act
|
||||
var info = new ValidatorInfo
|
||||
{
|
||||
Name = "spdx-tools",
|
||||
Version = "1.1.8",
|
||||
IsAvailable = true,
|
||||
SupportedFormats = ImmutableArray.Create(
|
||||
SbomFormat.Spdx23Json,
|
||||
SbomFormat.Spdx23TagValue,
|
||||
SbomFormat.Spdx3JsonLd),
|
||||
SupportedSchemaVersions = ImmutableArray.Create("2.3", "3.0.1")
|
||||
};
|
||||
|
||||
// Assert
|
||||
info.SupportedFormats.Should().HaveCount(3);
|
||||
info.SupportedFormats.Should().Contain(SbomFormat.Spdx3JsonLd);
|
||||
info.SupportedSchemaVersions.Should().HaveCount(2);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ValidatorInfo_UnavailableValidator()
|
||||
{
|
||||
// Act
|
||||
var info = new ValidatorInfo
|
||||
{
|
||||
Name = "missing-tool",
|
||||
Version = "unknown",
|
||||
IsAvailable = false
|
||||
};
|
||||
|
||||
// Assert
|
||||
info.IsAvailable.Should().BeFalse();
|
||||
info.Version.Should().Be("unknown");
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,407 @@
|
||||
// <copyright file="ValidatorBinaryManagerTests.cs" company="StellaOps">
|
||||
// Copyright (c) StellaOps. Licensed under the AGPL-3.0-or-later.
|
||||
// </copyright>
|
||||
|
||||
using System.Collections.Immutable;
|
||||
using FluentAssertions;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using Microsoft.Extensions.Options;
|
||||
using Moq;
|
||||
using StellaOps.Scanner.Validation;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Scanner.Validation.Tests;
|
||||
|
||||
/// <summary>
|
||||
/// Unit tests for <see cref="ValidatorBinaryManager"/>.
|
||||
/// Sprint: SPRINT_20260107_005_003 Task VG-004
|
||||
/// </summary>
|
||||
[Trait("Category", "Unit")]
|
||||
public sealed class ValidatorBinaryManagerTests
|
||||
{
|
||||
private readonly Mock<IHttpClientFactory> _httpClientFactoryMock;
|
||||
private readonly ValidatorBinaryOptions _options;
|
||||
private readonly ValidatorBinaryManager _manager;
|
||||
|
||||
public ValidatorBinaryManagerTests()
|
||||
{
|
||||
_httpClientFactoryMock = new Mock<IHttpClientFactory>();
|
||||
_options = new ValidatorBinaryOptions
|
||||
{
|
||||
BinaryDirectory = Path.Combine(Path.GetTempPath(), $"stellaops-test-{Guid.NewGuid():N}"),
|
||||
OfflineMode = true // Default to offline mode for unit tests
|
||||
};
|
||||
|
||||
_manager = new ValidatorBinaryManager(
|
||||
Options.Create(_options),
|
||||
_httpClientFactoryMock.Object,
|
||||
NullLogger<ValidatorBinaryManager>.Instance,
|
||||
TimeProvider.System);
|
||||
}
|
||||
|
||||
#region IsBinaryAvailable Tests
|
||||
|
||||
[Fact]
|
||||
public void IsBinaryAvailable_NullName_ReturnsFalse()
|
||||
{
|
||||
// Act
|
||||
var result = _manager.IsBinaryAvailable(null!);
|
||||
|
||||
// Assert
|
||||
result.Should().BeFalse();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void IsBinaryAvailable_EmptyName_ReturnsFalse()
|
||||
{
|
||||
// Act
|
||||
var result = _manager.IsBinaryAvailable(string.Empty);
|
||||
|
||||
// Assert
|
||||
result.Should().BeFalse();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void IsBinaryAvailable_UnknownValidator_ReturnsFalse()
|
||||
{
|
||||
// Act
|
||||
var result = _manager.IsBinaryAvailable("unknown-validator");
|
||||
|
||||
// Assert
|
||||
result.Should().BeFalse();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void IsBinaryAvailable_KnownValidatorNotInstalled_ReturnsFalse()
|
||||
{
|
||||
// Act
|
||||
var result = _manager.IsBinaryAvailable("sbom-utility");
|
||||
|
||||
// Assert
|
||||
result.Should().BeFalse(); // Not installed in test directory
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region GetBinaryPath Tests
|
||||
|
||||
[Fact]
|
||||
public void GetBinaryPath_SbomUtility_ReturnsExpectedPath()
|
||||
{
|
||||
// Act
|
||||
var path = _manager.GetBinaryPath("sbom-utility");
|
||||
|
||||
// Assert
|
||||
path.Should().NotBeNullOrEmpty();
|
||||
path.Should().Contain("sbom-utility");
|
||||
path.Should().Contain("0.17.0");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void GetBinaryPath_SpdxTools_ReturnsJarPath()
|
||||
{
|
||||
// Act
|
||||
var path = _manager.GetBinaryPath("spdx-tools");
|
||||
|
||||
// Assert
|
||||
path.Should().NotBeNullOrEmpty();
|
||||
path.Should().Contain("spdx-tools");
|
||||
path.Should().EndWith(".jar");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void GetBinaryPath_UnknownValidator_ThrowsException()
|
||||
{
|
||||
// Act
|
||||
var act = () => _manager.GetBinaryPath("unknown-validator");
|
||||
|
||||
// Assert
|
||||
act.Should().Throw<ValidatorBinaryException>()
|
||||
.WithMessage("*unknown-validator*");
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region GetAvailableValidators Tests
|
||||
|
||||
[Fact]
|
||||
public void GetAvailableValidators_ReturnsDefaultSpecs()
|
||||
{
|
||||
// Act
|
||||
var validators = _manager.GetAvailableValidators();
|
||||
|
||||
// Assert
|
||||
validators.Should().ContainKey("sbom-utility");
|
||||
validators.Should().ContainKey("spdx-tools");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void GetAvailableValidators_SbomUtilitySpec_HasCorrectVersion()
|
||||
{
|
||||
// Act
|
||||
var validators = _manager.GetAvailableValidators();
|
||||
|
||||
// Assert
|
||||
validators["sbom-utility"].Version.Should().Be("0.17.0");
|
||||
validators["sbom-utility"].ExecutableName.Should().Be("sbom-utility");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void GetAvailableValidators_SpdxToolsSpec_IsJar()
|
||||
{
|
||||
// Act
|
||||
var validators = _manager.GetAvailableValidators();
|
||||
|
||||
// Assert
|
||||
validators["spdx-tools"].IsJar.Should().BeTrue();
|
||||
validators["spdx-tools"].Version.Should().Be("1.1.9");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void GetAvailableValidators_WithCustomSpecs_MergesWithDefaults()
|
||||
{
|
||||
// Arrange
|
||||
var customOptions = new ValidatorBinaryOptions
|
||||
{
|
||||
CustomSpecs = new Dictionary<string, ValidatorBinarySpec>
|
||||
{
|
||||
["custom-validator"] = new ValidatorBinarySpec
|
||||
{
|
||||
Name = "custom-validator",
|
||||
Version = "1.0.0",
|
||||
BaseUrl = "https://example.com",
|
||||
FileNameFormat = "custom-{0}.tar.gz",
|
||||
ExecutableName = "custom"
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
var manager = new ValidatorBinaryManager(
|
||||
Options.Create(customOptions),
|
||||
_httpClientFactoryMock.Object,
|
||||
NullLogger<ValidatorBinaryManager>.Instance,
|
||||
TimeProvider.System);
|
||||
|
||||
// Act
|
||||
var validators = manager.GetAvailableValidators();
|
||||
|
||||
// Assert
|
||||
validators.Should().ContainKey("custom-validator");
|
||||
validators.Should().ContainKey("sbom-utility"); // Default still present
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void GetAvailableValidators_WithCustomSpecs_OverridesDefaults()
|
||||
{
|
||||
// Arrange
|
||||
var customOptions = new ValidatorBinaryOptions
|
||||
{
|
||||
CustomSpecs = new Dictionary<string, ValidatorBinarySpec>
|
||||
{
|
||||
["sbom-utility"] = new ValidatorBinarySpec
|
||||
{
|
||||
Name = "sbom-utility",
|
||||
Version = "0.18.0", // Override version
|
||||
BaseUrl = "https://custom.example.com",
|
||||
FileNameFormat = "sbom-utility-{0}.tar.gz",
|
||||
ExecutableName = "sbom-utility"
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
var manager = new ValidatorBinaryManager(
|
||||
Options.Create(customOptions),
|
||||
_httpClientFactoryMock.Object,
|
||||
NullLogger<ValidatorBinaryManager>.Instance,
|
||||
TimeProvider.System);
|
||||
|
||||
// Act
|
||||
var validators = manager.GetAvailableValidators();
|
||||
|
||||
// Assert
|
||||
validators["sbom-utility"].Version.Should().Be("0.18.0");
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region EnsureBinaryAsync Tests
|
||||
|
||||
[Fact]
|
||||
public async Task EnsureBinaryAsync_NullName_ThrowsArgumentException()
|
||||
{
|
||||
// Act
|
||||
var act = async () => await _manager.EnsureBinaryAsync(null!, TestContext.Current.CancellationToken);
|
||||
|
||||
// Assert
|
||||
await act.Should().ThrowAsync<ArgumentException>();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task EnsureBinaryAsync_EmptyName_ThrowsArgumentException()
|
||||
{
|
||||
// Act
|
||||
var act = async () => await _manager.EnsureBinaryAsync(string.Empty, TestContext.Current.CancellationToken);
|
||||
|
||||
// Assert
|
||||
await act.Should().ThrowAsync<ArgumentException>();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task EnsureBinaryAsync_OfflineModeNotInstalled_ThrowsException()
|
||||
{
|
||||
// Act
|
||||
var act = async () => await _manager.EnsureBinaryAsync(
|
||||
"sbom-utility",
|
||||
TestContext.Current.CancellationToken);
|
||||
|
||||
// Assert
|
||||
await act.Should().ThrowAsync<ValidatorBinaryException>()
|
||||
.WithMessage("*offline mode*");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task EnsureBinaryAsync_AlreadyInstalled_ReturnsPath()
|
||||
{
|
||||
// Arrange
|
||||
var path = _manager.GetBinaryPath("sbom-utility");
|
||||
Directory.CreateDirectory(Path.GetDirectoryName(path)!);
|
||||
await File.WriteAllTextAsync(path, "mock executable", TestContext.Current.CancellationToken);
|
||||
|
||||
try
|
||||
{
|
||||
// Act
|
||||
var result = await _manager.EnsureBinaryAsync(
|
||||
"sbom-utility",
|
||||
TestContext.Current.CancellationToken);
|
||||
|
||||
// Assert
|
||||
result.Should().Be(path);
|
||||
}
|
||||
finally
|
||||
{
|
||||
// Cleanup
|
||||
if (File.Exists(path))
|
||||
{
|
||||
File.Delete(path);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region VerifyBinaryIntegrityAsync Tests
|
||||
|
||||
[Fact]
|
||||
public async Task VerifyBinaryIntegrityAsync_FileNotExists_ReturnsFalse()
|
||||
{
|
||||
// Act
|
||||
var result = await _manager.VerifyBinaryIntegrityAsync(
|
||||
"sbom-utility",
|
||||
TestContext.Current.CancellationToken);
|
||||
|
||||
// Assert
|
||||
result.Should().BeFalse();
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region ValidatorBinarySpec Tests
|
||||
|
||||
[Fact]
|
||||
public void ValidatorBinarySpec_DefaultExpectedHashes_IsEmpty()
|
||||
{
|
||||
// Arrange
|
||||
var spec = new ValidatorBinarySpec
|
||||
{
|
||||
Name = "test",
|
||||
Version = "1.0.0",
|
||||
BaseUrl = "https://example.com",
|
||||
FileNameFormat = "test-{0}.tar.gz",
|
||||
ExecutableName = "test"
|
||||
};
|
||||
|
||||
// Assert
|
||||
spec.ExpectedHashes.Should().BeEmpty();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ValidatorBinarySpec_DefaultIsJar_IsFalse()
|
||||
{
|
||||
// Arrange
|
||||
var spec = new ValidatorBinarySpec
|
||||
{
|
||||
Name = "test",
|
||||
Version = "1.0.0",
|
||||
BaseUrl = "https://example.com",
|
||||
FileNameFormat = "test-{0}.tar.gz",
|
||||
ExecutableName = "test"
|
||||
};
|
||||
|
||||
// Assert
|
||||
spec.IsJar.Should().BeFalse();
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region ValidatorBinaryOptions Tests
|
||||
|
||||
[Fact]
|
||||
public void ValidatorBinaryOptions_DefaultDownloadTimeout_Is5Minutes()
|
||||
{
|
||||
// Arrange
|
||||
var options = new ValidatorBinaryOptions();
|
||||
|
||||
// Assert
|
||||
options.DownloadTimeout.Should().Be(TimeSpan.FromMinutes(5));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ValidatorBinaryOptions_DefaultOfflineMode_IsFalse()
|
||||
{
|
||||
// Arrange
|
||||
var options = new ValidatorBinaryOptions();
|
||||
|
||||
// Assert
|
||||
options.OfflineMode.Should().BeFalse();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ValidatorBinaryOptions_DefaultBinaryDirectory_IsNull()
|
||||
{
|
||||
// Arrange
|
||||
var options = new ValidatorBinaryOptions();
|
||||
|
||||
// Assert
|
||||
options.BinaryDirectory.Should().BeNull();
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region ValidatorBinaryException Tests
|
||||
|
||||
[Fact]
|
||||
public void ValidatorBinaryException_MessageOnly_SetsMessage()
|
||||
{
|
||||
// Arrange & Act
|
||||
var ex = new ValidatorBinaryException("Test error");
|
||||
|
||||
// Assert
|
||||
ex.Message.Should().Be("Test error");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ValidatorBinaryException_WithInnerException_SetsInner()
|
||||
{
|
||||
// Arrange
|
||||
var inner = new InvalidOperationException("Inner error");
|
||||
|
||||
// Act
|
||||
var ex = new ValidatorBinaryException("Test error", inner);
|
||||
|
||||
// Assert
|
||||
ex.Message.Should().Be("Test error");
|
||||
ex.InnerException.Should().Be(inner);
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
@@ -0,0 +1,462 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// EvidenceIntegrationTests.cs
|
||||
// Sprint: SPRINT_20260107_005_001_LB_cdx17_evidence_models
|
||||
// Task: EV-012 - Integration tests for CycloneDX 1.7 native evidence fields
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System.Net;
|
||||
using System.Net.Http.Json;
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
using Microsoft.Extensions.DependencyInjection;
|
||||
using Microsoft.Extensions.DependencyInjection.Extensions;
|
||||
using StellaOps.Scanner.Storage.ObjectStore;
|
||||
using StellaOps.Scanner.WebService.Contracts;
|
||||
using StellaOps.TestKit;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Scanner.WebService.Tests.Integration;
|
||||
|
||||
/// <summary>
|
||||
/// Integration tests for CycloneDX 1.7 native evidence field population.
|
||||
/// Verifies end-to-end SBOM generation produces spec-compliant evidence structures.
|
||||
/// </summary>
|
||||
[Trait("Category", TestCategories.Integration)]
|
||||
public sealed class EvidenceIntegrationTests : IAsyncLifetime
|
||||
{
|
||||
private ScannerApplicationFactory _factory = null!;
|
||||
private HttpClient _client = null!;
|
||||
|
||||
public ValueTask InitializeAsync()
|
||||
{
|
||||
_factory = new ScannerApplicationFactory().WithOverrides(
|
||||
configuration =>
|
||||
{
|
||||
configuration["scanner:authority:enabled"] = "false";
|
||||
configuration["scanner:emit:useNativeEvidence"] = "true";
|
||||
},
|
||||
configureServices: services =>
|
||||
{
|
||||
services.RemoveAll<IArtifactObjectStore>();
|
||||
services.AddSingleton<IArtifactObjectStore>(new InMemoryArtifactObjectStore());
|
||||
});
|
||||
|
||||
_client = _factory.CreateClient();
|
||||
return ValueTask.CompletedTask;
|
||||
}
|
||||
|
||||
public async ValueTask DisposeAsync()
|
||||
{
|
||||
_client.Dispose();
|
||||
await _factory.DisposeAsync();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task SbomSubmit_WithComponents_PopulatesNativeEvidenceFields()
|
||||
{
|
||||
// Arrange
|
||||
var scanId = await CreateScanAsync();
|
||||
|
||||
var sbomJson = """
|
||||
{
|
||||
"bomFormat": "CycloneDX",
|
||||
"specVersion": "1.7",
|
||||
"version": 1,
|
||||
"components": [
|
||||
{
|
||||
"type": "library",
|
||||
"name": "lodash",
|
||||
"version": "4.17.21",
|
||||
"purl": "pkg:npm/lodash@4.17.21",
|
||||
"evidence": {
|
||||
"identity": {
|
||||
"field": "purl",
|
||||
"confidence": 0.95,
|
||||
"methods": [
|
||||
{
|
||||
"technique": "manifest-analysis",
|
||||
"confidence": 0.95
|
||||
}
|
||||
]
|
||||
},
|
||||
"occurrences": [
|
||||
{
|
||||
"location": "/app/node_modules/lodash/package.json"
|
||||
}
|
||||
],
|
||||
"licenses": [
|
||||
{
|
||||
"license": {
|
||||
"id": "MIT"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
""";
|
||||
|
||||
// Act
|
||||
using var request = new HttpRequestMessage(HttpMethod.Post, $"/api/v1/scans/{scanId}/sbom");
|
||||
var content = new StringContent(sbomJson, Encoding.UTF8, "application/vnd.cyclonedx+json");
|
||||
content.Headers.ContentType?.Parameters.Add(
|
||||
new System.Net.Http.Headers.NameValueHeaderValue("version", "1.7"));
|
||||
request.Content = content;
|
||||
|
||||
var response = await _client.SendAsync(request, TestContext.Current.CancellationToken);
|
||||
|
||||
// Assert
|
||||
Assert.Equal(HttpStatusCode.Accepted, response.StatusCode);
|
||||
|
||||
var payload = await response.Content.ReadFromJsonAsync<SbomAcceptedResponseDto>();
|
||||
Assert.NotNull(payload);
|
||||
Assert.Equal(1, payload!.ComponentCount);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task SbomSubmit_WithLegacyProperties_PreservesEvidenceOnRoundTrip()
|
||||
{
|
||||
// Arrange
|
||||
var scanId = await CreateScanAsync();
|
||||
|
||||
// Legacy format with stellaops:evidence[n] properties
|
||||
var sbomJson = """
|
||||
{
|
||||
"bomFormat": "CycloneDX",
|
||||
"specVersion": "1.7",
|
||||
"version": 1,
|
||||
"components": [
|
||||
{
|
||||
"type": "library",
|
||||
"name": "express",
|
||||
"version": "4.18.2",
|
||||
"purl": "pkg:npm/express@4.18.2",
|
||||
"properties": [
|
||||
{
|
||||
"name": "stellaops:evidence[0]",
|
||||
"value": "manifest:package.json@/app/node_modules/express/package.json"
|
||||
},
|
||||
{
|
||||
"name": "stellaops:evidence[1]",
|
||||
"value": "binary:sha256:abc123@/app/node_modules/express/lib/router/index.js"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
""";
|
||||
|
||||
// Act
|
||||
using var request = new HttpRequestMessage(HttpMethod.Post, $"/api/v1/scans/{scanId}/sbom");
|
||||
var content = new StringContent(sbomJson, Encoding.UTF8, "application/vnd.cyclonedx+json");
|
||||
content.Headers.ContentType?.Parameters.Add(
|
||||
new System.Net.Http.Headers.NameValueHeaderValue("version", "1.7"));
|
||||
request.Content = content;
|
||||
|
||||
var response = await _client.SendAsync(request, TestContext.Current.CancellationToken);
|
||||
|
||||
// Assert
|
||||
Assert.Equal(HttpStatusCode.Accepted, response.StatusCode);
|
||||
|
||||
var payload = await response.Content.ReadFromJsonAsync<SbomAcceptedResponseDto>();
|
||||
Assert.NotNull(payload);
|
||||
Assert.Equal(1, payload!.ComponentCount);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task SbomSubmit_WithCallstackEvidence_PreservesReachabilityData()
|
||||
{
|
||||
// Arrange
|
||||
var scanId = await CreateScanAsync();
|
||||
|
||||
var sbomJson = """
|
||||
{
|
||||
"bomFormat": "CycloneDX",
|
||||
"specVersion": "1.7",
|
||||
"version": 1,
|
||||
"components": [
|
||||
{
|
||||
"type": "library",
|
||||
"name": "vulnerable-lib",
|
||||
"version": "1.0.0",
|
||||
"purl": "pkg:npm/vulnerable-lib@1.0.0",
|
||||
"evidence": {
|
||||
"callstack": {
|
||||
"frames": [
|
||||
{
|
||||
"module": "app.js",
|
||||
"function": "handleRequest",
|
||||
"line": 42
|
||||
},
|
||||
{
|
||||
"module": "vulnerable-lib/index.js",
|
||||
"function": "vulnerableMethod",
|
||||
"line": 15
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
""";
|
||||
|
||||
// Act
|
||||
using var request = new HttpRequestMessage(HttpMethod.Post, $"/api/v1/scans/{scanId}/sbom");
|
||||
var content = new StringContent(sbomJson, Encoding.UTF8, "application/vnd.cyclonedx+json");
|
||||
content.Headers.ContentType?.Parameters.Add(
|
||||
new System.Net.Http.Headers.NameValueHeaderValue("version", "1.7"));
|
||||
request.Content = content;
|
||||
|
||||
var response = await _client.SendAsync(request, TestContext.Current.CancellationToken);
|
||||
|
||||
// Assert
|
||||
Assert.Equal(HttpStatusCode.Accepted, response.StatusCode);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task SbomSubmit_WithCopyrightEvidence_DeduplicatesEntries()
|
||||
{
|
||||
// Arrange
|
||||
var scanId = await CreateScanAsync();
|
||||
|
||||
var sbomJson = """
|
||||
{
|
||||
"bomFormat": "CycloneDX",
|
||||
"specVersion": "1.7",
|
||||
"version": 1,
|
||||
"components": [
|
||||
{
|
||||
"type": "library",
|
||||
"name": "acme-lib",
|
||||
"version": "2.0.0",
|
||||
"purl": "pkg:npm/acme-lib@2.0.0",
|
||||
"evidence": {
|
||||
"copyright": [
|
||||
{
|
||||
"text": "Copyright 2024 ACME Corporation"
|
||||
},
|
||||
{
|
||||
"text": "Copyright 2024 ACME Corporation"
|
||||
},
|
||||
{
|
||||
"text": "Copyright 2023 ACME Inc."
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
""";
|
||||
|
||||
// Act
|
||||
using var request = new HttpRequestMessage(HttpMethod.Post, $"/api/v1/scans/{scanId}/sbom");
|
||||
var content = new StringContent(sbomJson, Encoding.UTF8, "application/vnd.cyclonedx+json");
|
||||
content.Headers.ContentType?.Parameters.Add(
|
||||
new System.Net.Http.Headers.NameValueHeaderValue("version", "1.7"));
|
||||
request.Content = content;
|
||||
|
||||
var response = await _client.SendAsync(request, TestContext.Current.CancellationToken);
|
||||
|
||||
// Assert
|
||||
Assert.Equal(HttpStatusCode.Accepted, response.StatusCode);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task SbomSubmit_VerifySerializationRoundTrip()
|
||||
{
|
||||
// Arrange
|
||||
var scanId = await CreateScanAsync();
|
||||
|
||||
var originalEvidence = new
|
||||
{
|
||||
identity = new
|
||||
{
|
||||
field = "purl",
|
||||
confidence = 0.9,
|
||||
methods = new[]
|
||||
{
|
||||
new { technique = "binary-analysis", confidence = 0.9 }
|
||||
}
|
||||
},
|
||||
occurrences = new[]
|
||||
{
|
||||
new { location = "/lib/test.so", line = 100, offset = 0x1234 }
|
||||
},
|
||||
licenses = new[]
|
||||
{
|
||||
new { license = new { id = "Apache-2.0" } }
|
||||
}
|
||||
};
|
||||
|
||||
var sbomJson = $$"""
|
||||
{
|
||||
"bomFormat": "CycloneDX",
|
||||
"specVersion": "1.7",
|
||||
"version": 1,
|
||||
"components": [
|
||||
{
|
||||
"type": "library",
|
||||
"name": "test-component",
|
||||
"version": "1.0.0",
|
||||
"purl": "pkg:generic/test-component@1.0.0",
|
||||
"evidence": {{JsonSerializer.Serialize(originalEvidence)}}
|
||||
}
|
||||
]
|
||||
}
|
||||
""";
|
||||
|
||||
// Act
|
||||
using var request = new HttpRequestMessage(HttpMethod.Post, $"/api/v1/scans/{scanId}/sbom");
|
||||
var content = new StringContent(sbomJson, Encoding.UTF8, "application/vnd.cyclonedx+json");
|
||||
content.Headers.ContentType?.Parameters.Add(
|
||||
new System.Net.Http.Headers.NameValueHeaderValue("version", "1.7"));
|
||||
request.Content = content;
|
||||
|
||||
var response = await _client.SendAsync(request, TestContext.Current.CancellationToken);
|
||||
|
||||
// Assert - SBOM accepted means it was successfully parsed and stored
|
||||
Assert.Equal(HttpStatusCode.Accepted, response.StatusCode);
|
||||
|
||||
var payload = await response.Content.ReadFromJsonAsync<SbomAcceptedResponseDto>();
|
||||
Assert.NotNull(payload);
|
||||
Assert.NotNull(payload!.SbomId);
|
||||
Assert.StartsWith("sha256:", payload.Digest, StringComparison.Ordinal);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task SbomSubmit_WithMixedEvidenceTypes_ProcessesAllEvidence()
|
||||
{
|
||||
// Arrange
|
||||
var scanId = await CreateScanAsync();
|
||||
|
||||
// Component with multiple evidence types
|
||||
var sbomJson = """
|
||||
{
|
||||
"bomFormat": "CycloneDX",
|
||||
"specVersion": "1.7",
|
||||
"version": 1,
|
||||
"components": [
|
||||
{
|
||||
"type": "library",
|
||||
"name": "multi-evidence-lib",
|
||||
"version": "3.0.0",
|
||||
"purl": "pkg:npm/multi-evidence-lib@3.0.0",
|
||||
"evidence": {
|
||||
"identity": {
|
||||
"field": "purl",
|
||||
"confidence": 0.85,
|
||||
"methods": [
|
||||
{ "technique": "manifest-analysis", "confidence": 0.85 },
|
||||
{ "technique": "source-code-analysis", "confidence": 0.75 }
|
||||
]
|
||||
},
|
||||
"occurrences": [
|
||||
{ "location": "/app/package.json" },
|
||||
{ "location": "/app/src/index.js", "line": 5 }
|
||||
],
|
||||
"licenses": [
|
||||
{ "license": { "id": "MIT" } },
|
||||
{ "license": { "name": "Custom License" } }
|
||||
],
|
||||
"copyright": [
|
||||
{ "text": "Copyright 2024 Multi Corp" }
|
||||
],
|
||||
"callstack": {
|
||||
"frames": [
|
||||
{ "module": "entry.js", "function": "main", "line": 1 },
|
||||
{ "module": "multi-evidence-lib", "function": "init", "line": 10 }
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
""";
|
||||
|
||||
// Act
|
||||
using var request = new HttpRequestMessage(HttpMethod.Post, $"/api/v1/scans/{scanId}/sbom");
|
||||
var content = new StringContent(sbomJson, Encoding.UTF8, "application/vnd.cyclonedx+json");
|
||||
content.Headers.ContentType?.Parameters.Add(
|
||||
new System.Net.Http.Headers.NameValueHeaderValue("version", "1.7"));
|
||||
request.Content = content;
|
||||
|
||||
var response = await _client.SendAsync(request, TestContext.Current.CancellationToken);
|
||||
|
||||
// Assert
|
||||
Assert.Equal(HttpStatusCode.Accepted, response.StatusCode);
|
||||
|
||||
var payload = await response.Content.ReadFromJsonAsync<SbomAcceptedResponseDto>();
|
||||
Assert.NotNull(payload);
|
||||
Assert.Equal(1, payload!.ComponentCount);
|
||||
}
|
||||
|
||||
private async Task<string> CreateScanAsync()
|
||||
{
|
||||
var response = await _client.PostAsJsonAsync("/api/v1/scans", new ScanSubmitRequest
|
||||
{
|
||||
Image = new ScanImageDescriptor
|
||||
{
|
||||
Reference = "example.com/evidence-test:1.0",
|
||||
Digest = "sha256:fedcba9876543210"
|
||||
}
|
||||
});
|
||||
|
||||
Assert.Equal(HttpStatusCode.Accepted, response.StatusCode);
|
||||
|
||||
var payload = await response.Content.ReadFromJsonAsync<ScanSubmitResponse>();
|
||||
Assert.NotNull(payload);
|
||||
return payload!.ScanId;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// In-memory artifact store for testing without external dependencies.
|
||||
/// </summary>
|
||||
private sealed class InMemoryArtifactObjectStore : IArtifactObjectStore
|
||||
{
|
||||
private readonly System.Collections.Concurrent.ConcurrentDictionary<string, byte[]> _objects = new(StringComparer.Ordinal);
|
||||
|
||||
public async Task PutAsync(ArtifactObjectDescriptor descriptor, Stream content, CancellationToken cancellationToken)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(descriptor);
|
||||
ArgumentNullException.ThrowIfNull(content);
|
||||
|
||||
using var buffer = new MemoryStream();
|
||||
await content.CopyToAsync(buffer, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
var key = $"{descriptor.Bucket}:{descriptor.Key}";
|
||||
_objects[key] = buffer.ToArray();
|
||||
}
|
||||
|
||||
public Task<Stream?> GetAsync(ArtifactObjectDescriptor descriptor, CancellationToken cancellationToken)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(descriptor);
|
||||
|
||||
var key = $"{descriptor.Bucket}:{descriptor.Key}";
|
||||
if (!_objects.TryGetValue(key, out var bytes))
|
||||
{
|
||||
return Task.FromResult<Stream?>(null);
|
||||
}
|
||||
|
||||
return Task.FromResult<Stream?>(new MemoryStream(bytes));
|
||||
}
|
||||
|
||||
public Task<bool> ExistsAsync(ArtifactObjectDescriptor descriptor, CancellationToken cancellationToken)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(descriptor);
|
||||
|
||||
var key = $"{descriptor.Bucket}:{descriptor.Key}";
|
||||
return Task.FromResult(_objects.ContainsKey(key));
|
||||
}
|
||||
|
||||
public Task DeleteAsync(ArtifactObjectDescriptor descriptor, CancellationToken cancellationToken)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(descriptor);
|
||||
|
||||
var key = $"{descriptor.Bucket}:{descriptor.Key}";
|
||||
_objects.TryRemove(key, out _);
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,483 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// PedigreeIntegrationTests.cs
|
||||
// Sprint: SPRINT_20260107_005_002_BE_cdx17_pedigree_integration
|
||||
// Task: PD-013 - Integration tests for CycloneDX 1.7 Pedigree fields
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System.Collections.Immutable;
|
||||
using System.Net;
|
||||
using System.Net.Http.Json;
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
using Microsoft.Extensions.DependencyInjection;
|
||||
using Microsoft.Extensions.DependencyInjection.Extensions;
|
||||
using StellaOps.Scanner.Emit.Pedigree;
|
||||
using StellaOps.Scanner.Storage.ObjectStore;
|
||||
using StellaOps.Scanner.WebService.Contracts;
|
||||
using StellaOps.TestKit;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Scanner.WebService.Tests.Integration;
|
||||
|
||||
/// <summary>
|
||||
/// Integration tests for CycloneDX 1.7 Pedigree field population via Feedser data.
|
||||
/// These tests verify end-to-end pedigree enrichment during SBOM generation.
|
||||
/// </summary>
|
||||
[Trait("Category", "Integration")]
|
||||
public sealed class PedigreeIntegrationTests : IAsyncLifetime
|
||||
{
|
||||
private ScannerApplicationFactory _factory = null!;
|
||||
private HttpClient _client = null!;
|
||||
|
||||
public ValueTask InitializeAsync()
|
||||
{
|
||||
_factory = new ScannerApplicationFactory().WithOverrides(
|
||||
configuration =>
|
||||
{
|
||||
configuration["scanner:authority:enabled"] = "false";
|
||||
configuration["scanner:pedigree:enabled"] = "true";
|
||||
configuration["scanner:pedigree:includeDiffs"] = "true";
|
||||
},
|
||||
configureServices: services =>
|
||||
{
|
||||
services.RemoveAll<IArtifactObjectStore>();
|
||||
services.AddSingleton<IArtifactObjectStore>(new InMemoryArtifactObjectStore());
|
||||
|
||||
// Register mock pedigree provider
|
||||
services.RemoveAll<IPedigreeDataProvider>();
|
||||
services.AddSingleton<IPedigreeDataProvider>(new MockPedigreeDataProvider());
|
||||
});
|
||||
|
||||
_client = _factory.CreateClient();
|
||||
return ValueTask.CompletedTask;
|
||||
}
|
||||
|
||||
public async ValueTask DisposeAsync()
|
||||
{
|
||||
_client.Dispose();
|
||||
await _factory.DisposeAsync();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task SbomGeneration_WithPedigreeData_IncludesAncestors()
|
||||
{
|
||||
// Arrange
|
||||
var scanId = await CreateScanAsync();
|
||||
|
||||
var sbomJson = """
|
||||
{
|
||||
"bomFormat": "CycloneDX",
|
||||
"specVersion": "1.7",
|
||||
"version": 1,
|
||||
"components": [
|
||||
{
|
||||
"type": "library",
|
||||
"name": "openssl",
|
||||
"version": "1.1.1n-0+deb11u5",
|
||||
"purl": "pkg:deb/debian/openssl@1.1.1n-0%2Bdeb11u5?distro=debian-11"
|
||||
}
|
||||
]
|
||||
}
|
||||
""";
|
||||
|
||||
// Act
|
||||
using var request = new HttpRequestMessage(HttpMethod.Post, $"/api/v1/scans/{scanId}/sbom");
|
||||
var content = new StringContent(sbomJson, Encoding.UTF8, "application/vnd.cyclonedx+json");
|
||||
content.Headers.ContentType?.Parameters.Add(
|
||||
new System.Net.Http.Headers.NameValueHeaderValue("version", "1.7"));
|
||||
request.Content = content;
|
||||
|
||||
var response = await _client.SendAsync(request, TestContext.Current.CancellationToken);
|
||||
|
||||
// Assert
|
||||
Assert.Equal(HttpStatusCode.Accepted, response.StatusCode);
|
||||
|
||||
var payload = await response.Content.ReadFromJsonAsync<SbomAcceptedResponseDto>();
|
||||
Assert.NotNull(payload);
|
||||
Assert.Equal(1, payload!.ComponentCount);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task SbomGeneration_BackportedPackage_IncludesPatches()
|
||||
{
|
||||
// Arrange
|
||||
var scanId = await CreateScanAsync();
|
||||
|
||||
// Component that has known backported patches
|
||||
var sbomJson = """
|
||||
{
|
||||
"bomFormat": "CycloneDX",
|
||||
"specVersion": "1.7",
|
||||
"version": 1,
|
||||
"components": [
|
||||
{
|
||||
"type": "library",
|
||||
"name": "curl",
|
||||
"version": "7.68.0-1ubuntu2.22",
|
||||
"purl": "pkg:deb/ubuntu/curl@7.68.0-1ubuntu2.22?distro=ubuntu-20.04"
|
||||
}
|
||||
]
|
||||
}
|
||||
""";
|
||||
|
||||
// Act
|
||||
using var request = new HttpRequestMessage(HttpMethod.Post, $"/api/v1/scans/{scanId}/sbom");
|
||||
var content = new StringContent(sbomJson, Encoding.UTF8, "application/vnd.cyclonedx+json");
|
||||
content.Headers.ContentType?.Parameters.Add(
|
||||
new System.Net.Http.Headers.NameValueHeaderValue("version", "1.7"));
|
||||
request.Content = content;
|
||||
|
||||
var response = await _client.SendAsync(request, TestContext.Current.CancellationToken);
|
||||
|
||||
// Assert
|
||||
Assert.Equal(HttpStatusCode.Accepted, response.StatusCode);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task SbomGeneration_ComponentWithCommits_IncludesProvenance()
|
||||
{
|
||||
// Arrange
|
||||
var scanId = await CreateScanAsync();
|
||||
|
||||
var sbomJson = """
|
||||
{
|
||||
"bomFormat": "CycloneDX",
|
||||
"specVersion": "1.7",
|
||||
"version": 1,
|
||||
"components": [
|
||||
{
|
||||
"type": "library",
|
||||
"name": "log4j-core",
|
||||
"version": "2.17.1",
|
||||
"purl": "pkg:maven/org.apache.logging.log4j/log4j-core@2.17.1"
|
||||
}
|
||||
]
|
||||
}
|
||||
""";
|
||||
|
||||
// Act
|
||||
using var request = new HttpRequestMessage(HttpMethod.Post, $"/api/v1/scans/{scanId}/sbom");
|
||||
var content = new StringContent(sbomJson, Encoding.UTF8, "application/vnd.cyclonedx+json");
|
||||
content.Headers.ContentType?.Parameters.Add(
|
||||
new System.Net.Http.Headers.NameValueHeaderValue("version", "1.7"));
|
||||
request.Content = content;
|
||||
|
||||
var response = await _client.SendAsync(request, TestContext.Current.CancellationToken);
|
||||
|
||||
// Assert
|
||||
Assert.Equal(HttpStatusCode.Accepted, response.StatusCode);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task SbomGeneration_ComponentWithVariants_IncludesDistroMappings()
|
||||
{
|
||||
// Arrange
|
||||
var scanId = await CreateScanAsync();
|
||||
|
||||
var sbomJson = """
|
||||
{
|
||||
"bomFormat": "CycloneDX",
|
||||
"specVersion": "1.7",
|
||||
"version": 1,
|
||||
"components": [
|
||||
{
|
||||
"type": "library",
|
||||
"name": "zlib",
|
||||
"version": "1.2.11.dfsg-2+deb11u2",
|
||||
"purl": "pkg:deb/debian/zlib1g@1.2.11.dfsg-2%2Bdeb11u2?distro=debian-11"
|
||||
}
|
||||
]
|
||||
}
|
||||
""";
|
||||
|
||||
// Act
|
||||
using var request = new HttpRequestMessage(HttpMethod.Post, $"/api/v1/scans/{scanId}/sbom");
|
||||
var content = new StringContent(sbomJson, Encoding.UTF8, "application/vnd.cyclonedx+json");
|
||||
content.Headers.ContentType?.Parameters.Add(
|
||||
new System.Net.Http.Headers.NameValueHeaderValue("version", "1.7"));
|
||||
request.Content = content;
|
||||
|
||||
var response = await _client.SendAsync(request, TestContext.Current.CancellationToken);
|
||||
|
||||
// Assert
|
||||
Assert.Equal(HttpStatusCode.Accepted, response.StatusCode);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task SbomGeneration_MultipleComponentsWithPedigree_EnrichesAll()
|
||||
{
|
||||
// Arrange
|
||||
var scanId = await CreateScanAsync();
|
||||
|
||||
var sbomJson = """
|
||||
{
|
||||
"bomFormat": "CycloneDX",
|
||||
"specVersion": "1.7",
|
||||
"version": 1,
|
||||
"components": [
|
||||
{
|
||||
"type": "library",
|
||||
"name": "openssl",
|
||||
"version": "1.1.1n-0+deb11u5",
|
||||
"purl": "pkg:deb/debian/openssl@1.1.1n-0%2Bdeb11u5"
|
||||
},
|
||||
{
|
||||
"type": "library",
|
||||
"name": "curl",
|
||||
"version": "7.74.0-1.3+deb11u7",
|
||||
"purl": "pkg:deb/debian/curl@7.74.0-1.3%2Bdeb11u7"
|
||||
},
|
||||
{
|
||||
"type": "library",
|
||||
"name": "zlib",
|
||||
"version": "1.2.11.dfsg-2+deb11u2",
|
||||
"purl": "pkg:deb/debian/zlib1g@1.2.11.dfsg-2%2Bdeb11u2"
|
||||
}
|
||||
]
|
||||
}
|
||||
""";
|
||||
|
||||
// Act
|
||||
using var request = new HttpRequestMessage(HttpMethod.Post, $"/api/v1/scans/{scanId}/sbom");
|
||||
var content = new StringContent(sbomJson, Encoding.UTF8, "application/vnd.cyclonedx+json");
|
||||
content.Headers.ContentType?.Parameters.Add(
|
||||
new System.Net.Http.Headers.NameValueHeaderValue("version", "1.7"));
|
||||
request.Content = content;
|
||||
|
||||
var response = await _client.SendAsync(request, TestContext.Current.CancellationToken);
|
||||
|
||||
// Assert
|
||||
Assert.Equal(HttpStatusCode.Accepted, response.StatusCode);
|
||||
|
||||
var payload = await response.Content.ReadFromJsonAsync<SbomAcceptedResponseDto>();
|
||||
Assert.NotNull(payload);
|
||||
Assert.Equal(3, payload!.ComponentCount);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task PedigreeMapper_MapsPatchesCorrectly()
|
||||
{
|
||||
// Arrange: Test pedigree mapper directly
|
||||
var mapper = new CycloneDxPedigreeMapper();
|
||||
|
||||
var pedigreeData = new PedigreeData
|
||||
{
|
||||
Ancestors = ImmutableArray.Create(new AncestorComponent
|
||||
{
|
||||
Name = "openssl",
|
||||
Version = "1.1.1o",
|
||||
Purl = "pkg:generic/openssl@1.1.1o",
|
||||
Type = "library"
|
||||
}),
|
||||
Variants = ImmutableArray.Create(new VariantComponent
|
||||
{
|
||||
Name = "openssl",
|
||||
Version = "1.1.1n-0+deb11u5",
|
||||
Purl = "pkg:deb/debian/openssl@1.1.1n-0+deb11u5",
|
||||
Type = "library",
|
||||
Distribution = "debian-11"
|
||||
}),
|
||||
Commits = ImmutableArray.Create(new CommitInfo
|
||||
{
|
||||
Uid = "abc123def456",
|
||||
Url = "https://github.com/openssl/openssl/commit/abc123def456",
|
||||
Message = "Fix CVE-2024-1234 buffer overflow",
|
||||
Author = new CommitActor { Name = "maintainer", Email = "maintainer@openssl.org" }
|
||||
}),
|
||||
Patches = ImmutableArray.Create(new PatchInfo
|
||||
{
|
||||
Type = PatchType.Backport,
|
||||
DiffUrl = "https://salsa.debian.org/...",
|
||||
DiffText = "--- a/ssl/ssl_lib.c\n+++ b/ssl/ssl_lib.c\n@@ -100 @@\n-vulnerable\n+fixed",
|
||||
Resolves = ImmutableArray.Create(new PatchResolution
|
||||
{
|
||||
Id = "CVE-2024-1234",
|
||||
SourceName = "NVD"
|
||||
})
|
||||
}),
|
||||
Notes = "Backported security fix from upstream 1.1.1o (Tier 1: Confirmed by distro advisory)"
|
||||
};
|
||||
|
||||
// Act
|
||||
var cdxPedigree = mapper.Map(pedigreeData);
|
||||
|
||||
// Assert
|
||||
Assert.NotNull(cdxPedigree);
|
||||
Assert.Single(cdxPedigree.Ancestors);
|
||||
Assert.Single(cdxPedigree.Variants);
|
||||
Assert.Single(cdxPedigree.Commits);
|
||||
Assert.Single(cdxPedigree.Patches);
|
||||
Assert.Equal("Backported security fix from upstream 1.1.1o (Tier 1: Confirmed by distro advisory)", cdxPedigree.Notes);
|
||||
|
||||
// Verify commit mapping
|
||||
var commit = cdxPedigree.Commits[0];
|
||||
Assert.Equal("abc123def456", commit.Uid);
|
||||
Assert.Equal("https://github.com/openssl/openssl/commit/abc123def456", commit.Url);
|
||||
|
||||
// Verify patch mapping
|
||||
var patch = cdxPedigree.Patches[0];
|
||||
Assert.Equal(CycloneDX.Models.Patch.PatchClassification.Backport, patch.Type);
|
||||
Assert.NotNull(patch.Resolves);
|
||||
Assert.Single(patch.Resolves);
|
||||
Assert.Equal("CVE-2024-1234", patch.Resolves[0].Id);
|
||||
}
|
||||
|
||||
private async Task<string> CreateScanAsync()
|
||||
{
|
||||
var response = await _client.PostAsJsonAsync("/api/v1/scans", new ScanSubmitRequest
|
||||
{
|
||||
Image = new ScanImageDescriptor
|
||||
{
|
||||
Reference = "example.com/pedigree-test:1.0",
|
||||
Digest = "sha256:abcdef123456"
|
||||
}
|
||||
});
|
||||
|
||||
Assert.Equal(HttpStatusCode.Accepted, response.StatusCode);
|
||||
|
||||
var payload = await response.Content.ReadFromJsonAsync<ScanSubmitResponse>();
|
||||
Assert.NotNull(payload);
|
||||
return payload!.ScanId;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Mock pedigree provider that returns test data for known PURLs.
|
||||
/// </summary>
|
||||
private sealed class MockPedigreeDataProvider : IPedigreeDataProvider
|
||||
{
|
||||
public Task<PedigreeData?> GetPedigreeAsync(string purl, CancellationToken cancellationToken = default)
|
||||
{
|
||||
if (string.IsNullOrEmpty(purl))
|
||||
{
|
||||
return Task.FromResult<PedigreeData?>(null);
|
||||
}
|
||||
|
||||
// Return mock pedigree data for Debian OpenSSL
|
||||
if (purl.Contains("openssl", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
return Task.FromResult<PedigreeData?>(new PedigreeData
|
||||
{
|
||||
Ancestors = ImmutableArray.Create(new AncestorComponent
|
||||
{
|
||||
Name = "openssl",
|
||||
Version = "1.1.1o",
|
||||
Purl = "pkg:generic/openssl@1.1.1o",
|
||||
Type = "library"
|
||||
}),
|
||||
Variants = ImmutableArray<VariantComponent>.Empty,
|
||||
Commits = ImmutableArray.Create(new CommitInfo
|
||||
{
|
||||
Uid = "c0d0e1f2a3b4",
|
||||
Url = "https://github.com/openssl/openssl/commit/c0d0e1f2a3b4",
|
||||
Message = "Fix buffer overflow in SSL_verify"
|
||||
}),
|
||||
Patches = ImmutableArray.Create(new PatchInfo
|
||||
{
|
||||
Type = PatchType.Backport,
|
||||
Resolves = ImmutableArray.Create(new PatchResolution
|
||||
{
|
||||
Id = "CVE-2024-0001",
|
||||
SourceName = "NVD"
|
||||
})
|
||||
}),
|
||||
Notes = "Tier 1: Confirmed by Debian Security Advisory DSA-5678"
|
||||
});
|
||||
}
|
||||
|
||||
// Return mock data for curl
|
||||
if (purl.Contains("curl", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
return Task.FromResult<PedigreeData?>(new PedigreeData
|
||||
{
|
||||
Ancestors = ImmutableArray.Create(new AncestorComponent
|
||||
{
|
||||
Name = "curl",
|
||||
Version = "7.88.1",
|
||||
Purl = "pkg:generic/curl@7.88.1"
|
||||
}),
|
||||
Patches = ImmutableArray.Create(new PatchInfo
|
||||
{
|
||||
Type = PatchType.Backport,
|
||||
DiffText = "--- a/lib/url.c\n+++ b/lib/url.c\n...",
|
||||
Resolves = ImmutableArray.Create(new PatchResolution
|
||||
{
|
||||
Id = "CVE-2024-0002",
|
||||
SourceName = "NVD"
|
||||
})
|
||||
}),
|
||||
Notes = "Tier 2: Changelog evidence"
|
||||
});
|
||||
}
|
||||
|
||||
return Task.FromResult<PedigreeData?>(null);
|
||||
}
|
||||
|
||||
public Task<IReadOnlyDictionary<string, PedigreeData>> GetPedigreesBatchAsync(
|
||||
IEnumerable<string> purls,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
var results = new Dictionary<string, PedigreeData>();
|
||||
|
||||
foreach (var purl in purls)
|
||||
{
|
||||
var data = GetPedigreeAsync(purl, cancellationToken).Result;
|
||||
if (data != null)
|
||||
{
|
||||
results[purl] = data;
|
||||
}
|
||||
}
|
||||
|
||||
return Task.FromResult<IReadOnlyDictionary<string, PedigreeData>>(results);
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// In-memory artifact store for testing without external dependencies.
|
||||
/// </summary>
|
||||
private sealed class InMemoryArtifactObjectStore : IArtifactObjectStore
|
||||
{
|
||||
private readonly System.Collections.Concurrent.ConcurrentDictionary<string, byte[]> _objects = new(StringComparer.Ordinal);
|
||||
|
||||
public async Task PutAsync(ArtifactObjectDescriptor descriptor, Stream content, CancellationToken cancellationToken)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(descriptor);
|
||||
ArgumentNullException.ThrowIfNull(content);
|
||||
|
||||
using var buffer = new MemoryStream();
|
||||
await content.CopyToAsync(buffer, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
var key = $"{descriptor.Bucket}:{descriptor.Key}";
|
||||
_objects[key] = buffer.ToArray();
|
||||
}
|
||||
|
||||
public Task<Stream?> GetAsync(ArtifactObjectDescriptor descriptor, CancellationToken cancellationToken)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(descriptor);
|
||||
|
||||
var key = $"{descriptor.Bucket}:{descriptor.Key}";
|
||||
if (!_objects.TryGetValue(key, out var bytes))
|
||||
{
|
||||
return Task.FromResult<Stream?>(null);
|
||||
}
|
||||
|
||||
return Task.FromResult<Stream?>(new MemoryStream(bytes));
|
||||
}
|
||||
|
||||
public Task<bool> ExistsAsync(ArtifactObjectDescriptor descriptor, CancellationToken cancellationToken)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(descriptor);
|
||||
|
||||
var key = $"{descriptor.Bucket}:{descriptor.Key}";
|
||||
return Task.FromResult(_objects.ContainsKey(key));
|
||||
}
|
||||
|
||||
public Task DeleteAsync(ArtifactObjectDescriptor descriptor, CancellationToken cancellationToken)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(descriptor);
|
||||
|
||||
var key = $"{descriptor.Bucket}:{descriptor.Key}";
|
||||
_objects.TryRemove(key, out _);
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,458 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// ValidationIntegrationTests.cs
|
||||
// Sprint: SPRINT_20260107_005_003_BE_sbom_validator_gate
|
||||
// Task: VG-010 - Integration tests for SBOM validation
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System.Collections.Immutable;
|
||||
using System.Net;
|
||||
using System.Net.Http.Json;
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
using Microsoft.Extensions.DependencyInjection;
|
||||
using Microsoft.Extensions.DependencyInjection.Extensions;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using Microsoft.Extensions.Options;
|
||||
using StellaOps.Scanner.Storage.ObjectStore;
|
||||
using StellaOps.Scanner.Validation;
|
||||
using StellaOps.Scanner.WebService.Contracts;
|
||||
using StellaOps.TestKit;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Scanner.WebService.Tests.Integration;
|
||||
|
||||
/// <summary>
|
||||
/// Integration tests for SBOM validation pipeline and endpoints.
|
||||
/// These tests verify end-to-end validation during SBOM generation and export.
|
||||
/// </summary>
|
||||
[Trait("Category", "Integration")]
|
||||
public sealed class ValidationIntegrationTests : IAsyncLifetime
|
||||
{
|
||||
private ScannerApplicationFactory _factory = null!;
|
||||
private HttpClient _client = null!;
|
||||
|
||||
public ValueTask InitializeAsync()
|
||||
{
|
||||
_factory = new ScannerApplicationFactory().WithOverrides(
|
||||
configuration =>
|
||||
{
|
||||
configuration["scanner:authority:enabled"] = "false";
|
||||
configuration["scanner:validation:enabled"] = "true";
|
||||
configuration["scanner:validation:failOnError"] = "false";
|
||||
configuration["scanner:validation:mode"] = "Audit";
|
||||
},
|
||||
configureServices: services =>
|
||||
{
|
||||
services.RemoveAll<IArtifactObjectStore>();
|
||||
services.AddSingleton<IArtifactObjectStore>(new InMemoryArtifactObjectStore());
|
||||
|
||||
// Register mock validator for testing
|
||||
services.RemoveAll<ISbomValidator>();
|
||||
services.AddSingleton<ISbomValidator>(new MockSbomValidator());
|
||||
});
|
||||
|
||||
_client = _factory.CreateClient();
|
||||
return ValueTask.CompletedTask;
|
||||
}
|
||||
|
||||
public async ValueTask DisposeAsync()
|
||||
{
|
||||
_client.Dispose();
|
||||
await _factory.DisposeAsync();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task SbomGeneration_WithValidationEnabled_ValidatesDocument()
|
||||
{
|
||||
// Arrange
|
||||
var scanId = await CreateScanAsync();
|
||||
|
||||
var sbomJson = """
|
||||
{
|
||||
"bomFormat": "CycloneDX",
|
||||
"specVersion": "1.7",
|
||||
"version": 1,
|
||||
"metadata": {
|
||||
"timestamp": "2026-01-09T12:00:00Z"
|
||||
},
|
||||
"components": [
|
||||
{
|
||||
"type": "library",
|
||||
"name": "test-package",
|
||||
"version": "1.0.0",
|
||||
"purl": "pkg:npm/test-package@1.0.0"
|
||||
}
|
||||
]
|
||||
}
|
||||
""";
|
||||
|
||||
// Act
|
||||
using var request = new HttpRequestMessage(HttpMethod.Post, $"/api/v1/scans/{scanId}/sbom");
|
||||
var content = new StringContent(sbomJson, Encoding.UTF8, "application/vnd.cyclonedx+json");
|
||||
content.Headers.ContentType?.Parameters.Add(
|
||||
new System.Net.Http.Headers.NameValueHeaderValue("version", "1.7"));
|
||||
request.Content = content;
|
||||
|
||||
var response = await _client.SendAsync(request, TestContext.Current.CancellationToken);
|
||||
|
||||
// Assert - SBOM should be accepted (validation in audit mode doesn't block)
|
||||
Assert.Equal(HttpStatusCode.Accepted, response.StatusCode);
|
||||
|
||||
var payload = await response.Content.ReadFromJsonAsync<SbomAcceptedResponseDto>();
|
||||
Assert.NotNull(payload);
|
||||
Assert.Equal(1, payload!.ComponentCount);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task SbomGeneration_InvalidDocument_ReturnsWarningsInAuditMode()
|
||||
{
|
||||
// Arrange
|
||||
var scanId = await CreateScanAsync();
|
||||
|
||||
// Missing required fields (invalid CycloneDX)
|
||||
var invalidSbomJson = """
|
||||
{
|
||||
"bomFormat": "CycloneDX",
|
||||
"specVersion": "1.7",
|
||||
"components": [
|
||||
{
|
||||
"name": "incomplete-package"
|
||||
}
|
||||
]
|
||||
}
|
||||
""";
|
||||
|
||||
// Act
|
||||
using var request = new HttpRequestMessage(HttpMethod.Post, $"/api/v1/scans/{scanId}/sbom");
|
||||
var content = new StringContent(invalidSbomJson, Encoding.UTF8, "application/vnd.cyclonedx+json");
|
||||
content.Headers.ContentType?.Parameters.Add(
|
||||
new System.Net.Http.Headers.NameValueHeaderValue("version", "1.7"));
|
||||
request.Content = content;
|
||||
|
||||
var response = await _client.SendAsync(request, TestContext.Current.CancellationToken);
|
||||
|
||||
// Assert - In audit mode, even invalid documents are accepted with warnings
|
||||
Assert.Equal(HttpStatusCode.Accepted, response.StatusCode);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task Validator_CycloneDxDocument_ValidatesFormat()
|
||||
{
|
||||
// Arrange: Test validator directly
|
||||
var mockValidator = new MockSbomValidator();
|
||||
var validationOptions = new SbomValidationOptions
|
||||
{
|
||||
Mode = SbomValidationMode.Audit
|
||||
};
|
||||
|
||||
var sbomBytes = Encoding.UTF8.GetBytes("""
|
||||
{
|
||||
"bomFormat": "CycloneDX",
|
||||
"specVersion": "1.7",
|
||||
"version": 1,
|
||||
"components": []
|
||||
}
|
||||
""");
|
||||
|
||||
// Act
|
||||
var result = await mockValidator.ValidateAsync(
|
||||
sbomBytes,
|
||||
SbomFormat.CycloneDxJson,
|
||||
validationOptions,
|
||||
CancellationToken.None);
|
||||
|
||||
// Assert
|
||||
Assert.NotNull(result);
|
||||
Assert.Equal(SbomFormat.CycloneDxJson, result.Format);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task Validator_SpdxDocument_ValidatesFormat()
|
||||
{
|
||||
// Arrange
|
||||
var mockValidator = new MockSbomValidator();
|
||||
var validationOptions = new SbomValidationOptions
|
||||
{
|
||||
Mode = SbomValidationMode.Audit
|
||||
};
|
||||
|
||||
var spdxBytes = Encoding.UTF8.GetBytes("""
|
||||
{
|
||||
"@context": "https://spdx.org/rdf/3.0.1/terms/",
|
||||
"spdxId": "urn:test:sbom:001",
|
||||
"name": "Test SBOM"
|
||||
}
|
||||
""");
|
||||
|
||||
// Act
|
||||
var result = await mockValidator.ValidateAsync(
|
||||
spdxBytes,
|
||||
SbomFormat.Spdx3JsonLd,
|
||||
validationOptions,
|
||||
CancellationToken.None);
|
||||
|
||||
// Assert
|
||||
Assert.NotNull(result);
|
||||
Assert.Equal(SbomFormat.Spdx3JsonLd, result.Format);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task Validator_SupportsFormat_ReturnsTrue()
|
||||
{
|
||||
// Arrange
|
||||
var mockValidator = new MockSbomValidator();
|
||||
|
||||
// Act & Assert
|
||||
Assert.True(mockValidator.SupportsFormat(SbomFormat.CycloneDxJson));
|
||||
Assert.True(mockValidator.SupportsFormat(SbomFormat.Spdx3JsonLd));
|
||||
Assert.True(mockValidator.SupportsFormat(SbomFormat.Unknown));
|
||||
|
||||
var info = await mockValidator.GetInfoAsync(CancellationToken.None);
|
||||
Assert.True(info.IsAvailable);
|
||||
Assert.Contains(SbomFormat.CycloneDxJson, info.SupportedFormats);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task Validator_WithErrors_ReturnsInvalidResult()
|
||||
{
|
||||
// Arrange
|
||||
var mockValidator = new MockSbomValidator(returnErrors: true);
|
||||
var validationOptions = new SbomValidationOptions
|
||||
{
|
||||
Mode = SbomValidationMode.Strict
|
||||
};
|
||||
|
||||
var sbomBytes = Encoding.UTF8.GetBytes("{}");
|
||||
|
||||
// Act
|
||||
var result = await mockValidator.ValidateAsync(
|
||||
sbomBytes,
|
||||
SbomFormat.CycloneDxJson,
|
||||
validationOptions,
|
||||
CancellationToken.None);
|
||||
|
||||
// Assert
|
||||
Assert.NotNull(result);
|
||||
Assert.False(result.IsValid);
|
||||
Assert.True(result.ErrorCount > 0);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task Validator_WithNoErrors_ReturnsValidResult()
|
||||
{
|
||||
// Arrange
|
||||
var mockValidator = new MockSbomValidator(returnErrors: false);
|
||||
var validationOptions = new SbomValidationOptions
|
||||
{
|
||||
Mode = SbomValidationMode.Lenient
|
||||
};
|
||||
|
||||
var sbomBytes = Encoding.UTF8.GetBytes("{}");
|
||||
|
||||
// Act
|
||||
var result = await mockValidator.ValidateAsync(
|
||||
sbomBytes,
|
||||
SbomFormat.CycloneDxJson,
|
||||
validationOptions,
|
||||
CancellationToken.None);
|
||||
|
||||
// Assert
|
||||
Assert.NotNull(result);
|
||||
Assert.True(result.IsValid);
|
||||
Assert.Equal(0, result.ErrorCount);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void FormatDetection_CycloneDxJson_DetectsCorrectFormat()
|
||||
{
|
||||
// Arrange
|
||||
var sbomJson = """{"bomFormat": "CycloneDX", "specVersion": "1.7"}""";
|
||||
var bytes = Encoding.UTF8.GetBytes(sbomJson);
|
||||
|
||||
// Act
|
||||
var format = SbomFormatDetector.Detect(bytes);
|
||||
|
||||
// Assert
|
||||
Assert.Equal(SbomFormat.CycloneDxJson, format);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void FormatDetection_SpdxJson_DetectsCorrectFormat()
|
||||
{
|
||||
// Arrange
|
||||
var spdxJson = """{"@context": "https://spdx.org/rdf/3.0.1/terms/"}""";
|
||||
var bytes = Encoding.UTF8.GetBytes(spdxJson);
|
||||
|
||||
// Act
|
||||
var format = SbomFormatDetector.Detect(bytes);
|
||||
|
||||
// Assert
|
||||
Assert.Equal(SbomFormat.Spdx3JsonLd, format);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ValidationOptions_DefaultValues_AreCorrect()
|
||||
{
|
||||
// Arrange & Act
|
||||
var options = new SbomValidationOptions();
|
||||
|
||||
// Assert
|
||||
Assert.Equal(SbomValidationMode.Strict, options.Mode);
|
||||
Assert.Equal(TimeSpan.FromSeconds(30), options.Timeout);
|
||||
}
|
||||
|
||||
private async Task<string> CreateScanAsync()
|
||||
{
|
||||
var response = await _client.PostAsJsonAsync("/api/v1/scans", new ScanSubmitRequest
|
||||
{
|
||||
Image = new ScanImageDescriptor
|
||||
{
|
||||
Reference = "example.com/validation-test:1.0",
|
||||
Digest = "sha256:validation123"
|
||||
}
|
||||
});
|
||||
|
||||
Assert.Equal(HttpStatusCode.Accepted, response.StatusCode);
|
||||
|
||||
var payload = await response.Content.ReadFromJsonAsync<ScanSubmitResponse>();
|
||||
Assert.NotNull(payload);
|
||||
return payload!.ScanId;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Mock SBOM validator for testing validation pipeline behavior.
|
||||
/// </summary>
|
||||
private sealed class MockSbomValidator : ISbomValidator
|
||||
{
|
||||
private readonly bool _returnErrors;
|
||||
|
||||
public MockSbomValidator(bool returnErrors = false)
|
||||
{
|
||||
_returnErrors = returnErrors;
|
||||
}
|
||||
|
||||
public Task<SbomValidationResult> ValidateAsync(
|
||||
byte[] sbomBytes,
|
||||
SbomFormat format,
|
||||
SbomValidationOptions? options = null,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
var diagnostics = _returnErrors
|
||||
? ImmutableArray.Create(new SbomValidationDiagnostic
|
||||
{
|
||||
Severity = SbomValidationSeverity.Error,
|
||||
Message = "Mock validation error",
|
||||
Code = "MOCK-001",
|
||||
Path = "$.root"
|
||||
})
|
||||
: ImmutableArray<SbomValidationDiagnostic>.Empty;
|
||||
|
||||
return Task.FromResult(new SbomValidationResult
|
||||
{
|
||||
IsValid = !_returnErrors,
|
||||
Format = format,
|
||||
ValidatorName = "MockValidator",
|
||||
ValidatorVersion = "1.0.0",
|
||||
Diagnostics = diagnostics,
|
||||
ValidationDuration = TimeSpan.FromMilliseconds(10)
|
||||
});
|
||||
}
|
||||
|
||||
public bool SupportsFormat(SbomFormat format) => true;
|
||||
|
||||
public Task<ValidatorInfo> GetInfoAsync(CancellationToken cancellationToken = default)
|
||||
{
|
||||
return Task.FromResult(new ValidatorInfo
|
||||
{
|
||||
Name = "MockValidator",
|
||||
Version = "1.0.0",
|
||||
IsAvailable = true,
|
||||
SupportedFormats = ImmutableArray.Create(
|
||||
SbomFormat.CycloneDxJson,
|
||||
SbomFormat.CycloneDxXml,
|
||||
SbomFormat.Spdx3JsonLd,
|
||||
SbomFormat.Spdx23Json)
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// In-memory artifact store for testing without external dependencies.
|
||||
/// </summary>
|
||||
private sealed class InMemoryArtifactObjectStore : IArtifactObjectStore
|
||||
{
|
||||
private readonly System.Collections.Concurrent.ConcurrentDictionary<string, byte[]> _objects = new(StringComparer.Ordinal);
|
||||
|
||||
public async Task PutAsync(ArtifactObjectDescriptor descriptor, Stream content, CancellationToken cancellationToken)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(descriptor);
|
||||
ArgumentNullException.ThrowIfNull(content);
|
||||
|
||||
using var buffer = new MemoryStream();
|
||||
await content.CopyToAsync(buffer, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
var key = $"{descriptor.Bucket}:{descriptor.Key}";
|
||||
_objects[key] = buffer.ToArray();
|
||||
}
|
||||
|
||||
public Task<Stream?> GetAsync(ArtifactObjectDescriptor descriptor, CancellationToken cancellationToken)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(descriptor);
|
||||
|
||||
var key = $"{descriptor.Bucket}:{descriptor.Key}";
|
||||
if (!_objects.TryGetValue(key, out var bytes))
|
||||
{
|
||||
return Task.FromResult<Stream?>(null);
|
||||
}
|
||||
|
||||
return Task.FromResult<Stream?>(new MemoryStream(bytes));
|
||||
}
|
||||
|
||||
public Task<bool> ExistsAsync(ArtifactObjectDescriptor descriptor, CancellationToken cancellationToken)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(descriptor);
|
||||
|
||||
var key = $"{descriptor.Bucket}:{descriptor.Key}";
|
||||
return Task.FromResult(_objects.ContainsKey(key));
|
||||
}
|
||||
|
||||
public Task DeleteAsync(ArtifactObjectDescriptor descriptor, CancellationToken cancellationToken)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(descriptor);
|
||||
|
||||
var key = $"{descriptor.Bucket}:{descriptor.Key}";
|
||||
_objects.TryRemove(key, out _);
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// SBOM format detector utility for content-based format detection.
|
||||
/// </summary>
|
||||
file static class SbomFormatDetector
|
||||
{
|
||||
public static SbomFormat Detect(byte[] bytes)
|
||||
{
|
||||
var content = Encoding.UTF8.GetString(bytes);
|
||||
|
||||
if (content.Contains("\"bomFormat\"", StringComparison.OrdinalIgnoreCase) &&
|
||||
content.Contains("\"CycloneDX\"", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
// All CycloneDX JSON versions map to CycloneDxJson
|
||||
return SbomFormat.CycloneDxJson;
|
||||
}
|
||||
|
||||
if (content.Contains("spdx.org/rdf/3.0", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
return SbomFormat.Spdx3JsonLd;
|
||||
}
|
||||
|
||||
if (content.Contains("\"spdxVersion\"", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
return SbomFormat.Spdx23Json;
|
||||
}
|
||||
|
||||
return SbomFormat.Unknown;
|
||||
}
|
||||
}
|
||||
@@ -585,6 +585,42 @@ internal sealed class InMemoryLayerSbomService : ILayerSbomService
|
||||
// Not implemented for tests
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
|
||||
public Task<byte[]?> GetComposedSbomAsync(
|
||||
ScanId scanId,
|
||||
string format,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
// Return the first matching layer SBOM for testing purposes
|
||||
var key = _layerSboms.Keys.FirstOrDefault(k => k.ScanId == scanId.Value && k.Format == format);
|
||||
if (key != default && _layerSboms.TryGetValue(key, out var sbom))
|
||||
{
|
||||
return Task.FromResult<byte[]?>(sbom);
|
||||
}
|
||||
return Task.FromResult<byte[]?>(null);
|
||||
}
|
||||
|
||||
public Task<IReadOnlyList<SbomLayerFragment>?> GetLayerFragmentsAsync(
|
||||
ScanId scanId,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
if (!_scans.TryGetValue(scanId.Value, out var scanData))
|
||||
{
|
||||
return Task.FromResult<IReadOnlyList<SbomLayerFragment>?>(null);
|
||||
}
|
||||
|
||||
var fragments = scanData.Layers
|
||||
.OrderBy(l => l.Order)
|
||||
.Select(l => new SbomLayerFragment
|
||||
{
|
||||
LayerDigest = l.LayerDigest,
|
||||
Order = l.Order,
|
||||
ComponentPurls = new List<string> { $"pkg:test/layer{l.Order}@1.0.0" }
|
||||
})
|
||||
.ToList();
|
||||
|
||||
return Task.FromResult<IReadOnlyList<SbomLayerFragment>?>(fragments);
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
|
||||
@@ -1,11 +1,31 @@
|
||||
using System;
|
||||
using System.Net.Http;
|
||||
using System.Net.Http.Headers;
|
||||
|
||||
namespace StellaOps.Scanner.WebService.Tests;
|
||||
|
||||
public sealed class ScannerApplicationFixture : IDisposable
|
||||
{
|
||||
private ScannerApplicationFactory? _authenticatedFactory;
|
||||
|
||||
public ScannerApplicationFactory Factory { get; } = new();
|
||||
|
||||
public void Dispose() => Factory.Dispose();
|
||||
/// <summary>
|
||||
/// Creates an HTTP client with test authentication enabled.
|
||||
/// </summary>
|
||||
public HttpClient CreateAuthenticatedClient()
|
||||
{
|
||||
_authenticatedFactory ??= Factory.WithOverrides(useTestAuthentication: true);
|
||||
var client = _authenticatedFactory.CreateClient();
|
||||
// Add a valid test bearer token (must have at least 3 dot-separated segments per TestAuthenticationHandler)
|
||||
client.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue("Bearer", "test.valid.token");
|
||||
return client;
|
||||
}
|
||||
|
||||
public void Dispose()
|
||||
{
|
||||
_authenticatedFactory?.Dispose();
|
||||
Factory.Dispose();
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
Reference in New Issue
Block a user