Add unit tests for SBOM ingestion and transformation
Some checks failed
Docs CI / lint-and-preview (push) Has been cancelled

- Implement `SbomIngestServiceCollectionExtensionsTests` to verify the SBOM ingestion pipeline exports snapshots correctly.
- Create `SbomIngestTransformerTests` to ensure the transformation produces expected nodes and edges, including deduplication of license nodes and normalization of timestamps.
- Add `SbomSnapshotExporterTests` to test the export functionality for manifest, adjacency, nodes, and edges.
- Introduce `VexOverlayTransformerTests` to validate the transformation of VEX nodes and edges.
- Set up project file for the test project with necessary dependencies and configurations.
- Include JSON fixture files for testing purposes.
This commit is contained in:
master
2025-11-04 07:49:39 +02:00
parent f72c5c513a
commit 2eb6852d34
491 changed files with 39445 additions and 3917 deletions

View File

@@ -2,44 +2,53 @@ using System;
using System.Collections.Generic;
using System.Collections.Immutable;
using System.Collections.ObjectModel;
using System.IO;
using System.Linq;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
using System.Linq;
using System.IO;
using System.Security.Cryptography;
using System.Text;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
using StellaOps.Scanner.Analyzers.Lang;
using StellaOps.Scanner.Analyzers.Lang.Plugin;
using StellaOps.Scanner.Analyzers.OS;
using StellaOps.Scanner.Analyzers.OS.Abstractions;
using StellaOps.Scanner.Analyzers.OS.Mapping;
using StellaOps.Scanner.Analyzers.OS.Plugin;
using StellaOps.Scanner.Core.Contracts;
using StellaOps.Scanner.Worker.Options;
using StellaOps.Scanner.Analyzers.Lang.Internal;
using StellaOps.Scanner.Analyzers.Lang.Plugin;
using StellaOps.Scanner.Analyzers.OS;
using StellaOps.Scanner.Analyzers.OS.Abstractions;
using StellaOps.Scanner.Analyzers.OS.Mapping;
using StellaOps.Scanner.Analyzers.OS.Plugin;
using StellaOps.Scanner.Core.Contracts;
using StellaOps.Scanner.Surface.Env;
using StellaOps.Scanner.Surface.FS;
using StellaOps.Scanner.Surface.Validation;
using StellaOps.Scanner.Worker.Options;
namespace StellaOps.Scanner.Worker.Processing;
internal sealed class CompositeScanAnalyzerDispatcher : IScanAnalyzerDispatcher
{
private readonly IServiceScopeFactory _scopeFactory;
private readonly IOSAnalyzerPluginCatalog _osCatalog;
private readonly ILanguageAnalyzerPluginCatalog _languageCatalog;
private readonly ScannerWorkerOptions _options;
private readonly ILogger<CompositeScanAnalyzerDispatcher> _logger;
internal sealed class CompositeScanAnalyzerDispatcher : IScanAnalyzerDispatcher
{
private readonly IServiceScopeFactory _scopeFactory;
private readonly IOSAnalyzerPluginCatalog _osCatalog;
private readonly ILanguageAnalyzerPluginCatalog _languageCatalog;
private readonly ScannerWorkerOptions _options;
private readonly ILogger<CompositeScanAnalyzerDispatcher> _logger;
private readonly ScannerWorkerMetrics _metrics;
private IReadOnlyList<string> _osPluginDirectories = Array.Empty<string>();
private IReadOnlyList<string> _languagePluginDirectories = Array.Empty<string>();
public CompositeScanAnalyzerDispatcher(
IServiceScopeFactory scopeFactory,
IOSAnalyzerPluginCatalog osCatalog,
ILanguageAnalyzerPluginCatalog languageCatalog,
IOptions<ScannerWorkerOptions> options,
ILogger<CompositeScanAnalyzerDispatcher> logger)
{
_scopeFactory = scopeFactory ?? throw new ArgumentNullException(nameof(scopeFactory));
_osCatalog = osCatalog ?? throw new ArgumentNullException(nameof(osCatalog));
_languageCatalog = languageCatalog ?? throw new ArgumentNullException(nameof(languageCatalog));
_options = options?.Value ?? throw new ArgumentNullException(nameof(options));
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
ILanguageAnalyzerPluginCatalog languageCatalog,
IOptions<ScannerWorkerOptions> options,
ILogger<CompositeScanAnalyzerDispatcher> logger,
ScannerWorkerMetrics metrics)
{
_scopeFactory = scopeFactory ?? throw new ArgumentNullException(nameof(scopeFactory));
_osCatalog = osCatalog ?? throw new ArgumentNullException(nameof(osCatalog));
_languageCatalog = languageCatalog ?? throw new ArgumentNullException(nameof(languageCatalog));
_options = options?.Value ?? throw new ArgumentNullException(nameof(options));
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
_metrics = metrics ?? throw new ArgumentNullException(nameof(metrics));
LoadPlugins();
}
@@ -131,72 +140,126 @@ internal sealed class CompositeScanAnalyzerDispatcher : IScanAnalyzerDispatcher
}
}
private async Task ExecuteLanguageAnalyzersAsync(
ScanJobContext context,
IReadOnlyList<ILanguageAnalyzer> analyzers,
IServiceProvider services,
string? workspacePath,
CancellationToken cancellationToken)
{
if (workspacePath is null)
{
_logger.LogWarning(
"Metadata key '{MetadataKey}' missing for job {JobId}; unable to locate workspace. Language analyzers skipped.",
_options.Analyzers.WorkspaceMetadataKey,
context.JobId);
return;
}
var usageHints = LanguageUsageHints.Empty;
var analyzerContext = new LanguageAnalyzerContext(workspacePath, context.TimeProvider, usageHints, services);
var results = new Dictionary<string, LanguageAnalyzerResult>(StringComparer.OrdinalIgnoreCase);
var fragments = new List<LayerComponentFragment>();
foreach (var analyzer in analyzers)
{
cancellationToken.ThrowIfCancellationRequested();
try
{
var engine = new LanguageAnalyzerEngine(new[] { analyzer });
var result = await engine.AnalyzeAsync(analyzerContext, cancellationToken).ConfigureAwait(false);
results[analyzer.Id] = result;
var components = result.Components
.Where(component => string.Equals(component.AnalyzerId, analyzer.Id, StringComparison.Ordinal))
.ToArray();
if (components.Length > 0)
{
var fragment = LanguageComponentMapper.ToLayerFragment(analyzer.Id, components);
fragments.Add(fragment);
}
}
catch (Exception ex)
{
_logger.LogError(ex, "Language analyzer {AnalyzerId} failed for job {JobId}.", analyzer.Id, context.JobId);
}
}
if (results.Count == 0 && fragments.Count == 0)
{
return;
}
if (results.Count > 0)
{
context.Analysis.Set(
ScanAnalysisKeys.LanguageAnalyzerResults,
new ReadOnlyDictionary<string, LanguageAnalyzerResult>(results));
}
if (fragments.Count > 0)
{
var immutableFragments = ImmutableArray.CreateRange(fragments);
context.Analysis.AppendLayerFragments(immutableFragments);
context.Analysis.Set(ScanAnalysisKeys.LanguageComponentFragments, immutableFragments);
}
}
private async Task ExecuteLanguageAnalyzersAsync(
ScanJobContext context,
IReadOnlyList<ILanguageAnalyzer> analyzers,
IServiceProvider services,
string? workspacePath,
CancellationToken cancellationToken)
{
if (workspacePath is null)
{
_logger.LogWarning(
"Metadata key '{MetadataKey}' missing for job {JobId}; unable to locate workspace. Language analyzers skipped.",
_options.Analyzers.WorkspaceMetadataKey,
context.JobId);
return;
}
var surfaceEnvironment = services.GetRequiredService<ISurfaceEnvironment>();
var validatorRunner = services.GetRequiredService<ISurfaceValidatorRunner>();
var validationProperties = new Dictionary<string, object?>(StringComparer.OrdinalIgnoreCase)
{
["jobId"] = context.JobId,
["scanId"] = context.ScanId,
["workspacePath"] = workspacePath,
["analyzerCount"] = analyzers.Count
};
var validationContext = SurfaceValidationContext.Create(
services,
"Scanner.Worker.LanguageAnalyzers",
surfaceEnvironment.Settings,
validationProperties);
await validatorRunner.EnsureAsync(validationContext, cancellationToken).ConfigureAwait(false);
string workspaceFingerprint;
try
{
workspaceFingerprint = LanguageWorkspaceFingerprint.Compute(workspacePath, cancellationToken);
}
catch (Exception ex) when (ex is IOException or UnauthorizedAccessException)
{
_logger.LogWarning(
ex,
"Failed to compute workspace fingerprint for job {JobId}; falling back to workspace path hash.",
context.JobId);
var fallbackBytes = Encoding.UTF8.GetBytes(workspacePath);
workspaceFingerprint = Convert.ToHexString(SHA256.HashData(fallbackBytes)).ToLowerInvariant();
}
var cache = services.GetRequiredService<ISurfaceCache>();
var cacheAdapter = new LanguageAnalyzerSurfaceCache(cache, surfaceEnvironment.Settings.Tenant);
var usageHints = LanguageUsageHints.Empty;
var analyzerContext = new LanguageAnalyzerContext(workspacePath, context.TimeProvider, usageHints, services);
var results = new Dictionary<string, LanguageAnalyzerResult>(StringComparer.OrdinalIgnoreCase);
var fragments = new List<LayerComponentFragment>();
foreach (var analyzer in analyzers)
{
cancellationToken.ThrowIfCancellationRequested();
try
{
var engine = new LanguageAnalyzerEngine(new[] { analyzer });
var cacheEntry = await cacheAdapter.GetOrCreateAsync(
_logger,
analyzer.Id,
workspaceFingerprint,
token => engine.AnalyzeAsync(analyzerContext, token),
cancellationToken)
.ConfigureAwait(false);
var result = cacheEntry.Result;
if (cacheEntry.IsHit)
{
_metrics.RecordLanguageCacheHit(context, analyzer.Id);
}
else
{
_metrics.RecordLanguageCacheMiss(context, analyzer.Id);
}
results[analyzer.Id] = result;
var components = result.Components
.Where(component => string.Equals(component.AnalyzerId, analyzer.Id, StringComparison.Ordinal))
.ToArray();
if (components.Length > 0)
{
var fragment = LanguageComponentMapper.ToLayerFragment(analyzer.Id, components);
fragments.Add(fragment);
}
}
catch (Exception ex)
{
_logger.LogError(ex, "Language analyzer {AnalyzerId} failed for job {JobId}.", analyzer.Id, context.JobId);
}
}
if (results.Count == 0 && fragments.Count == 0)
{
return;
}
if (results.Count > 0)
{
context.Analysis.Set(
ScanAnalysisKeys.LanguageAnalyzerResults,
new ReadOnlyDictionary<string, LanguageAnalyzerResult>(results));
}
if (fragments.Count > 0)
{
var immutableFragments = ImmutableArray.CreateRange(fragments);
context.Analysis.AppendLayerFragments(immutableFragments);
context.Analysis.Set(ScanAnalysisKeys.LanguageComponentFragments, immutableFragments);
}
}
private void LoadPlugins()
{