Add unit tests for SBOM ingestion and transformation
Some checks failed
Docs CI / lint-and-preview (push) Has been cancelled
Some checks failed
Docs CI / lint-and-preview (push) Has been cancelled
- Implement `SbomIngestServiceCollectionExtensionsTests` to verify the SBOM ingestion pipeline exports snapshots correctly. - Create `SbomIngestTransformerTests` to ensure the transformation produces expected nodes and edges, including deduplication of license nodes and normalization of timestamps. - Add `SbomSnapshotExporterTests` to test the export functionality for manifest, adjacency, nodes, and edges. - Introduce `VexOverlayTransformerTests` to validate the transformation of VEX nodes and edges. - Set up project file for the test project with necessary dependencies and configurations. - Include JSON fixture files for testing purposes.
This commit is contained in:
@@ -10,8 +10,10 @@ public sealed class ScannerWorkerMetrics
|
||||
private readonly Histogram<double> _queueLatencyMs;
|
||||
private readonly Histogram<double> _jobDurationMs;
|
||||
private readonly Histogram<double> _stageDurationMs;
|
||||
private readonly Counter<long> _jobsCompleted;
|
||||
private readonly Counter<long> _jobsFailed;
|
||||
private readonly Counter<long> _jobsCompleted;
|
||||
private readonly Counter<long> _jobsFailed;
|
||||
private readonly Counter<long> _languageCacheHits;
|
||||
private readonly Counter<long> _languageCacheMisses;
|
||||
|
||||
public ScannerWorkerMetrics()
|
||||
{
|
||||
@@ -27,12 +29,18 @@ public sealed class ScannerWorkerMetrics
|
||||
"scanner_worker_stage_duration_ms",
|
||||
unit: "ms",
|
||||
description: "Stage execution duration per job.");
|
||||
_jobsCompleted = ScannerWorkerInstrumentation.Meter.CreateCounter<long>(
|
||||
"scanner_worker_jobs_completed_total",
|
||||
description: "Number of successfully completed scan jobs.");
|
||||
_jobsFailed = ScannerWorkerInstrumentation.Meter.CreateCounter<long>(
|
||||
"scanner_worker_jobs_failed_total",
|
||||
description: "Number of scan jobs that failed permanently.");
|
||||
_jobsCompleted = ScannerWorkerInstrumentation.Meter.CreateCounter<long>(
|
||||
"scanner_worker_jobs_completed_total",
|
||||
description: "Number of successfully completed scan jobs.");
|
||||
_jobsFailed = ScannerWorkerInstrumentation.Meter.CreateCounter<long>(
|
||||
"scanner_worker_jobs_failed_total",
|
||||
description: "Number of scan jobs that failed permanently.");
|
||||
_languageCacheHits = ScannerWorkerInstrumentation.Meter.CreateCounter<long>(
|
||||
"scanner_worker_language_cache_hits_total",
|
||||
description: "Number of language analyzer cache hits encountered by the worker.");
|
||||
_languageCacheMisses = ScannerWorkerInstrumentation.Meter.CreateCounter<long>(
|
||||
"scanner_worker_language_cache_misses_total",
|
||||
description: "Number of language analyzer cache misses encountered by the worker.");
|
||||
}
|
||||
|
||||
public void RecordQueueLatency(ScanJobContext context, TimeSpan latency)
|
||||
@@ -70,19 +78,29 @@ public sealed class ScannerWorkerMetrics
|
||||
_jobsCompleted.Add(1, CreateTags(context));
|
||||
}
|
||||
|
||||
public void IncrementJobFailed(ScanJobContext context, string failureReason)
|
||||
{
|
||||
_jobsFailed.Add(1, CreateTags(context, failureReason: failureReason));
|
||||
}
|
||||
|
||||
private static KeyValuePair<string, object?>[] CreateTags(ScanJobContext context, string? stage = null, string? failureReason = null)
|
||||
{
|
||||
var tags = new List<KeyValuePair<string, object?>>(stage is null ? 5 : 6)
|
||||
{
|
||||
new("job.id", context.JobId),
|
||||
new("scan.id", context.ScanId),
|
||||
new("attempt", context.Lease.Attempt),
|
||||
};
|
||||
public void IncrementJobFailed(ScanJobContext context, string failureReason)
|
||||
{
|
||||
_jobsFailed.Add(1, CreateTags(context, failureReason: failureReason));
|
||||
}
|
||||
|
||||
public void RecordLanguageCacheHit(ScanJobContext context, string analyzerId)
|
||||
{
|
||||
_languageCacheHits.Add(1, CreateTags(context, analyzerId: analyzerId));
|
||||
}
|
||||
|
||||
public void RecordLanguageCacheMiss(ScanJobContext context, string analyzerId)
|
||||
{
|
||||
_languageCacheMisses.Add(1, CreateTags(context, analyzerId: analyzerId));
|
||||
}
|
||||
|
||||
private static KeyValuePair<string, object?>[] CreateTags(ScanJobContext context, string? stage = null, string? failureReason = null, string? analyzerId = null)
|
||||
{
|
||||
var tags = new List<KeyValuePair<string, object?>>(stage is null ? 5 : 6)
|
||||
{
|
||||
new("job.id", context.JobId),
|
||||
new("scan.id", context.ScanId),
|
||||
new("attempt", context.Lease.Attempt),
|
||||
};
|
||||
|
||||
if (context.Lease.Metadata.TryGetValue("queue", out var queueName) && !string.IsNullOrWhiteSpace(queueName))
|
||||
{
|
||||
@@ -99,11 +117,16 @@ public sealed class ScannerWorkerMetrics
|
||||
tags.Add(new KeyValuePair<string, object?>("stage", stage));
|
||||
}
|
||||
|
||||
if (!string.IsNullOrWhiteSpace(failureReason))
|
||||
{
|
||||
tags.Add(new KeyValuePair<string, object?>("reason", failureReason));
|
||||
}
|
||||
|
||||
return tags.ToArray();
|
||||
}
|
||||
}
|
||||
if (!string.IsNullOrWhiteSpace(failureReason))
|
||||
{
|
||||
tags.Add(new KeyValuePair<string, object?>("reason", failureReason));
|
||||
}
|
||||
|
||||
if (!string.IsNullOrWhiteSpace(analyzerId))
|
||||
{
|
||||
tags.Add(new KeyValuePair<string, object?>("analyzer.id", analyzerId));
|
||||
}
|
||||
|
||||
return tags.ToArray();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2,44 +2,53 @@ using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Collections.Immutable;
|
||||
using System.Collections.ObjectModel;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using Microsoft.Extensions.DependencyInjection;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Microsoft.Extensions.Options;
|
||||
using System.Linq;
|
||||
using System.IO;
|
||||
using System.Security.Cryptography;
|
||||
using System.Text;
|
||||
using Microsoft.Extensions.DependencyInjection;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Microsoft.Extensions.Options;
|
||||
using StellaOps.Scanner.Analyzers.Lang;
|
||||
using StellaOps.Scanner.Analyzers.Lang.Plugin;
|
||||
using StellaOps.Scanner.Analyzers.OS;
|
||||
using StellaOps.Scanner.Analyzers.OS.Abstractions;
|
||||
using StellaOps.Scanner.Analyzers.OS.Mapping;
|
||||
using StellaOps.Scanner.Analyzers.OS.Plugin;
|
||||
using StellaOps.Scanner.Core.Contracts;
|
||||
using StellaOps.Scanner.Worker.Options;
|
||||
using StellaOps.Scanner.Analyzers.Lang.Internal;
|
||||
using StellaOps.Scanner.Analyzers.Lang.Plugin;
|
||||
using StellaOps.Scanner.Analyzers.OS;
|
||||
using StellaOps.Scanner.Analyzers.OS.Abstractions;
|
||||
using StellaOps.Scanner.Analyzers.OS.Mapping;
|
||||
using StellaOps.Scanner.Analyzers.OS.Plugin;
|
||||
using StellaOps.Scanner.Core.Contracts;
|
||||
using StellaOps.Scanner.Surface.Env;
|
||||
using StellaOps.Scanner.Surface.FS;
|
||||
using StellaOps.Scanner.Surface.Validation;
|
||||
using StellaOps.Scanner.Worker.Options;
|
||||
|
||||
namespace StellaOps.Scanner.Worker.Processing;
|
||||
|
||||
internal sealed class CompositeScanAnalyzerDispatcher : IScanAnalyzerDispatcher
|
||||
{
|
||||
private readonly IServiceScopeFactory _scopeFactory;
|
||||
private readonly IOSAnalyzerPluginCatalog _osCatalog;
|
||||
private readonly ILanguageAnalyzerPluginCatalog _languageCatalog;
|
||||
private readonly ScannerWorkerOptions _options;
|
||||
private readonly ILogger<CompositeScanAnalyzerDispatcher> _logger;
|
||||
internal sealed class CompositeScanAnalyzerDispatcher : IScanAnalyzerDispatcher
|
||||
{
|
||||
private readonly IServiceScopeFactory _scopeFactory;
|
||||
private readonly IOSAnalyzerPluginCatalog _osCatalog;
|
||||
private readonly ILanguageAnalyzerPluginCatalog _languageCatalog;
|
||||
private readonly ScannerWorkerOptions _options;
|
||||
private readonly ILogger<CompositeScanAnalyzerDispatcher> _logger;
|
||||
private readonly ScannerWorkerMetrics _metrics;
|
||||
private IReadOnlyList<string> _osPluginDirectories = Array.Empty<string>();
|
||||
private IReadOnlyList<string> _languagePluginDirectories = Array.Empty<string>();
|
||||
|
||||
public CompositeScanAnalyzerDispatcher(
|
||||
IServiceScopeFactory scopeFactory,
|
||||
IOSAnalyzerPluginCatalog osCatalog,
|
||||
ILanguageAnalyzerPluginCatalog languageCatalog,
|
||||
IOptions<ScannerWorkerOptions> options,
|
||||
ILogger<CompositeScanAnalyzerDispatcher> logger)
|
||||
{
|
||||
_scopeFactory = scopeFactory ?? throw new ArgumentNullException(nameof(scopeFactory));
|
||||
_osCatalog = osCatalog ?? throw new ArgumentNullException(nameof(osCatalog));
|
||||
_languageCatalog = languageCatalog ?? throw new ArgumentNullException(nameof(languageCatalog));
|
||||
_options = options?.Value ?? throw new ArgumentNullException(nameof(options));
|
||||
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||
ILanguageAnalyzerPluginCatalog languageCatalog,
|
||||
IOptions<ScannerWorkerOptions> options,
|
||||
ILogger<CompositeScanAnalyzerDispatcher> logger,
|
||||
ScannerWorkerMetrics metrics)
|
||||
{
|
||||
_scopeFactory = scopeFactory ?? throw new ArgumentNullException(nameof(scopeFactory));
|
||||
_osCatalog = osCatalog ?? throw new ArgumentNullException(nameof(osCatalog));
|
||||
_languageCatalog = languageCatalog ?? throw new ArgumentNullException(nameof(languageCatalog));
|
||||
_options = options?.Value ?? throw new ArgumentNullException(nameof(options));
|
||||
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||
_metrics = metrics ?? throw new ArgumentNullException(nameof(metrics));
|
||||
|
||||
LoadPlugins();
|
||||
}
|
||||
@@ -131,72 +140,126 @@ internal sealed class CompositeScanAnalyzerDispatcher : IScanAnalyzerDispatcher
|
||||
}
|
||||
}
|
||||
|
||||
private async Task ExecuteLanguageAnalyzersAsync(
|
||||
ScanJobContext context,
|
||||
IReadOnlyList<ILanguageAnalyzer> analyzers,
|
||||
IServiceProvider services,
|
||||
string? workspacePath,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
if (workspacePath is null)
|
||||
{
|
||||
_logger.LogWarning(
|
||||
"Metadata key '{MetadataKey}' missing for job {JobId}; unable to locate workspace. Language analyzers skipped.",
|
||||
_options.Analyzers.WorkspaceMetadataKey,
|
||||
context.JobId);
|
||||
return;
|
||||
}
|
||||
|
||||
var usageHints = LanguageUsageHints.Empty;
|
||||
var analyzerContext = new LanguageAnalyzerContext(workspacePath, context.TimeProvider, usageHints, services);
|
||||
var results = new Dictionary<string, LanguageAnalyzerResult>(StringComparer.OrdinalIgnoreCase);
|
||||
var fragments = new List<LayerComponentFragment>();
|
||||
|
||||
foreach (var analyzer in analyzers)
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
|
||||
try
|
||||
{
|
||||
var engine = new LanguageAnalyzerEngine(new[] { analyzer });
|
||||
var result = await engine.AnalyzeAsync(analyzerContext, cancellationToken).ConfigureAwait(false);
|
||||
results[analyzer.Id] = result;
|
||||
|
||||
var components = result.Components
|
||||
.Where(component => string.Equals(component.AnalyzerId, analyzer.Id, StringComparison.Ordinal))
|
||||
.ToArray();
|
||||
|
||||
if (components.Length > 0)
|
||||
{
|
||||
var fragment = LanguageComponentMapper.ToLayerFragment(analyzer.Id, components);
|
||||
fragments.Add(fragment);
|
||||
}
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogError(ex, "Language analyzer {AnalyzerId} failed for job {JobId}.", analyzer.Id, context.JobId);
|
||||
}
|
||||
}
|
||||
|
||||
if (results.Count == 0 && fragments.Count == 0)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
if (results.Count > 0)
|
||||
{
|
||||
context.Analysis.Set(
|
||||
ScanAnalysisKeys.LanguageAnalyzerResults,
|
||||
new ReadOnlyDictionary<string, LanguageAnalyzerResult>(results));
|
||||
}
|
||||
|
||||
if (fragments.Count > 0)
|
||||
{
|
||||
var immutableFragments = ImmutableArray.CreateRange(fragments);
|
||||
context.Analysis.AppendLayerFragments(immutableFragments);
|
||||
context.Analysis.Set(ScanAnalysisKeys.LanguageComponentFragments, immutableFragments);
|
||||
}
|
||||
}
|
||||
private async Task ExecuteLanguageAnalyzersAsync(
|
||||
ScanJobContext context,
|
||||
IReadOnlyList<ILanguageAnalyzer> analyzers,
|
||||
IServiceProvider services,
|
||||
string? workspacePath,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
if (workspacePath is null)
|
||||
{
|
||||
_logger.LogWarning(
|
||||
"Metadata key '{MetadataKey}' missing for job {JobId}; unable to locate workspace. Language analyzers skipped.",
|
||||
_options.Analyzers.WorkspaceMetadataKey,
|
||||
context.JobId);
|
||||
return;
|
||||
}
|
||||
|
||||
var surfaceEnvironment = services.GetRequiredService<ISurfaceEnvironment>();
|
||||
var validatorRunner = services.GetRequiredService<ISurfaceValidatorRunner>();
|
||||
|
||||
var validationProperties = new Dictionary<string, object?>(StringComparer.OrdinalIgnoreCase)
|
||||
{
|
||||
["jobId"] = context.JobId,
|
||||
["scanId"] = context.ScanId,
|
||||
["workspacePath"] = workspacePath,
|
||||
["analyzerCount"] = analyzers.Count
|
||||
};
|
||||
|
||||
var validationContext = SurfaceValidationContext.Create(
|
||||
services,
|
||||
"Scanner.Worker.LanguageAnalyzers",
|
||||
surfaceEnvironment.Settings,
|
||||
validationProperties);
|
||||
|
||||
await validatorRunner.EnsureAsync(validationContext, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
string workspaceFingerprint;
|
||||
try
|
||||
{
|
||||
workspaceFingerprint = LanguageWorkspaceFingerprint.Compute(workspacePath, cancellationToken);
|
||||
}
|
||||
catch (Exception ex) when (ex is IOException or UnauthorizedAccessException)
|
||||
{
|
||||
_logger.LogWarning(
|
||||
ex,
|
||||
"Failed to compute workspace fingerprint for job {JobId}; falling back to workspace path hash.",
|
||||
context.JobId);
|
||||
|
||||
var fallbackBytes = Encoding.UTF8.GetBytes(workspacePath);
|
||||
workspaceFingerprint = Convert.ToHexString(SHA256.HashData(fallbackBytes)).ToLowerInvariant();
|
||||
}
|
||||
|
||||
var cache = services.GetRequiredService<ISurfaceCache>();
|
||||
var cacheAdapter = new LanguageAnalyzerSurfaceCache(cache, surfaceEnvironment.Settings.Tenant);
|
||||
|
||||
var usageHints = LanguageUsageHints.Empty;
|
||||
var analyzerContext = new LanguageAnalyzerContext(workspacePath, context.TimeProvider, usageHints, services);
|
||||
var results = new Dictionary<string, LanguageAnalyzerResult>(StringComparer.OrdinalIgnoreCase);
|
||||
var fragments = new List<LayerComponentFragment>();
|
||||
|
||||
foreach (var analyzer in analyzers)
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
|
||||
try
|
||||
{
|
||||
var engine = new LanguageAnalyzerEngine(new[] { analyzer });
|
||||
var cacheEntry = await cacheAdapter.GetOrCreateAsync(
|
||||
_logger,
|
||||
analyzer.Id,
|
||||
workspaceFingerprint,
|
||||
token => engine.AnalyzeAsync(analyzerContext, token),
|
||||
cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
var result = cacheEntry.Result;
|
||||
if (cacheEntry.IsHit)
|
||||
{
|
||||
_metrics.RecordLanguageCacheHit(context, analyzer.Id);
|
||||
}
|
||||
else
|
||||
{
|
||||
_metrics.RecordLanguageCacheMiss(context, analyzer.Id);
|
||||
}
|
||||
|
||||
results[analyzer.Id] = result;
|
||||
|
||||
var components = result.Components
|
||||
.Where(component => string.Equals(component.AnalyzerId, analyzer.Id, StringComparison.Ordinal))
|
||||
.ToArray();
|
||||
|
||||
if (components.Length > 0)
|
||||
{
|
||||
var fragment = LanguageComponentMapper.ToLayerFragment(analyzer.Id, components);
|
||||
fragments.Add(fragment);
|
||||
}
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogError(ex, "Language analyzer {AnalyzerId} failed for job {JobId}.", analyzer.Id, context.JobId);
|
||||
}
|
||||
}
|
||||
|
||||
if (results.Count == 0 && fragments.Count == 0)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
if (results.Count > 0)
|
||||
{
|
||||
context.Analysis.Set(
|
||||
ScanAnalysisKeys.LanguageAnalyzerResults,
|
||||
new ReadOnlyDictionary<string, LanguageAnalyzerResult>(results));
|
||||
}
|
||||
|
||||
if (fragments.Count > 0)
|
||||
{
|
||||
var immutableFragments = ImmutableArray.CreateRange(fragments);
|
||||
context.Analysis.AppendLayerFragments(immutableFragments);
|
||||
context.Analysis.Set(ScanAnalysisKeys.LanguageComponentFragments, immutableFragments);
|
||||
}
|
||||
}
|
||||
|
||||
private void LoadPlugins()
|
||||
{
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
|
||||
| ID | Status | Owner(s) | Depends on | Description | Exit Criteria |
|
||||
|----|--------|----------|------------|-------------|---------------|
|
||||
| SCAN-REPLAY-186-002 | TODO | Scanner Worker Guild | REPLAY-CORE-185-001 | Enforce deterministic analyzer execution when consuming replay input bundles, emit layer Merkle metadata, and author `docs/modules/scanner/deterministic-execution.md` summarising invariants from `docs/replay/DETERMINISTIC_REPLAY.md` Section 4. | Replay mode analyzers pass determinism tests; new doc merged; integration fixtures updated. |
|
||||
| SCANNER-SURFACE-01 | DOING (2025-11-02) | Scanner Worker Guild | SURFACE-FS-02 | Persist Surface.FS manifests after analyzer stages, including layer CAS metadata and EntryTrace fragments.<br>2025-11-02: Draft Surface.FS manifests emitted for sample scans; telemetry counters under review. | Integration tests prove cache entries exist; telemetry counters exported. |
|
||||
| SCANNER-ENV-01 | DOING (2025-11-02) | Scanner Worker Guild | SURFACE-ENV-02 | Replace ad-hoc environment reads with `StellaOps.Scanner.Surface.Env` helpers for cache roots and CAS endpoints.<br>2025-11-02: Worker bootstrap now resolves cache roots via helper; warning path documented; smoke tests running. | Worker boots with helper; misconfiguration warnings documented; smoke tests updated. |
|
||||
| SCANNER-SECRETS-01 | DOING (2025-11-02) | Scanner Worker Guild, Security Guild | SURFACE-SECRETS-02 | Adopt `StellaOps.Scanner.Surface.Secrets` for registry/CAS credentials during scan execution.<br>2025-11-02: Surface.Secrets provider wired for CAS token retrieval; integration tests added. | Secrets fetched via shared provider; legacy secret code removed; integration tests cover rotation. |
|
||||
|
||||
Reference in New Issue
Block a user