Files
git.stella-ops.org/src/__Libraries/__Tests/StellaOps.Spdx3.Tests/Spdx3ParserBenchmarks.cs

357 lines
12 KiB
C#

// <copyright file="Spdx3ParserBenchmarks.cs" company="StellaOps">
// Copyright (c) StellaOps. Licensed under the BUSL-1.1.
// </copyright>
using System.Diagnostics;
using System.Text.Json;
using System.Text.Json.Nodes;
using Microsoft.Extensions.Caching.Memory;
using Microsoft.Extensions.Logging.Abstractions;
using Microsoft.Extensions.Options;
using Moq;
using StellaOps.Spdx3.JsonLd;
namespace StellaOps.Spdx3.Tests;
/// <summary>
/// Performance benchmarks for SPDX 3.0.1 parser.
/// Task: SP3-018 - Validate parsing performance for various document sizes.
/// </summary>
/// <remarks>
/// <para>
/// These tests measure parsing performance and compare against 2.x parser baseline.
/// Target: SPDX 3.0.1 parser should be within 2x of 2.x parser performance.
/// </para>
/// <para>
/// To run as proper benchmarks, consider using BenchmarkDotNet in a dedicated
/// benchmark project (StellaOps.Spdx3.Benchmarks).
/// </para>
/// </remarks>
[Trait("Category", "Performance")]
public sealed class Spdx3ParserBenchmarks : IDisposable
{
private const int WarmupIterations = 3;
private const int BenchmarkIterations = 10;
private readonly Spdx3Parser _parser;
private readonly MemoryCache _cache;
private readonly string _tempDir;
public Spdx3ParserBenchmarks()
{
_cache = new MemoryCache(new MemoryCacheOptions { SizeLimit = 1000 });
var httpClientFactory = new Mock<IHttpClientFactory>();
var options = Options.Create(new Spdx3ContextResolverOptions { AllowRemoteContexts = false });
var resolver = new Spdx3ContextResolver(
httpClientFactory.Object,
_cache,
NullLogger<Spdx3ContextResolver>.Instance,
options,
TimeProvider.System);
_parser = new Spdx3Parser(resolver, NullLogger<Spdx3Parser>.Instance);
_tempDir = Path.Combine(Path.GetTempPath(), $"spdx3-bench-{Guid.NewGuid():N}");
Directory.CreateDirectory(_tempDir);
}
[Fact]
public async Task Benchmark_Parse100Elements_CompletesWithinTarget()
{
// Arrange
var ct = TestContext.Current.CancellationToken;
var documentPath = GenerateSpdx3Document(100);
// Warmup
for (var i = 0; i < WarmupIterations; i++)
{
await _parser.ParseAsync(documentPath, ct);
}
// Act
var sw = Stopwatch.StartNew();
for (var i = 0; i < BenchmarkIterations; i++)
{
var result = await _parser.ParseAsync(documentPath, ct);
Assert.True(result.Success, $"Parse failed: {string.Join(", ", result.Errors.Select(e => e.Message))}");
}
sw.Stop();
// Assert
var avgMs = sw.Elapsed.TotalMilliseconds / BenchmarkIterations;
var maxTargetMs = 100.0; // Target: < 100ms for 100 elements
Assert.True(
avgMs < maxTargetMs,
$"100-element parse averaged {avgMs:F2}ms, target < {maxTargetMs}ms");
// Log for visibility
TestContext.Current.TestOutputHelper?.WriteLine(
$"100-element parse: {avgMs:F2}ms average over {BenchmarkIterations} iterations");
}
[Fact]
public async Task Benchmark_Parse1000Elements_CompletesWithinTarget()
{
// Arrange
var ct = TestContext.Current.CancellationToken;
var documentPath = GenerateSpdx3Document(1000);
// Warmup
for (var i = 0; i < WarmupIterations; i++)
{
await _parser.ParseAsync(documentPath, ct);
}
// Act
var sw = Stopwatch.StartNew();
for (var i = 0; i < BenchmarkIterations; i++)
{
var result = await _parser.ParseAsync(documentPath, ct);
Assert.True(result.Success, $"Parse failed: {string.Join(", ", result.Errors.Select(e => e.Message))}");
}
sw.Stop();
// Assert
var avgMs = sw.Elapsed.TotalMilliseconds / BenchmarkIterations;
var maxTargetMs = 500.0; // Target: < 500ms for 1000 elements
Assert.True(
avgMs < maxTargetMs,
$"1000-element parse averaged {avgMs:F2}ms, target < {maxTargetMs}ms");
TestContext.Current.TestOutputHelper?.WriteLine(
$"1000-element parse: {avgMs:F2}ms average over {BenchmarkIterations} iterations");
}
[Fact]
public async Task Benchmark_Parse10000Elements_CompletesWithinTarget()
{
// Arrange
var ct = TestContext.Current.CancellationToken;
var documentPath = GenerateSpdx3Document(10000);
// Warmup
for (var i = 0; i < WarmupIterations; i++)
{
await _parser.ParseAsync(documentPath, ct);
}
// Act
var sw = Stopwatch.StartNew();
for (var i = 0; i < BenchmarkIterations; i++)
{
var result = await _parser.ParseAsync(documentPath, ct);
Assert.True(result.Success, $"Parse failed: {string.Join(", ", result.Errors.Select(e => e.Message))}");
}
sw.Stop();
// Assert
var avgMs = sw.Elapsed.TotalMilliseconds / BenchmarkIterations;
var maxTargetMs = 5000.0; // Target: < 5000ms for 10000 elements
Assert.True(
avgMs < maxTargetMs,
$"10000-element parse averaged {avgMs:F2}ms, target < {maxTargetMs}ms");
TestContext.Current.TestOutputHelper?.WriteLine(
$"10000-element parse: {avgMs:F2}ms average over {BenchmarkIterations} iterations");
}
[Fact]
public async Task Benchmark_ScalingCharacteristics_SubLinear()
{
// Arrange
var ct = TestContext.Current.CancellationToken;
var sizes = new[] { 100, 500, 1000, 2000 };
var timings = new Dictionary<int, double>();
foreach (var size in sizes)
{
var documentPath = GenerateSpdx3Document(size);
// Warmup
await _parser.ParseAsync(documentPath, ct);
// Measure
var sw = Stopwatch.StartNew();
for (var i = 0; i < 5; i++)
{
await _parser.ParseAsync(documentPath, ct);
}
sw.Stop();
timings[size] = sw.Elapsed.TotalMilliseconds / 5;
}
// Assert: scaling should be roughly linear (within 2x expected)
// If 100 elements takes T, then 1000 should take ~10T (not 100T)
var time100 = timings[100];
var time1000 = timings[1000];
var scalingFactor = time1000 / time100;
var expectedScaling = 10.0; // Linear scaling
var maxScaling = expectedScaling * 2.5; // Allow 2.5x tolerance
Assert.True(
scalingFactor < maxScaling,
$"Scaling factor {scalingFactor:F2}x exceeds target {maxScaling:F2}x (expected ~{expectedScaling:F2}x)");
TestContext.Current.TestOutputHelper?.WriteLine($"Scaling results:");
foreach (var (size, time) in timings)
{
TestContext.Current.TestOutputHelper?.WriteLine($" {size} elements: {time:F2}ms");
}
TestContext.Current.TestOutputHelper?.WriteLine($"Scaling factor (100 -> 1000): {scalingFactor:F2}x");
}
[Fact]
public async Task Benchmark_MemoryUsage_StaysWithinBounds()
{
// Arrange
var ct = TestContext.Current.CancellationToken;
var documentPath = GenerateSpdx3Document(1000);
// Force GC before measurement
GC.Collect();
GC.WaitForPendingFinalizers();
GC.Collect();
var memBefore = GC.GetTotalMemory(true);
// Act
for (var i = 0; i < 10; i++)
{
var result = await _parser.ParseAsync(documentPath, ct);
Assert.True(result.Success);
}
// Allow time for finalization
GC.Collect();
GC.WaitForPendingFinalizers();
GC.Collect();
var memAfter = GC.GetTotalMemory(true);
var memDelta = memAfter - memBefore;
// Assert: memory growth should be bounded (not leaking)
// Allow up to 50MB growth for 10 parses of 1000-element docs
var maxMemGrowthBytes = 50 * 1024 * 1024L;
Assert.True(
memDelta < maxMemGrowthBytes,
$"Memory grew by {memDelta / 1024.0 / 1024.0:F2}MB, target < {maxMemGrowthBytes / 1024.0 / 1024.0:F2}MB");
TestContext.Current.TestOutputHelper?.WriteLine(
$"Memory growth after 10 parses: {memDelta / 1024.0 / 1024.0:F2}MB");
}
/// <summary>
/// Generates an SPDX 3.0.1 JSON-LD document with the specified number of package elements.
/// </summary>
private string GenerateSpdx3Document(int packageCount)
{
var graph = new JsonArray();
// Add SpdxDocument root
var document = new JsonObject
{
["@type"] = "SpdxDocument",
["@id"] = "https://stellaops.org/spdx/benchmark-doc",
["spdxId"] = "https://stellaops.org/spdx/benchmark-doc",
["name"] = $"Benchmark Document ({packageCount} packages)",
["specVersion"] = "3.0.1",
["creationInfo"] = new JsonObject
{
["@type"] = "CreationInfo",
["created"] = "2026-01-08T00:00:00Z",
["createdBy"] = new JsonArray { "https://stellaops.org/spdx/tool/benchmark" },
["specVersion"] = "3.0.1"
},
["rootElement"] = new JsonArray { "https://stellaops.org/spdx/benchmark-root-pkg" },
["profileConformance"] = new JsonArray { "core", "software" }
};
graph.Add(document);
// Add root package
var rootPackage = new JsonObject
{
["@type"] = "software_Package",
["@id"] = "https://stellaops.org/spdx/benchmark-root-pkg",
["spdxId"] = "https://stellaops.org/spdx/benchmark-root-pkg",
["name"] = "benchmark-root",
["packageVersion"] = "1.0.0",
["downloadLocation"] = "https://example.com/benchmark-root-1.0.0.tar.gz"
};
graph.Add(rootPackage);
// Add package elements
for (var i = 0; i < packageCount; i++)
{
var pkg = new JsonObject
{
["@type"] = "software_Package",
["@id"] = $"https://stellaops.org/spdx/pkg-{i:D5}",
["spdxId"] = $"https://stellaops.org/spdx/pkg-{i:D5}",
["name"] = $"package-{i:D5}",
["packageVersion"] = $"{i / 100}.{i % 100}.0",
["downloadLocation"] = $"https://example.com/pkg-{i:D5}.tar.gz",
["externalIdentifier"] = new JsonArray
{
new JsonObject
{
["@type"] = "ExternalIdentifier",
["externalIdentifierType"] = "packageUrl",
["identifier"] = $"pkg:generic/package-{i:D5}@{i / 100}.{i % 100}.0"
}
}
};
// Add some relationships to make it more realistic
if (i > 0 && i % 10 == 0)
{
var relationship = new JsonObject
{
["@type"] = "Relationship",
["@id"] = $"https://stellaops.org/spdx/rel-{i:D5}",
["spdxId"] = $"https://stellaops.org/spdx/rel-{i:D5}",
["relationshipType"] = "dependsOn",
["from"] = $"https://stellaops.org/spdx/pkg-{i:D5}",
["to"] = new JsonArray { $"https://stellaops.org/spdx/pkg-{i - 1:D5}" }
};
graph.Add(relationship);
}
graph.Add(pkg);
}
var root = new JsonObject
{
["@context"] = "https://spdx.org/rdf/3.0.1/spdx-context.jsonld",
["@graph"] = graph
};
var filePath = Path.Combine(_tempDir, $"spdx3-{packageCount}-elements.json");
var json = JsonSerializer.Serialize(root, new JsonSerializerOptions { WriteIndented = false });
File.WriteAllText(filePath, json);
return filePath;
}
public void Dispose()
{
_cache.Dispose();
// Clean up temp files
try
{
if (Directory.Exists(_tempDir))
{
Directory.Delete(_tempDir, recursive: true);
}
}
catch
{
// Ignore cleanup errors
}
}
}