5100* tests strengthtenen work
This commit is contained in:
@@ -0,0 +1,651 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// CanonicalSerializationPerfSmokeTests.cs
|
||||
// Sprint: SPRINT_5100_0009_0001 - Scanner Module Test Implementation
|
||||
// Task: SCANNER-5100-025 - Add perf smoke tests for canonical serialization (2× regression gate)
|
||||
// Description: Performance smoke tests for canonical JSON serialization with 2× regression gate.
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System.Diagnostics;
|
||||
using System.Security.Cryptography;
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
using FluentAssertions;
|
||||
using Xunit;
|
||||
using Xunit.Abstractions;
|
||||
|
||||
namespace StellaOps.Scanner.Core.Tests.Perf;
|
||||
|
||||
/// <summary>
|
||||
/// Performance smoke tests for canonical JSON serialization.
|
||||
/// These tests enforce a 2× regression gate: if performance regresses to more than
|
||||
/// twice the baseline, the test fails.
|
||||
///
|
||||
/// Canonical serialization is critical for:
|
||||
/// - Deterministic hashing of findings, evidence, and attestations
|
||||
/// - DSSE payload generation
|
||||
/// - Replay verification
|
||||
/// </summary>
|
||||
[Trait("Category", "Perf")]
|
||||
[Trait("Category", "PERF")]
|
||||
[Trait("Category", "Smoke")]
|
||||
public sealed class CanonicalSerializationPerfSmokeTests
|
||||
{
|
||||
private readonly ITestOutputHelper _output;
|
||||
|
||||
// Regression gate multiplier: 2× means test fails if time exceeds 2× baseline
|
||||
private const double RegressionGateMultiplier = 2.0;
|
||||
|
||||
// Baselines (in milliseconds) - conservative estimates
|
||||
private const long BaselineSmallObjectMs = 1; // Single small object
|
||||
private const long BaselineMediumObjectMs = 5; // Medium complexity object
|
||||
private const long BaselineLargeObjectMs = 20; // Large object (1000 items)
|
||||
private const long BaselineXLargeObjectMs = 100; // XLarge object (10000 items)
|
||||
private const long BaselineDigestComputeMs = 2; // SHA-256 digest
|
||||
private const long BaselineBatchSerializeMs = 50; // 100 objects
|
||||
|
||||
private static readonly JsonSerializerOptions CanonicalOptions = new()
|
||||
{
|
||||
WriteIndented = false,
|
||||
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
|
||||
DefaultIgnoreCondition = System.Text.Json.Serialization.JsonIgnoreCondition.WhenWritingNull
|
||||
};
|
||||
|
||||
public CanonicalSerializationPerfSmokeTests(ITestOutputHelper output)
|
||||
{
|
||||
_output = output;
|
||||
}
|
||||
|
||||
#region Basic Serialization Performance
|
||||
|
||||
[Fact]
|
||||
public void SmallObject_Serialization_Under2xBaseline()
|
||||
{
|
||||
// Arrange
|
||||
var baseline = BaselineSmallObjectMs;
|
||||
var threshold = (long)(baseline * RegressionGateMultiplier);
|
||||
var obj = CreateSmallObject();
|
||||
|
||||
// Warm up
|
||||
for (int i = 0; i < 1000; i++) _ = SerializeCanonical(obj);
|
||||
|
||||
// Act - many iterations for accurate measurement
|
||||
const int iterations = 10000;
|
||||
var sw = Stopwatch.StartNew();
|
||||
for (int i = 0; i < iterations; i++)
|
||||
{
|
||||
_ = SerializeCanonical(obj);
|
||||
}
|
||||
sw.Stop();
|
||||
|
||||
var avgMs = sw.Elapsed.TotalMilliseconds / iterations;
|
||||
|
||||
// Log
|
||||
_output.WriteLine($"Small object serialization: {avgMs:F4}ms average over {iterations} iterations");
|
||||
_output.WriteLine($"Baseline: {baseline}ms, Threshold (2×): {threshold}ms");
|
||||
|
||||
// Assert
|
||||
avgMs.Should().BeLessThanOrEqualTo(threshold,
|
||||
$"Small object serialization exceeded 2× regression gate ({avgMs:F4}ms > {threshold}ms)");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void MediumObject_Serialization_Under2xBaseline()
|
||||
{
|
||||
// Arrange
|
||||
var baseline = BaselineMediumObjectMs;
|
||||
var threshold = (long)(baseline * RegressionGateMultiplier);
|
||||
var obj = CreateMediumObject();
|
||||
|
||||
// Warm up
|
||||
for (int i = 0; i < 100; i++) _ = SerializeCanonical(obj);
|
||||
|
||||
// Act
|
||||
const int iterations = 1000;
|
||||
var sw = Stopwatch.StartNew();
|
||||
for (int i = 0; i < iterations; i++)
|
||||
{
|
||||
_ = SerializeCanonical(obj);
|
||||
}
|
||||
sw.Stop();
|
||||
|
||||
var avgMs = sw.Elapsed.TotalMilliseconds / iterations;
|
||||
|
||||
// Log
|
||||
_output.WriteLine($"Medium object serialization: {avgMs:F4}ms average over {iterations} iterations");
|
||||
_output.WriteLine($"Baseline: {baseline}ms, Threshold (2×): {threshold}ms");
|
||||
|
||||
// Assert
|
||||
avgMs.Should().BeLessThanOrEqualTo(threshold,
|
||||
$"Medium object serialization exceeded 2× regression gate ({avgMs:F4}ms > {threshold}ms)");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void LargeObject_Serialization_Under2xBaseline()
|
||||
{
|
||||
// Arrange
|
||||
var baseline = BaselineLargeObjectMs;
|
||||
var threshold = (long)(baseline * RegressionGateMultiplier);
|
||||
var obj = CreateLargeObject(1000);
|
||||
|
||||
// Warm up
|
||||
for (int i = 0; i < 10; i++) _ = SerializeCanonical(obj);
|
||||
|
||||
// Act
|
||||
const int iterations = 100;
|
||||
var sw = Stopwatch.StartNew();
|
||||
for (int i = 0; i < iterations; i++)
|
||||
{
|
||||
_ = SerializeCanonical(obj);
|
||||
}
|
||||
sw.Stop();
|
||||
|
||||
var avgMs = sw.Elapsed.TotalMilliseconds / iterations;
|
||||
|
||||
// Log
|
||||
_output.WriteLine($"Large object (1000 items) serialization: {avgMs:F4}ms average over {iterations} iterations");
|
||||
_output.WriteLine($"Baseline: {baseline}ms, Threshold (2×): {threshold}ms");
|
||||
|
||||
// Assert
|
||||
avgMs.Should().BeLessThanOrEqualTo(threshold,
|
||||
$"Large object serialization exceeded 2× regression gate ({avgMs:F4}ms > {threshold}ms)");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void XLargeObject_Serialization_Under2xBaseline()
|
||||
{
|
||||
// Arrange
|
||||
var baseline = BaselineXLargeObjectMs;
|
||||
var threshold = (long)(baseline * RegressionGateMultiplier);
|
||||
var obj = CreateLargeObject(10000);
|
||||
|
||||
// Warm up
|
||||
for (int i = 0; i < 3; i++) _ = SerializeCanonical(obj);
|
||||
|
||||
// Act
|
||||
const int iterations = 20;
|
||||
var sw = Stopwatch.StartNew();
|
||||
for (int i = 0; i < iterations; i++)
|
||||
{
|
||||
_ = SerializeCanonical(obj);
|
||||
}
|
||||
sw.Stop();
|
||||
|
||||
var avgMs = sw.Elapsed.TotalMilliseconds / iterations;
|
||||
|
||||
// Log
|
||||
_output.WriteLine($"XLarge object (10000 items) serialization: {avgMs:F4}ms average over {iterations} iterations");
|
||||
_output.WriteLine($"Baseline: {baseline}ms, Threshold (2×): {threshold}ms");
|
||||
|
||||
// Assert
|
||||
avgMs.Should().BeLessThanOrEqualTo(threshold,
|
||||
$"XLarge object serialization exceeded 2× regression gate ({avgMs:F4}ms > {threshold}ms)");
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Digest Computation Performance
|
||||
|
||||
[Fact]
|
||||
public void DigestComputation_Under2xBaseline()
|
||||
{
|
||||
// Arrange
|
||||
var baseline = BaselineDigestComputeMs;
|
||||
var threshold = (long)(baseline * RegressionGateMultiplier);
|
||||
var obj = CreateMediumObject();
|
||||
var json = SerializeCanonical(obj);
|
||||
|
||||
// Warm up
|
||||
for (int i = 0; i < 1000; i++) _ = ComputeDigest(json);
|
||||
|
||||
// Act
|
||||
const int iterations = 10000;
|
||||
var sw = Stopwatch.StartNew();
|
||||
for (int i = 0; i < iterations; i++)
|
||||
{
|
||||
_ = ComputeDigest(json);
|
||||
}
|
||||
sw.Stop();
|
||||
|
||||
var avgMs = sw.Elapsed.TotalMilliseconds / iterations;
|
||||
|
||||
// Log
|
||||
_output.WriteLine($"Digest computation: {avgMs:F4}ms average over {iterations} iterations");
|
||||
_output.WriteLine($"JSON size: {json.Length} bytes");
|
||||
_output.WriteLine($"Baseline: {baseline}ms, Threshold (2×): {threshold}ms");
|
||||
|
||||
// Assert
|
||||
avgMs.Should().BeLessThanOrEqualTo(threshold,
|
||||
$"Digest computation exceeded 2× regression gate ({avgMs:F4}ms > {threshold}ms)");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void SerializeWithDigest_Combined_Under2xBaseline()
|
||||
{
|
||||
// Arrange - combined serialize + digest is common operation
|
||||
var baseline = BaselineMediumObjectMs + BaselineDigestComputeMs;
|
||||
var threshold = (long)(baseline * RegressionGateMultiplier);
|
||||
var obj = CreateMediumObject();
|
||||
|
||||
// Warm up
|
||||
for (int i = 0; i < 100; i++) _ = SerializeWithDigest(obj);
|
||||
|
||||
// Act
|
||||
const int iterations = 1000;
|
||||
var sw = Stopwatch.StartNew();
|
||||
for (int i = 0; i < iterations; i++)
|
||||
{
|
||||
_ = SerializeWithDigest(obj);
|
||||
}
|
||||
sw.Stop();
|
||||
|
||||
var avgMs = sw.Elapsed.TotalMilliseconds / iterations;
|
||||
|
||||
// Log
|
||||
_output.WriteLine($"Serialize + digest: {avgMs:F4}ms average over {iterations} iterations");
|
||||
_output.WriteLine($"Baseline: {baseline}ms, Threshold (2×): {threshold}ms");
|
||||
|
||||
// Assert
|
||||
avgMs.Should().BeLessThanOrEqualTo(threshold,
|
||||
$"Serialize + digest exceeded 2× regression gate ({avgMs:F4}ms > {threshold}ms)");
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Batch Serialization Performance
|
||||
|
||||
[Fact]
|
||||
public void BatchSerialization_Under2xBaseline()
|
||||
{
|
||||
// Arrange
|
||||
const int objectCount = 100;
|
||||
var baseline = BaselineBatchSerializeMs;
|
||||
var threshold = (long)(baseline * RegressionGateMultiplier);
|
||||
var objects = Enumerable.Range(0, objectCount)
|
||||
.Select(i => CreateFinding(i))
|
||||
.ToList();
|
||||
|
||||
// Warm up
|
||||
foreach (var obj in objects.Take(10)) _ = SerializeCanonical(obj);
|
||||
|
||||
// Act
|
||||
var sw = Stopwatch.StartNew();
|
||||
var results = new List<string>();
|
||||
foreach (var obj in objects)
|
||||
{
|
||||
results.Add(SerializeCanonical(obj));
|
||||
}
|
||||
sw.Stop();
|
||||
|
||||
// Log
|
||||
_output.WriteLine($"Batch serialization ({objectCount} objects): {sw.ElapsedMilliseconds}ms");
|
||||
_output.WriteLine($"Average per object: {sw.Elapsed.TotalMilliseconds / objectCount:F4}ms");
|
||||
_output.WriteLine($"Baseline: {baseline}ms, Threshold (2×): {threshold}ms");
|
||||
|
||||
// Assert
|
||||
sw.ElapsedMilliseconds.Should().BeLessThanOrEqualTo(threshold,
|
||||
$"Batch serialization exceeded 2× regression gate ({sw.ElapsedMilliseconds}ms > {threshold}ms)");
|
||||
results.Should().HaveCount(objectCount);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void BatchSerializeWithDigest_Under2xBaseline()
|
||||
{
|
||||
// Arrange
|
||||
const int objectCount = 100;
|
||||
var baseline = BaselineBatchSerializeMs * 2; // Allow 2× for combined operation
|
||||
var threshold = (long)(baseline * RegressionGateMultiplier);
|
||||
var objects = Enumerable.Range(0, objectCount)
|
||||
.Select(i => CreateFinding(i))
|
||||
.ToList();
|
||||
|
||||
// Warm up
|
||||
foreach (var obj in objects.Take(10)) _ = SerializeWithDigest(obj);
|
||||
|
||||
// Act
|
||||
var sw = Stopwatch.StartNew();
|
||||
var results = new List<(string Json, string Digest)>();
|
||||
foreach (var obj in objects)
|
||||
{
|
||||
results.Add(SerializeWithDigest(obj));
|
||||
}
|
||||
sw.Stop();
|
||||
|
||||
// Log
|
||||
_output.WriteLine($"Batch serialize + digest ({objectCount} objects): {sw.ElapsedMilliseconds}ms");
|
||||
_output.WriteLine($"Average per object: {sw.Elapsed.TotalMilliseconds / objectCount:F4}ms");
|
||||
_output.WriteLine($"Baseline: {baseline}ms, Threshold (2×): {threshold}ms");
|
||||
|
||||
// Assert
|
||||
sw.ElapsedMilliseconds.Should().BeLessThanOrEqualTo(threshold,
|
||||
$"Batch serialize + digest exceeded 2× regression gate ({sw.ElapsedMilliseconds}ms > {threshold}ms)");
|
||||
results.Should().HaveCount(objectCount);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Dictionary Ordering Performance
|
||||
|
||||
[Fact]
|
||||
public void DictionaryOrdering_Under2xBaseline()
|
||||
{
|
||||
// Arrange - dictionaries must be serialized with stable key ordering
|
||||
var baseline = 10L; // ms
|
||||
var threshold = (long)(baseline * RegressionGateMultiplier);
|
||||
var obj = CreateObjectWithRandomOrderDictionary(500);
|
||||
|
||||
// Warm up
|
||||
for (int i = 0; i < 10; i++) _ = SerializeCanonical(obj);
|
||||
|
||||
// Act
|
||||
const int iterations = 100;
|
||||
var sw = Stopwatch.StartNew();
|
||||
var hashes = new HashSet<string>();
|
||||
for (int i = 0; i < iterations; i++)
|
||||
{
|
||||
var (_, digest) = SerializeWithDigest(obj);
|
||||
hashes.Add(digest);
|
||||
}
|
||||
sw.Stop();
|
||||
|
||||
var avgMs = sw.Elapsed.TotalMilliseconds / iterations;
|
||||
|
||||
// Log
|
||||
_output.WriteLine($"Dictionary ordering serialization: {avgMs:F4}ms average over {iterations} iterations");
|
||||
_output.WriteLine($"Unique digests: {hashes.Count} (should be 1)");
|
||||
_output.WriteLine($"Baseline: {baseline}ms, Threshold (2×): {threshold}ms");
|
||||
|
||||
// Assert
|
||||
avgMs.Should().BeLessThanOrEqualTo(threshold,
|
||||
$"Dictionary ordering exceeded 2× regression gate ({avgMs:F4}ms > {threshold}ms)");
|
||||
hashes.Should().HaveCount(1, "All serializations should produce identical digest");
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Scaling Behavior
|
||||
|
||||
[Fact]
|
||||
public void Serialization_ScalesLinearlyWithSize()
|
||||
{
|
||||
// Arrange
|
||||
var sizes = new[] { 100, 500, 1000, 2000 };
|
||||
var times = new List<(int size, double ms)>();
|
||||
|
||||
foreach (var size in sizes)
|
||||
{
|
||||
var obj = CreateLargeObject(size);
|
||||
|
||||
// Warm up
|
||||
_ = SerializeCanonical(obj);
|
||||
|
||||
// Measure
|
||||
const int iterations = 50;
|
||||
var sw = Stopwatch.StartNew();
|
||||
for (int i = 0; i < iterations; i++)
|
||||
{
|
||||
_ = SerializeCanonical(obj);
|
||||
}
|
||||
sw.Stop();
|
||||
|
||||
var avgMs = sw.Elapsed.TotalMilliseconds / iterations;
|
||||
times.Add((size, avgMs));
|
||||
_output.WriteLine($"Size {size}: {avgMs:F4}ms");
|
||||
}
|
||||
|
||||
// Assert - verify roughly linear scaling
|
||||
for (int i = 1; i < times.Count; i++)
|
||||
{
|
||||
var sizeRatio = times[i].size / (double)times[i - 1].size;
|
||||
var timeRatio = times[i].ms / Math.Max(0.001, times[i - 1].ms);
|
||||
var scaleFactor = timeRatio / sizeRatio;
|
||||
|
||||
_output.WriteLine($"Size ratio: {sizeRatio:F1}×, Time ratio: {timeRatio:F1}×, Scale factor: {scaleFactor:F2}");
|
||||
|
||||
// Should be better than O(n²)
|
||||
scaleFactor.Should().BeLessThan(2.0,
|
||||
$"Serialization shows non-linear scaling at size {times[i].size}");
|
||||
}
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Memory Efficiency
|
||||
|
||||
[Fact]
|
||||
public void LargeSerialization_MemoryEfficient_Under20MB()
|
||||
{
|
||||
// Arrange
|
||||
var obj = CreateLargeObject(10000);
|
||||
|
||||
GC.Collect();
|
||||
GC.WaitForPendingFinalizers();
|
||||
var beforeMem = GC.GetTotalMemory(true);
|
||||
|
||||
// Act
|
||||
var json = SerializeCanonical(obj);
|
||||
var digest = ComputeDigest(json);
|
||||
|
||||
GC.Collect();
|
||||
GC.WaitForPendingFinalizers();
|
||||
var afterMem = GC.GetTotalMemory(true);
|
||||
|
||||
var memoryUsedMB = (afterMem - beforeMem) / (1024.0 * 1024.0);
|
||||
|
||||
// Log
|
||||
_output.WriteLine($"Large serialization memory usage: {memoryUsedMB:F2}MB");
|
||||
_output.WriteLine($"JSON output size: {json.Length / 1024.0:F1}KB");
|
||||
|
||||
// Assert
|
||||
memoryUsedMB.Should().BeLessThan(20,
|
||||
$"Large serialization memory usage ({memoryUsedMB:F2}MB) exceeds 20MB threshold");
|
||||
|
||||
// Keep objects alive
|
||||
digest.Should().NotBeNullOrEmpty();
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Determinism Verification
|
||||
|
||||
[Fact]
|
||||
public void SerializationIsDeterministic_SameInput_SameOutput()
|
||||
{
|
||||
// Arrange
|
||||
var obj = CreateMediumObject();
|
||||
var digests = new HashSet<string>();
|
||||
|
||||
// Act - serialize same object 100 times
|
||||
for (int i = 0; i < 100; i++)
|
||||
{
|
||||
var (_, digest) = SerializeWithDigest(obj);
|
||||
digests.Add(digest);
|
||||
}
|
||||
|
||||
// Assert
|
||||
digests.Should().HaveCount(1, "Same input must produce same digest");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ParallelSerialization_IsDeterministic()
|
||||
{
|
||||
// Arrange
|
||||
var obj = CreateMediumObject();
|
||||
var digests = new System.Collections.Concurrent.ConcurrentBag<string>();
|
||||
|
||||
// Act - serialize in parallel
|
||||
Parallel.For(0, 100, _ =>
|
||||
{
|
||||
var (_, digest) = SerializeWithDigest(obj);
|
||||
digests.Add(digest);
|
||||
});
|
||||
|
||||
// Assert
|
||||
digests.Distinct().Should().HaveCount(1, "Parallel serialization must be deterministic");
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Test Infrastructure
|
||||
|
||||
private static TestSmallObject CreateSmallObject()
|
||||
{
|
||||
return new TestSmallObject
|
||||
{
|
||||
Id = "test-id-001",
|
||||
Name = "Test Object",
|
||||
Value = 42.5,
|
||||
Active = true
|
||||
};
|
||||
}
|
||||
|
||||
private static TestMediumObject CreateMediumObject()
|
||||
{
|
||||
return new TestMediumObject
|
||||
{
|
||||
Id = "finding-id-001",
|
||||
CveId = "CVE-2024-12345",
|
||||
Package = "test-package",
|
||||
Version = "1.2.3",
|
||||
Severity = "HIGH",
|
||||
Score = 8.5,
|
||||
IsReachable = true,
|
||||
ReachabilityTier = "executed",
|
||||
Timestamp = new DateTimeOffset(2025, 1, 15, 12, 0, 0, TimeSpan.Zero),
|
||||
Tags = new List<string> { "security", "critical", "cve" },
|
||||
Metadata = new Dictionary<string, string>
|
||||
{
|
||||
["source"] = "nvd",
|
||||
["published"] = "2024-06-15",
|
||||
["modified"] = "2024-12-01"
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
private static TestLargeObject CreateLargeObject(int itemCount)
|
||||
{
|
||||
var random = new Random(42); // Fixed seed
|
||||
return new TestLargeObject
|
||||
{
|
||||
Id = $"batch-{itemCount:D5}",
|
||||
Name = "Large Batch",
|
||||
Items = Enumerable.Range(0, itemCount)
|
||||
.Select(i => new TestItemObject
|
||||
{
|
||||
Id = $"item-{i:D5}",
|
||||
Name = $"Item {i}",
|
||||
Value = random.NextDouble() * 100,
|
||||
Tags = Enumerable.Range(0, random.Next(1, 5))
|
||||
.Select(t => $"tag-{t}")
|
||||
.ToList()
|
||||
})
|
||||
.ToList()
|
||||
};
|
||||
}
|
||||
|
||||
private static TestMediumObject CreateFinding(int index)
|
||||
{
|
||||
return new TestMediumObject
|
||||
{
|
||||
Id = $"finding-{index:D4}",
|
||||
CveId = $"CVE-2024-{10000 + index}",
|
||||
Package = $"package-{index % 50}",
|
||||
Version = $"1.{index % 10}.0",
|
||||
Severity = (index % 4) switch { 0 => "CRITICAL", 1 => "HIGH", 2 => "MEDIUM", _ => "LOW" },
|
||||
Score = 3.0 + (index % 7),
|
||||
IsReachable = index % 3 != 0,
|
||||
ReachabilityTier = (index % 3) switch { 0 => "imported", 1 => "called", _ => "executed" },
|
||||
Timestamp = new DateTimeOffset(2025, 1, 15, 12, 0, 0, TimeSpan.Zero),
|
||||
Tags = new List<string> { "auto-generated" },
|
||||
Metadata = new Dictionary<string, string> { ["index"] = index.ToString() }
|
||||
};
|
||||
}
|
||||
|
||||
private static TestDictionaryObject CreateObjectWithRandomOrderDictionary(int keyCount)
|
||||
{
|
||||
var random = new Random(42);
|
||||
var keys = Enumerable.Range(0, keyCount)
|
||||
.Select(i => $"key-{i:D4}")
|
||||
.OrderBy(_ => random.Next()) // Randomize order
|
||||
.ToList();
|
||||
|
||||
var data = new Dictionary<string, string>();
|
||||
foreach (var key in keys)
|
||||
{
|
||||
data[key] = $"value-for-{key}";
|
||||
}
|
||||
|
||||
return new TestDictionaryObject
|
||||
{
|
||||
Id = "dict-test",
|
||||
Data = data
|
||||
};
|
||||
}
|
||||
|
||||
private static string SerializeCanonical<T>(T value)
|
||||
{
|
||||
return JsonSerializer.Serialize(value, CanonicalOptions);
|
||||
}
|
||||
|
||||
private static string ComputeDigest(string json)
|
||||
{
|
||||
var hash = SHA256.HashData(Encoding.UTF8.GetBytes(json));
|
||||
return Convert.ToHexString(hash).ToLowerInvariant();
|
||||
}
|
||||
|
||||
private static (string Json, string Digest) SerializeWithDigest<T>(T value)
|
||||
{
|
||||
var json = SerializeCanonical(value);
|
||||
var digest = ComputeDigest(json);
|
||||
return (json, digest);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Test Models
|
||||
|
||||
private sealed class TestSmallObject
|
||||
{
|
||||
public required string Id { get; init; }
|
||||
public required string Name { get; init; }
|
||||
public double Value { get; init; }
|
||||
public bool Active { get; init; }
|
||||
}
|
||||
|
||||
private sealed class TestMediumObject
|
||||
{
|
||||
public required string Id { get; init; }
|
||||
public required string CveId { get; init; }
|
||||
public required string Package { get; init; }
|
||||
public required string Version { get; init; }
|
||||
public required string Severity { get; init; }
|
||||
public double Score { get; init; }
|
||||
public bool IsReachable { get; init; }
|
||||
public required string ReachabilityTier { get; init; }
|
||||
public DateTimeOffset Timestamp { get; init; }
|
||||
public List<string> Tags { get; init; } = new();
|
||||
public Dictionary<string, string> Metadata { get; init; } = new();
|
||||
}
|
||||
|
||||
private sealed class TestLargeObject
|
||||
{
|
||||
public required string Id { get; init; }
|
||||
public required string Name { get; init; }
|
||||
public List<TestItemObject> Items { get; init; } = new();
|
||||
}
|
||||
|
||||
private sealed class TestItemObject
|
||||
{
|
||||
public required string Id { get; init; }
|
||||
public required string Name { get; init; }
|
||||
public double Value { get; init; }
|
||||
public List<string> Tags { get; init; } = new();
|
||||
}
|
||||
|
||||
private sealed class TestDictionaryObject
|
||||
{
|
||||
public required string Id { get; init; }
|
||||
public Dictionary<string, string> Data { get; init; } = new();
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
Reference in New Issue
Block a user