consolidate the tests locations
This commit is contained in:
@@ -0,0 +1,384 @@
|
||||
// =============================================================================
|
||||
// StellaOps.Integration.AirGap - Air-Gap Integration Tests
|
||||
// Sprint 3500.0004.0003 - T8: Air-Gap Integration Tests
|
||||
// =============================================================================
|
||||
|
||||
using FluentAssertions;
|
||||
using System.Net;
|
||||
using System.Net.Sockets;
|
||||
using Moq;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Integration.AirGap;
|
||||
|
||||
/// <summary>
|
||||
/// Integration tests for air-gapped (offline) operation.
|
||||
/// Validates that StellaOps functions correctly without network access.
|
||||
/// </summary>
|
||||
/// <remarks>
|
||||
/// T8-AC1: Offline kit installation test
|
||||
/// T8-AC2: Offline scan test
|
||||
/// T8-AC3: Offline score replay test
|
||||
/// T8-AC4: Offline proof verification test
|
||||
/// T8-AC5: No network calls during offline operation
|
||||
/// </remarks>
|
||||
[Trait("Category", "AirGap")]
|
||||
[Trait("Category", "Integration")]
|
||||
[Trait("Category", "Offline")]
|
||||
public class AirGapIntegrationTests : IClassFixture<AirGapTestFixture>
|
||||
{
|
||||
private readonly AirGapTestFixture _fixture;
|
||||
|
||||
public AirGapIntegrationTests(AirGapTestFixture fixture)
|
||||
{
|
||||
_fixture = fixture;
|
||||
}
|
||||
|
||||
#region T8-AC1: Offline Kit Installation
|
||||
|
||||
[Fact(DisplayName = "T8-AC1.1: Offline kit manifest is valid")]
|
||||
public void OfflineKitManifest_IsValid()
|
||||
{
|
||||
// Arrange & Act
|
||||
var manifest = _fixture.GetOfflineKitManifest();
|
||||
|
||||
// Assert
|
||||
manifest.Should().NotBeNull();
|
||||
manifest.Version.Should().NotBeNullOrEmpty();
|
||||
manifest.Components.Should().NotBeEmpty();
|
||||
manifest.CreatedAt.Should().BeBefore(DateTime.UtcNow);
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "T8-AC1.2: All required components present")]
|
||||
public void OfflineKit_HasRequiredComponents()
|
||||
{
|
||||
// Arrange
|
||||
var requiredComponents = new[]
|
||||
{
|
||||
"vulnerability-database",
|
||||
"advisory-feeds",
|
||||
"trust-bundles",
|
||||
"signing-keys"
|
||||
};
|
||||
|
||||
// Act
|
||||
var manifest = _fixture.GetOfflineKitManifest();
|
||||
|
||||
// Assert
|
||||
foreach (var component in requiredComponents)
|
||||
{
|
||||
manifest.Components.Should().ContainKey(component,
|
||||
$"Offline kit missing required component: {component}");
|
||||
}
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "T8-AC1.3: Component hashes are valid")]
|
||||
public async Task OfflineKitComponents_HaveValidHashes()
|
||||
{
|
||||
// Arrange
|
||||
var manifest = _fixture.GetOfflineKitManifest();
|
||||
var invalidComponents = new List<string>();
|
||||
|
||||
// Act
|
||||
foreach (var (name, component) in manifest.Components)
|
||||
{
|
||||
var actualHash = await _fixture.ComputeComponentHashAsync(name);
|
||||
if (actualHash != component.Hash)
|
||||
{
|
||||
invalidComponents.Add($"{name}: expected {component.Hash}, got {actualHash}");
|
||||
}
|
||||
}
|
||||
|
||||
// Assert
|
||||
invalidComponents.Should().BeEmpty(
|
||||
$"Components with invalid hashes:\n{string.Join("\n", invalidComponents)}");
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "T8-AC1.4: Offline kit installation succeeds")]
|
||||
public async Task OfflineKitInstallation_Succeeds()
|
||||
{
|
||||
// Arrange
|
||||
var targetPath = _fixture.GetTempDirectory();
|
||||
|
||||
// Act
|
||||
var result = await _fixture.InstallOfflineKitAsync(targetPath);
|
||||
|
||||
// Assert
|
||||
result.Success.Should().BeTrue();
|
||||
result.InstalledComponents.Should().NotBeEmpty();
|
||||
Directory.Exists(targetPath).Should().BeTrue();
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region T8-AC2: Offline Scan
|
||||
|
||||
[Fact(DisplayName = "T8-AC2.1: Scan completes without network")]
|
||||
public async Task OfflineScan_CompletesWithoutNetwork()
|
||||
{
|
||||
// Arrange
|
||||
await _fixture.DisableNetworkAsync();
|
||||
var targetImage = _fixture.GetLocalTestImage();
|
||||
|
||||
try
|
||||
{
|
||||
// Act
|
||||
var result = await _fixture.RunOfflineScanAsync(targetImage);
|
||||
|
||||
// Assert
|
||||
result.Success.Should().BeTrue();
|
||||
result.Findings.Should().NotBeNull();
|
||||
}
|
||||
finally
|
||||
{
|
||||
await _fixture.EnableNetworkAsync();
|
||||
}
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "T8-AC2.2: Scan uses local vulnerability database")]
|
||||
public async Task OfflineScan_UsesLocalVulnDatabase()
|
||||
{
|
||||
// Arrange
|
||||
var targetImage = _fixture.GetLocalTestImage();
|
||||
_fixture.SetOfflineMode(true);
|
||||
|
||||
// Act
|
||||
var result = await _fixture.RunOfflineScanAsync(targetImage);
|
||||
|
||||
// Assert
|
||||
result.Success.Should().BeTrue();
|
||||
result.DataSource.Should().Be("offline-kit");
|
||||
result.DataSourcePath.Should().Contain("offline");
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "T8-AC2.3: Scan produces deterministic results offline")]
|
||||
public async Task OfflineScan_ProducesDeterministicResults()
|
||||
{
|
||||
// Arrange
|
||||
var targetImage = _fixture.GetLocalTestImage();
|
||||
_fixture.SetOfflineMode(true);
|
||||
|
||||
// Act - run twice
|
||||
var result1 = await _fixture.RunOfflineScanAsync(targetImage);
|
||||
var result2 = await _fixture.RunOfflineScanAsync(targetImage);
|
||||
|
||||
// Assert
|
||||
result1.ManifestHash.Should().Be(result2.ManifestHash,
|
||||
"Offline scan should produce identical results");
|
||||
result1.Findings.Count.Should().Be(result2.Findings.Count);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region T8-AC3: Offline Score Replay
|
||||
|
||||
[Fact(DisplayName = "T8-AC3.1: Score replay works offline")]
|
||||
public async Task ScoreReplay_WorksOffline()
|
||||
{
|
||||
// Arrange
|
||||
var proofBundle = _fixture.GetSampleProofBundle();
|
||||
_fixture.SetOfflineMode(true);
|
||||
|
||||
// Act
|
||||
var result = await _fixture.ReplayScoreOfflineAsync(proofBundle);
|
||||
|
||||
// Assert
|
||||
result.Success.Should().BeTrue();
|
||||
result.Score.Should().BeGreaterThanOrEqualTo(0);
|
||||
result.ReplayedAt.Should().BeBefore(DateTime.UtcNow);
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "T8-AC3.2: Score replay produces identical score")]
|
||||
public async Task ScoreReplay_ProducesIdenticalScore()
|
||||
{
|
||||
// Arrange
|
||||
var proofBundle = _fixture.GetSampleProofBundle();
|
||||
var originalScore = proofBundle.OriginalScore;
|
||||
_fixture.SetOfflineMode(true);
|
||||
|
||||
// Act
|
||||
var result = await _fixture.ReplayScoreOfflineAsync(proofBundle);
|
||||
|
||||
// Assert
|
||||
result.Score.Should().Be(originalScore,
|
||||
"Replay score should match original");
|
||||
result.ScoreHash.Should().Be(proofBundle.OriginalScoreHash,
|
||||
"Replay score hash should match original");
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "T8-AC3.3: Score replay includes audit trail")]
|
||||
public async Task ScoreReplay_IncludesAuditTrail()
|
||||
{
|
||||
// Arrange
|
||||
var proofBundle = _fixture.GetSampleProofBundle();
|
||||
_fixture.SetOfflineMode(true);
|
||||
|
||||
// Act
|
||||
var result = await _fixture.ReplayScoreOfflineAsync(proofBundle);
|
||||
|
||||
// Assert
|
||||
result.AuditTrail.Should().NotBeEmpty();
|
||||
result.AuditTrail.Should().Contain(a => a.Type == "replay_started");
|
||||
result.AuditTrail.Should().Contain(a => a.Type == "replay_completed");
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region T8-AC4: Offline Proof Verification
|
||||
|
||||
[Fact(DisplayName = "T8-AC4.1: Proof verification works offline")]
|
||||
public async Task ProofVerification_WorksOffline()
|
||||
{
|
||||
// Arrange
|
||||
var proofBundle = _fixture.GetSampleProofBundle();
|
||||
_fixture.SetOfflineMode(true);
|
||||
|
||||
// Act
|
||||
var result = await _fixture.VerifyProofOfflineAsync(proofBundle);
|
||||
|
||||
// Assert
|
||||
result.Valid.Should().BeTrue();
|
||||
result.VerifiedAt.Should().BeBefore(DateTime.UtcNow);
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "T8-AC4.2: Verification uses offline trust store")]
|
||||
public async Task ProofVerification_UsesOfflineTrustStore()
|
||||
{
|
||||
// Arrange
|
||||
var proofBundle = _fixture.GetSampleProofBundle();
|
||||
_fixture.SetOfflineMode(true);
|
||||
|
||||
// Act
|
||||
var result = await _fixture.VerifyProofOfflineAsync(proofBundle);
|
||||
|
||||
// Assert
|
||||
result.TrustSource.Should().Be("offline-trust-store");
|
||||
result.CertificateChain.Should().NotBeEmpty();
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "T8-AC4.3: Tampered proof fails verification")]
|
||||
public async Task TamperedProof_FailsVerification()
|
||||
{
|
||||
// Arrange
|
||||
var proofBundle = _fixture.GetSampleProofBundle();
|
||||
var tamperedBundle = _fixture.TamperWithProof(proofBundle);
|
||||
_fixture.SetOfflineMode(true);
|
||||
|
||||
// Act
|
||||
var result = await _fixture.VerifyProofOfflineAsync(tamperedBundle);
|
||||
|
||||
// Assert
|
||||
result.Valid.Should().BeFalse();
|
||||
result.FailureReason.Should().Contain("signature");
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "T8-AC4.4: Expired certificate handling offline")]
|
||||
public async Task ExpiredCertificate_HandledCorrectly()
|
||||
{
|
||||
// Arrange
|
||||
var proofBundle = _fixture.GetProofBundleWithExpiredCert();
|
||||
_fixture.SetOfflineMode(true);
|
||||
|
||||
// Act
|
||||
var result = await _fixture.VerifyProofOfflineAsync(proofBundle);
|
||||
|
||||
// Assert
|
||||
result.Valid.Should().BeFalse();
|
||||
result.FailureReason.Should().Contain("expired");
|
||||
result.Warnings.Should().ContainSingle(w => w.Contains("certificate"));
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region T8-AC5: No Network Calls
|
||||
|
||||
[Fact(DisplayName = "T8-AC5.1: No outbound connections during scan")]
|
||||
public async Task OfflineScan_NoOutboundConnections()
|
||||
{
|
||||
// Arrange
|
||||
var connectionAttempts = new List<string>();
|
||||
_fixture.SetConnectionMonitor(endpoint => connectionAttempts.Add(endpoint));
|
||||
_fixture.SetOfflineMode(true);
|
||||
var targetImage = _fixture.GetLocalTestImage();
|
||||
|
||||
// Act
|
||||
await _fixture.RunOfflineScanAsync(targetImage);
|
||||
|
||||
// Assert
|
||||
connectionAttempts.Should().BeEmpty(
|
||||
$"Unexpected network connections:\n{string.Join("\n", connectionAttempts)}");
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "T8-AC5.2: No outbound connections during verification")]
|
||||
public async Task OfflineVerification_NoOutboundConnections()
|
||||
{
|
||||
// Arrange
|
||||
var connectionAttempts = new List<string>();
|
||||
_fixture.SetConnectionMonitor(endpoint => connectionAttempts.Add(endpoint));
|
||||
_fixture.SetOfflineMode(true);
|
||||
var proofBundle = _fixture.GetSampleProofBundle();
|
||||
|
||||
// Act
|
||||
await _fixture.VerifyProofOfflineAsync(proofBundle);
|
||||
|
||||
// Assert
|
||||
connectionAttempts.Should().BeEmpty(
|
||||
$"Unexpected network connections:\n{string.Join("\n", connectionAttempts)}");
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "T8-AC5.3: No DNS lookups in offline mode")]
|
||||
public async Task OfflineMode_NoDnsLookups()
|
||||
{
|
||||
// Arrange
|
||||
var dnsLookups = new List<string>();
|
||||
_fixture.SetDnsMonitor(hostname => dnsLookups.Add(hostname));
|
||||
_fixture.SetOfflineMode(true);
|
||||
|
||||
// Act
|
||||
var targetImage = _fixture.GetLocalTestImage();
|
||||
await _fixture.RunOfflineScanAsync(targetImage);
|
||||
|
||||
// Assert
|
||||
dnsLookups.Should().BeEmpty(
|
||||
$"Unexpected DNS lookups:\n{string.Join("\n", dnsLookups)}");
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "T8-AC5.4: Telemetry disabled in offline mode")]
|
||||
public async Task OfflineMode_TelemetryDisabled()
|
||||
{
|
||||
// Arrange
|
||||
_fixture.SetOfflineMode(true);
|
||||
var targetImage = _fixture.GetLocalTestImage();
|
||||
|
||||
// Act
|
||||
var result = await _fixture.RunOfflineScanAsync(targetImage);
|
||||
|
||||
// Assert
|
||||
result.TelemetrySent.Should().BeFalse();
|
||||
result.Configuration.TelemetryEnabled.Should().BeFalse();
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "T8-AC5.5: Network operations gracefully fail")]
|
||||
public async Task NetworkOperations_GracefullyFail()
|
||||
{
|
||||
// Arrange
|
||||
await _fixture.DisableNetworkAsync();
|
||||
|
||||
try
|
||||
{
|
||||
// Act - attempt online operation
|
||||
var result = await _fixture.AttemptOnlineUpdateAsync();
|
||||
|
||||
// Assert
|
||||
result.Success.Should().BeFalse();
|
||||
result.FailureReason.Should().Contain("offline");
|
||||
result.SuggestedAction.Should().Contain("offline-kit");
|
||||
}
|
||||
finally
|
||||
{
|
||||
await _fixture.EnableNetworkAsync();
|
||||
}
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
@@ -0,0 +1,418 @@
|
||||
// =============================================================================
|
||||
// StellaOps.Integration.AirGap - Air-Gap Test Fixture
|
||||
// Sprint 3500.0004.0003 - T8: Air-Gap Integration Tests
|
||||
// =============================================================================
|
||||
|
||||
using System.Security.Cryptography;
|
||||
using System.Text.Json;
|
||||
|
||||
namespace StellaOps.Integration.AirGap;
|
||||
|
||||
/// <summary>
|
||||
/// Test fixture for air-gap integration tests.
|
||||
/// Manages offline kit, network simulation, and test artifacts.
|
||||
/// </summary>
|
||||
public sealed class AirGapTestFixture : IDisposable
|
||||
{
|
||||
private readonly string _offlineKitPath;
|
||||
private readonly string _tempDir;
|
||||
private bool _offlineMode;
|
||||
private Action<string>? _connectionMonitor;
|
||||
private Action<string>? _dnsMonitor;
|
||||
|
||||
public AirGapTestFixture()
|
||||
{
|
||||
_offlineKitPath = Path.Combine(AppContext.BaseDirectory, "offline-kit");
|
||||
_tempDir = Path.Combine(Path.GetTempPath(), $"stellaops-airgap-test-{Guid.NewGuid():N}");
|
||||
Directory.CreateDirectory(_tempDir);
|
||||
}
|
||||
|
||||
#region Offline Kit
|
||||
|
||||
public OfflineKitManifest GetOfflineKitManifest()
|
||||
{
|
||||
var manifestPath = Path.Combine(_offlineKitPath, "manifest.json");
|
||||
|
||||
if (File.Exists(manifestPath))
|
||||
{
|
||||
var json = File.ReadAllText(manifestPath);
|
||||
return JsonSerializer.Deserialize<OfflineKitManifest>(json) ?? GetDefaultManifest();
|
||||
}
|
||||
|
||||
return GetDefaultManifest();
|
||||
}
|
||||
|
||||
public async Task<string> ComputeComponentHashAsync(string componentName)
|
||||
{
|
||||
var componentPath = Path.Combine(_offlineKitPath, componentName);
|
||||
|
||||
if (!Directory.Exists(componentPath) && !File.Exists(componentPath))
|
||||
{
|
||||
return "MISSING";
|
||||
}
|
||||
|
||||
using var sha256 = SHA256.Create();
|
||||
|
||||
if (File.Exists(componentPath))
|
||||
{
|
||||
await using var stream = File.OpenRead(componentPath);
|
||||
var hash = await sha256.ComputeHashAsync(stream);
|
||||
return Convert.ToHexString(hash).ToLowerInvariant();
|
||||
}
|
||||
|
||||
// Directory - hash all files
|
||||
var files = Directory.GetFiles(componentPath, "*", SearchOption.AllDirectories)
|
||||
.OrderBy(f => f)
|
||||
.ToList();
|
||||
|
||||
using var combinedStream = new MemoryStream();
|
||||
foreach (var file in files)
|
||||
{
|
||||
await using var fileStream = File.OpenRead(file);
|
||||
await fileStream.CopyToAsync(combinedStream);
|
||||
}
|
||||
|
||||
combinedStream.Position = 0;
|
||||
var dirHash = await sha256.ComputeHashAsync(combinedStream);
|
||||
return Convert.ToHexString(dirHash).ToLowerInvariant();
|
||||
}
|
||||
|
||||
public async Task<InstallationResult> InstallOfflineKitAsync(string targetPath)
|
||||
{
|
||||
await Task.Delay(10); // Simulate installation
|
||||
|
||||
var manifest = GetOfflineKitManifest();
|
||||
var installed = new List<string>();
|
||||
|
||||
foreach (var (name, _) in manifest.Components)
|
||||
{
|
||||
var sourcePath = Path.Combine(_offlineKitPath, name);
|
||||
var destPath = Path.Combine(targetPath, name);
|
||||
|
||||
if (Directory.Exists(sourcePath))
|
||||
{
|
||||
Directory.CreateDirectory(destPath);
|
||||
// Simulate copy
|
||||
}
|
||||
else if (File.Exists(sourcePath))
|
||||
{
|
||||
Directory.CreateDirectory(Path.GetDirectoryName(destPath)!);
|
||||
// Simulate copy
|
||||
}
|
||||
|
||||
installed.Add(name);
|
||||
}
|
||||
|
||||
return new InstallationResult
|
||||
{
|
||||
Success = true,
|
||||
InstalledComponents = installed
|
||||
};
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Test Images
|
||||
|
||||
public string GetLocalTestImage()
|
||||
{
|
||||
return "localhost/test-image:v1.0.0";
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Scanning
|
||||
|
||||
public async Task<ScanResult> RunOfflineScanAsync(string targetImage)
|
||||
{
|
||||
await Task.Delay(50); // Simulate scan
|
||||
|
||||
if (!_offlineMode)
|
||||
{
|
||||
_connectionMonitor?.Invoke("nvd.nist.gov:443");
|
||||
}
|
||||
|
||||
return new ScanResult
|
||||
{
|
||||
Success = true,
|
||||
Findings = GenerateSampleFindings(),
|
||||
ManifestHash = "sha256:abc123def456",
|
||||
DataSource = _offlineMode ? "offline-kit" : "online",
|
||||
DataSourcePath = _offlineMode ? _offlineKitPath : "https://feeds.stellaops.io",
|
||||
TelemetrySent = !_offlineMode,
|
||||
Configuration = new ScanConfiguration
|
||||
{
|
||||
TelemetryEnabled = !_offlineMode
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Score Replay
|
||||
|
||||
public ProofBundle GetSampleProofBundle()
|
||||
{
|
||||
return new ProofBundle
|
||||
{
|
||||
Id = Guid.NewGuid().ToString(),
|
||||
CreatedAt = DateTime.UtcNow.AddDays(-1),
|
||||
OriginalScore = 7.5,
|
||||
OriginalScoreHash = "sha256:score123",
|
||||
Signature = Convert.ToBase64String(new byte[64]),
|
||||
CertificateChain = new[] { "cert1", "cert2", "root" }
|
||||
};
|
||||
}
|
||||
|
||||
public async Task<ReplayResult> ReplayScoreOfflineAsync(ProofBundle bundle)
|
||||
{
|
||||
await Task.Delay(20); // Simulate replay
|
||||
|
||||
return new ReplayResult
|
||||
{
|
||||
Success = true,
|
||||
Score = bundle.OriginalScore,
|
||||
ScoreHash = bundle.OriginalScoreHash,
|
||||
ReplayedAt = DateTime.UtcNow,
|
||||
AuditTrail = new[]
|
||||
{
|
||||
new AuditEntry { Type = "replay_started", Timestamp = DateTime.UtcNow.AddMilliseconds(-20) },
|
||||
new AuditEntry { Type = "data_loaded", Timestamp = DateTime.UtcNow.AddMilliseconds(-15) },
|
||||
new AuditEntry { Type = "score_computed", Timestamp = DateTime.UtcNow.AddMilliseconds(-5) },
|
||||
new AuditEntry { Type = "replay_completed", Timestamp = DateTime.UtcNow }
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Proof Verification
|
||||
|
||||
public async Task<VerificationResult> VerifyProofOfflineAsync(ProofBundle bundle)
|
||||
{
|
||||
await Task.Delay(10); // Simulate verification
|
||||
|
||||
var isTampered = bundle.Signature.Contains("TAMPERED");
|
||||
var isExpired = bundle.CertificateChain.Any(c => c.Contains("EXPIRED"));
|
||||
|
||||
return new VerificationResult
|
||||
{
|
||||
Valid = !isTampered && !isExpired,
|
||||
VerifiedAt = DateTime.UtcNow,
|
||||
TrustSource = "offline-trust-store",
|
||||
CertificateChain = bundle.CertificateChain,
|
||||
FailureReason = isTampered ? "Invalid signature" : (isExpired ? "Certificate expired" : null),
|
||||
Warnings = isExpired ? new[] { "certificate chain contains expired certificate" } : Array.Empty<string>()
|
||||
};
|
||||
}
|
||||
|
||||
public ProofBundle TamperWithProof(ProofBundle original)
|
||||
{
|
||||
return original with
|
||||
{
|
||||
Signature = "TAMPERED_" + original.Signature
|
||||
};
|
||||
}
|
||||
|
||||
public ProofBundle GetProofBundleWithExpiredCert()
|
||||
{
|
||||
return new ProofBundle
|
||||
{
|
||||
Id = Guid.NewGuid().ToString(),
|
||||
CreatedAt = DateTime.UtcNow.AddYears(-2),
|
||||
OriginalScore = 5.0,
|
||||
OriginalScoreHash = "sha256:expired123",
|
||||
Signature = Convert.ToBase64String(new byte[64]),
|
||||
CertificateChain = new[] { "cert1", "EXPIRED_cert2", "root" }
|
||||
};
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Network Control
|
||||
|
||||
public void SetOfflineMode(bool offline)
|
||||
{
|
||||
_offlineMode = offline;
|
||||
}
|
||||
|
||||
public async Task DisableNetworkAsync()
|
||||
{
|
||||
_offlineMode = true;
|
||||
await Task.CompletedTask;
|
||||
}
|
||||
|
||||
public async Task EnableNetworkAsync()
|
||||
{
|
||||
_offlineMode = false;
|
||||
await Task.CompletedTask;
|
||||
}
|
||||
|
||||
public void SetConnectionMonitor(Action<string> monitor)
|
||||
{
|
||||
_connectionMonitor = monitor;
|
||||
}
|
||||
|
||||
public void SetDnsMonitor(Action<string> monitor)
|
||||
{
|
||||
_dnsMonitor = monitor;
|
||||
}
|
||||
|
||||
public async Task<OnlineUpdateResult> AttemptOnlineUpdateAsync()
|
||||
{
|
||||
if (_offlineMode)
|
||||
{
|
||||
return new OnlineUpdateResult
|
||||
{
|
||||
Success = false,
|
||||
FailureReason = "System is in offline mode",
|
||||
SuggestedAction = "Use offline-kit update mechanism"
|
||||
};
|
||||
}
|
||||
|
||||
await Task.Delay(100);
|
||||
return new OnlineUpdateResult { Success = true };
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Helpers
|
||||
|
||||
public string GetTempDirectory()
|
||||
{
|
||||
var path = Path.Combine(_tempDir, Guid.NewGuid().ToString("N"));
|
||||
Directory.CreateDirectory(path);
|
||||
return path;
|
||||
}
|
||||
|
||||
private static List<Finding> GenerateSampleFindings()
|
||||
{
|
||||
return new List<Finding>
|
||||
{
|
||||
new() { CveId = "CVE-2024-00001", Severity = "HIGH", Score = 8.0 },
|
||||
new() { CveId = "CVE-2024-00002", Severity = "MEDIUM", Score = 5.5 },
|
||||
new() { CveId = "CVE-2024-00003", Severity = "LOW", Score = 3.2 }
|
||||
};
|
||||
}
|
||||
|
||||
private static OfflineKitManifest GetDefaultManifest()
|
||||
{
|
||||
return new OfflineKitManifest
|
||||
{
|
||||
Version = "1.0.0",
|
||||
CreatedAt = DateTime.UtcNow.AddDays(-7),
|
||||
Components = new Dictionary<string, OfflineComponent>
|
||||
{
|
||||
["vulnerability-database"] = new() { Hash = "sha256:vulndb123", Size = 1024 * 1024 },
|
||||
["advisory-feeds"] = new() { Hash = "sha256:feeds456", Size = 512 * 1024 },
|
||||
["trust-bundles"] = new() { Hash = "sha256:trust789", Size = 64 * 1024 },
|
||||
["signing-keys"] = new() { Hash = "sha256:keys012", Size = 16 * 1024 }
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
public void Dispose()
|
||||
{
|
||||
if (Directory.Exists(_tempDir))
|
||||
{
|
||||
try
|
||||
{
|
||||
Directory.Delete(_tempDir, true);
|
||||
}
|
||||
catch
|
||||
{
|
||||
// Best effort cleanup
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#region Record Types
|
||||
|
||||
public record OfflineKitManifest
|
||||
{
|
||||
public string Version { get; init; } = "";
|
||||
public DateTime CreatedAt { get; init; }
|
||||
public Dictionary<string, OfflineComponent> Components { get; init; } = new();
|
||||
}
|
||||
|
||||
public record OfflineComponent
|
||||
{
|
||||
public string Hash { get; init; } = "";
|
||||
public long Size { get; init; }
|
||||
}
|
||||
|
||||
public record InstallationResult
|
||||
{
|
||||
public bool Success { get; init; }
|
||||
public List<string> InstalledComponents { get; init; } = new();
|
||||
}
|
||||
|
||||
public record ScanResult
|
||||
{
|
||||
public bool Success { get; init; }
|
||||
public List<Finding> Findings { get; init; } = new();
|
||||
public string ManifestHash { get; init; } = "";
|
||||
public string DataSource { get; init; } = "";
|
||||
public string DataSourcePath { get; init; } = "";
|
||||
public bool TelemetrySent { get; init; }
|
||||
public ScanConfiguration Configuration { get; init; } = new();
|
||||
}
|
||||
|
||||
public record ScanConfiguration
|
||||
{
|
||||
public bool TelemetryEnabled { get; init; }
|
||||
}
|
||||
|
||||
public record Finding
|
||||
{
|
||||
public string CveId { get; init; } = "";
|
||||
public string Severity { get; init; } = "";
|
||||
public double Score { get; init; }
|
||||
}
|
||||
|
||||
public record ProofBundle
|
||||
{
|
||||
public string Id { get; init; } = "";
|
||||
public DateTime CreatedAt { get; init; }
|
||||
public double OriginalScore { get; init; }
|
||||
public string OriginalScoreHash { get; init; } = "";
|
||||
public string Signature { get; init; } = "";
|
||||
public string[] CertificateChain { get; init; } = Array.Empty<string>();
|
||||
}
|
||||
|
||||
public record ReplayResult
|
||||
{
|
||||
public bool Success { get; init; }
|
||||
public double Score { get; init; }
|
||||
public string ScoreHash { get; init; } = "";
|
||||
public DateTime ReplayedAt { get; init; }
|
||||
public AuditEntry[] AuditTrail { get; init; } = Array.Empty<AuditEntry>();
|
||||
}
|
||||
|
||||
public record AuditEntry
|
||||
{
|
||||
public string Type { get; init; } = "";
|
||||
public DateTime Timestamp { get; init; }
|
||||
}
|
||||
|
||||
public record VerificationResult
|
||||
{
|
||||
public bool Valid { get; init; }
|
||||
public DateTime VerifiedAt { get; init; }
|
||||
public string TrustSource { get; init; } = "";
|
||||
public string[] CertificateChain { get; init; } = Array.Empty<string>();
|
||||
public string? FailureReason { get; init; }
|
||||
public string[] Warnings { get; init; } = Array.Empty<string>();
|
||||
}
|
||||
|
||||
public record OnlineUpdateResult
|
||||
{
|
||||
public bool Success { get; init; }
|
||||
public string? FailureReason { get; init; }
|
||||
public string? SuggestedAction { get; init; }
|
||||
}
|
||||
|
||||
#endregion
|
||||
@@ -0,0 +1,34 @@
|
||||
<Project Sdk="Microsoft.NET.Sdk">
|
||||
|
||||
<PropertyGroup>
|
||||
<TargetFramework>net10.0</TargetFramework>
|
||||
<ImplicitUsings>enable</ImplicitUsings>
|
||||
<Nullable>enable</Nullable>
|
||||
<LangVersion>preview</LangVersion>
|
||||
<IsPackable>false</IsPackable>
|
||||
<IsTestProject>true</IsTestProject>
|
||||
</PropertyGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.11.0" />
|
||||
<PackageReference Include="xunit" Version="2.9.0" />
|
||||
<PackageReference Include="xunit.runner.visualstudio" Version="2.8.2">
|
||||
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
|
||||
<PrivateAssets>all</PrivateAssets>
|
||||
</PackageReference>
|
||||
<PackageReference Include="FluentAssertions" Version="6.12.0" />
|
||||
<PackageReference Include="Moq" Version="4.20.70" />
|
||||
<PackageReference Include="Testcontainers.PostgreSql" Version="3.10.0" />
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<ProjectReference Include="..\Scanner\StellaOps.Scanner.WebService\StellaOps.Scanner.WebService.csproj" />
|
||||
<ProjectReference Include="..\Attestor\StellaOps.Attestor.ProofChain\StellaOps.Attestor.ProofChain.csproj" />
|
||||
<ProjectReference Include="..\Cli\StellaOps.Cli\StellaOps.Cli.csproj" />
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<Content Include="..\offline-kit\**\*" LinkBase="offline-kit" CopyToOutputDirectory="PreserveNewest" />
|
||||
</ItemGroup>
|
||||
|
||||
</Project>
|
||||
@@ -0,0 +1,586 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// AirGapBundleDeterminismTests.cs
|
||||
// Sprint: SPRINT_5100_0007_0003 - Epic B (Determinism Gate)
|
||||
// Task: T7 - AirGap Bundle Export Determinism
|
||||
// Description: Tests to validate AirGap bundle generation determinism
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System.Text;
|
||||
using FluentAssertions;
|
||||
using StellaOps.Canonical.Json;
|
||||
using StellaOps.Testing.Determinism;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Integration.Determinism;
|
||||
|
||||
/// <summary>
|
||||
/// Determinism validation tests for AirGap bundle generation.
|
||||
/// Ensures identical inputs produce identical bundles across:
|
||||
/// - NDJSON bundle file generation
|
||||
/// - Bundle manifest creation
|
||||
/// - Entry trace generation
|
||||
/// - Multiple runs with frozen time
|
||||
/// - Parallel execution
|
||||
/// </summary>
|
||||
public class AirGapBundleDeterminismTests
|
||||
{
|
||||
#region NDJSON Bundle Determinism Tests
|
||||
|
||||
[Fact]
|
||||
public void AirGapBundle_WithIdenticalInput_ProducesDeterministicOutput()
|
||||
{
|
||||
// Arrange
|
||||
var input = CreateSampleAirGapInput();
|
||||
var frozenTime = DateTimeOffset.Parse("2025-12-23T18:00:00Z");
|
||||
|
||||
// Act - Generate bundle multiple times
|
||||
var bundle1 = GenerateNdjsonBundle(input, frozenTime);
|
||||
var bundle2 = GenerateNdjsonBundle(input, frozenTime);
|
||||
var bundle3 = GenerateNdjsonBundle(input, frozenTime);
|
||||
|
||||
// Assert - All outputs should be identical
|
||||
bundle1.Should().Be(bundle2);
|
||||
bundle2.Should().Be(bundle3);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void AirGapBundle_CanonicalHash_IsStable()
|
||||
{
|
||||
// Arrange
|
||||
var input = CreateSampleAirGapInput();
|
||||
var frozenTime = DateTimeOffset.Parse("2025-12-23T18:00:00Z");
|
||||
|
||||
// Act - Generate bundle and compute canonical hash twice
|
||||
var bundle1 = GenerateNdjsonBundle(input, frozenTime);
|
||||
var hash1 = CanonJson.Sha256Hex(Encoding.UTF8.GetBytes(bundle1));
|
||||
|
||||
var bundle2 = GenerateNdjsonBundle(input, frozenTime);
|
||||
var hash2 = CanonJson.Sha256Hex(Encoding.UTF8.GetBytes(bundle2));
|
||||
|
||||
// Assert
|
||||
hash1.Should().Be(hash2, "Same input should produce same canonical hash");
|
||||
hash1.Should().MatchRegex("^[0-9a-f]{64}$");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void AirGapBundle_DeterminismManifest_CanBeCreated()
|
||||
{
|
||||
// Arrange
|
||||
var input = CreateSampleAirGapInput();
|
||||
var frozenTime = DateTimeOffset.Parse("2025-12-23T18:00:00Z");
|
||||
var bundle = GenerateNdjsonBundle(input, frozenTime);
|
||||
var bundleBytes = Encoding.UTF8.GetBytes(bundle);
|
||||
|
||||
var artifactInfo = new ArtifactInfo
|
||||
{
|
||||
Type = "airgap-bundle",
|
||||
Name = "concelier-airgap-export",
|
||||
Version = "1.0.0",
|
||||
Format = "NDJSON"
|
||||
};
|
||||
|
||||
var toolchain = new ToolchainInfo
|
||||
{
|
||||
Platform = ".NET 10.0",
|
||||
Components = new[]
|
||||
{
|
||||
new ComponentInfo { Name = "StellaOps.Concelier", Version = "1.0.0" }
|
||||
}
|
||||
};
|
||||
|
||||
// Act - Create determinism manifest
|
||||
var manifest = DeterminismManifestWriter.CreateManifest(
|
||||
bundleBytes,
|
||||
artifactInfo,
|
||||
toolchain);
|
||||
|
||||
// Assert
|
||||
manifest.SchemaVersion.Should().Be("1.0");
|
||||
manifest.Artifact.Format.Should().Be("NDJSON");
|
||||
manifest.CanonicalHash.Algorithm.Should().Be("SHA-256");
|
||||
manifest.CanonicalHash.Value.Should().MatchRegex("^[0-9a-f]{64}$");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task AirGapBundle_ParallelGeneration_ProducesDeterministicOutput()
|
||||
{
|
||||
// Arrange
|
||||
var input = CreateSampleAirGapInput();
|
||||
var frozenTime = DateTimeOffset.Parse("2025-12-23T18:00:00Z");
|
||||
|
||||
// Act - Generate in parallel 20 times
|
||||
var tasks = Enumerable.Range(0, 20)
|
||||
.Select(_ => Task.Run(() => GenerateNdjsonBundle(input, frozenTime)))
|
||||
.ToArray();
|
||||
|
||||
var bundles = await Task.WhenAll(tasks);
|
||||
|
||||
// Assert - All outputs should be identical
|
||||
bundles.Should().AllBe(bundles[0]);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void AirGapBundle_ItemOrdering_IsDeterministic()
|
||||
{
|
||||
// Arrange - Items in random order
|
||||
var input = CreateUnorderedAirGapInput();
|
||||
var frozenTime = DateTimeOffset.Parse("2025-12-23T18:00:00Z");
|
||||
|
||||
// Act - Generate bundle multiple times
|
||||
var bundle1 = GenerateNdjsonBundle(input, frozenTime);
|
||||
var bundle2 = GenerateNdjsonBundle(input, frozenTime);
|
||||
|
||||
// Assert - Items should be sorted deterministically
|
||||
bundle1.Should().Be(bundle2);
|
||||
|
||||
// Verify items are lexicographically sorted
|
||||
var lines = bundle1.Split('\n', StringSplitOptions.RemoveEmptyEntries);
|
||||
var sortedLines = lines.OrderBy(l => l, StringComparer.Ordinal).ToArray();
|
||||
lines.Should().BeEquivalentTo(sortedLines, options => options.WithStrictOrdering());
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Bundle Manifest Determinism Tests
|
||||
|
||||
[Fact]
|
||||
public void BundleManifest_WithIdenticalInput_ProducesDeterministicOutput()
|
||||
{
|
||||
// Arrange
|
||||
var input = CreateSampleAirGapInput();
|
||||
var frozenTime = DateTimeOffset.Parse("2025-12-23T18:00:00Z");
|
||||
|
||||
// Act - Generate manifest multiple times
|
||||
var manifest1 = GenerateBundleManifest(input, frozenTime);
|
||||
var manifest2 = GenerateBundleManifest(input, frozenTime);
|
||||
var manifest3 = GenerateBundleManifest(input, frozenTime);
|
||||
|
||||
// Assert - All outputs should be identical
|
||||
manifest1.Should().Be(manifest2);
|
||||
manifest2.Should().Be(manifest3);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void BundleManifest_CanonicalHash_IsStable()
|
||||
{
|
||||
// Arrange
|
||||
var input = CreateSampleAirGapInput();
|
||||
var frozenTime = DateTimeOffset.Parse("2025-12-23T18:00:00Z");
|
||||
|
||||
// Act
|
||||
var manifest1 = GenerateBundleManifest(input, frozenTime);
|
||||
var hash1 = CanonJson.Sha256Hex(Encoding.UTF8.GetBytes(manifest1));
|
||||
|
||||
var manifest2 = GenerateBundleManifest(input, frozenTime);
|
||||
var hash2 = CanonJson.Sha256Hex(Encoding.UTF8.GetBytes(manifest2));
|
||||
|
||||
// Assert
|
||||
hash1.Should().Be(hash2);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void BundleManifest_BundleSha256_MatchesNdjsonHash()
|
||||
{
|
||||
// Arrange
|
||||
var input = CreateSampleAirGapInput();
|
||||
var frozenTime = DateTimeOffset.Parse("2025-12-23T18:00:00Z");
|
||||
|
||||
// Act
|
||||
var bundle = GenerateNdjsonBundle(input, frozenTime);
|
||||
var bundleHash = CanonJson.Sha256Hex(Encoding.UTF8.GetBytes(bundle));
|
||||
var manifest = GenerateBundleManifest(input, frozenTime);
|
||||
|
||||
// Assert - Manifest should contain matching bundle hash
|
||||
manifest.Should().Contain($"\"bundleSha256\": \"{bundleHash}\"");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void BundleManifest_ItemCount_IsAccurate()
|
||||
{
|
||||
// Arrange
|
||||
var input = CreateSampleAirGapInput();
|
||||
var frozenTime = DateTimeOffset.Parse("2025-12-23T18:00:00Z");
|
||||
|
||||
// Act
|
||||
var manifest = GenerateBundleManifest(input, frozenTime);
|
||||
|
||||
// Assert
|
||||
manifest.Should().Contain($"\"count\": {input.Items.Length}");
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Entry Trace Determinism Tests
|
||||
|
||||
[Fact]
|
||||
public void EntryTrace_WithIdenticalInput_ProducesDeterministicOutput()
|
||||
{
|
||||
// Arrange
|
||||
var input = CreateSampleAirGapInput();
|
||||
var frozenTime = DateTimeOffset.Parse("2025-12-23T18:00:00Z");
|
||||
|
||||
// Act - Generate entry trace multiple times
|
||||
var trace1 = GenerateEntryTrace(input, frozenTime);
|
||||
var trace2 = GenerateEntryTrace(input, frozenTime);
|
||||
var trace3 = GenerateEntryTrace(input, frozenTime);
|
||||
|
||||
// Assert - All outputs should be identical
|
||||
trace1.Should().Be(trace2);
|
||||
trace2.Should().Be(trace3);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void EntryTrace_LineNumbers_AreSequential()
|
||||
{
|
||||
// Arrange
|
||||
var input = CreateSampleAirGapInput();
|
||||
var frozenTime = DateTimeOffset.Parse("2025-12-23T18:00:00Z");
|
||||
|
||||
// Act
|
||||
var trace = GenerateEntryTrace(input, frozenTime);
|
||||
|
||||
// Assert - Line numbers should be sequential starting from 1
|
||||
for (int i = 1; i <= input.Items.Length; i++)
|
||||
{
|
||||
trace.Should().Contain($"\"lineNumber\": {i}");
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void EntryTrace_ItemHashes_AreCorrect()
|
||||
{
|
||||
// Arrange
|
||||
var input = CreateSampleAirGapInput();
|
||||
var frozenTime = DateTimeOffset.Parse("2025-12-23T18:00:00Z");
|
||||
|
||||
// Act
|
||||
var trace = GenerateEntryTrace(input, frozenTime);
|
||||
|
||||
// Assert - Each item hash should be present
|
||||
var sortedItems = input.Items.OrderBy(i => i, StringComparer.Ordinal);
|
||||
foreach (var item in sortedItems)
|
||||
{
|
||||
var expectedHash = CanonJson.Sha256Hex(Encoding.UTF8.GetBytes(item));
|
||||
trace.Should().Contain(expectedHash);
|
||||
}
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Feed Snapshot Determinism Tests
|
||||
|
||||
[Fact]
|
||||
public void FeedSnapshot_WithIdenticalInput_ProducesDeterministicOutput()
|
||||
{
|
||||
// Arrange
|
||||
var input = CreateFeedSnapshotInput();
|
||||
var frozenTime = DateTimeOffset.Parse("2025-12-23T18:00:00Z");
|
||||
|
||||
// Act - Generate snapshot multiple times
|
||||
var snapshot1 = GenerateFeedSnapshot(input, frozenTime);
|
||||
var snapshot2 = GenerateFeedSnapshot(input, frozenTime);
|
||||
var snapshot3 = GenerateFeedSnapshot(input, frozenTime);
|
||||
|
||||
// Assert - All outputs should be identical
|
||||
snapshot1.Should().Be(snapshot2);
|
||||
snapshot2.Should().Be(snapshot3);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void FeedSnapshot_SourceOrdering_IsDeterministic()
|
||||
{
|
||||
// Arrange - Sources in random order
|
||||
var input = CreateFeedSnapshotInput();
|
||||
var frozenTime = DateTimeOffset.Parse("2025-12-23T18:00:00Z");
|
||||
|
||||
// Act
|
||||
var snapshot = GenerateFeedSnapshot(input, frozenTime);
|
||||
|
||||
// Assert - Sources should appear in sorted order
|
||||
var sourcePositions = input.Sources
|
||||
.OrderBy(s => s, StringComparer.Ordinal)
|
||||
.Select(s => snapshot.IndexOf($"\"{s}\""))
|
||||
.ToArray();
|
||||
|
||||
// Positions should be ascending
|
||||
for (int i = 1; i < sourcePositions.Length; i++)
|
||||
{
|
||||
sourcePositions[i].Should().BeGreaterThan(sourcePositions[i - 1]);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void FeedSnapshot_Hash_IsStable()
|
||||
{
|
||||
// Arrange
|
||||
var input = CreateFeedSnapshotInput();
|
||||
var frozenTime = DateTimeOffset.Parse("2025-12-23T18:00:00Z");
|
||||
|
||||
// Act
|
||||
var snapshot1 = GenerateFeedSnapshot(input, frozenTime);
|
||||
var hash1 = CanonJson.Sha256Hex(Encoding.UTF8.GetBytes(snapshot1));
|
||||
|
||||
var snapshot2 = GenerateFeedSnapshot(input, frozenTime);
|
||||
var hash2 = CanonJson.Sha256Hex(Encoding.UTF8.GetBytes(snapshot2));
|
||||
|
||||
// Assert
|
||||
hash1.Should().Be(hash2);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Policy Pack Bundle Determinism Tests
|
||||
|
||||
[Fact]
|
||||
public void PolicyPackBundle_WithIdenticalInput_ProducesDeterministicOutput()
|
||||
{
|
||||
// Arrange
|
||||
var input = CreatePolicyPackInput();
|
||||
var frozenTime = DateTimeOffset.Parse("2025-12-23T18:00:00Z");
|
||||
|
||||
// Act
|
||||
var bundle1 = GeneratePolicyPackBundle(input, frozenTime);
|
||||
var bundle2 = GeneratePolicyPackBundle(input, frozenTime);
|
||||
|
||||
// Assert
|
||||
bundle1.Should().Be(bundle2);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void PolicyPackBundle_RuleOrdering_IsDeterministic()
|
||||
{
|
||||
// Arrange
|
||||
var input = CreatePolicyPackInput();
|
||||
var frozenTime = DateTimeOffset.Parse("2025-12-23T18:00:00Z");
|
||||
|
||||
// Act
|
||||
var bundle = GeneratePolicyPackBundle(input, frozenTime);
|
||||
|
||||
// Assert - Rules should appear in sorted order
|
||||
var rulePositions = input.Rules
|
||||
.OrderBy(r => r.Name, StringComparer.Ordinal)
|
||||
.Select(r => bundle.IndexOf($"\"{r.Name}\""))
|
||||
.ToArray();
|
||||
|
||||
for (int i = 1; i < rulePositions.Length; i++)
|
||||
{
|
||||
rulePositions[i].Should().BeGreaterThan(rulePositions[i - 1]);
|
||||
}
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Helper Methods
|
||||
|
||||
private static AirGapInput CreateSampleAirGapInput()
|
||||
{
|
||||
return new AirGapInput
|
||||
{
|
||||
Items = new[]
|
||||
{
|
||||
"{\"cveId\":\"CVE-2024-0001\",\"source\":\"nvd\"}",
|
||||
"{\"cveId\":\"CVE-2024-0002\",\"source\":\"nvd\"}",
|
||||
"{\"cveId\":\"CVE-2024-0003\",\"source\":\"osv\"}",
|
||||
"{\"cveId\":\"GHSA-0001\",\"source\":\"ghsa\"}"
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
private static AirGapInput CreateUnorderedAirGapInput()
|
||||
{
|
||||
return new AirGapInput
|
||||
{
|
||||
Items = new[]
|
||||
{
|
||||
"{\"cveId\":\"CVE-2024-9999\",\"source\":\"nvd\"}",
|
||||
"{\"cveId\":\"CVE-2024-0001\",\"source\":\"nvd\"}",
|
||||
"{\"cveId\":\"GHSA-zzzz\",\"source\":\"ghsa\"}",
|
||||
"{\"cveId\":\"CVE-2024-5555\",\"source\":\"osv\"}",
|
||||
"{\"cveId\":\"GHSA-aaaa\",\"source\":\"ghsa\"}"
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
private static FeedSnapshotInput CreateFeedSnapshotInput()
|
||||
{
|
||||
return new FeedSnapshotInput
|
||||
{
|
||||
Sources = new[] { "nvd", "osv", "ghsa", "kev", "epss" },
|
||||
SnapshotId = "snapshot-2024-001",
|
||||
ItemCounts = new Dictionary<string, int>
|
||||
{
|
||||
{ "nvd", 25000 },
|
||||
{ "osv", 15000 },
|
||||
{ "ghsa", 8000 },
|
||||
{ "kev", 1200 },
|
||||
{ "epss", 250000 }
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
private static PolicyPackInput CreatePolicyPackInput()
|
||||
{
|
||||
return new PolicyPackInput
|
||||
{
|
||||
PackId = "policy-pack-2024-001",
|
||||
Version = "1.0.0",
|
||||
Rules = new[]
|
||||
{
|
||||
new PolicyRule { Name = "kev-critical-block", Priority = 1, Action = "block" },
|
||||
new PolicyRule { Name = "high-cvss-warn", Priority = 2, Action = "warn" },
|
||||
new PolicyRule { Name = "default-pass", Priority = 100, Action = "allow" }
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
private static string GenerateNdjsonBundle(AirGapInput input, DateTimeOffset timestamp)
|
||||
{
|
||||
var sortedItems = input.Items
|
||||
.OrderBy(item => item, StringComparer.Ordinal);
|
||||
|
||||
return string.Join("\n", sortedItems);
|
||||
}
|
||||
|
||||
private static string GenerateBundleManifest(AirGapInput input, DateTimeOffset timestamp)
|
||||
{
|
||||
var sortedItems = input.Items
|
||||
.OrderBy(item => item, StringComparer.Ordinal)
|
||||
.ToArray();
|
||||
|
||||
var bundle = GenerateNdjsonBundle(input, timestamp);
|
||||
var bundleHash = CanonJson.Sha256Hex(Encoding.UTF8.GetBytes(bundle));
|
||||
|
||||
var entries = sortedItems.Select((item, index) => new
|
||||
{
|
||||
lineNumber = index + 1,
|
||||
sha256 = CanonJson.Sha256Hex(Encoding.UTF8.GetBytes(item))
|
||||
});
|
||||
|
||||
var entriesJson = string.Join(",\n ", entries.Select(e =>
|
||||
$"{{\"lineNumber\": {e.lineNumber}, \"sha256\": \"{e.sha256}\"}}"));
|
||||
|
||||
var itemsJson = string.Join(",\n ", sortedItems.Select(i => $"\"{EscapeJson(i)}\""));
|
||||
|
||||
return $$"""
|
||||
{
|
||||
"bundleSha256": "{{bundleHash}}",
|
||||
"count": {{sortedItems.Length}},
|
||||
"createdUtc": "{{timestamp:O}}",
|
||||
"entries": [
|
||||
{{entriesJson}}
|
||||
],
|
||||
"items": [
|
||||
{{itemsJson}}
|
||||
]
|
||||
}
|
||||
""";
|
||||
}
|
||||
|
||||
private static string GenerateEntryTrace(AirGapInput input, DateTimeOffset timestamp)
|
||||
{
|
||||
var sortedItems = input.Items
|
||||
.OrderBy(item => item, StringComparer.Ordinal)
|
||||
.ToArray();
|
||||
|
||||
var entries = sortedItems.Select((item, index) =>
|
||||
$$"""
|
||||
{
|
||||
"lineNumber": {{index + 1}},
|
||||
"sha256": "{{CanonJson.Sha256Hex(Encoding.UTF8.GetBytes(item))}}"
|
||||
}
|
||||
""");
|
||||
|
||||
return $$"""
|
||||
{
|
||||
"createdUtc": "{{timestamp:O}}",
|
||||
"entries": [
|
||||
{{string.Join(",\n ", entries)}}
|
||||
]
|
||||
}
|
||||
""";
|
||||
}
|
||||
|
||||
private static string GenerateFeedSnapshot(FeedSnapshotInput input, DateTimeOffset timestamp)
|
||||
{
|
||||
var sortedSources = input.Sources
|
||||
.OrderBy(s => s, StringComparer.Ordinal)
|
||||
.ToArray();
|
||||
|
||||
var sourceCounts = sortedSources.Select(s =>
|
||||
$"\"{s}\": {input.ItemCounts.GetValueOrDefault(s, 0)}");
|
||||
|
||||
return $$"""
|
||||
{
|
||||
"snapshotId": "{{input.SnapshotId}}",
|
||||
"createdUtc": "{{timestamp:O}}",
|
||||
"sources": [{{string.Join(", ", sortedSources.Select(s => $"\"{s}\""))}}],
|
||||
"itemCounts": {
|
||||
{{string.Join(",\n ", sourceCounts)}}
|
||||
}
|
||||
}
|
||||
""";
|
||||
}
|
||||
|
||||
private static string GeneratePolicyPackBundle(PolicyPackInput input, DateTimeOffset timestamp)
|
||||
{
|
||||
var sortedRules = input.Rules
|
||||
.OrderBy(r => r.Name, StringComparer.Ordinal)
|
||||
.ToArray();
|
||||
|
||||
var rulesJson = string.Join(",\n ", sortedRules.Select(r =>
|
||||
$$"""{"name": "{{r.Name}}", "priority": {{r.Priority}}, "action": "{{r.Action}}"}"""));
|
||||
|
||||
return $$"""
|
||||
{
|
||||
"packId": "{{input.PackId}}",
|
||||
"version": "{{input.Version}}",
|
||||
"createdUtc": "{{timestamp:O}}",
|
||||
"rules": [
|
||||
{{rulesJson}}
|
||||
]
|
||||
}
|
||||
""";
|
||||
}
|
||||
|
||||
private static string EscapeJson(string value)
|
||||
{
|
||||
return value
|
||||
.Replace("\\", "\\\\")
|
||||
.Replace("\"", "\\\"")
|
||||
.Replace("\n", "\\n")
|
||||
.Replace("\r", "\\r")
|
||||
.Replace("\t", "\\t");
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region DTOs
|
||||
|
||||
private sealed record AirGapInput
|
||||
{
|
||||
public required string[] Items { get; init; }
|
||||
}
|
||||
|
||||
private sealed record FeedSnapshotInput
|
||||
{
|
||||
public required string[] Sources { get; init; }
|
||||
public required string SnapshotId { get; init; }
|
||||
public required Dictionary<string, int> ItemCounts { get; init; }
|
||||
}
|
||||
|
||||
private sealed record PolicyPackInput
|
||||
{
|
||||
public required string PackId { get; init; }
|
||||
public required string Version { get; init; }
|
||||
public required PolicyRule[] Rules { get; init; }
|
||||
}
|
||||
|
||||
private sealed record PolicyRule
|
||||
{
|
||||
public required string Name { get; init; }
|
||||
public required int Priority { get; init; }
|
||||
public required string Action { get; init; }
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
@@ -0,0 +1,408 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// DeterminismValidationTests.cs
|
||||
// Sprint: SPRINT_3500_0004_0003_integration_tests_corpus
|
||||
// Task: T5 - Determinism Validation Suite
|
||||
// Description: Tests to validate scoring determinism across runs, platforms, and time
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System.Security.Cryptography;
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
using FluentAssertions;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Integration.Determinism;
|
||||
|
||||
/// <summary>
|
||||
/// Determinism validation tests for the scoring engine.
|
||||
/// Ensures identical inputs produce identical outputs across:
|
||||
/// - Multiple runs
|
||||
/// - Different timestamps (with frozen time)
|
||||
/// - Parallel execution
|
||||
/// </summary>
|
||||
public class DeterminismValidationTests
|
||||
{
|
||||
private static readonly JsonSerializerOptions JsonOptions = new()
|
||||
{
|
||||
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
|
||||
WriteIndented = false
|
||||
};
|
||||
|
||||
#region T5-AC1: Same input produces identical score hash
|
||||
|
||||
[Fact]
|
||||
public void IdenticalInput_ProducesIdenticalHash_AcrossRuns()
|
||||
{
|
||||
// Arrange
|
||||
var input = new ScoringInput
|
||||
{
|
||||
ScanId = "test-scan-001",
|
||||
SbomHash = "sha256:abc123",
|
||||
RulesHash = "sha256:def456",
|
||||
PolicyHash = "sha256:ghi789",
|
||||
FeedHash = "sha256:jkl012",
|
||||
Timestamp = DateTimeOffset.Parse("2024-01-01T00:00:00Z")
|
||||
};
|
||||
|
||||
// Act - Compute hash multiple times
|
||||
var hash1 = ComputeInputHash(input);
|
||||
var hash2 = ComputeInputHash(input);
|
||||
var hash3 = ComputeInputHash(input);
|
||||
|
||||
// Assert
|
||||
hash1.Should().Be(hash2);
|
||||
hash2.Should().Be(hash3);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void DifferentInput_ProducesDifferentHash()
|
||||
{
|
||||
// Arrange
|
||||
var input1 = new ScoringInput
|
||||
{
|
||||
ScanId = "scan-001",
|
||||
SbomHash = "sha256:abc",
|
||||
RulesHash = "sha256:def",
|
||||
PolicyHash = "sha256:ghi",
|
||||
FeedHash = "sha256:jkl",
|
||||
Timestamp = DateTimeOffset.Parse("2024-01-01T00:00:00Z")
|
||||
};
|
||||
|
||||
var input2 = new ScoringInput
|
||||
{
|
||||
ScanId = "scan-001",
|
||||
SbomHash = "sha256:DIFFERENT", // Changed
|
||||
RulesHash = "sha256:def",
|
||||
PolicyHash = "sha256:ghi",
|
||||
FeedHash = "sha256:jkl",
|
||||
Timestamp = DateTimeOffset.Parse("2024-01-01T00:00:00Z")
|
||||
};
|
||||
|
||||
// Act
|
||||
var hash1 = ComputeInputHash(input1);
|
||||
var hash2 = ComputeInputHash(input2);
|
||||
|
||||
// Assert
|
||||
hash1.Should().NotBe(hash2);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region T5-AC2: Cross-platform determinism
|
||||
|
||||
[Fact]
|
||||
public void HashComputation_IsConsistent_WithKnownVector()
|
||||
{
|
||||
// Arrange - Known test vector for cross-platform verification
|
||||
var input = new ScoringInput
|
||||
{
|
||||
ScanId = "determinism-test-001",
|
||||
SbomHash = "sha256:e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
|
||||
RulesHash = "sha256:0000000000000000000000000000000000000000000000000000000000000000",
|
||||
PolicyHash = "sha256:ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff",
|
||||
FeedHash = "sha256:1234567890abcdef1234567890abcdef1234567890abcdef1234567890abcdef",
|
||||
Timestamp = DateTimeOffset.Parse("2024-06-15T12:00:00Z")
|
||||
};
|
||||
|
||||
// Act
|
||||
var hash = ComputeInputHash(input);
|
||||
|
||||
// Assert - This hash should be identical on any platform
|
||||
hash.Should().NotBeNullOrEmpty();
|
||||
hash.Should().HaveLength(64); // SHA-256 hex = 64 chars
|
||||
hash.Should().MatchRegex("^[a-f0-9]{64}$");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void CanonicalJson_ProducesStableOutput()
|
||||
{
|
||||
// Arrange - Same data, different property order
|
||||
var obj1 = new Dictionary<string, object>
|
||||
{
|
||||
["zebra"] = "last",
|
||||
["alpha"] = "first",
|
||||
["middle"] = 123
|
||||
};
|
||||
|
||||
var obj2 = new Dictionary<string, object>
|
||||
{
|
||||
["alpha"] = "first",
|
||||
["middle"] = 123,
|
||||
["zebra"] = "last"
|
||||
};
|
||||
|
||||
// Act
|
||||
var json1 = ToCanonicalJson(obj1);
|
||||
var json2 = ToCanonicalJson(obj2);
|
||||
|
||||
// Assert - Canonical JSON should sort keys
|
||||
json1.Should().Be(json2);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region T5-AC3: Timestamp independence (frozen time tests)
|
||||
|
||||
[Fact]
|
||||
public void ScoringWithFrozenTime_IsDeterministic()
|
||||
{
|
||||
// Arrange - Freeze timestamp
|
||||
var frozenTime = DateTimeOffset.Parse("2024-06-15T00:00:00Z");
|
||||
|
||||
var input1 = new ScoringInput
|
||||
{
|
||||
ScanId = "frozen-time-001",
|
||||
SbomHash = "sha256:sbom",
|
||||
RulesHash = "sha256:rules",
|
||||
PolicyHash = "sha256:policy",
|
||||
FeedHash = "sha256:feed",
|
||||
Timestamp = frozenTime
|
||||
};
|
||||
|
||||
var input2 = new ScoringInput
|
||||
{
|
||||
ScanId = "frozen-time-001",
|
||||
SbomHash = "sha256:sbom",
|
||||
RulesHash = "sha256:rules",
|
||||
PolicyHash = "sha256:policy",
|
||||
FeedHash = "sha256:feed",
|
||||
Timestamp = frozenTime
|
||||
};
|
||||
|
||||
// Act
|
||||
var hash1 = ComputeInputHash(input1);
|
||||
var hash2 = ComputeInputHash(input2);
|
||||
|
||||
// Assert
|
||||
hash1.Should().Be(hash2);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void DifferentTimestamps_ProduceDifferentHashes()
|
||||
{
|
||||
// Arrange
|
||||
var input1 = new ScoringInput
|
||||
{
|
||||
ScanId = "time-test-001",
|
||||
SbomHash = "sha256:same",
|
||||
RulesHash = "sha256:same",
|
||||
PolicyHash = "sha256:same",
|
||||
FeedHash = "sha256:same",
|
||||
Timestamp = DateTimeOffset.Parse("2024-01-01T00:00:00Z")
|
||||
};
|
||||
|
||||
var input2 = new ScoringInput
|
||||
{
|
||||
ScanId = "time-test-001",
|
||||
SbomHash = "sha256:same",
|
||||
RulesHash = "sha256:same",
|
||||
PolicyHash = "sha256:same",
|
||||
FeedHash = "sha256:same",
|
||||
Timestamp = DateTimeOffset.Parse("2024-01-02T00:00:00Z") // Different
|
||||
};
|
||||
|
||||
// Act
|
||||
var hash1 = ComputeInputHash(input1);
|
||||
var hash2 = ComputeInputHash(input2);
|
||||
|
||||
// Assert
|
||||
hash1.Should().NotBe(hash2);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region T5-AC4: Parallel execution determinism
|
||||
|
||||
[Fact]
|
||||
public async Task ParallelExecution_ProducesIdenticalHashes()
|
||||
{
|
||||
// Arrange
|
||||
var input = new ScoringInput
|
||||
{
|
||||
ScanId = "parallel-test-001",
|
||||
SbomHash = "sha256:parallel",
|
||||
RulesHash = "sha256:parallel",
|
||||
PolicyHash = "sha256:parallel",
|
||||
FeedHash = "sha256:parallel",
|
||||
Timestamp = DateTimeOffset.Parse("2024-01-01T00:00:00Z")
|
||||
};
|
||||
|
||||
// Act - Compute hash in parallel 100 times
|
||||
var tasks = Enumerable.Range(0, 100)
|
||||
.Select(_ => Task.Run(() => ComputeInputHash(input)))
|
||||
.ToArray();
|
||||
|
||||
var hashes = await Task.WhenAll(tasks);
|
||||
|
||||
// Assert - All hashes should be identical
|
||||
hashes.Should().AllBe(hashes[0]);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ConcurrentScoring_MaintainsDeterminism()
|
||||
{
|
||||
// Arrange - Multiple different inputs
|
||||
var inputs = Enumerable.Range(0, 50)
|
||||
.Select(i => new ScoringInput
|
||||
{
|
||||
ScanId = $"concurrent-{i:D3}",
|
||||
SbomHash = $"sha256:sbom{i:D3}",
|
||||
RulesHash = "sha256:rules",
|
||||
PolicyHash = "sha256:policy",
|
||||
FeedHash = "sha256:feed",
|
||||
Timestamp = DateTimeOffset.Parse("2024-01-01T00:00:00Z")
|
||||
})
|
||||
.ToArray();
|
||||
|
||||
// Act - Run twice in parallel
|
||||
var hashes1 = await Task.WhenAll(inputs.Select(i => Task.Run(() => ComputeInputHash(i))));
|
||||
var hashes2 = await Task.WhenAll(inputs.Select(i => Task.Run(() => ComputeInputHash(i))));
|
||||
|
||||
// Assert - Both runs should produce identical results
|
||||
hashes1.Should().BeEquivalentTo(hashes2);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region T5-AC5: Replay after code changes produces same result
|
||||
|
||||
[Fact]
|
||||
public void GoldenVectorReplay_ProducesExpectedHash()
|
||||
{
|
||||
// Arrange - Golden test vector (version-locked)
|
||||
// This test ensures code changes don't break determinism
|
||||
var goldenInput = new ScoringInput
|
||||
{
|
||||
ScanId = "golden-vector-001",
|
||||
SbomHash = "sha256:goldensbom0000000000000000000000000000000000000000000000000",
|
||||
RulesHash = "sha256:goldenrule0000000000000000000000000000000000000000000000000",
|
||||
PolicyHash = "sha256:goldenpoli0000000000000000000000000000000000000000000000000",
|
||||
FeedHash = "sha256:goldenfeed0000000000000000000000000000000000000000000000000",
|
||||
Timestamp = DateTimeOffset.Parse("2024-01-01T00:00:00Z")
|
||||
};
|
||||
|
||||
// Act
|
||||
var hash = ComputeInputHash(goldenInput);
|
||||
|
||||
// Assert - This is the expected hash for the golden vector
|
||||
// If this test fails after a code change, it indicates a breaking change to determinism
|
||||
hash.Should().NotBeNullOrEmpty();
|
||||
|
||||
// The actual expected hash would be computed once and stored here:
|
||||
// hash.Should().Be("expected_golden_hash_here");
|
||||
|
||||
// For now, verify it's a valid hash format
|
||||
hash.Should().MatchRegex("^[a-f0-9]{64}$");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void MerkleRoot_IsStable_ForSameNodes()
|
||||
{
|
||||
// Arrange
|
||||
var nodes = new[]
|
||||
{
|
||||
"sha256:node1",
|
||||
"sha256:node2",
|
||||
"sha256:node3",
|
||||
"sha256:node4"
|
||||
};
|
||||
|
||||
// Act - Compute merkle root multiple times
|
||||
var root1 = ComputeMerkleRoot(nodes);
|
||||
var root2 = ComputeMerkleRoot(nodes);
|
||||
var root3 = ComputeMerkleRoot(nodes);
|
||||
|
||||
// Assert
|
||||
root1.Should().Be(root2);
|
||||
root2.Should().Be(root3);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void MerkleRoot_ChangesWhenNodeChanges()
|
||||
{
|
||||
// Arrange
|
||||
var nodes1 = new[] { "sha256:a", "sha256:b", "sha256:c" };
|
||||
var nodes2 = new[] { "sha256:a", "sha256:DIFFERENT", "sha256:c" };
|
||||
|
||||
// Act
|
||||
var root1 = ComputeMerkleRoot(nodes1);
|
||||
var root2 = ComputeMerkleRoot(nodes2);
|
||||
|
||||
// Assert
|
||||
root1.Should().NotBe(root2);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Helper Methods
|
||||
|
||||
private static string ComputeInputHash(ScoringInput input)
|
||||
{
|
||||
var canonical = ToCanonicalJson(input);
|
||||
return ComputeSha256(canonical);
|
||||
}
|
||||
|
||||
private static string ToCanonicalJson<T>(T obj)
|
||||
{
|
||||
// Sort keys for canonical JSON
|
||||
if (obj is IDictionary<string, object> dict)
|
||||
{
|
||||
var sorted = dict.OrderBy(kvp => kvp.Key, StringComparer.Ordinal)
|
||||
.ToDictionary(kvp => kvp.Key, kvp => kvp.Value);
|
||||
return JsonSerializer.Serialize(sorted, JsonOptions);
|
||||
}
|
||||
|
||||
return JsonSerializer.Serialize(obj, JsonOptions);
|
||||
}
|
||||
|
||||
private static string ComputeSha256(string input)
|
||||
{
|
||||
var bytes = Encoding.UTF8.GetBytes(input);
|
||||
var hash = SHA256.HashData(bytes);
|
||||
return Convert.ToHexStringLower(hash);
|
||||
}
|
||||
|
||||
private static string ComputeMerkleRoot(string[] nodes)
|
||||
{
|
||||
if (nodes.Length == 0)
|
||||
return ComputeSha256("");
|
||||
|
||||
if (nodes.Length == 1)
|
||||
return nodes[0];
|
||||
|
||||
var current = nodes.ToList();
|
||||
|
||||
while (current.Count > 1)
|
||||
{
|
||||
var next = new List<string>();
|
||||
|
||||
for (var i = 0; i < current.Count; i += 2)
|
||||
{
|
||||
var left = current[i];
|
||||
var right = i + 1 < current.Count ? current[i + 1] : left;
|
||||
var combined = left + right;
|
||||
next.Add("sha256:" + ComputeSha256(combined));
|
||||
}
|
||||
|
||||
current = next;
|
||||
}
|
||||
|
||||
return current[0];
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region DTOs
|
||||
|
||||
private sealed record ScoringInput
|
||||
{
|
||||
public required string ScanId { get; init; }
|
||||
public required string SbomHash { get; init; }
|
||||
public required string RulesHash { get; init; }
|
||||
public required string PolicyHash { get; init; }
|
||||
public required string FeedHash { get; init; }
|
||||
public required DateTimeOffset Timestamp { get; init; }
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
@@ -0,0 +1,560 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// EvidenceBundleDeterminismTests.cs
|
||||
// Sprint: SPRINT_5100_0007_0003 - Epic B (Determinism Gate)
|
||||
// Task: T6 - Evidence Bundle Determinism (DSSE envelopes, in-toto attestations)
|
||||
// Description: Tests to validate evidence bundle generation determinism
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System.Security.Cryptography;
|
||||
using System.Text;
|
||||
using FluentAssertions;
|
||||
using StellaOps.Canonical.Json;
|
||||
using StellaOps.Testing.Determinism;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Integration.Determinism;
|
||||
|
||||
/// <summary>
|
||||
/// Determinism validation tests for evidence bundle generation.
|
||||
/// Ensures identical inputs produce identical bundles across:
|
||||
/// - Evidence bundle creation
|
||||
/// - DSSE envelope wrapping
|
||||
/// - in-toto attestation generation
|
||||
/// - Multiple runs with frozen time
|
||||
/// - Parallel execution
|
||||
/// </summary>
|
||||
public class EvidenceBundleDeterminismTests
|
||||
{
|
||||
#region Evidence Bundle Determinism Tests
|
||||
|
||||
[Fact]
|
||||
public void EvidenceBundle_WithIdenticalInput_ProducesDeterministicOutput()
|
||||
{
|
||||
// Arrange
|
||||
var input = CreateSampleEvidenceInput();
|
||||
var frozenTime = DateTimeOffset.Parse("2025-12-23T18:00:00Z");
|
||||
var deterministicBundleId = GenerateDeterministicBundleId(input, frozenTime);
|
||||
|
||||
// Act - Generate bundle multiple times
|
||||
var bundle1 = CreateEvidenceBundle(input, frozenTime, deterministicBundleId);
|
||||
var bundle2 = CreateEvidenceBundle(input, frozenTime, deterministicBundleId);
|
||||
var bundle3 = CreateEvidenceBundle(input, frozenTime, deterministicBundleId);
|
||||
|
||||
// Assert - All outputs should be identical
|
||||
bundle1.Should().Be(bundle2);
|
||||
bundle2.Should().Be(bundle3);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void EvidenceBundle_CanonicalHash_IsStable()
|
||||
{
|
||||
// Arrange
|
||||
var input = CreateSampleEvidenceInput();
|
||||
var frozenTime = DateTimeOffset.Parse("2025-12-23T18:00:00Z");
|
||||
var deterministicBundleId = GenerateDeterministicBundleId(input, frozenTime);
|
||||
|
||||
// Act - Generate bundle and compute canonical hash twice
|
||||
var bundle1 = CreateEvidenceBundle(input, frozenTime, deterministicBundleId);
|
||||
var hash1 = CanonJson.Sha256Hex(Encoding.UTF8.GetBytes(bundle1));
|
||||
|
||||
var bundle2 = CreateEvidenceBundle(input, frozenTime, deterministicBundleId);
|
||||
var hash2 = CanonJson.Sha256Hex(Encoding.UTF8.GetBytes(bundle2));
|
||||
|
||||
// Assert
|
||||
hash1.Should().Be(hash2, "Same input should produce same canonical hash");
|
||||
hash1.Should().MatchRegex("^[0-9a-f]{64}$");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void EvidenceBundle_DeterminismManifest_CanBeCreated()
|
||||
{
|
||||
// Arrange
|
||||
var input = CreateSampleEvidenceInput();
|
||||
var frozenTime = DateTimeOffset.Parse("2025-12-23T18:00:00Z");
|
||||
var deterministicBundleId = GenerateDeterministicBundleId(input, frozenTime);
|
||||
var bundle = CreateEvidenceBundle(input, frozenTime, deterministicBundleId);
|
||||
var bundleBytes = Encoding.UTF8.GetBytes(bundle);
|
||||
|
||||
var artifactInfo = new ArtifactInfo
|
||||
{
|
||||
Type = "evidence-bundle",
|
||||
Name = "test-finding-evidence",
|
||||
Version = "1.0.0",
|
||||
Format = "EvidenceBundle JSON"
|
||||
};
|
||||
|
||||
var toolchain = new ToolchainInfo
|
||||
{
|
||||
Platform = ".NET 10.0",
|
||||
Components = new[]
|
||||
{
|
||||
new ComponentInfo { Name = "StellaOps.Evidence.Bundle", Version = "1.0.0" }
|
||||
}
|
||||
};
|
||||
|
||||
// Act - Create determinism manifest
|
||||
var manifest = DeterminismManifestWriter.CreateManifest(
|
||||
bundleBytes,
|
||||
artifactInfo,
|
||||
toolchain);
|
||||
|
||||
// Assert
|
||||
manifest.SchemaVersion.Should().Be("1.0");
|
||||
manifest.Artifact.Format.Should().Be("EvidenceBundle JSON");
|
||||
manifest.CanonicalHash.Algorithm.Should().Be("SHA-256");
|
||||
manifest.CanonicalHash.Value.Should().MatchRegex("^[0-9a-f]{64}$");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task EvidenceBundle_ParallelGeneration_ProducesDeterministicOutput()
|
||||
{
|
||||
// Arrange
|
||||
var input = CreateSampleEvidenceInput();
|
||||
var frozenTime = DateTimeOffset.Parse("2025-12-23T18:00:00Z");
|
||||
var deterministicBundleId = GenerateDeterministicBundleId(input, frozenTime);
|
||||
|
||||
// Act - Generate in parallel 20 times
|
||||
var tasks = Enumerable.Range(0, 20)
|
||||
.Select(_ => Task.Run(() => CreateEvidenceBundle(input, frozenTime, deterministicBundleId)))
|
||||
.ToArray();
|
||||
|
||||
var bundles = await Task.WhenAll(tasks);
|
||||
|
||||
// Assert - All outputs should be identical
|
||||
bundles.Should().AllBe(bundles[0]);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region DSSE Envelope Determinism Tests
|
||||
|
||||
[Fact]
|
||||
public void DsseEnvelope_WithIdenticalPayload_ProducesDeterministicOutput()
|
||||
{
|
||||
// Arrange
|
||||
var input = CreateSampleEvidenceInput();
|
||||
var frozenTime = DateTimeOffset.Parse("2025-12-23T18:00:00Z");
|
||||
var deterministicBundleId = GenerateDeterministicBundleId(input, frozenTime);
|
||||
var bundle = CreateEvidenceBundle(input, frozenTime, deterministicBundleId);
|
||||
|
||||
// Act - Wrap in DSSE envelope multiple times
|
||||
var envelope1 = CreateDsseEnvelope(bundle, frozenTime);
|
||||
var envelope2 = CreateDsseEnvelope(bundle, frozenTime);
|
||||
var envelope3 = CreateDsseEnvelope(bundle, frozenTime);
|
||||
|
||||
// Assert - Payloads should be identical (signatures depend on key)
|
||||
var payload1 = ExtractDssePayload(envelope1);
|
||||
var payload2 = ExtractDssePayload(envelope2);
|
||||
var payload3 = ExtractDssePayload(envelope3);
|
||||
|
||||
payload1.Should().Be(payload2);
|
||||
payload2.Should().Be(payload3);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void DsseEnvelope_PayloadHash_IsStable()
|
||||
{
|
||||
// Arrange
|
||||
var input = CreateSampleEvidenceInput();
|
||||
var frozenTime = DateTimeOffset.Parse("2025-12-23T18:00:00Z");
|
||||
var deterministicBundleId = GenerateDeterministicBundleId(input, frozenTime);
|
||||
var bundle = CreateEvidenceBundle(input, frozenTime, deterministicBundleId);
|
||||
|
||||
// Act
|
||||
var envelope1 = CreateDsseEnvelope(bundle, frozenTime);
|
||||
var payload1 = ExtractDssePayload(envelope1);
|
||||
var hash1 = CanonJson.Sha256Hex(Encoding.UTF8.GetBytes(payload1));
|
||||
|
||||
var envelope2 = CreateDsseEnvelope(bundle, frozenTime);
|
||||
var payload2 = ExtractDssePayload(envelope2);
|
||||
var hash2 = CanonJson.Sha256Hex(Encoding.UTF8.GetBytes(payload2));
|
||||
|
||||
// Assert
|
||||
hash1.Should().Be(hash2);
|
||||
hash1.Should().MatchRegex("^[0-9a-f]{64}$");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void DsseEnvelope_PayloadType_IsConsistent()
|
||||
{
|
||||
// Arrange
|
||||
var input = CreateSampleEvidenceInput();
|
||||
var frozenTime = DateTimeOffset.Parse("2025-12-23T18:00:00Z");
|
||||
var deterministicBundleId = GenerateDeterministicBundleId(input, frozenTime);
|
||||
var bundle = CreateEvidenceBundle(input, frozenTime, deterministicBundleId);
|
||||
|
||||
// Act
|
||||
var envelope = CreateDsseEnvelope(bundle, frozenTime);
|
||||
|
||||
// Assert
|
||||
envelope.Should().Contain("\"payloadType\"");
|
||||
envelope.Should().Contain("application/vnd.stellaops.evidence+json");
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region in-toto Attestation Determinism Tests
|
||||
|
||||
[Fact]
|
||||
public void InTotoAttestation_WithIdenticalSubject_ProducesDeterministicOutput()
|
||||
{
|
||||
// Arrange
|
||||
var input = CreateSampleEvidenceInput();
|
||||
var frozenTime = DateTimeOffset.Parse("2025-12-23T18:00:00Z");
|
||||
var deterministicBundleId = GenerateDeterministicBundleId(input, frozenTime);
|
||||
|
||||
// Act - Generate attestation multiple times
|
||||
var attestation1 = CreateInTotoAttestation(input, frozenTime, deterministicBundleId);
|
||||
var attestation2 = CreateInTotoAttestation(input, frozenTime, deterministicBundleId);
|
||||
var attestation3 = CreateInTotoAttestation(input, frozenTime, deterministicBundleId);
|
||||
|
||||
// Assert - All outputs should be identical
|
||||
attestation1.Should().Be(attestation2);
|
||||
attestation2.Should().Be(attestation3);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void InTotoAttestation_CanonicalHash_IsStable()
|
||||
{
|
||||
// Arrange
|
||||
var input = CreateSampleEvidenceInput();
|
||||
var frozenTime = DateTimeOffset.Parse("2025-12-23T18:00:00Z");
|
||||
var deterministicBundleId = GenerateDeterministicBundleId(input, frozenTime);
|
||||
|
||||
// Act
|
||||
var attestation1 = CreateInTotoAttestation(input, frozenTime, deterministicBundleId);
|
||||
var hash1 = CanonJson.Sha256Hex(Encoding.UTF8.GetBytes(attestation1));
|
||||
|
||||
var attestation2 = CreateInTotoAttestation(input, frozenTime, deterministicBundleId);
|
||||
var hash2 = CanonJson.Sha256Hex(Encoding.UTF8.GetBytes(attestation2));
|
||||
|
||||
// Assert
|
||||
hash1.Should().Be(hash2);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void InTotoAttestation_SubjectOrdering_IsDeterministic()
|
||||
{
|
||||
// Arrange - Multiple subjects
|
||||
var input = CreateMultiSubjectEvidenceInput();
|
||||
var frozenTime = DateTimeOffset.Parse("2025-12-23T18:00:00Z");
|
||||
var deterministicBundleId = GenerateDeterministicBundleId(input, frozenTime);
|
||||
|
||||
// Act
|
||||
var attestation1 = CreateInTotoAttestation(input, frozenTime, deterministicBundleId);
|
||||
var attestation2 = CreateInTotoAttestation(input, frozenTime, deterministicBundleId);
|
||||
|
||||
// Assert - Subject order should be deterministic
|
||||
attestation1.Should().Be(attestation2);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void InTotoAttestation_PredicateType_IsConsistent()
|
||||
{
|
||||
// Arrange
|
||||
var input = CreateSampleEvidenceInput();
|
||||
var frozenTime = DateTimeOffset.Parse("2025-12-23T18:00:00Z");
|
||||
var deterministicBundleId = GenerateDeterministicBundleId(input, frozenTime);
|
||||
|
||||
// Act
|
||||
var attestation = CreateInTotoAttestation(input, frozenTime, deterministicBundleId);
|
||||
|
||||
// Assert
|
||||
attestation.Should().Contain("\"predicateType\"");
|
||||
attestation.Should().Contain("https://stellaops.io/evidence/v1");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void InTotoAttestation_StatementType_IsConsistent()
|
||||
{
|
||||
// Arrange
|
||||
var input = CreateSampleEvidenceInput();
|
||||
var frozenTime = DateTimeOffset.Parse("2025-12-23T18:00:00Z");
|
||||
var deterministicBundleId = GenerateDeterministicBundleId(input, frozenTime);
|
||||
|
||||
// Act
|
||||
var attestation = CreateInTotoAttestation(input, frozenTime, deterministicBundleId);
|
||||
|
||||
// Assert
|
||||
attestation.Should().Contain("\"_type\"");
|
||||
attestation.Should().Contain("https://in-toto.io/Statement/v1");
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Evidence Hash Determinism Tests
|
||||
|
||||
[Fact]
|
||||
public void EvidenceHashes_WithIdenticalContent_ProduceDeterministicHashes()
|
||||
{
|
||||
// Arrange
|
||||
var content = "test content for hashing";
|
||||
|
||||
// Act - Hash the same content multiple times
|
||||
var hash1 = ComputeEvidenceHash(content);
|
||||
var hash2 = ComputeEvidenceHash(content);
|
||||
var hash3 = ComputeEvidenceHash(content);
|
||||
|
||||
// Assert
|
||||
hash1.Should().Be(hash2);
|
||||
hash2.Should().Be(hash3);
|
||||
hash1.Should().MatchRegex("^sha256:[0-9a-f]{64}$");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void EvidenceHashSet_Ordering_IsDeterministic()
|
||||
{
|
||||
// Arrange - Multiple hashes in random order
|
||||
var hashes = new[]
|
||||
{
|
||||
("artifact", "sha256:abcd1234"),
|
||||
("sbom", "sha256:efgh5678"),
|
||||
("vex", "sha256:ijkl9012"),
|
||||
("policy", "sha256:mnop3456")
|
||||
};
|
||||
|
||||
// Act - Create hash sets multiple times
|
||||
var hashSet1 = CreateHashSet(hashes);
|
||||
var hashSet2 = CreateHashSet(hashes);
|
||||
|
||||
// Assert - Serialized hash sets should be identical
|
||||
var json1 = SerializeHashSet(hashSet1);
|
||||
var json2 = SerializeHashSet(hashSet2);
|
||||
|
||||
json1.Should().Be(json2);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Completeness Score Determinism Tests
|
||||
|
||||
[Theory]
|
||||
[InlineData(true, true, true, true, 4)]
|
||||
[InlineData(true, true, true, false, 3)]
|
||||
[InlineData(true, true, false, false, 2)]
|
||||
[InlineData(true, false, false, false, 1)]
|
||||
[InlineData(false, false, false, false, 0)]
|
||||
public void CompletenessScore_IsDeterministic(
|
||||
bool hasReachability,
|
||||
bool hasCallStack,
|
||||
bool hasProvenance,
|
||||
bool hasVexStatus,
|
||||
int expectedScore)
|
||||
{
|
||||
// Arrange
|
||||
var input = new EvidenceInput
|
||||
{
|
||||
AlertId = "ALERT-001",
|
||||
ArtifactId = "sha256:abc123",
|
||||
FindingId = "CVE-2024-1234",
|
||||
HasReachability = hasReachability,
|
||||
HasCallStack = hasCallStack,
|
||||
HasProvenance = hasProvenance,
|
||||
HasVexStatus = hasVexStatus,
|
||||
Subjects = Array.Empty<string>()
|
||||
};
|
||||
var frozenTime = DateTimeOffset.Parse("2025-12-23T18:00:00Z");
|
||||
var deterministicBundleId = GenerateDeterministicBundleId(input, frozenTime);
|
||||
|
||||
// Act
|
||||
var bundle1 = CreateEvidenceBundle(input, frozenTime, deterministicBundleId);
|
||||
var bundle2 = CreateEvidenceBundle(input, frozenTime, deterministicBundleId);
|
||||
|
||||
// Assert - Both should have same completeness score
|
||||
bundle1.Should().Contain($"\"completenessScore\": {expectedScore}");
|
||||
bundle2.Should().Contain($"\"completenessScore\": {expectedScore}");
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Helper Methods
|
||||
|
||||
private static EvidenceInput CreateSampleEvidenceInput()
|
||||
{
|
||||
return new EvidenceInput
|
||||
{
|
||||
AlertId = "ALERT-2024-001",
|
||||
ArtifactId = "sha256:abc123def456",
|
||||
FindingId = "CVE-2024-1234",
|
||||
HasReachability = true,
|
||||
HasCallStack = true,
|
||||
HasProvenance = true,
|
||||
HasVexStatus = true,
|
||||
Subjects = new[] { "pkg:oci/myapp@sha256:abc123" }
|
||||
};
|
||||
}
|
||||
|
||||
private static EvidenceInput CreateMultiSubjectEvidenceInput()
|
||||
{
|
||||
return new EvidenceInput
|
||||
{
|
||||
AlertId = "ALERT-2024-002",
|
||||
ArtifactId = "sha256:multi123",
|
||||
FindingId = "CVE-2024-5678",
|
||||
HasReachability = true,
|
||||
HasCallStack = false,
|
||||
HasProvenance = true,
|
||||
HasVexStatus = false,
|
||||
Subjects = new[]
|
||||
{
|
||||
"pkg:oci/app-c@sha256:ccc",
|
||||
"pkg:oci/app-a@sha256:aaa",
|
||||
"pkg:oci/app-b@sha256:bbb"
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
private static string GenerateDeterministicBundleId(EvidenceInput input, DateTimeOffset timestamp)
|
||||
{
|
||||
var seed = $"{input.AlertId}:{input.ArtifactId}:{input.FindingId}:{timestamp:O}";
|
||||
var hash = CanonJson.Sha256Hex(Encoding.UTF8.GetBytes(seed));
|
||||
return hash[..32]; // Use first 32 chars as bundle ID
|
||||
}
|
||||
|
||||
private static string CreateEvidenceBundle(EvidenceInput input, DateTimeOffset timestamp, string bundleId)
|
||||
{
|
||||
var completenessScore = CalculateCompletenessScore(input);
|
||||
var reachabilityStatus = input.HasReachability ? "available" : "unavailable";
|
||||
var callStackStatus = input.HasCallStack ? "available" : "unavailable";
|
||||
var provenanceStatus = input.HasProvenance ? "available" : "unavailable";
|
||||
var vexStatusValue = input.HasVexStatus ? "available" : "unavailable";
|
||||
|
||||
var artifactHash = ComputeEvidenceHash(input.ArtifactId);
|
||||
|
||||
return $$"""
|
||||
{
|
||||
"bundleId": "{{bundleId}}",
|
||||
"schemaVersion": "1.0",
|
||||
"alertId": "{{input.AlertId}}",
|
||||
"artifactId": "{{input.ArtifactId}}",
|
||||
"completenessScore": {{completenessScore}},
|
||||
"createdAt": "{{timestamp:O}}",
|
||||
"hashes": {
|
||||
"artifact": "{{artifactHash}}",
|
||||
"bundle": "sha256:{{bundleId}}"
|
||||
},
|
||||
"reachability": {
|
||||
"status": "{{reachabilityStatus}}"
|
||||
},
|
||||
"callStack": {
|
||||
"status": "{{callStackStatus}}"
|
||||
},
|
||||
"provenance": {
|
||||
"status": "{{provenanceStatus}}"
|
||||
},
|
||||
"vexStatus": {
|
||||
"status": "{{vexStatusValue}}"
|
||||
}
|
||||
}
|
||||
""";
|
||||
}
|
||||
|
||||
private static string CreateDsseEnvelope(string payload, DateTimeOffset timestamp)
|
||||
{
|
||||
var payloadBase64 = Convert.ToBase64String(Encoding.UTF8.GetBytes(payload));
|
||||
var payloadHash = CanonJson.Sha256Hex(Encoding.UTF8.GetBytes(payload));
|
||||
|
||||
// Note: In production, signature would be computed with actual key
|
||||
// For determinism testing, we use a deterministic placeholder
|
||||
var deterministicSig = $"sig:{payloadHash[..32]}";
|
||||
var sigBase64 = Convert.ToBase64String(Encoding.UTF8.GetBytes(deterministicSig));
|
||||
|
||||
return $$"""
|
||||
{
|
||||
"payloadType": "application/vnd.stellaops.evidence+json",
|
||||
"payload": "{{payloadBase64}}",
|
||||
"signatures": [
|
||||
{
|
||||
"keyid": "stellaops-signing-key-v1",
|
||||
"sig": "{{sigBase64}}"
|
||||
}
|
||||
]
|
||||
}
|
||||
""";
|
||||
}
|
||||
|
||||
private static string ExtractDssePayload(string envelope)
|
||||
{
|
||||
// Extract base64 payload and decode
|
||||
var payloadStart = envelope.IndexOf("\"payload\": \"") + 12;
|
||||
var payloadEnd = envelope.IndexOf("\"", payloadStart);
|
||||
var payloadBase64 = envelope[payloadStart..payloadEnd];
|
||||
return Encoding.UTF8.GetString(Convert.FromBase64String(payloadBase64));
|
||||
}
|
||||
|
||||
private static string CreateInTotoAttestation(EvidenceInput input, DateTimeOffset timestamp, string bundleId)
|
||||
{
|
||||
var subjects = input.Subjects
|
||||
.OrderBy(s => s, StringComparer.Ordinal)
|
||||
.Select(s => $$"""
|
||||
{
|
||||
"name": "{{s}}",
|
||||
"digest": {
|
||||
"sha256": "{{CanonJson.Sha256Hex(Encoding.UTF8.GetBytes(s))}}"
|
||||
}
|
||||
}
|
||||
""");
|
||||
|
||||
var bundle = CreateEvidenceBundle(input, timestamp, bundleId);
|
||||
|
||||
return $$"""
|
||||
{
|
||||
"_type": "https://in-toto.io/Statement/v1",
|
||||
"predicateType": "https://stellaops.io/evidence/v1",
|
||||
"subject": [
|
||||
{{string.Join(",\n ", subjects)}}
|
||||
],
|
||||
"predicate": {{bundle}}
|
||||
}
|
||||
""";
|
||||
}
|
||||
|
||||
private static int CalculateCompletenessScore(EvidenceInput input)
|
||||
{
|
||||
var score = 0;
|
||||
if (input.HasReachability) score++;
|
||||
if (input.HasCallStack) score++;
|
||||
if (input.HasProvenance) score++;
|
||||
if (input.HasVexStatus) score++;
|
||||
return score;
|
||||
}
|
||||
|
||||
private static string ComputeEvidenceHash(string content)
|
||||
{
|
||||
var hash = CanonJson.Sha256Hex(Encoding.UTF8.GetBytes(content));
|
||||
return $"sha256:{hash}";
|
||||
}
|
||||
|
||||
private static Dictionary<string, string> CreateHashSet((string name, string hash)[] hashes)
|
||||
{
|
||||
return hashes
|
||||
.OrderBy(h => h.name, StringComparer.Ordinal)
|
||||
.ToDictionary(h => h.name, h => h.hash);
|
||||
}
|
||||
|
||||
private static string SerializeHashSet(Dictionary<string, string> hashSet)
|
||||
{
|
||||
var entries = hashSet
|
||||
.OrderBy(kvp => kvp.Key, StringComparer.Ordinal)
|
||||
.Select(kvp => $"\"{kvp.Key}\": \"{kvp.Value}\"");
|
||||
return $"{{\n {string.Join(",\n ", entries)}\n}}";
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region DTOs
|
||||
|
||||
private sealed record EvidenceInput
|
||||
{
|
||||
public required string AlertId { get; init; }
|
||||
public required string ArtifactId { get; init; }
|
||||
public required string FindingId { get; init; }
|
||||
public required bool HasReachability { get; init; }
|
||||
public required bool HasCallStack { get; init; }
|
||||
public required bool HasProvenance { get; init; }
|
||||
public required bool HasVexStatus { get; init; }
|
||||
public required string[] Subjects { get; init; }
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
@@ -0,0 +1,658 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// PolicyDeterminismTests.cs
|
||||
// Sprint: SPRINT_5100_0007_0003 - Epic B (Determinism Gate)
|
||||
// Task: T5 - Policy Verdict Determinism
|
||||
// Description: Tests to validate policy verdict generation determinism
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System.Collections.Immutable;
|
||||
using System.Text;
|
||||
using FluentAssertions;
|
||||
using StellaOps.Canonical.Json;
|
||||
using StellaOps.Testing.Determinism;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Integration.Determinism;
|
||||
|
||||
/// <summary>
|
||||
/// Determinism validation tests for policy verdict generation.
|
||||
/// Ensures identical inputs produce identical verdicts across:
|
||||
/// - Single verdict generation
|
||||
/// - Batch verdict generation
|
||||
/// - Verdict serialization
|
||||
/// - Multiple runs with frozen time
|
||||
/// - Parallel execution
|
||||
/// </summary>
|
||||
public class PolicyDeterminismTests
|
||||
{
|
||||
#region Single Verdict Determinism Tests
|
||||
|
||||
[Fact]
|
||||
public void PolicyVerdict_WithIdenticalInput_ProducesDeterministicOutput()
|
||||
{
|
||||
// Arrange
|
||||
var input = CreateSamplePolicyInput();
|
||||
var frozenTime = DateTimeOffset.Parse("2025-12-23T18:00:00Z");
|
||||
|
||||
// Act - Generate verdict multiple times
|
||||
var verdict1 = EvaluatePolicy(input, frozenTime);
|
||||
var verdict2 = EvaluatePolicy(input, frozenTime);
|
||||
var verdict3 = EvaluatePolicy(input, frozenTime);
|
||||
|
||||
// Assert - All outputs should be identical
|
||||
verdict1.Should().BeEquivalentTo(verdict2);
|
||||
verdict2.Should().BeEquivalentTo(verdict3);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void PolicyVerdict_CanonicalHash_IsStable()
|
||||
{
|
||||
// Arrange
|
||||
var input = CreateSamplePolicyInput();
|
||||
var frozenTime = DateTimeOffset.Parse("2025-12-23T18:00:00Z");
|
||||
|
||||
// Act - Generate verdict and compute canonical hash twice
|
||||
var verdict1 = EvaluatePolicy(input, frozenTime);
|
||||
var json1 = SerializeVerdict(verdict1);
|
||||
var hash1 = CanonJson.Sha256Hex(Encoding.UTF8.GetBytes(json1));
|
||||
|
||||
var verdict2 = EvaluatePolicy(input, frozenTime);
|
||||
var json2 = SerializeVerdict(verdict2);
|
||||
var hash2 = CanonJson.Sha256Hex(Encoding.UTF8.GetBytes(json2));
|
||||
|
||||
// Assert
|
||||
hash1.Should().Be(hash2, "Same input should produce same canonical hash");
|
||||
hash1.Should().MatchRegex("^[0-9a-f]{64}$");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void PolicyVerdict_DeterminismManifest_CanBeCreated()
|
||||
{
|
||||
// Arrange
|
||||
var input = CreateSamplePolicyInput();
|
||||
var frozenTime = DateTimeOffset.Parse("2025-12-23T18:00:00Z");
|
||||
var verdict = EvaluatePolicy(input, frozenTime);
|
||||
var json = SerializeVerdict(verdict);
|
||||
var verdictBytes = Encoding.UTF8.GetBytes(json);
|
||||
|
||||
var artifactInfo = new ArtifactInfo
|
||||
{
|
||||
Type = "policy-verdict",
|
||||
Name = "test-finding-verdict",
|
||||
Version = "1.0.0",
|
||||
Format = "PolicyVerdict JSON"
|
||||
};
|
||||
|
||||
var toolchain = new ToolchainInfo
|
||||
{
|
||||
Platform = ".NET 10.0",
|
||||
Components = new[]
|
||||
{
|
||||
new ComponentInfo { Name = "StellaOps.Policy.Engine", Version = "1.0.0" }
|
||||
}
|
||||
};
|
||||
|
||||
// Act - Create determinism manifest
|
||||
var manifest = DeterminismManifestWriter.CreateManifest(
|
||||
verdictBytes,
|
||||
artifactInfo,
|
||||
toolchain);
|
||||
|
||||
// Assert
|
||||
manifest.SchemaVersion.Should().Be("1.0");
|
||||
manifest.Artifact.Format.Should().Be("PolicyVerdict JSON");
|
||||
manifest.CanonicalHash.Algorithm.Should().Be("SHA-256");
|
||||
manifest.CanonicalHash.Value.Should().MatchRegex("^[0-9a-f]{64}$");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task PolicyVerdict_ParallelGeneration_ProducesDeterministicOutput()
|
||||
{
|
||||
// Arrange
|
||||
var input = CreateSamplePolicyInput();
|
||||
var frozenTime = DateTimeOffset.Parse("2025-12-23T18:00:00Z");
|
||||
|
||||
// Act - Generate in parallel 20 times
|
||||
var tasks = Enumerable.Range(0, 20)
|
||||
.Select(_ => Task.Run(() => EvaluatePolicy(input, frozenTime)))
|
||||
.ToArray();
|
||||
|
||||
var verdicts = await Task.WhenAll(tasks);
|
||||
|
||||
// Assert - All outputs should be identical
|
||||
var first = verdicts[0];
|
||||
verdicts.Should().AllSatisfy(v => v.Should().BeEquivalentTo(first));
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Batch Verdict Determinism Tests
|
||||
|
||||
[Fact]
|
||||
public void PolicyVerdictBatch_WithIdenticalInput_ProducesDeterministicOutput()
|
||||
{
|
||||
// Arrange
|
||||
var inputs = CreateSampleBatchPolicyInputs();
|
||||
var frozenTime = DateTimeOffset.Parse("2025-12-23T18:00:00Z");
|
||||
|
||||
// Act - Generate batch verdicts multiple times
|
||||
var batch1 = EvaluatePolicyBatch(inputs, frozenTime);
|
||||
var batch2 = EvaluatePolicyBatch(inputs, frozenTime);
|
||||
var batch3 = EvaluatePolicyBatch(inputs, frozenTime);
|
||||
|
||||
// Assert - All batches should be identical
|
||||
batch1.Should().BeEquivalentTo(batch2);
|
||||
batch2.Should().BeEquivalentTo(batch3);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void PolicyVerdictBatch_Ordering_IsDeterministic()
|
||||
{
|
||||
// Arrange - Findings in random order
|
||||
var inputs = CreateSampleBatchPolicyInputs();
|
||||
var frozenTime = DateTimeOffset.Parse("2025-12-23T18:00:00Z");
|
||||
|
||||
// Act - Generate batch verdicts multiple times
|
||||
var batch1 = EvaluatePolicyBatch(inputs, frozenTime);
|
||||
var batch2 = EvaluatePolicyBatch(inputs, frozenTime);
|
||||
|
||||
// Assert - Order should be deterministic
|
||||
var json1 = SerializeBatch(batch1);
|
||||
var json2 = SerializeBatch(batch2);
|
||||
|
||||
json1.Should().Be(json2);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void PolicyVerdictBatch_CanonicalHash_IsStable()
|
||||
{
|
||||
// Arrange
|
||||
var inputs = CreateSampleBatchPolicyInputs();
|
||||
var frozenTime = DateTimeOffset.Parse("2025-12-23T18:00:00Z");
|
||||
|
||||
// Act
|
||||
var batch1 = EvaluatePolicyBatch(inputs, frozenTime);
|
||||
var json1 = SerializeBatch(batch1);
|
||||
var hash1 = CanonJson.Sha256Hex(Encoding.UTF8.GetBytes(json1));
|
||||
|
||||
var batch2 = EvaluatePolicyBatch(inputs, frozenTime);
|
||||
var json2 = SerializeBatch(batch2);
|
||||
var hash2 = CanonJson.Sha256Hex(Encoding.UTF8.GetBytes(json2));
|
||||
|
||||
// Assert
|
||||
hash1.Should().Be(hash2);
|
||||
hash1.Should().MatchRegex("^[0-9a-f]{64}$");
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Verdict Status Determinism Tests
|
||||
|
||||
[Theory]
|
||||
[InlineData(PolicyVerdictStatus.Pass)]
|
||||
[InlineData(PolicyVerdictStatus.Blocked)]
|
||||
[InlineData(PolicyVerdictStatus.Ignored)]
|
||||
[InlineData(PolicyVerdictStatus.Warned)]
|
||||
[InlineData(PolicyVerdictStatus.Deferred)]
|
||||
[InlineData(PolicyVerdictStatus.Escalated)]
|
||||
[InlineData(PolicyVerdictStatus.RequiresVex)]
|
||||
public void PolicyVerdict_WithStatus_IsDeterministic(PolicyVerdictStatus status)
|
||||
{
|
||||
// Arrange
|
||||
var input = CreatePolicyInputWithExpectedStatus(status);
|
||||
var frozenTime = DateTimeOffset.Parse("2025-12-23T18:00:00Z");
|
||||
|
||||
// Act
|
||||
var verdict1 = EvaluatePolicy(input, frozenTime);
|
||||
var verdict2 = EvaluatePolicy(input, frozenTime);
|
||||
|
||||
// Assert
|
||||
verdict1.Status.Should().Be(status);
|
||||
verdict2.Status.Should().Be(status);
|
||||
verdict1.Should().BeEquivalentTo(verdict2);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Score Calculation Determinism Tests
|
||||
|
||||
[Fact]
|
||||
public void PolicyScore_WithSameInputs_ProducesDeterministicScore()
|
||||
{
|
||||
// Arrange
|
||||
var input = CreateSamplePolicyInput();
|
||||
var frozenTime = DateTimeOffset.Parse("2025-12-23T18:00:00Z");
|
||||
|
||||
// Act
|
||||
var verdict1 = EvaluatePolicy(input, frozenTime);
|
||||
var verdict2 = EvaluatePolicy(input, frozenTime);
|
||||
|
||||
// Assert - Scores should be identical (not floating point approximate)
|
||||
verdict1.Score.Should().Be(verdict2.Score);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void PolicyScore_InputOrdering_DoesNotAffectScore()
|
||||
{
|
||||
// Arrange - Same inputs but in different order
|
||||
var inputs1 = new Dictionary<string, double>
|
||||
{
|
||||
{ "cvss", 7.5 },
|
||||
{ "epss", 0.001 },
|
||||
{ "kev", 0.0 },
|
||||
{ "reachability", 0.8 }
|
||||
};
|
||||
|
||||
var inputs2 = new Dictionary<string, double>
|
||||
{
|
||||
{ "reachability", 0.8 },
|
||||
{ "kev", 0.0 },
|
||||
{ "epss", 0.001 },
|
||||
{ "cvss", 7.5 }
|
||||
};
|
||||
|
||||
var frozenTime = DateTimeOffset.Parse("2025-12-23T18:00:00Z");
|
||||
|
||||
// Act
|
||||
var verdict1 = EvaluatePolicyWithInputs("CVE-2024-1234", inputs1, frozenTime);
|
||||
var verdict2 = EvaluatePolicyWithInputs("CVE-2024-1234", inputs2, frozenTime);
|
||||
|
||||
// Assert
|
||||
verdict1.Score.Should().Be(verdict2.Score);
|
||||
verdict1.Status.Should().Be(verdict2.Status);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void PolicyScore_FloatingPointPrecision_IsConsistent()
|
||||
{
|
||||
// Arrange - Inputs that might cause floating point issues
|
||||
var inputs = new Dictionary<string, double>
|
||||
{
|
||||
{ "cvss", 0.1 + 0.2 }, // Classic floating point precision test
|
||||
{ "epss", 1.0 / 3.0 },
|
||||
{ "weight_a", 0.33333333333333333 },
|
||||
{ "weight_b", 0.66666666666666666 }
|
||||
};
|
||||
|
||||
var frozenTime = DateTimeOffset.Parse("2025-12-23T18:00:00Z");
|
||||
|
||||
// Act
|
||||
var verdict1 = EvaluatePolicyWithInputs("CVE-2024-5678", inputs, frozenTime);
|
||||
var verdict2 = EvaluatePolicyWithInputs("CVE-2024-5678", inputs, frozenTime);
|
||||
|
||||
// Assert - Score should be rounded to consistent precision
|
||||
verdict1.Score.Should().Be(verdict2.Score);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Rule Matching Determinism Tests
|
||||
|
||||
[Fact]
|
||||
public void PolicyRuleMatching_WithMultipleMatchingRules_SelectsDeterministically()
|
||||
{
|
||||
// Arrange - Input that matches multiple rules
|
||||
var input = CreateInputMatchingMultipleRules();
|
||||
var frozenTime = DateTimeOffset.Parse("2025-12-23T18:00:00Z");
|
||||
|
||||
// Act
|
||||
var verdict1 = EvaluatePolicy(input, frozenTime);
|
||||
var verdict2 = EvaluatePolicy(input, frozenTime);
|
||||
|
||||
// Assert - Same rule should be selected each time
|
||||
verdict1.RuleName.Should().Be(verdict2.RuleName);
|
||||
verdict1.RuleAction.Should().Be(verdict2.RuleAction);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void PolicyQuieting_IsDeterministic()
|
||||
{
|
||||
// Arrange - Input that triggers quieting
|
||||
var input = CreateQuietedInput();
|
||||
var frozenTime = DateTimeOffset.Parse("2025-12-23T18:00:00Z");
|
||||
|
||||
// Act
|
||||
var verdict1 = EvaluatePolicy(input, frozenTime);
|
||||
var verdict2 = EvaluatePolicy(input, frozenTime);
|
||||
|
||||
// Assert
|
||||
verdict1.Quiet.Should().Be(verdict2.Quiet);
|
||||
verdict1.QuietedBy.Should().Be(verdict2.QuietedBy);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Helper Methods
|
||||
|
||||
private static PolicyInput CreateSamplePolicyInput()
|
||||
{
|
||||
return new PolicyInput
|
||||
{
|
||||
FindingId = "CVE-2024-1234",
|
||||
CvssScore = 7.5,
|
||||
EpssScore = 0.001,
|
||||
IsKev = false,
|
||||
ReachabilityScore = 0.8,
|
||||
SourceTrust = "high",
|
||||
PackageType = "npm",
|
||||
Severity = "high"
|
||||
};
|
||||
}
|
||||
|
||||
private static PolicyInput[] CreateSampleBatchPolicyInputs()
|
||||
{
|
||||
return new[]
|
||||
{
|
||||
new PolicyInput
|
||||
{
|
||||
FindingId = "CVE-2024-1111",
|
||||
CvssScore = 9.8,
|
||||
EpssScore = 0.5,
|
||||
IsKev = true,
|
||||
ReachabilityScore = 1.0,
|
||||
SourceTrust = "high",
|
||||
PackageType = "npm",
|
||||
Severity = "critical"
|
||||
},
|
||||
new PolicyInput
|
||||
{
|
||||
FindingId = "CVE-2024-2222",
|
||||
CvssScore = 5.5,
|
||||
EpssScore = 0.01,
|
||||
IsKev = false,
|
||||
ReachabilityScore = 0.3,
|
||||
SourceTrust = "medium",
|
||||
PackageType = "pypi",
|
||||
Severity = "medium"
|
||||
},
|
||||
new PolicyInput
|
||||
{
|
||||
FindingId = "CVE-2024-3333",
|
||||
CvssScore = 3.2,
|
||||
EpssScore = 0.001,
|
||||
IsKev = false,
|
||||
ReachabilityScore = 0.1,
|
||||
SourceTrust = "low",
|
||||
PackageType = "maven",
|
||||
Severity = "low"
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
private static PolicyInput CreatePolicyInputWithExpectedStatus(PolicyVerdictStatus status)
|
||||
{
|
||||
return status switch
|
||||
{
|
||||
PolicyVerdictStatus.Pass => new PolicyInput
|
||||
{
|
||||
FindingId = "CVE-PASS-001",
|
||||
CvssScore = 2.0,
|
||||
EpssScore = 0.0001,
|
||||
IsKev = false,
|
||||
ReachabilityScore = 0.0,
|
||||
SourceTrust = "high",
|
||||
PackageType = "npm",
|
||||
Severity = "low"
|
||||
},
|
||||
PolicyVerdictStatus.Blocked => new PolicyInput
|
||||
{
|
||||
FindingId = "CVE-BLOCKED-001",
|
||||
CvssScore = 9.8,
|
||||
EpssScore = 0.9,
|
||||
IsKev = true,
|
||||
ReachabilityScore = 1.0,
|
||||
SourceTrust = "high",
|
||||
PackageType = "npm",
|
||||
Severity = "critical"
|
||||
},
|
||||
PolicyVerdictStatus.Warned => new PolicyInput
|
||||
{
|
||||
FindingId = "CVE-WARNED-001",
|
||||
CvssScore = 7.0,
|
||||
EpssScore = 0.05,
|
||||
IsKev = false,
|
||||
ReachabilityScore = 0.5,
|
||||
SourceTrust = "medium",
|
||||
PackageType = "npm",
|
||||
Severity = "high"
|
||||
},
|
||||
PolicyVerdictStatus.RequiresVex => new PolicyInput
|
||||
{
|
||||
FindingId = "CVE-VEXREQ-001",
|
||||
CvssScore = 7.5,
|
||||
EpssScore = 0.1,
|
||||
IsKev = false,
|
||||
ReachabilityScore = null, // Unknown reachability
|
||||
SourceTrust = "high",
|
||||
PackageType = "npm",
|
||||
Severity = "high"
|
||||
},
|
||||
_ => new PolicyInput
|
||||
{
|
||||
FindingId = $"CVE-{status}-001",
|
||||
CvssScore = 5.0,
|
||||
EpssScore = 0.01,
|
||||
IsKev = false,
|
||||
ReachabilityScore = 0.5,
|
||||
SourceTrust = "medium",
|
||||
PackageType = "npm",
|
||||
Severity = "medium"
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
private static PolicyInput CreateInputMatchingMultipleRules()
|
||||
{
|
||||
return new PolicyInput
|
||||
{
|
||||
FindingId = "CVE-MULTIRULE-001",
|
||||
CvssScore = 7.0,
|
||||
EpssScore = 0.1,
|
||||
IsKev = false,
|
||||
ReachabilityScore = 0.5,
|
||||
SourceTrust = "high",
|
||||
PackageType = "npm",
|
||||
Severity = "high"
|
||||
};
|
||||
}
|
||||
|
||||
private static PolicyInput CreateQuietedInput()
|
||||
{
|
||||
return new PolicyInput
|
||||
{
|
||||
FindingId = "CVE-2024-QUIETED",
|
||||
CvssScore = 9.0,
|
||||
EpssScore = 0.5,
|
||||
IsKev = false,
|
||||
ReachabilityScore = 1.0,
|
||||
SourceTrust = "high",
|
||||
PackageType = "npm",
|
||||
Severity = "critical",
|
||||
QuietedBy = "waiver:WAIVER-2024-001"
|
||||
};
|
||||
}
|
||||
|
||||
private static PolicyVerdictResult EvaluatePolicy(PolicyInput input, DateTimeOffset timestamp)
|
||||
{
|
||||
// TODO: Integrate with actual PolicyEngine
|
||||
// For now, return deterministic stub
|
||||
var status = DetermineStatus(input);
|
||||
var score = CalculateScore(input);
|
||||
var ruleName = DetermineRuleName(input);
|
||||
|
||||
return new PolicyVerdictResult
|
||||
{
|
||||
FindingId = input.FindingId,
|
||||
Status = status,
|
||||
Score = score,
|
||||
RuleName = ruleName,
|
||||
RuleAction = status == PolicyVerdictStatus.Pass ? "allow" : "block",
|
||||
Notes = null,
|
||||
ConfigVersion = "1.0",
|
||||
Inputs = new Dictionary<string, double>
|
||||
{
|
||||
{ "cvss", input.CvssScore },
|
||||
{ "epss", input.EpssScore },
|
||||
{ "kev", input.IsKev ? 1.0 : 0.0 },
|
||||
{ "reachability", input.ReachabilityScore ?? 0.5 }
|
||||
}.ToImmutableDictionary(),
|
||||
Quiet = input.QuietedBy != null,
|
||||
QuietedBy = input.QuietedBy,
|
||||
Timestamp = timestamp
|
||||
};
|
||||
}
|
||||
|
||||
private static PolicyVerdictResult EvaluatePolicyWithInputs(
|
||||
string findingId,
|
||||
Dictionary<string, double> inputs,
|
||||
DateTimeOffset timestamp)
|
||||
{
|
||||
// Calculate score from inputs
|
||||
var cvss = inputs.GetValueOrDefault("cvss", 0);
|
||||
var epss = inputs.GetValueOrDefault("epss", 0);
|
||||
var score = Math.Round((cvss * 10 + epss * 100) / 2, 4);
|
||||
|
||||
var status = score > 70 ? PolicyVerdictStatus.Blocked :
|
||||
score > 40 ? PolicyVerdictStatus.Warned :
|
||||
PolicyVerdictStatus.Pass;
|
||||
|
||||
return new PolicyVerdictResult
|
||||
{
|
||||
FindingId = findingId,
|
||||
Status = status,
|
||||
Score = score,
|
||||
RuleName = "calculated-score-rule",
|
||||
RuleAction = status == PolicyVerdictStatus.Pass ? "allow" : "block",
|
||||
Notes = null,
|
||||
ConfigVersion = "1.0",
|
||||
Inputs = inputs.ToImmutableDictionary(),
|
||||
Quiet = false,
|
||||
QuietedBy = null,
|
||||
Timestamp = timestamp
|
||||
};
|
||||
}
|
||||
|
||||
private static PolicyVerdictResult[] EvaluatePolicyBatch(PolicyInput[] inputs, DateTimeOffset timestamp)
|
||||
{
|
||||
return inputs
|
||||
.Select(input => EvaluatePolicy(input, timestamp))
|
||||
.OrderBy(v => v.FindingId, StringComparer.Ordinal)
|
||||
.ToArray();
|
||||
}
|
||||
|
||||
private static PolicyVerdictStatus DetermineStatus(PolicyInput input)
|
||||
{
|
||||
if (input.QuietedBy != null)
|
||||
return PolicyVerdictStatus.Ignored;
|
||||
|
||||
if (input.ReachabilityScore == null)
|
||||
return PolicyVerdictStatus.RequiresVex;
|
||||
|
||||
if (input.IsKev || input.CvssScore >= 9.0 || input.EpssScore >= 0.5)
|
||||
return PolicyVerdictStatus.Blocked;
|
||||
|
||||
if (input.CvssScore >= 7.0 || input.EpssScore >= 0.05)
|
||||
return PolicyVerdictStatus.Warned;
|
||||
|
||||
return PolicyVerdictStatus.Pass;
|
||||
}
|
||||
|
||||
private static double CalculateScore(PolicyInput input)
|
||||
{
|
||||
var baseScore = input.CvssScore * 10;
|
||||
var epssMultiplier = 1 + (input.EpssScore * 10);
|
||||
var kevBonus = input.IsKev ? 20 : 0;
|
||||
var reachabilityFactor = input.ReachabilityScore ?? 0.5;
|
||||
|
||||
var rawScore = (baseScore * epssMultiplier + kevBonus) * reachabilityFactor;
|
||||
return Math.Round(rawScore, 4);
|
||||
}
|
||||
|
||||
private static string DetermineRuleName(PolicyInput input)
|
||||
{
|
||||
if (input.IsKev)
|
||||
return "kev-critical-block";
|
||||
if (input.CvssScore >= 9.0)
|
||||
return "critical-cvss-block";
|
||||
if (input.EpssScore >= 0.5)
|
||||
return "high-exploit-likelihood-block";
|
||||
if (input.CvssScore >= 7.0)
|
||||
return "high-cvss-warn";
|
||||
return "default-pass";
|
||||
}
|
||||
|
||||
private static string SerializeVerdict(PolicyVerdictResult verdict)
|
||||
{
|
||||
// Canonical JSON serialization
|
||||
var inputsJson = string.Join(", ", verdict.Inputs
|
||||
.OrderBy(kvp => kvp.Key, StringComparer.Ordinal)
|
||||
.Select(kvp => $"\"{kvp.Key}\": {kvp.Value}"));
|
||||
|
||||
return $$"""
|
||||
{
|
||||
"configVersion": "{{verdict.ConfigVersion}}",
|
||||
"findingId": "{{verdict.FindingId}}",
|
||||
"inputs": {{{inputsJson}}},
|
||||
"notes": {{(verdict.Notes == null ? "null" : $"\"{verdict.Notes}\"")}},
|
||||
"quiet": {{verdict.Quiet.ToString().ToLowerInvariant()}},
|
||||
"quietedBy": {{(verdict.QuietedBy == null ? "null" : $"\"{verdict.QuietedBy}\"")}},
|
||||
"ruleAction": "{{verdict.RuleAction}}",
|
||||
"ruleName": "{{verdict.RuleName}}",
|
||||
"score": {{verdict.Score}},
|
||||
"status": "{{verdict.Status}}",
|
||||
"timestamp": "{{verdict.Timestamp:O}}"
|
||||
}
|
||||
""";
|
||||
}
|
||||
|
||||
private static string SerializeBatch(PolicyVerdictResult[] verdicts)
|
||||
{
|
||||
var items = verdicts.Select(SerializeVerdict);
|
||||
return $"[\n {string.Join(",\n ", items)}\n]";
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region DTOs
|
||||
|
||||
private sealed record PolicyInput
|
||||
{
|
||||
public required string FindingId { get; init; }
|
||||
public required double CvssScore { get; init; }
|
||||
public required double EpssScore { get; init; }
|
||||
public required bool IsKev { get; init; }
|
||||
public double? ReachabilityScore { get; init; }
|
||||
public required string SourceTrust { get; init; }
|
||||
public required string PackageType { get; init; }
|
||||
public required string Severity { get; init; }
|
||||
public string? QuietedBy { get; init; }
|
||||
}
|
||||
|
||||
private sealed record PolicyVerdictResult
|
||||
{
|
||||
public required string FindingId { get; init; }
|
||||
public required PolicyVerdictStatus Status { get; init; }
|
||||
public required double Score { get; init; }
|
||||
public required string RuleName { get; init; }
|
||||
public required string RuleAction { get; init; }
|
||||
public string? Notes { get; init; }
|
||||
public required string ConfigVersion { get; init; }
|
||||
public required ImmutableDictionary<string, double> Inputs { get; init; }
|
||||
public required bool Quiet { get; init; }
|
||||
public string? QuietedBy { get; init; }
|
||||
public required DateTimeOffset Timestamp { get; init; }
|
||||
}
|
||||
|
||||
public enum PolicyVerdictStatus
|
||||
{
|
||||
Pass,
|
||||
Blocked,
|
||||
Ignored,
|
||||
Warned,
|
||||
Deferred,
|
||||
Escalated,
|
||||
RequiresVex
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,496 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// SbomDeterminismTests.cs
|
||||
// Sprint: SPRINT_5100_0007_0003 - Epic B (Determinism Gate)
|
||||
// Task: T3 - SBOM Export Determinism (SPDX 3.0.1, CycloneDX 1.6, CycloneDX 1.7)
|
||||
// Task: SCANNER-5100-007 - Expand determinism tests for Scanner SBOM hash stable
|
||||
// Description: Tests to validate SBOM generation determinism across formats
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System.Collections.Immutable;
|
||||
using System.Text;
|
||||
using FluentAssertions;
|
||||
using StellaOps.Canonical.Json;
|
||||
using StellaOps.Scanner.Core.Contracts;
|
||||
using StellaOps.Scanner.Emit.Composition;
|
||||
using StellaOps.Testing.Determinism;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Integration.Determinism;
|
||||
|
||||
/// <summary>
|
||||
/// Determinism validation tests for SBOM generation.
|
||||
/// Ensures identical inputs produce identical SBOMs across:
|
||||
/// - SPDX 3.0.1
|
||||
/// - CycloneDX 1.6
|
||||
/// - CycloneDX 1.7
|
||||
/// - Multiple runs with frozen time
|
||||
/// - Parallel execution
|
||||
/// </summary>
|
||||
public class SbomDeterminismTests
|
||||
{
|
||||
#region SPDX 3.0.1 Determinism Tests
|
||||
|
||||
[Fact]
|
||||
public void SpdxSbom_WithIdenticalInput_ProducesDeterministicOutput()
|
||||
{
|
||||
// Arrange
|
||||
var input = CreateSampleSbomInput();
|
||||
var frozenTime = DateTimeOffset.Parse("2025-12-23T18:00:00Z");
|
||||
|
||||
// Act - Generate SBOM multiple times
|
||||
var sbom1 = GenerateSpdxSbom(input, frozenTime);
|
||||
var sbom2 = GenerateSpdxSbom(input, frozenTime);
|
||||
var sbom3 = GenerateSpdxSbom(input, frozenTime);
|
||||
|
||||
// Assert - All outputs should be identical
|
||||
sbom1.Should().Be(sbom2);
|
||||
sbom2.Should().Be(sbom3);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void SpdxSbom_CanonicalHash_IsStable()
|
||||
{
|
||||
// Arrange
|
||||
var input = CreateSampleSbomInput();
|
||||
var frozenTime = DateTimeOffset.Parse("2025-12-23T18:00:00Z");
|
||||
|
||||
// Act - Generate SBOM and compute canonical hash twice
|
||||
var sbom1 = GenerateSpdxSbom(input, frozenTime);
|
||||
var hash1 = CanonJson.Sha256Hex(Encoding.UTF8.GetBytes(sbom1));
|
||||
|
||||
var sbom2 = GenerateSpdxSbom(input, frozenTime);
|
||||
var hash2 = CanonJson.Sha256Hex(Encoding.UTF8.GetBytes(sbom2));
|
||||
|
||||
// Assert
|
||||
hash1.Should().Be(hash2, "Same input should produce same canonical hash");
|
||||
hash1.Should().MatchRegex("^[0-9a-f]{64}$");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void SpdxSbom_DeterminismManifest_CanBeCreated()
|
||||
{
|
||||
// Arrange
|
||||
var input = CreateSampleSbomInput();
|
||||
var frozenTime = DateTimeOffset.Parse("2025-12-23T18:00:00Z");
|
||||
var sbom = GenerateSpdxSbom(input, frozenTime);
|
||||
var sbomBytes = Encoding.UTF8.GetBytes(sbom);
|
||||
|
||||
var artifactInfo = new ArtifactInfo
|
||||
{
|
||||
Type = "sbom",
|
||||
Name = "test-container-sbom",
|
||||
Version = "1.0.0",
|
||||
Format = "SPDX 3.0.1"
|
||||
};
|
||||
|
||||
var toolchain = new ToolchainInfo
|
||||
{
|
||||
Platform = ".NET 10.0",
|
||||
Components = new[]
|
||||
{
|
||||
new ComponentInfo { Name = "StellaOps.Scanner", Version = "1.0.0" }
|
||||
}
|
||||
};
|
||||
|
||||
// Act - Create determinism manifest
|
||||
var manifest = DeterminismManifestWriter.CreateManifest(
|
||||
sbomBytes,
|
||||
artifactInfo,
|
||||
toolchain);
|
||||
|
||||
// Assert
|
||||
manifest.SchemaVersion.Should().Be("1.0");
|
||||
manifest.Artifact.Format.Should().Be("SPDX 3.0.1");
|
||||
manifest.CanonicalHash.Algorithm.Should().Be("SHA-256");
|
||||
manifest.CanonicalHash.Value.Should().MatchRegex("^[0-9a-f]{64}$");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task SpdxSbom_ParallelGeneration_ProducesDeterministicOutput()
|
||||
{
|
||||
// Arrange
|
||||
var input = CreateSampleSbomInput();
|
||||
var frozenTime = DateTimeOffset.Parse("2025-12-23T18:00:00Z");
|
||||
|
||||
// Act - Generate in parallel 20 times
|
||||
var tasks = Enumerable.Range(0, 20)
|
||||
.Select(_ => Task.Run(() => GenerateSpdxSbom(input, frozenTime)))
|
||||
.ToArray();
|
||||
|
||||
var sboms = await Task.WhenAll(tasks);
|
||||
|
||||
// Assert - All outputs should be identical
|
||||
sboms.Should().AllBe(sboms[0]);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region CycloneDX 1.6 Determinism Tests
|
||||
|
||||
[Fact]
|
||||
public void CycloneDx16Sbom_WithIdenticalInput_ProducesDeterministicOutput()
|
||||
{
|
||||
// Arrange
|
||||
var input = CreateSampleSbomInput();
|
||||
var frozenTime = DateTimeOffset.Parse("2025-12-23T18:00:00Z");
|
||||
|
||||
// Act - Generate SBOM multiple times
|
||||
var sbom1 = GenerateCycloneDx16Sbom(input, frozenTime);
|
||||
var sbom2 = GenerateCycloneDx16Sbom(input, frozenTime);
|
||||
var sbom3 = GenerateCycloneDx16Sbom(input, frozenTime);
|
||||
|
||||
// Assert - All outputs should be identical
|
||||
sbom1.Should().Be(sbom2);
|
||||
sbom2.Should().Be(sbom3);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void CycloneDx16Sbom_CanonicalHash_IsStable()
|
||||
{
|
||||
// Arrange
|
||||
var input = CreateSampleSbomInput();
|
||||
var frozenTime = DateTimeOffset.Parse("2025-12-23T18:00:00Z");
|
||||
|
||||
// Act - Generate SBOM and compute canonical hash twice
|
||||
var sbom1 = GenerateCycloneDx16Sbom(input, frozenTime);
|
||||
var hash1 = CanonJson.Sha256Hex(Encoding.UTF8.GetBytes(sbom1));
|
||||
|
||||
var sbom2 = GenerateCycloneDx16Sbom(input, frozenTime);
|
||||
var hash2 = CanonJson.Sha256Hex(Encoding.UTF8.GetBytes(sbom2));
|
||||
|
||||
// Assert
|
||||
hash1.Should().Be(hash2, "Same input should produce same canonical hash");
|
||||
hash1.Should().MatchRegex("^[0-9a-f]{64}$");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void CycloneDx16Sbom_DeterminismManifest_CanBeCreated()
|
||||
{
|
||||
// Arrange
|
||||
var input = CreateSampleSbomInput();
|
||||
var frozenTime = DateTimeOffset.Parse("2025-12-23T18:00:00Z");
|
||||
var sbom = GenerateCycloneDx16Sbom(input, frozenTime);
|
||||
var sbomBytes = Encoding.UTF8.GetBytes(sbom);
|
||||
|
||||
var artifactInfo = new ArtifactInfo
|
||||
{
|
||||
Type = "sbom",
|
||||
Name = "test-container-sbom",
|
||||
Version = "1.0.0",
|
||||
Format = "CycloneDX 1.6"
|
||||
};
|
||||
|
||||
var toolchain = new ToolchainInfo
|
||||
{
|
||||
Platform = ".NET 10.0",
|
||||
Components = new[]
|
||||
{
|
||||
new ComponentInfo { Name = "StellaOps.Scanner", Version = "1.0.0" }
|
||||
}
|
||||
};
|
||||
|
||||
// Act - Create determinism manifest
|
||||
var manifest = DeterminismManifestWriter.CreateManifest(
|
||||
sbomBytes,
|
||||
artifactInfo,
|
||||
toolchain);
|
||||
|
||||
// Assert
|
||||
manifest.SchemaVersion.Should().Be("1.0");
|
||||
manifest.Artifact.Format.Should().Be("CycloneDX 1.6");
|
||||
manifest.CanonicalHash.Algorithm.Should().Be("SHA-256");
|
||||
manifest.CanonicalHash.Value.Should().MatchRegex("^[0-9a-f]{64}$");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task CycloneDx16Sbom_ParallelGeneration_ProducesDeterministicOutput()
|
||||
{
|
||||
// Arrange
|
||||
var input = CreateSampleSbomInput();
|
||||
var frozenTime = DateTimeOffset.Parse("2025-12-23T18:00:00Z");
|
||||
|
||||
// Act - Generate in parallel 20 times
|
||||
var tasks = Enumerable.Range(0, 20)
|
||||
.Select(_ => Task.Run(() => GenerateCycloneDx16Sbom(input, frozenTime)))
|
||||
.ToArray();
|
||||
|
||||
var sboms = await Task.WhenAll(tasks);
|
||||
|
||||
// Assert - All outputs should be identical
|
||||
sboms.Should().AllBe(sboms[0]);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region CycloneDX 1.7 Determinism Tests
|
||||
|
||||
[Fact]
|
||||
public void CycloneDx17Sbom_WithIdenticalInput_ProducesDeterministicOutput()
|
||||
{
|
||||
// Arrange
|
||||
var input = CreateSampleSbomInput();
|
||||
var frozenTime = DateTimeOffset.Parse("2025-12-23T18:00:00Z");
|
||||
|
||||
// Act - Generate SBOM multiple times
|
||||
var sbom1 = GenerateCycloneDx17Sbom(input, frozenTime);
|
||||
var sbom2 = GenerateCycloneDx17Sbom(input, frozenTime);
|
||||
var sbom3 = GenerateCycloneDx17Sbom(input, frozenTime);
|
||||
|
||||
// Assert - All outputs should be identical
|
||||
sbom1.Should().Be(sbom2);
|
||||
sbom2.Should().Be(sbom3);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void CycloneDx17Sbom_CanonicalHash_IsStable()
|
||||
{
|
||||
// Arrange
|
||||
var input = CreateSampleSbomInput();
|
||||
var frozenTime = DateTimeOffset.Parse("2025-12-23T18:00:00Z");
|
||||
|
||||
// Act - Generate SBOM and compute canonical hash twice
|
||||
var sbom1 = GenerateCycloneDx17Sbom(input, frozenTime);
|
||||
var hash1 = CanonJson.Sha256Hex(Encoding.UTF8.GetBytes(sbom1));
|
||||
|
||||
var sbom2 = GenerateCycloneDx17Sbom(input, frozenTime);
|
||||
var hash2 = CanonJson.Sha256Hex(Encoding.UTF8.GetBytes(sbom2));
|
||||
|
||||
// Assert
|
||||
hash1.Should().Be(hash2, "Same input should produce same canonical hash");
|
||||
hash1.Should().MatchRegex("^[0-9a-f]{64}$");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void CycloneDx17Sbom_DeterminismManifest_CanBeCreated()
|
||||
{
|
||||
// Arrange
|
||||
var input = CreateSampleSbomInput();
|
||||
var frozenTime = DateTimeOffset.Parse("2025-12-23T18:00:00Z");
|
||||
var sbom = GenerateCycloneDx17Sbom(input, frozenTime);
|
||||
var sbomBytes = Encoding.UTF8.GetBytes(sbom);
|
||||
|
||||
var artifactInfo = new ArtifactInfo
|
||||
{
|
||||
Type = "sbom",
|
||||
Name = "test-container-sbom",
|
||||
Version = "1.0.0",
|
||||
Format = "CycloneDX 1.7"
|
||||
};
|
||||
|
||||
var toolchain = new ToolchainInfo
|
||||
{
|
||||
Platform = ".NET 10.0",
|
||||
Components = new[]
|
||||
{
|
||||
new ComponentInfo { Name = "StellaOps.Scanner", Version = "1.0.0" }
|
||||
}
|
||||
};
|
||||
|
||||
// Act - Create determinism manifest
|
||||
var manifest = DeterminismManifestWriter.CreateManifest(
|
||||
sbomBytes,
|
||||
artifactInfo,
|
||||
toolchain);
|
||||
|
||||
// Assert
|
||||
manifest.SchemaVersion.Should().Be("1.0");
|
||||
manifest.Artifact.Format.Should().Be("CycloneDX 1.7");
|
||||
manifest.CanonicalHash.Algorithm.Should().Be("SHA-256");
|
||||
manifest.CanonicalHash.Value.Should().MatchRegex("^[0-9a-f]{64}$");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task CycloneDx17Sbom_ParallelGeneration_ProducesDeterministicOutput()
|
||||
{
|
||||
// Arrange
|
||||
var input = CreateSampleSbomInput();
|
||||
var frozenTime = DateTimeOffset.Parse("2025-12-23T18:00:00Z");
|
||||
|
||||
// Act - Generate in parallel 20 times
|
||||
var tasks = Enumerable.Range(0, 20)
|
||||
.Select(_ => Task.Run(() => GenerateCycloneDx17Sbom(input, frozenTime)))
|
||||
.ToArray();
|
||||
|
||||
var sboms = await Task.WhenAll(tasks);
|
||||
|
||||
// Assert - All outputs should be identical
|
||||
sboms.Should().AllBe(sboms[0]);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Cross-Format Consistency Tests
|
||||
|
||||
[Fact]
|
||||
public void AllFormats_WithSameInput_ProduceDifferentButStableHashes()
|
||||
{
|
||||
// Arrange
|
||||
var input = CreateSampleSbomInput();
|
||||
var frozenTime = DateTimeOffset.Parse("2025-12-23T18:00:00Z");
|
||||
|
||||
// Act - Generate all formats
|
||||
var spdx = GenerateSpdxSbom(input, frozenTime);
|
||||
var cdx16 = GenerateCycloneDx16Sbom(input, frozenTime);
|
||||
var cdx17 = GenerateCycloneDx17Sbom(input, frozenTime);
|
||||
|
||||
var spdxHash = CanonJson.Sha256Hex(Encoding.UTF8.GetBytes(spdx));
|
||||
var cdx16Hash = CanonJson.Sha256Hex(Encoding.UTF8.GetBytes(cdx16));
|
||||
var cdx17Hash = CanonJson.Sha256Hex(Encoding.UTF8.GetBytes(cdx17));
|
||||
|
||||
// Assert - SPDX should differ from CycloneDX
|
||||
spdxHash.Should().NotBe(cdx16Hash);
|
||||
spdxHash.Should().NotBe(cdx17Hash);
|
||||
|
||||
// Note: CycloneDX 1.6 and 1.7 produce same output because CycloneDxComposer
|
||||
// only outputs spec version 1.7. This is expected behavior.
|
||||
cdx16Hash.Should().Be(cdx17Hash, "CycloneDxComposer outputs 1.7 for both");
|
||||
|
||||
// All hashes should be valid SHA-256
|
||||
spdxHash.Should().MatchRegex("^[0-9a-f]{64}$");
|
||||
cdx16Hash.Should().MatchRegex("^[0-9a-f]{64}$");
|
||||
cdx17Hash.Should().MatchRegex("^[0-9a-f]{64}$");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void AllFormats_CanProduceDeterminismManifests()
|
||||
{
|
||||
// Arrange
|
||||
var input = CreateSampleSbomInput();
|
||||
var frozenTime = DateTimeOffset.Parse("2025-12-23T18:00:00Z");
|
||||
|
||||
var toolchain = new ToolchainInfo
|
||||
{
|
||||
Platform = ".NET 10.0",
|
||||
Components = new[]
|
||||
{
|
||||
new ComponentInfo { Name = "StellaOps.Scanner", Version = "1.0.0" }
|
||||
}
|
||||
};
|
||||
|
||||
// Act - Generate all formats and create manifests
|
||||
var spdxManifest = DeterminismManifestWriter.CreateManifest(
|
||||
Encoding.UTF8.GetBytes(GenerateSpdxSbom(input, frozenTime)),
|
||||
new ArtifactInfo { Type = "sbom", Name = "test-sbom", Version = "1.0.0", Format = "SPDX 3.0.1" },
|
||||
toolchain);
|
||||
|
||||
var cdx16Manifest = DeterminismManifestWriter.CreateManifest(
|
||||
Encoding.UTF8.GetBytes(GenerateCycloneDx16Sbom(input, frozenTime)),
|
||||
new ArtifactInfo { Type = "sbom", Name = "test-sbom", Version = "1.0.0", Format = "CycloneDX 1.6" },
|
||||
toolchain);
|
||||
|
||||
var cdx17Manifest = DeterminismManifestWriter.CreateManifest(
|
||||
Encoding.UTF8.GetBytes(GenerateCycloneDx17Sbom(input, frozenTime)),
|
||||
new ArtifactInfo { Type = "sbom", Name = "test-sbom", Version = "1.0.0", Format = "CycloneDX 1.7" },
|
||||
toolchain);
|
||||
|
||||
// Assert - All manifests should be valid
|
||||
spdxManifest.SchemaVersion.Should().Be("1.0");
|
||||
cdx16Manifest.SchemaVersion.Should().Be("1.0");
|
||||
cdx17Manifest.SchemaVersion.Should().Be("1.0");
|
||||
|
||||
spdxManifest.Artifact.Format.Should().Be("SPDX 3.0.1");
|
||||
cdx16Manifest.Artifact.Format.Should().Be("CycloneDX 1.6");
|
||||
cdx17Manifest.Artifact.Format.Should().Be("CycloneDX 1.7");
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Helper Methods
|
||||
|
||||
private static SbomInput CreateSampleSbomInput()
|
||||
{
|
||||
return new SbomInput
|
||||
{
|
||||
ContainerImage = "alpine:3.18",
|
||||
PackageUrls = new[]
|
||||
{
|
||||
"pkg:apk/alpine/musl@1.2.4-r2?arch=x86_64",
|
||||
"pkg:apk/alpine/busybox@1.36.1-r2?arch=x86_64",
|
||||
"pkg:apk/alpine/alpine-baselayout@3.4.3-r1?arch=x86_64"
|
||||
},
|
||||
Timestamp = DateTimeOffset.Parse("2025-12-23T18:00:00Z")
|
||||
};
|
||||
}
|
||||
|
||||
private static SbomCompositionRequest CreateCompositionRequest(SbomInput input, DateTimeOffset timestamp)
|
||||
{
|
||||
var fragments = new[]
|
||||
{
|
||||
LayerComponentFragment.Create("sha256:layer1", input.PackageUrls.Select((purl, i) =>
|
||||
new ComponentRecord
|
||||
{
|
||||
Identity = ComponentIdentity.Create(
|
||||
purl.Split('@')[0],
|
||||
purl.Split('/').Last().Split('@')[0],
|
||||
purl.Split('@').Last().Split('?')[0],
|
||||
purl,
|
||||
"library"),
|
||||
LayerDigest = "sha256:layer1",
|
||||
Evidence = ImmutableArray.Create(ComponentEvidence.FromPath($"/lib/{purl.Split('/').Last().Split('@')[0]}")),
|
||||
Dependencies = ImmutableArray<string>.Empty,
|
||||
Usage = ComponentUsage.Create(false),
|
||||
Metadata = new ComponentMetadata { Scope = "runtime" }
|
||||
}).ToArray())
|
||||
};
|
||||
|
||||
var image = new ImageArtifactDescriptor
|
||||
{
|
||||
ImageDigest = "sha256:determinism1234567890determinism1234567890determinism1234567890",
|
||||
ImageReference = $"docker.io/library/{input.ContainerImage}",
|
||||
Repository = "docker.io/library/alpine",
|
||||
Tag = input.ContainerImage.Split(':').Last(),
|
||||
Architecture = "amd64"
|
||||
};
|
||||
|
||||
return SbomCompositionRequest.Create(
|
||||
image,
|
||||
fragments,
|
||||
timestamp,
|
||||
generatorName: "StellaOps.Scanner",
|
||||
generatorVersion: "1.0.0",
|
||||
properties: new Dictionary<string, string>
|
||||
{
|
||||
["stellaops:scanId"] = "determinism-test-001",
|
||||
["stellaops:tenantId"] = "test-tenant"
|
||||
});
|
||||
}
|
||||
|
||||
private static string GenerateSpdxSbom(SbomInput input, DateTimeOffset timestamp)
|
||||
{
|
||||
var request = CreateCompositionRequest(input, timestamp);
|
||||
var composer = new SpdxComposer();
|
||||
var result = composer.Compose(request, new SpdxCompositionOptions());
|
||||
return Encoding.UTF8.GetString(result.JsonBytes);
|
||||
}
|
||||
|
||||
private static string GenerateCycloneDx16Sbom(SbomInput input, DateTimeOffset timestamp)
|
||||
{
|
||||
// CycloneDxComposer produces 1.7 format; for 1.6 testing we use the same composer
|
||||
// as the actual production code would. The API doesn't support version selection.
|
||||
var request = CreateCompositionRequest(input, timestamp);
|
||||
var composer = new CycloneDxComposer();
|
||||
var result = composer.Compose(request);
|
||||
return Encoding.UTF8.GetString(result.Inventory.JsonBytes);
|
||||
}
|
||||
|
||||
private static string GenerateCycloneDx17Sbom(SbomInput input, DateTimeOffset timestamp)
|
||||
{
|
||||
var request = CreateCompositionRequest(input, timestamp);
|
||||
var composer = new CycloneDxComposer();
|
||||
var result = composer.Compose(request);
|
||||
return Encoding.UTF8.GetString(result.Inventory.JsonBytes);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region DTOs
|
||||
|
||||
private sealed record SbomInput
|
||||
{
|
||||
public required string ContainerImage { get; init; }
|
||||
public required string[] PackageUrls { get; init; }
|
||||
public required DateTimeOffset Timestamp { get; init; }
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
@@ -0,0 +1,68 @@
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<!--
|
||||
StellaOps.Integration.Determinism.csproj
|
||||
Sprint: SPRINT_3500_0004_0003_integration_tests_corpus
|
||||
Task: T5 - Determinism Validation Suite
|
||||
Description: Tests to validate scoring determinism across runs, platforms, and time
|
||||
-->
|
||||
<Project Sdk="Microsoft.NET.Sdk">
|
||||
|
||||
<PropertyGroup>
|
||||
<TargetFramework>net10.0</TargetFramework>
|
||||
<LangVersion>preview</LangVersion>
|
||||
<ImplicitUsings>enable</ImplicitUsings>
|
||||
<Nullable>enable</Nullable>
|
||||
<IsPackable>false</IsPackable>
|
||||
<IsTestProject>true</IsTestProject>
|
||||
</PropertyGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.10.0" />
|
||||
<PackageReference Include="xunit" Version="2.7.0" />
|
||||
<PackageReference Include="xunit.runner.visualstudio" Version="2.5.8">
|
||||
<PrivateAssets>all</PrivateAssets>
|
||||
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
|
||||
</PackageReference>
|
||||
<PackageReference Include="FluentAssertions" Version="6.12.0" />
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<!-- Policy scoring for determinism tests -->
|
||||
<ProjectReference Include="../../Policy/__Libraries/StellaOps.Policy.Scoring/StellaOps.Policy.Scoring.csproj" />
|
||||
|
||||
<!-- Policy for VerdictId content-addressing tests (SPRINT_8200_0001_0001) -->
|
||||
<ProjectReference Include="../../Policy/__Libraries/StellaOps.Policy/StellaOps.Policy.csproj" />
|
||||
|
||||
<!-- Proof chain for hash verification -->
|
||||
<ProjectReference Include="../../Attestor/__Libraries/StellaOps.Attestor.ProofChain/StellaOps.Attestor.ProofChain.csproj" />
|
||||
|
||||
<!-- Cryptography for hashing -->
|
||||
<ProjectReference Include="../../__Libraries/StellaOps.Cryptography/StellaOps.Cryptography.csproj" />
|
||||
|
||||
<!-- Canonical JSON -->
|
||||
<ProjectReference Include="../../__Libraries/StellaOps.Canonical.Json/StellaOps.Canonical.Json.csproj" />
|
||||
|
||||
<!-- Determinism manifest writer/reader (NEW for SPRINT_5100_0007_0003) -->
|
||||
<ProjectReference Include="../__Libraries/StellaOps.Testing.Determinism/StellaOps.Testing.Determinism.csproj" />
|
||||
|
||||
<!-- Scanner Emit for SBOM generation (SPRINT_5100_0009_0001 Task 7) -->
|
||||
<ProjectReference Include="../../Scanner/__Libraries/StellaOps.Scanner.Emit/StellaOps.Scanner.Emit.csproj" />
|
||||
|
||||
<!-- Scanner Core contracts for composition requests -->
|
||||
<ProjectReference Include="../../Scanner/__Libraries/StellaOps.Scanner.Core/StellaOps.Scanner.Core.csproj" />
|
||||
|
||||
<!-- Scanner Reachability for reachability evidence determinism (SPRINT_5100_0009_0001 Task 8) -->
|
||||
<ProjectReference Include="../../Scanner/__Libraries/StellaOps.Scanner.Reachability/StellaOps.Scanner.Reachability.csproj" />
|
||||
|
||||
<!-- Scanner Evidence for reachability evidence models (SPRINT_5100_0009_0001 Task 8) -->
|
||||
<ProjectReference Include="../../Scanner/__Libraries/StellaOps.Scanner.Evidence/StellaOps.Scanner.Evidence.csproj" />
|
||||
</ItemGroup>
|
||||
<ItemGroup>
|
||||
<!-- Determinism corpus -->
|
||||
<Content Include="../__Benchmarks/determinism/**/*">
|
||||
<Link>determinism/%(RecursiveDir)%(Filename)%(Extension)</Link>
|
||||
<CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
|
||||
</Content>
|
||||
</ItemGroup>
|
||||
|
||||
</Project>
|
||||
@@ -0,0 +1,462 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// TriageOutputDeterminismTests.cs
|
||||
// Sprint: SPRINT_5100_0009_0001 - Scanner Module Test Implementation
|
||||
// Task: SCANNER-5100-009 - Expand determinism tests: triage output hash stable
|
||||
// Description: Tests to validate triage output generation determinism
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System.Text;
|
||||
using FluentAssertions;
|
||||
using StellaOps.Canonical.Json;
|
||||
using StellaOps.Testing.Determinism;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Integration.Determinism;
|
||||
|
||||
/// <summary>
|
||||
/// Determinism validation tests for triage output generation.
|
||||
/// Ensures identical inputs produce identical triage outputs across:
|
||||
/// - Multiple runs with frozen time
|
||||
/// - Parallel execution
|
||||
/// - Finding ordering
|
||||
/// - Status transitions
|
||||
/// </summary>
|
||||
public class TriageOutputDeterminismTests
|
||||
{
|
||||
#region Basic Determinism Tests
|
||||
|
||||
[Fact]
|
||||
public void TriageOutput_WithIdenticalInput_ProducesDeterministicOutput()
|
||||
{
|
||||
// Arrange
|
||||
var frozenTime = DateTimeOffset.Parse("2025-12-24T12:00:00Z");
|
||||
var input = CreateSampleTriageInput();
|
||||
|
||||
// Act - Generate triage output multiple times
|
||||
var output1 = GenerateTriageOutput(input, frozenTime);
|
||||
var output2 = GenerateTriageOutput(input, frozenTime);
|
||||
var output3 = GenerateTriageOutput(input, frozenTime);
|
||||
|
||||
// Serialize to canonical JSON
|
||||
var json1 = CanonJson.Serialize(output1);
|
||||
var json2 = CanonJson.Serialize(output2);
|
||||
var json3 = CanonJson.Serialize(output3);
|
||||
|
||||
// Assert - All outputs should be identical
|
||||
json1.Should().Be(json2);
|
||||
json2.Should().Be(json3);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void TriageOutput_CanonicalHash_IsStable()
|
||||
{
|
||||
// Arrange
|
||||
var frozenTime = DateTimeOffset.Parse("2025-12-24T12:00:00Z");
|
||||
var input = CreateSampleTriageInput();
|
||||
|
||||
// Act - Generate output and compute canonical hash twice
|
||||
var output1 = GenerateTriageOutput(input, frozenTime);
|
||||
var hash1 = ComputeCanonicalHash(output1);
|
||||
|
||||
var output2 = GenerateTriageOutput(input, frozenTime);
|
||||
var hash2 = ComputeCanonicalHash(output2);
|
||||
|
||||
// Assert
|
||||
hash1.Should().Be(hash2, "Same input should produce same canonical hash");
|
||||
hash1.Should().MatchRegex("^[0-9a-f]{64}$");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void TriageOutput_DeterminismManifest_CanBeCreated()
|
||||
{
|
||||
// Arrange
|
||||
var frozenTime = DateTimeOffset.Parse("2025-12-24T12:00:00Z");
|
||||
var input = CreateSampleTriageInput();
|
||||
var output = GenerateTriageOutput(input, frozenTime);
|
||||
var outputBytes = Encoding.UTF8.GetBytes(CanonJson.Serialize(output));
|
||||
|
||||
var artifactInfo = new ArtifactInfo
|
||||
{
|
||||
Type = "triage-output",
|
||||
Name = "test-scan-triage",
|
||||
Version = "1.0.0",
|
||||
Format = "triage-output@1.0"
|
||||
};
|
||||
|
||||
var toolchain = new ToolchainInfo
|
||||
{
|
||||
Platform = ".NET 10.0",
|
||||
Components = new[]
|
||||
{
|
||||
new ComponentInfo { Name = "StellaOps.Scanner.Triage", Version = "1.0.0" }
|
||||
}
|
||||
};
|
||||
|
||||
// Act - Create determinism manifest
|
||||
var manifest = DeterminismManifestWriter.CreateManifest(
|
||||
outputBytes,
|
||||
artifactInfo,
|
||||
toolchain);
|
||||
|
||||
// Assert
|
||||
manifest.SchemaVersion.Should().Be("1.0");
|
||||
manifest.Artifact.Format.Should().Be("triage-output@1.0");
|
||||
manifest.CanonicalHash.Algorithm.Should().Be("SHA-256");
|
||||
manifest.CanonicalHash.Value.Should().MatchRegex("^[0-9a-f]{64}$");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task TriageOutput_ParallelGeneration_ProducesDeterministicOutput()
|
||||
{
|
||||
// Arrange
|
||||
var frozenTime = DateTimeOffset.Parse("2025-12-24T12:00:00Z");
|
||||
var input = CreateSampleTriageInput();
|
||||
|
||||
// Act - Generate in parallel 20 times
|
||||
var tasks = Enumerable.Range(0, 20)
|
||||
.Select(_ => Task.Run(() => CanonJson.Serialize(GenerateTriageOutput(input, frozenTime))))
|
||||
.ToArray();
|
||||
|
||||
var outputs = await Task.WhenAll(tasks);
|
||||
|
||||
// Assert - All outputs should be identical
|
||||
outputs.Should().AllBe(outputs[0]);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Finding Ordering Tests
|
||||
|
||||
[Fact]
|
||||
public void TriageOutput_FindingsAreDeterministicallyOrdered()
|
||||
{
|
||||
// Arrange - Create input with findings in random order
|
||||
var findings = new[]
|
||||
{
|
||||
CreateFinding("CVE-2024-0003", "critical"),
|
||||
CreateFinding("CVE-2024-0001", "high"),
|
||||
CreateFinding("CVE-2024-0002", "medium")
|
||||
};
|
||||
|
||||
var input = new TriageInput
|
||||
{
|
||||
ScanId = Guid.Parse("11111111-1111-1111-1111-111111111111"),
|
||||
Findings = findings
|
||||
};
|
||||
|
||||
var frozenTime = DateTimeOffset.Parse("2025-12-24T12:00:00Z");
|
||||
|
||||
// Act
|
||||
var output1 = GenerateTriageOutput(input, frozenTime);
|
||||
var output2 = GenerateTriageOutput(input, frozenTime);
|
||||
|
||||
// Assert - Outputs should be identical
|
||||
var json1 = CanonJson.Serialize(output1);
|
||||
var json2 = CanonJson.Serialize(output2);
|
||||
json1.Should().Be(json2);
|
||||
|
||||
// Verify findings are sorted by CVE ID
|
||||
for (int i = 1; i < output1.Findings.Count; i++)
|
||||
{
|
||||
string.CompareOrdinal(output1.Findings[i - 1].CveId, output1.Findings[i].CveId)
|
||||
.Should().BeLessOrEqualTo(0, "Findings should be sorted by CVE ID");
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void TriageOutput_FindingsWithSameCve_SortedByPackage()
|
||||
{
|
||||
// Arrange - Multiple findings for same CVE
|
||||
var findings = new[]
|
||||
{
|
||||
CreateFinding("CVE-2024-0001", "high", "pkg:npm/package-z@1.0.0"),
|
||||
CreateFinding("CVE-2024-0001", "high", "pkg:npm/package-a@1.0.0"),
|
||||
CreateFinding("CVE-2024-0001", "high", "pkg:npm/package-m@1.0.0")
|
||||
};
|
||||
|
||||
var input = new TriageInput
|
||||
{
|
||||
ScanId = Guid.Parse("22222222-2222-2222-2222-222222222222"),
|
||||
Findings = findings
|
||||
};
|
||||
|
||||
var frozenTime = DateTimeOffset.Parse("2025-12-24T12:00:00Z");
|
||||
|
||||
// Act
|
||||
var output1 = GenerateTriageOutput(input, frozenTime);
|
||||
var output2 = GenerateTriageOutput(input, frozenTime);
|
||||
|
||||
// Assert
|
||||
var json1 = CanonJson.Serialize(output1);
|
||||
var json2 = CanonJson.Serialize(output2);
|
||||
json1.Should().Be(json2);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Status Transition Tests
|
||||
|
||||
[Theory]
|
||||
[InlineData("open")]
|
||||
[InlineData("acknowledged")]
|
||||
[InlineData("mitigated")]
|
||||
[InlineData("resolved")]
|
||||
[InlineData("false_positive")]
|
||||
public void TriageOutput_StatusIsPreserved(string status)
|
||||
{
|
||||
// Arrange
|
||||
var finding = CreateFinding("CVE-2024-0001", "high") with { Status = status };
|
||||
var input = new TriageInput
|
||||
{
|
||||
ScanId = Guid.Parse("33333333-3333-3333-3333-333333333333"),
|
||||
Findings = new[] { finding }
|
||||
};
|
||||
|
||||
var frozenTime = DateTimeOffset.Parse("2025-12-24T12:00:00Z");
|
||||
|
||||
// Act
|
||||
var output = GenerateTriageOutput(input, frozenTime);
|
||||
|
||||
// Assert
|
||||
output.Findings[0].Status.Should().Be(status);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void TriageOutput_StatusTransitionHistoryIsOrdered()
|
||||
{
|
||||
// Arrange
|
||||
var finding = CreateFinding("CVE-2024-0001", "high") with
|
||||
{
|
||||
StatusHistory = new[]
|
||||
{
|
||||
new StatusTransition { Status = "mitigated", Timestamp = DateTimeOffset.Parse("2025-12-24T10:00:00Z") },
|
||||
new StatusTransition { Status = "open", Timestamp = DateTimeOffset.Parse("2025-12-24T08:00:00Z") },
|
||||
new StatusTransition { Status = "acknowledged", Timestamp = DateTimeOffset.Parse("2025-12-24T09:00:00Z") }
|
||||
}
|
||||
};
|
||||
|
||||
var input = new TriageInput
|
||||
{
|
||||
ScanId = Guid.Parse("44444444-4444-4444-4444-444444444444"),
|
||||
Findings = new[] { finding }
|
||||
};
|
||||
|
||||
var frozenTime = DateTimeOffset.Parse("2025-12-24T12:00:00Z");
|
||||
|
||||
// Act
|
||||
var output1 = GenerateTriageOutput(input, frozenTime);
|
||||
var output2 = GenerateTriageOutput(input, frozenTime);
|
||||
|
||||
// Assert
|
||||
var json1 = CanonJson.Serialize(output1);
|
||||
var json2 = CanonJson.Serialize(output2);
|
||||
json1.Should().Be(json2);
|
||||
|
||||
// Verify history is sorted by timestamp
|
||||
var history = output1.Findings[0].StatusHistory;
|
||||
for (int i = 1; i < history.Count; i++)
|
||||
{
|
||||
history[i - 1].Timestamp.Should().BeBefore(history[i].Timestamp,
|
||||
"Status history should be sorted by timestamp");
|
||||
}
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Inputs Hash Tests
|
||||
|
||||
[Fact]
|
||||
public void TriageOutput_InputsHashIsStable()
|
||||
{
|
||||
// Arrange
|
||||
var input = CreateSampleTriageInput();
|
||||
var frozenTime = DateTimeOffset.Parse("2025-12-24T12:00:00Z");
|
||||
|
||||
// Act
|
||||
var output1 = GenerateTriageOutput(input, frozenTime);
|
||||
var output2 = GenerateTriageOutput(input, frozenTime);
|
||||
|
||||
// Assert
|
||||
output1.InputsHash.Should().Be(output2.InputsHash);
|
||||
output1.InputsHash.Should().MatchRegex("^[0-9a-f]{64}$");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void TriageOutput_DifferentInputs_ProduceDifferentHashes()
|
||||
{
|
||||
// Arrange
|
||||
var input1 = CreateSampleTriageInput();
|
||||
var input2 = CreateSampleTriageInput() with
|
||||
{
|
||||
ScanId = Guid.Parse("55555555-5555-5555-5555-555555555555")
|
||||
};
|
||||
|
||||
var frozenTime = DateTimeOffset.Parse("2025-12-24T12:00:00Z");
|
||||
|
||||
// Act
|
||||
var output1 = GenerateTriageOutput(input1, frozenTime);
|
||||
var output2 = GenerateTriageOutput(input2, frozenTime);
|
||||
|
||||
// Assert
|
||||
output1.InputsHash.Should().NotBe(output2.InputsHash);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Empty/Edge Case Tests
|
||||
|
||||
[Fact]
|
||||
public void TriageOutput_EmptyFindings_ProducesDeterministicOutput()
|
||||
{
|
||||
// Arrange
|
||||
var input = new TriageInput
|
||||
{
|
||||
ScanId = Guid.Parse("66666666-6666-6666-6666-666666666666"),
|
||||
Findings = Array.Empty<FindingInput>()
|
||||
};
|
||||
|
||||
var frozenTime = DateTimeOffset.Parse("2025-12-24T12:00:00Z");
|
||||
|
||||
// Act
|
||||
var hash1 = ComputeCanonicalHash(GenerateTriageOutput(input, frozenTime));
|
||||
var hash2 = ComputeCanonicalHash(GenerateTriageOutput(input, frozenTime));
|
||||
|
||||
// Assert
|
||||
hash1.Should().Be(hash2);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void TriageOutput_ManyFindings_ProducesDeterministicOutput()
|
||||
{
|
||||
// Arrange - Create 500 findings
|
||||
var findings = Enumerable.Range(0, 500)
|
||||
.Select(i => CreateFinding($"CVE-2024-{i:D4}", i % 4 == 0 ? "critical" : i % 3 == 0 ? "high" : "medium"))
|
||||
.ToArray();
|
||||
|
||||
var input = new TriageInput
|
||||
{
|
||||
ScanId = Guid.Parse("77777777-7777-7777-7777-777777777777"),
|
||||
Findings = findings
|
||||
};
|
||||
|
||||
var frozenTime = DateTimeOffset.Parse("2025-12-24T12:00:00Z");
|
||||
|
||||
// Act
|
||||
var hash1 = ComputeCanonicalHash(GenerateTriageOutput(input, frozenTime));
|
||||
var hash2 = ComputeCanonicalHash(GenerateTriageOutput(input, frozenTime));
|
||||
|
||||
// Assert
|
||||
hash1.Should().Be(hash2);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Helper Methods
|
||||
|
||||
private static TriageInput CreateSampleTriageInput()
|
||||
{
|
||||
return new TriageInput
|
||||
{
|
||||
ScanId = Guid.Parse("88888888-8888-8888-8888-888888888888"),
|
||||
Findings = new[]
|
||||
{
|
||||
CreateFinding("CVE-2024-1234", "critical"),
|
||||
CreateFinding("CVE-2024-5678", "high"),
|
||||
CreateFinding("CVE-2024-9012", "medium")
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
private static FindingInput CreateFinding(string cveId, string severity, string? packageUrl = null)
|
||||
{
|
||||
return new FindingInput
|
||||
{
|
||||
CveId = cveId,
|
||||
Severity = severity,
|
||||
PackageUrl = packageUrl ?? $"pkg:npm/test-package@1.0.0",
|
||||
Status = "open",
|
||||
StatusHistory = Array.Empty<StatusTransition>()
|
||||
};
|
||||
}
|
||||
|
||||
private static TriageOutput GenerateTriageOutput(TriageInput input, DateTimeOffset timestamp)
|
||||
{
|
||||
// Sort findings deterministically by CVE ID, then by package URL
|
||||
var sortedFindings = input.Findings
|
||||
.OrderBy(f => f.CveId, StringComparer.Ordinal)
|
||||
.ThenBy(f => f.PackageUrl, StringComparer.Ordinal)
|
||||
.Select(f => new TriageFindingOutput
|
||||
{
|
||||
CveId = f.CveId,
|
||||
Severity = f.Severity,
|
||||
PackageUrl = f.PackageUrl,
|
||||
Status = f.Status,
|
||||
StatusHistory = f.StatusHistory
|
||||
.OrderBy(s => s.Timestamp)
|
||||
.ToList()
|
||||
})
|
||||
.ToList();
|
||||
|
||||
// Compute inputs hash
|
||||
var inputsJson = CanonJson.Serialize(input);
|
||||
var inputsHash = CanonJson.Sha256Hex(Encoding.UTF8.GetBytes(inputsJson));
|
||||
|
||||
return new TriageOutput
|
||||
{
|
||||
ScanId = input.ScanId,
|
||||
Timestamp = timestamp,
|
||||
Findings = sortedFindings,
|
||||
InputsHash = inputsHash
|
||||
};
|
||||
}
|
||||
|
||||
private static string ComputeCanonicalHash(TriageOutput output)
|
||||
{
|
||||
var json = CanonJson.Serialize(output);
|
||||
return CanonJson.Sha256Hex(Encoding.UTF8.GetBytes(json));
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region DTOs
|
||||
|
||||
private sealed record TriageInput
|
||||
{
|
||||
public required Guid ScanId { get; init; }
|
||||
public required FindingInput[] Findings { get; init; }
|
||||
}
|
||||
|
||||
private sealed record FindingInput
|
||||
{
|
||||
public required string CveId { get; init; }
|
||||
public required string Severity { get; init; }
|
||||
public required string PackageUrl { get; init; }
|
||||
public required string Status { get; init; }
|
||||
public required StatusTransition[] StatusHistory { get; init; }
|
||||
}
|
||||
|
||||
private sealed record StatusTransition
|
||||
{
|
||||
public required string Status { get; init; }
|
||||
public required DateTimeOffset Timestamp { get; init; }
|
||||
}
|
||||
|
||||
private sealed record TriageOutput
|
||||
{
|
||||
public required Guid ScanId { get; init; }
|
||||
public required DateTimeOffset Timestamp { get; init; }
|
||||
public required IReadOnlyList<TriageFindingOutput> Findings { get; init; }
|
||||
public required string InputsHash { get; init; }
|
||||
}
|
||||
|
||||
private sealed record TriageFindingOutput
|
||||
{
|
||||
public required string CveId { get; init; }
|
||||
public required string Severity { get; init; }
|
||||
public required string PackageUrl { get; init; }
|
||||
public required string Status { get; init; }
|
||||
public required IReadOnlyList<StatusTransition> StatusHistory { get; init; }
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
@@ -0,0 +1,585 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// VerdictArtifactDeterminismTests.cs
|
||||
// Sprint: SPRINT_5100_0009_0001 - Scanner Module Test Implementation
|
||||
// Task: SCANNER-5100-010 - Expand determinism tests: verdict artifact payload hash stable
|
||||
// Description: Tests to validate verdict artifact generation determinism
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System.Text;
|
||||
using FluentAssertions;
|
||||
using StellaOps.Canonical.Json;
|
||||
using StellaOps.Testing.Determinism;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Integration.Determinism;
|
||||
|
||||
/// <summary>
|
||||
/// Determinism validation tests for verdict artifact generation.
|
||||
/// Ensures identical inputs produce identical verdict artifacts across:
|
||||
/// - Multiple runs with frozen time
|
||||
/// - Parallel execution
|
||||
/// - Change ordering
|
||||
/// - Proof spine integration
|
||||
/// </summary>
|
||||
public class VerdictArtifactDeterminismTests
|
||||
{
|
||||
#region Basic Determinism Tests
|
||||
|
||||
[Fact]
|
||||
public void VerdictArtifact_WithIdenticalInput_ProducesDeterministicOutput()
|
||||
{
|
||||
// Arrange
|
||||
var frozenTime = DateTimeOffset.Parse("2025-12-24T12:00:00Z");
|
||||
var input = CreateSampleVerdictInput();
|
||||
|
||||
// Act - Generate verdict artifact multiple times
|
||||
var verdict1 = GenerateVerdictArtifact(input, frozenTime);
|
||||
var verdict2 = GenerateVerdictArtifact(input, frozenTime);
|
||||
var verdict3 = GenerateVerdictArtifact(input, frozenTime);
|
||||
|
||||
// Serialize to canonical JSON
|
||||
var json1 = CanonJson.Serialize(verdict1);
|
||||
var json2 = CanonJson.Serialize(verdict2);
|
||||
var json3 = CanonJson.Serialize(verdict3);
|
||||
|
||||
// Assert - All outputs should be identical
|
||||
json1.Should().Be(json2);
|
||||
json2.Should().Be(json3);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void VerdictArtifact_CanonicalHash_IsStable()
|
||||
{
|
||||
// Arrange
|
||||
var frozenTime = DateTimeOffset.Parse("2025-12-24T12:00:00Z");
|
||||
var input = CreateSampleVerdictInput();
|
||||
|
||||
// Act - Generate verdict and compute canonical hash twice
|
||||
var verdict1 = GenerateVerdictArtifact(input, frozenTime);
|
||||
var hash1 = ComputeCanonicalHash(verdict1);
|
||||
|
||||
var verdict2 = GenerateVerdictArtifact(input, frozenTime);
|
||||
var hash2 = ComputeCanonicalHash(verdict2);
|
||||
|
||||
// Assert
|
||||
hash1.Should().Be(hash2, "Same input should produce same canonical hash");
|
||||
hash1.Should().MatchRegex("^[0-9a-f]{64}$");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void VerdictArtifact_DeterminismManifest_CanBeCreated()
|
||||
{
|
||||
// Arrange
|
||||
var frozenTime = DateTimeOffset.Parse("2025-12-24T12:00:00Z");
|
||||
var input = CreateSampleVerdictInput();
|
||||
var verdict = GenerateVerdictArtifact(input, frozenTime);
|
||||
var verdictBytes = Encoding.UTF8.GetBytes(CanonJson.Serialize(verdict));
|
||||
|
||||
var artifactInfo = new ArtifactInfo
|
||||
{
|
||||
Type = "verdict-artifact",
|
||||
Name = "test-delta-verdict",
|
||||
Version = "1.0.0",
|
||||
Format = "delta-verdict@1.0"
|
||||
};
|
||||
|
||||
var toolchain = new ToolchainInfo
|
||||
{
|
||||
Platform = ".NET 10.0",
|
||||
Components = new[]
|
||||
{
|
||||
new ComponentInfo { Name = "StellaOps.Scanner.SmartDiff", Version = "1.0.0" }
|
||||
}
|
||||
};
|
||||
|
||||
// Act - Create determinism manifest
|
||||
var manifest = DeterminismManifestWriter.CreateManifest(
|
||||
verdictBytes,
|
||||
artifactInfo,
|
||||
toolchain);
|
||||
|
||||
// Assert
|
||||
manifest.SchemaVersion.Should().Be("1.0");
|
||||
manifest.Artifact.Format.Should().Be("delta-verdict@1.0");
|
||||
manifest.CanonicalHash.Algorithm.Should().Be("SHA-256");
|
||||
manifest.CanonicalHash.Value.Should().MatchRegex("^[0-9a-f]{64}$");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task VerdictArtifact_ParallelGeneration_ProducesDeterministicOutput()
|
||||
{
|
||||
// Arrange
|
||||
var frozenTime = DateTimeOffset.Parse("2025-12-24T12:00:00Z");
|
||||
var input = CreateSampleVerdictInput();
|
||||
|
||||
// Act - Generate in parallel 20 times
|
||||
var tasks = Enumerable.Range(0, 20)
|
||||
.Select(_ => Task.Run(() => CanonJson.Serialize(GenerateVerdictArtifact(input, frozenTime))))
|
||||
.ToArray();
|
||||
|
||||
var verdicts = await Task.WhenAll(tasks);
|
||||
|
||||
// Assert - All outputs should be identical
|
||||
verdicts.Should().AllBe(verdicts[0]);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Change Ordering Tests
|
||||
|
||||
[Fact]
|
||||
public void VerdictArtifact_ChangesAreDeterministicallyOrdered()
|
||||
{
|
||||
// Arrange - Create input with changes in random order
|
||||
var changes = new[]
|
||||
{
|
||||
CreateChange("CVE-2024-0003", "pkg:npm/c@1.0.0", "new"),
|
||||
CreateChange("CVE-2024-0001", "pkg:npm/a@1.0.0", "resolved"),
|
||||
CreateChange("CVE-2024-0002", "pkg:npm/b@1.0.0", "severity_changed")
|
||||
};
|
||||
|
||||
var input = new VerdictInput
|
||||
{
|
||||
VerdictId = Guid.Parse("11111111-1111-1111-1111-111111111111"),
|
||||
BaselineScanId = Guid.Parse("00000000-0000-0000-0000-000000000001"),
|
||||
CurrentScanId = Guid.Parse("00000000-0000-0000-0000-000000000002"),
|
||||
Changes = changes
|
||||
};
|
||||
|
||||
var frozenTime = DateTimeOffset.Parse("2025-12-24T12:00:00Z");
|
||||
|
||||
// Act
|
||||
var verdict1 = GenerateVerdictArtifact(input, frozenTime);
|
||||
var verdict2 = GenerateVerdictArtifact(input, frozenTime);
|
||||
|
||||
// Assert - Outputs should be identical
|
||||
var json1 = CanonJson.Serialize(verdict1);
|
||||
var json2 = CanonJson.Serialize(verdict2);
|
||||
json1.Should().Be(json2);
|
||||
|
||||
// Verify changes are sorted by CVE ID, then by package URL
|
||||
for (int i = 1; i < verdict1.Changes.Count; i++)
|
||||
{
|
||||
var cmp = string.CompareOrdinal(verdict1.Changes[i - 1].CveId, verdict1.Changes[i].CveId);
|
||||
if (cmp == 0)
|
||||
{
|
||||
cmp = string.CompareOrdinal(verdict1.Changes[i - 1].PackageUrl, verdict1.Changes[i].PackageUrl);
|
||||
}
|
||||
cmp.Should().BeLessOrEqualTo(0, "Changes should be sorted by CVE ID, then package URL");
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void VerdictArtifact_ChangesWithSameCveAndPackage_SortedByChangeType()
|
||||
{
|
||||
// Arrange - Multiple changes for same CVE/package
|
||||
var changes = new[]
|
||||
{
|
||||
CreateChange("CVE-2024-0001", "pkg:npm/test@1.0.0", "severity_changed"),
|
||||
CreateChange("CVE-2024-0001", "pkg:npm/test@1.0.0", "status_changed"),
|
||||
CreateChange("CVE-2024-0001", "pkg:npm/test@1.0.0", "epss_changed")
|
||||
};
|
||||
|
||||
var input = new VerdictInput
|
||||
{
|
||||
VerdictId = Guid.Parse("22222222-2222-2222-2222-222222222222"),
|
||||
BaselineScanId = Guid.Parse("00000000-0000-0000-0000-000000000001"),
|
||||
CurrentScanId = Guid.Parse("00000000-0000-0000-0000-000000000002"),
|
||||
Changes = changes
|
||||
};
|
||||
|
||||
var frozenTime = DateTimeOffset.Parse("2025-12-24T12:00:00Z");
|
||||
|
||||
// Act
|
||||
var verdict1 = GenerateVerdictArtifact(input, frozenTime);
|
||||
var verdict2 = GenerateVerdictArtifact(input, frozenTime);
|
||||
|
||||
// Assert
|
||||
var json1 = CanonJson.Serialize(verdict1);
|
||||
var json2 = CanonJson.Serialize(verdict2);
|
||||
json1.Should().Be(json2);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Change Type Tests
|
||||
|
||||
[Theory]
|
||||
[InlineData("new")]
|
||||
[InlineData("resolved")]
|
||||
[InlineData("severity_changed")]
|
||||
[InlineData("status_changed")]
|
||||
[InlineData("epss_changed")]
|
||||
[InlineData("reachability_changed")]
|
||||
[InlineData("vex_status_changed")]
|
||||
public void VerdictArtifact_ChangeTypeIsPreserved(string changeType)
|
||||
{
|
||||
// Arrange
|
||||
var change = CreateChange("CVE-2024-0001", "pkg:npm/test@1.0.0", changeType);
|
||||
var input = new VerdictInput
|
||||
{
|
||||
VerdictId = Guid.Parse("33333333-3333-3333-3333-333333333333"),
|
||||
BaselineScanId = Guid.Parse("00000000-0000-0000-0000-000000000001"),
|
||||
CurrentScanId = Guid.Parse("00000000-0000-0000-0000-000000000002"),
|
||||
Changes = new[] { change }
|
||||
};
|
||||
|
||||
var frozenTime = DateTimeOffset.Parse("2025-12-24T12:00:00Z");
|
||||
|
||||
// Act
|
||||
var verdict = GenerateVerdictArtifact(input, frozenTime);
|
||||
|
||||
// Assert
|
||||
verdict.Changes[0].ChangeType.Should().Be(changeType);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Proof Spine Tests
|
||||
|
||||
[Fact]
|
||||
public void VerdictArtifact_ProofSpinesAreDeterministicallyOrdered()
|
||||
{
|
||||
// Arrange
|
||||
var changes = new[]
|
||||
{
|
||||
CreateChange("CVE-2024-0001", "pkg:npm/a@1.0.0", "new") with
|
||||
{
|
||||
ProofSpine = new ProofSpine
|
||||
{
|
||||
SpineId = "spine-a",
|
||||
Evidences = new[]
|
||||
{
|
||||
CreateProofEvidence("epss", 0.8),
|
||||
CreateProofEvidence("reachability", 0.9),
|
||||
CreateProofEvidence("vex", 1.0)
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
var input = new VerdictInput
|
||||
{
|
||||
VerdictId = Guid.Parse("44444444-4444-4444-4444-444444444444"),
|
||||
BaselineScanId = Guid.Parse("00000000-0000-0000-0000-000000000001"),
|
||||
CurrentScanId = Guid.Parse("00000000-0000-0000-0000-000000000002"),
|
||||
Changes = changes
|
||||
};
|
||||
|
||||
var frozenTime = DateTimeOffset.Parse("2025-12-24T12:00:00Z");
|
||||
|
||||
// Act
|
||||
var verdict1 = GenerateVerdictArtifact(input, frozenTime);
|
||||
var verdict2 = GenerateVerdictArtifact(input, frozenTime);
|
||||
|
||||
// Assert
|
||||
var json1 = CanonJson.Serialize(verdict1);
|
||||
var json2 = CanonJson.Serialize(verdict2);
|
||||
json1.Should().Be(json2);
|
||||
|
||||
// Verify evidences in proof spine are sorted
|
||||
var evidences = verdict1.Changes[0].ProofSpine!.Evidences;
|
||||
for (int i = 1; i < evidences.Count; i++)
|
||||
{
|
||||
string.CompareOrdinal(evidences[i - 1].EvidenceType, evidences[i].EvidenceType)
|
||||
.Should().BeLessOrEqualTo(0, "Proof spine evidences should be sorted by type");
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void VerdictArtifact_ProofSpineHashIsStable()
|
||||
{
|
||||
// Arrange
|
||||
var change = CreateChange("CVE-2024-0001", "pkg:npm/test@1.0.0", "new") with
|
||||
{
|
||||
ProofSpine = new ProofSpine
|
||||
{
|
||||
SpineId = "spine-test",
|
||||
Evidences = new[]
|
||||
{
|
||||
CreateProofEvidence("epss", 0.5),
|
||||
CreateProofEvidence("reachability", 0.75)
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
var input = new VerdictInput
|
||||
{
|
||||
VerdictId = Guid.Parse("55555555-5555-5555-5555-555555555555"),
|
||||
BaselineScanId = Guid.Parse("00000000-0000-0000-0000-000000000001"),
|
||||
CurrentScanId = Guid.Parse("00000000-0000-0000-0000-000000000002"),
|
||||
Changes = new[] { change }
|
||||
};
|
||||
|
||||
var frozenTime = DateTimeOffset.Parse("2025-12-24T12:00:00Z");
|
||||
|
||||
// Act
|
||||
var verdict1 = GenerateVerdictArtifact(input, frozenTime);
|
||||
var verdict2 = GenerateVerdictArtifact(input, frozenTime);
|
||||
|
||||
// Assert
|
||||
verdict1.Changes[0].ProofSpine!.SpineHash.Should().Be(verdict2.Changes[0].ProofSpine!.SpineHash);
|
||||
verdict1.Changes[0].ProofSpine!.SpineHash.Should().MatchRegex("^[0-9a-f]{64}$");
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Summary Statistics Tests
|
||||
|
||||
[Fact]
|
||||
public void VerdictArtifact_SummaryStatisticsAreDeterministic()
|
||||
{
|
||||
// Arrange
|
||||
var changes = new[]
|
||||
{
|
||||
CreateChange("CVE-2024-0001", "pkg:npm/a@1.0.0", "new"),
|
||||
CreateChange("CVE-2024-0002", "pkg:npm/b@1.0.0", "new"),
|
||||
CreateChange("CVE-2024-0003", "pkg:npm/c@1.0.0", "resolved"),
|
||||
CreateChange("CVE-2024-0004", "pkg:npm/d@1.0.0", "severity_changed")
|
||||
};
|
||||
|
||||
var input = new VerdictInput
|
||||
{
|
||||
VerdictId = Guid.Parse("66666666-6666-6666-6666-666666666666"),
|
||||
BaselineScanId = Guid.Parse("00000000-0000-0000-0000-000000000001"),
|
||||
CurrentScanId = Guid.Parse("00000000-0000-0000-0000-000000000002"),
|
||||
Changes = changes
|
||||
};
|
||||
|
||||
var frozenTime = DateTimeOffset.Parse("2025-12-24T12:00:00Z");
|
||||
|
||||
// Act
|
||||
var verdict1 = GenerateVerdictArtifact(input, frozenTime);
|
||||
var verdict2 = GenerateVerdictArtifact(input, frozenTime);
|
||||
|
||||
// Assert
|
||||
verdict1.Summary.Should().NotBeNull();
|
||||
verdict1.Summary.TotalChanges.Should().Be(verdict2.Summary.TotalChanges);
|
||||
verdict1.Summary.NewFindings.Should().Be(verdict2.Summary.NewFindings);
|
||||
verdict1.Summary.ResolvedFindings.Should().Be(verdict2.Summary.ResolvedFindings);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Empty/Edge Case Tests
|
||||
|
||||
[Fact]
|
||||
public void VerdictArtifact_NoChanges_ProducesDeterministicOutput()
|
||||
{
|
||||
// Arrange
|
||||
var input = new VerdictInput
|
||||
{
|
||||
VerdictId = Guid.Parse("77777777-7777-7777-7777-777777777777"),
|
||||
BaselineScanId = Guid.Parse("00000000-0000-0000-0000-000000000001"),
|
||||
CurrentScanId = Guid.Parse("00000000-0000-0000-0000-000000000002"),
|
||||
Changes = Array.Empty<VerdictChange>()
|
||||
};
|
||||
|
||||
var frozenTime = DateTimeOffset.Parse("2025-12-24T12:00:00Z");
|
||||
|
||||
// Act
|
||||
var hash1 = ComputeCanonicalHash(GenerateVerdictArtifact(input, frozenTime));
|
||||
var hash2 = ComputeCanonicalHash(GenerateVerdictArtifact(input, frozenTime));
|
||||
|
||||
// Assert
|
||||
hash1.Should().Be(hash2);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void VerdictArtifact_ManyChanges_ProducesDeterministicOutput()
|
||||
{
|
||||
// Arrange - Create 500 changes
|
||||
var changes = Enumerable.Range(0, 500)
|
||||
.Select(i => CreateChange(
|
||||
$"CVE-2024-{i:D4}",
|
||||
$"pkg:npm/package-{i}@1.0.0",
|
||||
i % 3 == 0 ? "new" : i % 2 == 0 ? "resolved" : "severity_changed"))
|
||||
.ToArray();
|
||||
|
||||
var input = new VerdictInput
|
||||
{
|
||||
VerdictId = Guid.Parse("88888888-8888-8888-8888-888888888888"),
|
||||
BaselineScanId = Guid.Parse("00000000-0000-0000-0000-000000000001"),
|
||||
CurrentScanId = Guid.Parse("00000000-0000-0000-0000-000000000002"),
|
||||
Changes = changes
|
||||
};
|
||||
|
||||
var frozenTime = DateTimeOffset.Parse("2025-12-24T12:00:00Z");
|
||||
|
||||
// Act
|
||||
var hash1 = ComputeCanonicalHash(GenerateVerdictArtifact(input, frozenTime));
|
||||
var hash2 = ComputeCanonicalHash(GenerateVerdictArtifact(input, frozenTime));
|
||||
|
||||
// Assert
|
||||
hash1.Should().Be(hash2);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Helper Methods
|
||||
|
||||
private static VerdictInput CreateSampleVerdictInput()
|
||||
{
|
||||
return new VerdictInput
|
||||
{
|
||||
VerdictId = Guid.Parse("99999999-9999-9999-9999-999999999999"),
|
||||
BaselineScanId = Guid.Parse("00000000-0000-0000-0000-000000000001"),
|
||||
CurrentScanId = Guid.Parse("00000000-0000-0000-0000-000000000002"),
|
||||
Changes = new[]
|
||||
{
|
||||
CreateChange("CVE-2024-1234", "pkg:npm/lodash@4.17.20", "new"),
|
||||
CreateChange("CVE-2024-5678", "pkg:npm/axios@0.21.0", "resolved"),
|
||||
CreateChange("CVE-2024-9012", "pkg:npm/express@4.17.1", "severity_changed")
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
private static VerdictChange CreateChange(string cveId, string packageUrl, string changeType)
|
||||
{
|
||||
return new VerdictChange
|
||||
{
|
||||
CveId = cveId,
|
||||
PackageUrl = packageUrl,
|
||||
ChangeType = changeType,
|
||||
ProofSpine = null
|
||||
};
|
||||
}
|
||||
|
||||
private static ProofEvidence CreateProofEvidence(string evidenceType, double confidence)
|
||||
{
|
||||
return new ProofEvidence
|
||||
{
|
||||
EvidenceType = evidenceType,
|
||||
Confidence = confidence,
|
||||
Summary = $"{evidenceType} evidence"
|
||||
};
|
||||
}
|
||||
|
||||
private static VerdictArtifact GenerateVerdictArtifact(VerdictInput input, DateTimeOffset timestamp)
|
||||
{
|
||||
// Sort changes deterministically
|
||||
var sortedChanges = input.Changes
|
||||
.OrderBy(c => c.CveId, StringComparer.Ordinal)
|
||||
.ThenBy(c => c.PackageUrl, StringComparer.Ordinal)
|
||||
.ThenBy(c => c.ChangeType, StringComparer.Ordinal)
|
||||
.Select(c => new VerdictChangeOutput
|
||||
{
|
||||
CveId = c.CveId,
|
||||
PackageUrl = c.PackageUrl,
|
||||
ChangeType = c.ChangeType,
|
||||
ProofSpine = c.ProofSpine != null ? ProcessProofSpine(c.ProofSpine) : null
|
||||
})
|
||||
.ToList();
|
||||
|
||||
// Compute summary statistics
|
||||
var summary = new VerdictSummary
|
||||
{
|
||||
TotalChanges = sortedChanges.Count,
|
||||
NewFindings = sortedChanges.Count(c => c.ChangeType == "new"),
|
||||
ResolvedFindings = sortedChanges.Count(c => c.ChangeType == "resolved"),
|
||||
OtherChanges = sortedChanges.Count(c => c.ChangeType != "new" && c.ChangeType != "resolved")
|
||||
};
|
||||
|
||||
return new VerdictArtifact
|
||||
{
|
||||
VerdictId = input.VerdictId,
|
||||
BaselineScanId = input.BaselineScanId,
|
||||
CurrentScanId = input.CurrentScanId,
|
||||
Timestamp = timestamp,
|
||||
Changes = sortedChanges,
|
||||
Summary = summary
|
||||
};
|
||||
}
|
||||
|
||||
private static ProofSpineOutput ProcessProofSpine(ProofSpine spine)
|
||||
{
|
||||
var sortedEvidences = spine.Evidences
|
||||
.OrderBy(e => e.EvidenceType, StringComparer.Ordinal)
|
||||
.ToList();
|
||||
|
||||
// Compute spine hash from sorted evidences
|
||||
var evidenceJson = CanonJson.Serialize(sortedEvidences);
|
||||
var spineHash = CanonJson.Sha256Hex(Encoding.UTF8.GetBytes(evidenceJson));
|
||||
|
||||
return new ProofSpineOutput
|
||||
{
|
||||
SpineId = spine.SpineId,
|
||||
Evidences = sortedEvidences,
|
||||
SpineHash = spineHash
|
||||
};
|
||||
}
|
||||
|
||||
private static string ComputeCanonicalHash(VerdictArtifact artifact)
|
||||
{
|
||||
var json = CanonJson.Serialize(artifact);
|
||||
return CanonJson.Sha256Hex(Encoding.UTF8.GetBytes(json));
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region DTOs
|
||||
|
||||
private sealed record VerdictInput
|
||||
{
|
||||
public required Guid VerdictId { get; init; }
|
||||
public required Guid BaselineScanId { get; init; }
|
||||
public required Guid CurrentScanId { get; init; }
|
||||
public required VerdictChange[] Changes { get; init; }
|
||||
}
|
||||
|
||||
private sealed record VerdictChange
|
||||
{
|
||||
public required string CveId { get; init; }
|
||||
public required string PackageUrl { get; init; }
|
||||
public required string ChangeType { get; init; }
|
||||
public ProofSpine? ProofSpine { get; init; }
|
||||
}
|
||||
|
||||
private sealed record ProofSpine
|
||||
{
|
||||
public required string SpineId { get; init; }
|
||||
public required ProofEvidence[] Evidences { get; init; }
|
||||
}
|
||||
|
||||
private sealed record ProofEvidence
|
||||
{
|
||||
public required string EvidenceType { get; init; }
|
||||
public required double Confidence { get; init; }
|
||||
public required string Summary { get; init; }
|
||||
}
|
||||
|
||||
private sealed record VerdictArtifact
|
||||
{
|
||||
public required Guid VerdictId { get; init; }
|
||||
public required Guid BaselineScanId { get; init; }
|
||||
public required Guid CurrentScanId { get; init; }
|
||||
public required DateTimeOffset Timestamp { get; init; }
|
||||
public required IReadOnlyList<VerdictChangeOutput> Changes { get; init; }
|
||||
public required VerdictSummary Summary { get; init; }
|
||||
}
|
||||
|
||||
private sealed record VerdictChangeOutput
|
||||
{
|
||||
public required string CveId { get; init; }
|
||||
public required string PackageUrl { get; init; }
|
||||
public required string ChangeType { get; init; }
|
||||
public ProofSpineOutput? ProofSpine { get; init; }
|
||||
}
|
||||
|
||||
private sealed record ProofSpineOutput
|
||||
{
|
||||
public required string SpineId { get; init; }
|
||||
public required IReadOnlyList<ProofEvidence> Evidences { get; init; }
|
||||
public required string SpineHash { get; init; }
|
||||
}
|
||||
|
||||
private sealed record VerdictSummary
|
||||
{
|
||||
public required int TotalChanges { get; init; }
|
||||
public required int NewFindings { get; init; }
|
||||
public required int ResolvedFindings { get; init; }
|
||||
public required int OtherChanges { get; init; }
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
@@ -0,0 +1,465 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// VerdictIdContentAddressingTests.cs
|
||||
// Sprint: SPRINT_8200_0001_0001 - Verdict ID Content-Addressing Fix
|
||||
// Task: VERDICT-8200-010 - Integration test: VerdictId in attestation matches recomputed ID
|
||||
// Description: Verifies that VerdictId is content-addressed and deterministic across
|
||||
// attestation creation and verification workflows.
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System.Text.Json;
|
||||
using FluentAssertions;
|
||||
using StellaOps.Canonical.Json;
|
||||
using StellaOps.Policy.Deltas;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Integration.Determinism;
|
||||
|
||||
/// <summary>
|
||||
/// Integration tests for VerdictId content-addressing.
|
||||
/// Validates that:
|
||||
/// 1. VerdictId in generated verdicts matches recomputed ID from components
|
||||
/// 2. VerdictId is deterministic across multiple generations
|
||||
/// 3. VerdictId in serialized/deserialized verdicts remains stable
|
||||
/// 4. Different verdict contents produce different VerdictIds
|
||||
/// </summary>
|
||||
[Trait("Category", "Integration")]
|
||||
[Trait("Sprint", "8200.0001.0001")]
|
||||
[Trait("Feature", "VerdictId-ContentAddressing")]
|
||||
public sealed class VerdictIdContentAddressingTests
|
||||
{
|
||||
#region Attestation Match Tests
|
||||
|
||||
[Fact(DisplayName = "VerdictId in built verdict matches recomputed ID")]
|
||||
public void VerdictId_InBuiltVerdict_MatchesRecomputedId()
|
||||
{
|
||||
// Arrange - Create a verdict using the builder
|
||||
var deltaId = "delta:sha256:0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef";
|
||||
var blockingDriver1 = new DeltaDriver
|
||||
{
|
||||
Type = "new-finding",
|
||||
Severity = DeltaDriverSeverity.Critical,
|
||||
Description = "New CVE-2024-0001",
|
||||
CveId = "CVE-2024-0001",
|
||||
Purl = "pkg:npm/lodash@4.17.20"
|
||||
};
|
||||
var blockingDriver2 = new DeltaDriver
|
||||
{
|
||||
Type = "severity-increase",
|
||||
Severity = DeltaDriverSeverity.High,
|
||||
Description = "Severity increase",
|
||||
CveId = "CVE-2024-0002",
|
||||
Purl = "pkg:npm/axios@0.21.0"
|
||||
};
|
||||
var warningDriver = new DeltaDriver
|
||||
{
|
||||
Type = "severity-decrease",
|
||||
Severity = DeltaDriverSeverity.Low,
|
||||
Description = "Severity decrease",
|
||||
CveId = "CVE-2024-0003",
|
||||
Purl = "pkg:npm/moment@2.29.0"
|
||||
};
|
||||
|
||||
// Act - Build verdict using DeltaVerdictBuilder
|
||||
var verdict = new DeltaVerdictBuilder()
|
||||
.WithGate(DeltaGateLevel.G4)
|
||||
.AddBlockingDriver(blockingDriver1)
|
||||
.AddBlockingDriver(blockingDriver2)
|
||||
.AddWarningDriver(warningDriver)
|
||||
.AddException("exception-123")
|
||||
.AddException("exception-456")
|
||||
.Build(deltaId);
|
||||
|
||||
// Act - Recompute VerdictId from the verdict's components
|
||||
var generator = new VerdictIdGenerator();
|
||||
var recomputedId = generator.ComputeVerdictId(
|
||||
verdict.DeltaId,
|
||||
verdict.BlockingDrivers,
|
||||
verdict.WarningDrivers,
|
||||
verdict.AppliedExceptions,
|
||||
verdict.RecommendedGate);
|
||||
|
||||
// Assert - VerdictId should match recomputed value
|
||||
verdict.VerdictId.Should().Be(recomputedId);
|
||||
verdict.VerdictId.Should().StartWith("verdict:sha256:");
|
||||
verdict.VerdictId.Should().MatchRegex("^verdict:sha256:[0-9a-f]{64}$");
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "VerdictId matches after serialization round-trip")]
|
||||
public void VerdictId_AfterSerializationRoundTrip_MatchesRecomputedId()
|
||||
{
|
||||
// Arrange - Create a verdict
|
||||
var verdict = CreateSampleVerdict();
|
||||
var originalVerdictId = verdict.VerdictId;
|
||||
|
||||
// Act - Serialize to JSON
|
||||
var json = JsonSerializer.Serialize(verdict, new JsonSerializerOptions
|
||||
{
|
||||
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
|
||||
WriteIndented = false
|
||||
});
|
||||
|
||||
// Act - Deserialize back
|
||||
var deserialized = JsonSerializer.Deserialize<DeltaVerdict>(json, new JsonSerializerOptions
|
||||
{
|
||||
PropertyNamingPolicy = JsonNamingPolicy.CamelCase
|
||||
});
|
||||
|
||||
// Act - Recompute VerdictId from deserialized verdict
|
||||
var generator = new VerdictIdGenerator();
|
||||
var recomputedId = generator.ComputeVerdictId(
|
||||
deserialized!.DeltaId,
|
||||
deserialized.BlockingDrivers,
|
||||
deserialized.WarningDrivers,
|
||||
deserialized.AppliedExceptions,
|
||||
deserialized.RecommendedGate);
|
||||
|
||||
// Assert
|
||||
deserialized!.VerdictId.Should().Be(originalVerdictId);
|
||||
recomputedId.Should().Be(originalVerdictId);
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "VerdictId matches after canonical JSON round-trip")]
|
||||
public void VerdictId_AfterCanonicalJsonRoundTrip_MatchesRecomputedId()
|
||||
{
|
||||
// Arrange - Create a verdict
|
||||
var verdict = CreateSampleVerdict();
|
||||
var originalVerdictId = verdict.VerdictId;
|
||||
|
||||
// Act - Serialize to canonical JSON (uses camelCase property names)
|
||||
var canonicalJson = CanonJson.Serialize(verdict);
|
||||
|
||||
// Act - Parse canonical JSON to extract components and verify hash
|
||||
using var doc = JsonDocument.Parse(canonicalJson);
|
||||
var root = doc.RootElement;
|
||||
|
||||
var deltaId = root.GetProperty("deltaId").GetString()!;
|
||||
|
||||
// RecommendedGate is serialized as a number (enum value)
|
||||
var gateLevelValue = root.GetProperty("recommendedGate").GetInt32();
|
||||
var gateLevel = (DeltaGateLevel)gateLevelValue;
|
||||
|
||||
var blockingDrivers = ParseDriversFromCamelCase(root.GetProperty("blockingDrivers"));
|
||||
var warningDrivers = ParseDriversFromCamelCase(root.GetProperty("warningDrivers"));
|
||||
var appliedExceptions = ParseExceptions(root.GetProperty("appliedExceptions"));
|
||||
|
||||
// Act - Recompute VerdictId from parsed components
|
||||
var generator = new VerdictIdGenerator();
|
||||
var recomputedId = generator.ComputeVerdictId(
|
||||
deltaId,
|
||||
blockingDrivers,
|
||||
warningDrivers,
|
||||
appliedExceptions,
|
||||
gateLevel);
|
||||
|
||||
// Assert
|
||||
recomputedId.Should().Be(originalVerdictId);
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "VerdictId is deterministic across 100 iterations")]
|
||||
public void VerdictId_IsDeterministic_Across100Iterations()
|
||||
{
|
||||
// Arrange
|
||||
var deltaId = "delta:sha256:stable_delta_id_for_testing_determinism_0000000000000";
|
||||
var blockingDriver = new DeltaDriver
|
||||
{
|
||||
Type = "new-finding",
|
||||
Severity = DeltaDriverSeverity.Critical,
|
||||
Description = "Test finding",
|
||||
CveId = "CVE-2024-9999",
|
||||
Purl = "pkg:npm/test@1.0.0"
|
||||
};
|
||||
|
||||
// Act - Generate verdict 100 times
|
||||
var verdictIds = new HashSet<string>();
|
||||
for (int i = 0; i < 100; i++)
|
||||
{
|
||||
var verdict = new DeltaVerdictBuilder()
|
||||
.WithGate(DeltaGateLevel.G4)
|
||||
.AddBlockingDriver(blockingDriver)
|
||||
.Build(deltaId);
|
||||
|
||||
verdictIds.Add(verdict.VerdictId);
|
||||
}
|
||||
|
||||
// Assert - All iterations should produce the same VerdictId
|
||||
verdictIds.Should().HaveCount(1, "100 identical inputs should produce exactly 1 unique VerdictId");
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "Different verdicts produce different VerdictIds")]
|
||||
public void DifferentVerdicts_ProduceDifferentVerdictIds()
|
||||
{
|
||||
// Arrange - Create base driver with Low severity (to avoid gate escalation)
|
||||
var deltaId = "delta:sha256:test_delta_00000000000000000000000000000000000000000000";
|
||||
var baseDriver = new DeltaDriver
|
||||
{
|
||||
Type = "new-finding",
|
||||
Severity = DeltaDriverSeverity.Low, // Low to avoid gate escalation
|
||||
Description = "Test",
|
||||
CveId = "CVE-2024-0001",
|
||||
Purl = "pkg:npm/a@1.0.0"
|
||||
};
|
||||
|
||||
// Act - Create verdicts with variations
|
||||
// Note: Using warning drivers instead of blocking to avoid status changes
|
||||
var verdict1 = new DeltaVerdictBuilder()
|
||||
.WithGate(DeltaGateLevel.G1)
|
||||
.AddWarningDriver(baseDriver)
|
||||
.Build(deltaId);
|
||||
|
||||
// Different severity
|
||||
var modifiedDriver = new DeltaDriver
|
||||
{
|
||||
Type = "new-finding",
|
||||
Severity = DeltaDriverSeverity.Medium, // Different severity
|
||||
Description = "Test",
|
||||
CveId = "CVE-2024-0001",
|
||||
Purl = "pkg:npm/a@1.0.0"
|
||||
};
|
||||
var verdict2 = new DeltaVerdictBuilder()
|
||||
.WithGate(DeltaGateLevel.G1)
|
||||
.AddWarningDriver(modifiedDriver)
|
||||
.Build(deltaId);
|
||||
|
||||
// Different deltaId
|
||||
var verdict3 = new DeltaVerdictBuilder()
|
||||
.WithGate(DeltaGateLevel.G1)
|
||||
.AddWarningDriver(baseDriver)
|
||||
.Build("delta:sha256:different_delta_id_000000000000000000000000000000000000");
|
||||
|
||||
// Assert - All should have different VerdictIds
|
||||
verdict1.VerdictId.Should().NotBe(verdict2.VerdictId, "Different severity should produce different VerdictId");
|
||||
verdict1.VerdictId.Should().NotBe(verdict3.VerdictId, "Different deltaId should produce different VerdictId");
|
||||
verdict2.VerdictId.Should().NotBe(verdict3.VerdictId);
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "VerdictId is independent of driver order")]
|
||||
public void VerdictId_IsIndependent_OfDriverOrder()
|
||||
{
|
||||
// Arrange - Same drivers in different orders
|
||||
var deltaId = "delta:sha256:order_test_0000000000000000000000000000000000000000000000";
|
||||
var driver1 = new DeltaDriver
|
||||
{
|
||||
Type = "new-finding",
|
||||
Severity = DeltaDriverSeverity.Critical,
|
||||
Description = "A",
|
||||
CveId = "CVE-2024-0001",
|
||||
Purl = "pkg:npm/a@1.0.0"
|
||||
};
|
||||
var driver2 = new DeltaDriver
|
||||
{
|
||||
Type = "new-finding",
|
||||
Severity = DeltaDriverSeverity.High,
|
||||
Description = "B",
|
||||
CveId = "CVE-2024-0002",
|
||||
Purl = "pkg:npm/b@1.0.0"
|
||||
};
|
||||
var driver3 = new DeltaDriver
|
||||
{
|
||||
Type = "severity-increase",
|
||||
Severity = DeltaDriverSeverity.Medium,
|
||||
Description = "C",
|
||||
CveId = "CVE-2024-0003",
|
||||
Purl = "pkg:npm/c@1.0.0"
|
||||
};
|
||||
|
||||
// Act - Create verdicts with drivers in different orders
|
||||
var verdict1 = new DeltaVerdictBuilder()
|
||||
.WithGate(DeltaGateLevel.G4)
|
||||
.AddBlockingDriver(driver1)
|
||||
.AddBlockingDriver(driver2)
|
||||
.AddBlockingDriver(driver3)
|
||||
.Build(deltaId);
|
||||
|
||||
var verdict2 = new DeltaVerdictBuilder()
|
||||
.WithGate(DeltaGateLevel.G4)
|
||||
.AddBlockingDriver(driver3)
|
||||
.AddBlockingDriver(driver1)
|
||||
.AddBlockingDriver(driver2)
|
||||
.Build(deltaId);
|
||||
|
||||
var verdict3 = new DeltaVerdictBuilder()
|
||||
.WithGate(DeltaGateLevel.G4)
|
||||
.AddBlockingDriver(driver2)
|
||||
.AddBlockingDriver(driver3)
|
||||
.AddBlockingDriver(driver1)
|
||||
.Build(deltaId);
|
||||
|
||||
// Assert - All should produce the same VerdictId (canonical ordering is applied)
|
||||
verdict1.VerdictId.Should().Be(verdict2.VerdictId);
|
||||
verdict2.VerdictId.Should().Be(verdict3.VerdictId);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Verification Workflow Tests
|
||||
|
||||
[Fact(DisplayName = "VerdictId can be verified against attestation payload")]
|
||||
public void VerdictId_CanBeVerified_AgainstAttestationPayload()
|
||||
{
|
||||
// Arrange - Simulate an attestation workflow
|
||||
var verdict = CreateSampleVerdict();
|
||||
|
||||
// Simulate creating an attestation with the verdict
|
||||
var attestationPayload = new
|
||||
{
|
||||
verdict.DeltaId,
|
||||
verdict.VerdictId,
|
||||
verdict.BlockingDrivers,
|
||||
verdict.WarningDrivers,
|
||||
verdict.AppliedExceptions,
|
||||
verdict.RecommendedGate,
|
||||
attestedAt = DateTimeOffset.UtcNow.ToString("O"),
|
||||
predicateType = "delta-verdict.stella/v1"
|
||||
};
|
||||
|
||||
// Act - Extract VerdictId from "attestation" and verify it
|
||||
var attestedVerdictId = attestationPayload.VerdictId;
|
||||
|
||||
// Recompute from attestation components
|
||||
var generator = new VerdictIdGenerator();
|
||||
var recomputedId = generator.ComputeVerdictId(
|
||||
attestationPayload.DeltaId,
|
||||
attestationPayload.BlockingDrivers,
|
||||
attestationPayload.WarningDrivers,
|
||||
attestationPayload.AppliedExceptions,
|
||||
attestationPayload.RecommendedGate);
|
||||
|
||||
// Assert - The attested VerdictId should match recomputed value
|
||||
attestedVerdictId.Should().Be(recomputedId);
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "Tampered verdict fails VerdictId verification")]
|
||||
public void TamperedVerdict_FailsVerdictIdVerification()
|
||||
{
|
||||
// Arrange - Create an original verdict
|
||||
var originalVerdict = CreateSampleVerdict();
|
||||
var originalVerdictId = originalVerdict.VerdictId;
|
||||
|
||||
// Act - Simulate tampering by modifying severity
|
||||
var tamperedDrivers = originalVerdict.BlockingDrivers
|
||||
.Select(d => new DeltaDriver
|
||||
{
|
||||
Type = d.Type,
|
||||
Severity = DeltaDriverSeverity.Low, // Tampered!
|
||||
Description = d.Description,
|
||||
CveId = d.CveId,
|
||||
Purl = d.Purl
|
||||
})
|
||||
.ToList();
|
||||
|
||||
// Recompute VerdictId with tampered data
|
||||
var generator = new VerdictIdGenerator();
|
||||
var tamperedId = generator.ComputeVerdictId(
|
||||
originalVerdict.DeltaId,
|
||||
tamperedDrivers,
|
||||
originalVerdict.WarningDrivers,
|
||||
originalVerdict.AppliedExceptions,
|
||||
originalVerdict.RecommendedGate);
|
||||
|
||||
// Assert - Tampered content should produce different VerdictId
|
||||
tamperedId.Should().NotBe(originalVerdictId, "Tampered content should fail VerdictId verification");
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Helper Methods
|
||||
|
||||
private static DeltaVerdict CreateSampleVerdict()
|
||||
{
|
||||
var deltaId = "delta:sha256:sample_delta_for_testing_123456789abcdef0123456789abcdef";
|
||||
var blockingDriver1 = new DeltaDriver
|
||||
{
|
||||
Type = "new-finding",
|
||||
Severity = DeltaDriverSeverity.Critical,
|
||||
Description = "Critical finding",
|
||||
CveId = "CVE-2024-1111",
|
||||
Purl = "pkg:npm/vulnerable@1.0.0"
|
||||
};
|
||||
var blockingDriver2 = new DeltaDriver
|
||||
{
|
||||
Type = "severity-increase",
|
||||
Severity = DeltaDriverSeverity.High,
|
||||
Description = "Severity increase",
|
||||
CveId = "CVE-2024-2222",
|
||||
Purl = "pkg:npm/risky@2.0.0"
|
||||
};
|
||||
var warningDriver = new DeltaDriver
|
||||
{
|
||||
Type = "new-finding",
|
||||
Severity = DeltaDriverSeverity.Medium,
|
||||
Description = "Medium finding",
|
||||
CveId = "CVE-2024-3333",
|
||||
Purl = "pkg:npm/warning@3.0.0"
|
||||
};
|
||||
|
||||
return new DeltaVerdictBuilder()
|
||||
.WithGate(DeltaGateLevel.G4)
|
||||
.AddBlockingDriver(blockingDriver1)
|
||||
.AddBlockingDriver(blockingDriver2)
|
||||
.AddWarningDriver(warningDriver)
|
||||
.AddException("exc-001")
|
||||
.AddException("exc-002")
|
||||
.Build(deltaId);
|
||||
}
|
||||
|
||||
private static List<DeltaDriver> ParseDrivers(JsonElement element)
|
||||
{
|
||||
var drivers = new List<DeltaDriver>();
|
||||
foreach (var item in element.EnumerateArray())
|
||||
{
|
||||
var type = item.GetProperty("Type").GetString()!;
|
||||
var severityStr = item.GetProperty("Severity").GetString()!;
|
||||
var severity = Enum.Parse<DeltaDriverSeverity>(severityStr, true);
|
||||
var description = item.GetProperty("Description").GetString()!;
|
||||
var cveId = item.TryGetProperty("CveId", out var cve) ? cve.GetString() : null;
|
||||
var purl = item.TryGetProperty("Purl", out var p) ? p.GetString() : null;
|
||||
|
||||
drivers.Add(new DeltaDriver
|
||||
{
|
||||
Type = type,
|
||||
Severity = severity,
|
||||
Description = description,
|
||||
CveId = cveId,
|
||||
Purl = purl
|
||||
});
|
||||
}
|
||||
return drivers;
|
||||
}
|
||||
|
||||
private static List<DeltaDriver> ParseDriversFromCamelCase(JsonElement element)
|
||||
{
|
||||
var drivers = new List<DeltaDriver>();
|
||||
foreach (var item in element.EnumerateArray())
|
||||
{
|
||||
var type = item.GetProperty("type").GetString()!;
|
||||
// Severity is serialized as a number (enum value)
|
||||
var severityValue = item.GetProperty("severity").GetInt32();
|
||||
var severity = (DeltaDriverSeverity)severityValue;
|
||||
var description = item.GetProperty("description").GetString()!;
|
||||
var cveId = item.TryGetProperty("cveId", out var cve) ? cve.GetString() : null;
|
||||
var purl = item.TryGetProperty("purl", out var p) ? p.GetString() : null;
|
||||
|
||||
drivers.Add(new DeltaDriver
|
||||
{
|
||||
Type = type,
|
||||
Severity = severity,
|
||||
Description = description,
|
||||
CveId = cveId,
|
||||
Purl = purl
|
||||
});
|
||||
}
|
||||
return drivers;
|
||||
}
|
||||
|
||||
private static List<string> ParseExceptions(JsonElement element)
|
||||
{
|
||||
var exceptions = new List<string>();
|
||||
foreach (var item in element.EnumerateArray())
|
||||
{
|
||||
exceptions.Add(item.GetString()!);
|
||||
}
|
||||
return exceptions;
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
@@ -0,0 +1,625 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// VexDeterminismTests.cs
|
||||
// Sprint: SPRINT_5100_0007_0003 - Epic B (Determinism Gate)
|
||||
// Task: T4 - VEX Export Determinism (OpenVEX, CSAF)
|
||||
// Description: Tests to validate VEX generation determinism across formats
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System.Text;
|
||||
using FluentAssertions;
|
||||
using StellaOps.Canonical.Json;
|
||||
using StellaOps.Testing.Determinism;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Integration.Determinism;
|
||||
|
||||
/// <summary>
|
||||
/// Determinism validation tests for VEX export generation.
|
||||
/// Ensures identical inputs produce identical VEX documents across:
|
||||
/// - OpenVEX format
|
||||
/// - CSAF 2.0 VEX format
|
||||
/// - Multiple runs with frozen time
|
||||
/// - Parallel execution
|
||||
/// </summary>
|
||||
public class VexDeterminismTests
|
||||
{
|
||||
#region OpenVEX Determinism Tests
|
||||
|
||||
[Fact]
|
||||
public void OpenVex_WithIdenticalInput_ProducesDeterministicOutput()
|
||||
{
|
||||
// Arrange
|
||||
var input = CreateSampleVexInput();
|
||||
var frozenTime = DateTimeOffset.Parse("2025-12-23T18:00:00Z");
|
||||
|
||||
// Act - Generate VEX multiple times
|
||||
var vex1 = GenerateOpenVex(input, frozenTime);
|
||||
var vex2 = GenerateOpenVex(input, frozenTime);
|
||||
var vex3 = GenerateOpenVex(input, frozenTime);
|
||||
|
||||
// Assert - All outputs should be identical
|
||||
vex1.Should().Be(vex2);
|
||||
vex2.Should().Be(vex3);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void OpenVex_CanonicalHash_IsStable()
|
||||
{
|
||||
// Arrange
|
||||
var input = CreateSampleVexInput();
|
||||
var frozenTime = DateTimeOffset.Parse("2025-12-23T18:00:00Z");
|
||||
|
||||
// Act - Generate VEX and compute canonical hash twice
|
||||
var vex1 = GenerateOpenVex(input, frozenTime);
|
||||
var hash1 = CanonJson.Sha256Hex(Encoding.UTF8.GetBytes(vex1));
|
||||
|
||||
var vex2 = GenerateOpenVex(input, frozenTime);
|
||||
var hash2 = CanonJson.Sha256Hex(Encoding.UTF8.GetBytes(vex2));
|
||||
|
||||
// Assert
|
||||
hash1.Should().Be(hash2, "Same input should produce same canonical hash");
|
||||
hash1.Should().MatchRegex("^[0-9a-f]{64}$");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void OpenVex_DeterminismManifest_CanBeCreated()
|
||||
{
|
||||
// Arrange
|
||||
var input = CreateSampleVexInput();
|
||||
var frozenTime = DateTimeOffset.Parse("2025-12-23T18:00:00Z");
|
||||
var vex = GenerateOpenVex(input, frozenTime);
|
||||
var vexBytes = Encoding.UTF8.GetBytes(vex);
|
||||
|
||||
var artifactInfo = new ArtifactInfo
|
||||
{
|
||||
Type = "vex",
|
||||
Name = "test-container-vex",
|
||||
Version = "1.0.0",
|
||||
Format = "OpenVEX"
|
||||
};
|
||||
|
||||
var toolchain = new ToolchainInfo
|
||||
{
|
||||
Platform = ".NET 10.0",
|
||||
Components = new[]
|
||||
{
|
||||
new ComponentInfo { Name = "StellaOps.Excititor", Version = "1.0.0" }
|
||||
}
|
||||
};
|
||||
|
||||
// Act - Create determinism manifest
|
||||
var manifest = DeterminismManifestWriter.CreateManifest(
|
||||
vexBytes,
|
||||
artifactInfo,
|
||||
toolchain);
|
||||
|
||||
// Assert
|
||||
manifest.SchemaVersion.Should().Be("1.0");
|
||||
manifest.Artifact.Format.Should().Be("OpenVEX");
|
||||
manifest.CanonicalHash.Algorithm.Should().Be("SHA-256");
|
||||
manifest.CanonicalHash.Value.Should().MatchRegex("^[0-9a-f]{64}$");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task OpenVex_ParallelGeneration_ProducesDeterministicOutput()
|
||||
{
|
||||
// Arrange
|
||||
var input = CreateSampleVexInput();
|
||||
var frozenTime = DateTimeOffset.Parse("2025-12-23T18:00:00Z");
|
||||
|
||||
// Act - Generate in parallel 20 times
|
||||
var tasks = Enumerable.Range(0, 20)
|
||||
.Select(_ => Task.Run(() => GenerateOpenVex(input, frozenTime)))
|
||||
.ToArray();
|
||||
|
||||
var vexDocuments = await Task.WhenAll(tasks);
|
||||
|
||||
// Assert - All outputs should be identical
|
||||
vexDocuments.Should().AllBe(vexDocuments[0]);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void OpenVex_StatementOrdering_IsDeterministic()
|
||||
{
|
||||
// Arrange - Multiple claims for different products in random order
|
||||
var input = CreateMultiStatementVexInput();
|
||||
var frozenTime = DateTimeOffset.Parse("2025-12-23T18:00:00Z");
|
||||
|
||||
// Act - Generate VEX multiple times
|
||||
var vex1 = GenerateOpenVex(input, frozenTime);
|
||||
var vex2 = GenerateOpenVex(input, frozenTime);
|
||||
|
||||
// Assert - Statement order should be deterministic
|
||||
vex1.Should().Be(vex2);
|
||||
vex1.Should().Contain("\"product_ids\"");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void OpenVex_JustificationText_IsCanonicalized()
|
||||
{
|
||||
// Arrange - Claims with varying justification text formatting
|
||||
var input = CreateSampleVexInput();
|
||||
var frozenTime = DateTimeOffset.Parse("2025-12-23T18:00:00Z");
|
||||
|
||||
// Act
|
||||
var vex = GenerateOpenVex(input, frozenTime);
|
||||
|
||||
// Assert - Justification should be present and normalized
|
||||
vex.Should().Contain("justification");
|
||||
vex.Should().Contain("inline_mitigations_already_exist");
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region CSAF 2.0 VEX Determinism Tests
|
||||
|
||||
[Fact]
|
||||
public void CsafVex_WithIdenticalInput_ProducesDeterministicOutput()
|
||||
{
|
||||
// Arrange
|
||||
var input = CreateSampleVexInput();
|
||||
var frozenTime = DateTimeOffset.Parse("2025-12-23T18:00:00Z");
|
||||
|
||||
// Act - Generate VEX multiple times
|
||||
var vex1 = GenerateCsafVex(input, frozenTime);
|
||||
var vex2 = GenerateCsafVex(input, frozenTime);
|
||||
var vex3 = GenerateCsafVex(input, frozenTime);
|
||||
|
||||
// Assert - All outputs should be identical
|
||||
vex1.Should().Be(vex2);
|
||||
vex2.Should().Be(vex3);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void CsafVex_CanonicalHash_IsStable()
|
||||
{
|
||||
// Arrange
|
||||
var input = CreateSampleVexInput();
|
||||
var frozenTime = DateTimeOffset.Parse("2025-12-23T18:00:00Z");
|
||||
|
||||
// Act - Generate VEX and compute canonical hash twice
|
||||
var vex1 = GenerateCsafVex(input, frozenTime);
|
||||
var hash1 = CanonJson.Sha256Hex(Encoding.UTF8.GetBytes(vex1));
|
||||
|
||||
var vex2 = GenerateCsafVex(input, frozenTime);
|
||||
var hash2 = CanonJson.Sha256Hex(Encoding.UTF8.GetBytes(vex2));
|
||||
|
||||
// Assert
|
||||
hash1.Should().Be(hash2, "Same input should produce same canonical hash");
|
||||
hash1.Should().MatchRegex("^[0-9a-f]{64}$");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void CsafVex_DeterminismManifest_CanBeCreated()
|
||||
{
|
||||
// Arrange
|
||||
var input = CreateSampleVexInput();
|
||||
var frozenTime = DateTimeOffset.Parse("2025-12-23T18:00:00Z");
|
||||
var vex = GenerateCsafVex(input, frozenTime);
|
||||
var vexBytes = Encoding.UTF8.GetBytes(vex);
|
||||
|
||||
var artifactInfo = new ArtifactInfo
|
||||
{
|
||||
Type = "vex",
|
||||
Name = "test-container-vex",
|
||||
Version = "1.0.0",
|
||||
Format = "CSAF 2.0"
|
||||
};
|
||||
|
||||
var toolchain = new ToolchainInfo
|
||||
{
|
||||
Platform = ".NET 10.0",
|
||||
Components = new[]
|
||||
{
|
||||
new ComponentInfo { Name = "StellaOps.Excititor", Version = "1.0.0" }
|
||||
}
|
||||
};
|
||||
|
||||
// Act - Create determinism manifest
|
||||
var manifest = DeterminismManifestWriter.CreateManifest(
|
||||
vexBytes,
|
||||
artifactInfo,
|
||||
toolchain);
|
||||
|
||||
// Assert
|
||||
manifest.SchemaVersion.Should().Be("1.0");
|
||||
manifest.Artifact.Format.Should().Be("CSAF 2.0");
|
||||
manifest.CanonicalHash.Algorithm.Should().Be("SHA-256");
|
||||
manifest.CanonicalHash.Value.Should().MatchRegex("^[0-9a-f]{64}$");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task CsafVex_ParallelGeneration_ProducesDeterministicOutput()
|
||||
{
|
||||
// Arrange
|
||||
var input = CreateSampleVexInput();
|
||||
var frozenTime = DateTimeOffset.Parse("2025-12-23T18:00:00Z");
|
||||
|
||||
// Act - Generate in parallel 20 times
|
||||
var tasks = Enumerable.Range(0, 20)
|
||||
.Select(_ => Task.Run(() => GenerateCsafVex(input, frozenTime)))
|
||||
.ToArray();
|
||||
|
||||
var vexDocuments = await Task.WhenAll(tasks);
|
||||
|
||||
// Assert - All outputs should be identical
|
||||
vexDocuments.Should().AllBe(vexDocuments[0]);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void CsafVex_VulnerabilityOrdering_IsDeterministic()
|
||||
{
|
||||
// Arrange - Multiple vulnerabilities
|
||||
var input = CreateMultiStatementVexInput();
|
||||
var frozenTime = DateTimeOffset.Parse("2025-12-23T18:00:00Z");
|
||||
|
||||
// Act - Generate VEX multiple times
|
||||
var vex1 = GenerateCsafVex(input, frozenTime);
|
||||
var vex2 = GenerateCsafVex(input, frozenTime);
|
||||
|
||||
// Assert - Vulnerability order should be deterministic
|
||||
vex1.Should().Be(vex2);
|
||||
vex1.Should().Contain("\"vulnerabilities\"");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void CsafVex_ProductTree_IsDeterministic()
|
||||
{
|
||||
// Arrange - Multiple products
|
||||
var input = CreateMultiStatementVexInput();
|
||||
var frozenTime = DateTimeOffset.Parse("2025-12-23T18:00:00Z");
|
||||
|
||||
// Act
|
||||
var vex = GenerateCsafVex(input, frozenTime);
|
||||
|
||||
// Assert - Product tree should be present and ordered
|
||||
vex.Should().Contain("\"product_tree\"");
|
||||
vex.Should().Contain("\"branches\"");
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Cross-Format Consistency Tests
|
||||
|
||||
[Fact]
|
||||
public void AllVexFormats_WithSameInput_ProduceDifferentButStableHashes()
|
||||
{
|
||||
// Arrange
|
||||
var input = CreateSampleVexInput();
|
||||
var frozenTime = DateTimeOffset.Parse("2025-12-23T18:00:00Z");
|
||||
|
||||
// Act - Generate all formats
|
||||
var openVex = GenerateOpenVex(input, frozenTime);
|
||||
var csafVex = GenerateCsafVex(input, frozenTime);
|
||||
|
||||
var openVexHash = CanonJson.Sha256Hex(Encoding.UTF8.GetBytes(openVex));
|
||||
var csafVexHash = CanonJson.Sha256Hex(Encoding.UTF8.GetBytes(csafVex));
|
||||
|
||||
// Assert - Each format should have different hash but be deterministic
|
||||
openVexHash.Should().NotBe(csafVexHash);
|
||||
|
||||
// All hashes should be valid SHA-256
|
||||
openVexHash.Should().MatchRegex("^[0-9a-f]{64}$");
|
||||
csafVexHash.Should().MatchRegex("^[0-9a-f]{64}$");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void AllVexFormats_CanProduceDeterminismManifests()
|
||||
{
|
||||
// Arrange
|
||||
var input = CreateSampleVexInput();
|
||||
var frozenTime = DateTimeOffset.Parse("2025-12-23T18:00:00Z");
|
||||
|
||||
var toolchain = new ToolchainInfo
|
||||
{
|
||||
Platform = ".NET 10.0",
|
||||
Components = new[]
|
||||
{
|
||||
new ComponentInfo { Name = "StellaOps.Excititor", Version = "1.0.0" }
|
||||
}
|
||||
};
|
||||
|
||||
// Act - Generate manifests for all formats
|
||||
var formats = new[] { "OpenVEX", "CSAF 2.0" };
|
||||
var generators = new Func<VexInput, DateTimeOffset, string>[]
|
||||
{
|
||||
GenerateOpenVex,
|
||||
GenerateCsafVex
|
||||
};
|
||||
|
||||
var manifests = formats.Zip(generators)
|
||||
.Select(pair =>
|
||||
{
|
||||
var vex = pair.Second(input, frozenTime);
|
||||
var vexBytes = Encoding.UTF8.GetBytes(vex);
|
||||
var artifactInfo = new ArtifactInfo
|
||||
{
|
||||
Type = "vex",
|
||||
Name = "test-container-vex",
|
||||
Version = "1.0.0",
|
||||
Format = pair.First
|
||||
};
|
||||
return DeterminismManifestWriter.CreateManifest(vexBytes, artifactInfo, toolchain);
|
||||
})
|
||||
.ToArray();
|
||||
|
||||
// Assert
|
||||
manifests.Should().HaveCount(2);
|
||||
manifests.Should().AllSatisfy(m =>
|
||||
{
|
||||
m.SchemaVersion.Should().Be("1.0");
|
||||
m.CanonicalHash.Algorithm.Should().Be("SHA-256");
|
||||
m.CanonicalHash.Value.Should().MatchRegex("^[0-9a-f]{64}$");
|
||||
});
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Status Transition Determinism Tests
|
||||
|
||||
[Fact]
|
||||
public void VexStatus_NotAffected_IsDeterministic()
|
||||
{
|
||||
// Arrange
|
||||
var input = CreateVexInputWithStatus(VexStatus.NotAffected, "vulnerable_code_not_present");
|
||||
var frozenTime = DateTimeOffset.Parse("2025-12-23T18:00:00Z");
|
||||
|
||||
// Act
|
||||
var vex1 = GenerateOpenVex(input, frozenTime);
|
||||
var vex2 = GenerateOpenVex(input, frozenTime);
|
||||
|
||||
// Assert
|
||||
vex1.Should().Be(vex2);
|
||||
vex1.Should().Contain("not_affected");
|
||||
vex1.Should().Contain("vulnerable_code_not_present");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void VexStatus_Affected_IsDeterministic()
|
||||
{
|
||||
// Arrange
|
||||
var input = CreateVexInputWithStatus(VexStatus.Affected, null);
|
||||
var frozenTime = DateTimeOffset.Parse("2025-12-23T18:00:00Z");
|
||||
|
||||
// Act
|
||||
var vex1 = GenerateOpenVex(input, frozenTime);
|
||||
var vex2 = GenerateOpenVex(input, frozenTime);
|
||||
|
||||
// Assert
|
||||
vex1.Should().Be(vex2);
|
||||
vex1.Should().Contain("affected");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void VexStatus_Fixed_IsDeterministic()
|
||||
{
|
||||
// Arrange
|
||||
var input = CreateVexInputWithStatus(VexStatus.Fixed, null);
|
||||
var frozenTime = DateTimeOffset.Parse("2025-12-23T18:00:00Z");
|
||||
|
||||
// Act
|
||||
var vex1 = GenerateOpenVex(input, frozenTime);
|
||||
var vex2 = GenerateOpenVex(input, frozenTime);
|
||||
|
||||
// Assert
|
||||
vex1.Should().Be(vex2);
|
||||
vex1.Should().Contain("fixed");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void VexStatus_UnderInvestigation_IsDeterministic()
|
||||
{
|
||||
// Arrange
|
||||
var input = CreateVexInputWithStatus(VexStatus.UnderInvestigation, null);
|
||||
var frozenTime = DateTimeOffset.Parse("2025-12-23T18:00:00Z");
|
||||
|
||||
// Act
|
||||
var vex1 = GenerateOpenVex(input, frozenTime);
|
||||
var vex2 = GenerateOpenVex(input, frozenTime);
|
||||
|
||||
// Assert
|
||||
vex1.Should().Be(vex2);
|
||||
vex1.Should().Contain("under_investigation");
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Helper Methods
|
||||
|
||||
private static VexInput CreateSampleVexInput()
|
||||
{
|
||||
return new VexInput
|
||||
{
|
||||
VulnerabilityId = "CVE-2024-1234",
|
||||
Product = "pkg:oci/myapp@sha256:abc123",
|
||||
Status = VexStatus.NotAffected,
|
||||
Justification = "inline_mitigations_already_exist",
|
||||
ImpactStatement = "The vulnerable code path is not reachable in this deployment.",
|
||||
Timestamp = DateTimeOffset.Parse("2025-12-23T18:00:00Z")
|
||||
};
|
||||
}
|
||||
|
||||
private static VexInput CreateMultiStatementVexInput()
|
||||
{
|
||||
return new VexInput
|
||||
{
|
||||
VulnerabilityId = "CVE-2024-1234",
|
||||
Product = "pkg:oci/myapp@sha256:abc123",
|
||||
Status = VexStatus.NotAffected,
|
||||
Justification = "vulnerable_code_not_present",
|
||||
ImpactStatement = null,
|
||||
AdditionalProducts = new[]
|
||||
{
|
||||
"pkg:oci/myapp@sha256:def456",
|
||||
"pkg:oci/myapp@sha256:ghi789"
|
||||
},
|
||||
AdditionalVulnerabilities = new[]
|
||||
{
|
||||
"CVE-2024-5678",
|
||||
"CVE-2024-9012"
|
||||
},
|
||||
Timestamp = DateTimeOffset.Parse("2025-12-23T18:00:00Z")
|
||||
};
|
||||
}
|
||||
|
||||
private static VexInput CreateVexInputWithStatus(VexStatus status, string? justification)
|
||||
{
|
||||
return new VexInput
|
||||
{
|
||||
VulnerabilityId = "CVE-2024-1234",
|
||||
Product = "pkg:oci/myapp@sha256:abc123",
|
||||
Status = status,
|
||||
Justification = justification,
|
||||
ImpactStatement = null,
|
||||
Timestamp = DateTimeOffset.Parse("2025-12-23T18:00:00Z")
|
||||
};
|
||||
}
|
||||
|
||||
private static string GenerateOpenVex(VexInput input, DateTimeOffset timestamp)
|
||||
{
|
||||
// TODO: Integrate with actual OpenVexExporter
|
||||
// For now, return deterministic stub following OpenVEX spec
|
||||
var deterministicId = GenerateDeterministicId(input, "openvex");
|
||||
var productIds = new[] { input.Product }
|
||||
.Concat(input.AdditionalProducts ?? Array.Empty<string>())
|
||||
.OrderBy(p => p, StringComparer.Ordinal)
|
||||
.Select(p => $"\"{p}\"");
|
||||
|
||||
var vulnerabilities = new[] { input.VulnerabilityId }
|
||||
.Concat(input.AdditionalVulnerabilities ?? Array.Empty<string>())
|
||||
.OrderBy(v => v, StringComparer.Ordinal);
|
||||
|
||||
var statements = vulnerabilities.Select(vuln =>
|
||||
$$"""
|
||||
{
|
||||
"vulnerability": {"@id": "{{vuln}}"},
|
||||
"products": [{{string.Join(", ", productIds)}}],
|
||||
"status": "{{StatusToString(input.Status)}}",
|
||||
"justification": "{{input.Justification ?? ""}}",
|
||||
"impact_statement": "{{input.ImpactStatement ?? ""}}"
|
||||
}
|
||||
""");
|
||||
|
||||
return $$"""
|
||||
{
|
||||
"@context": "https://openvex.dev/ns/v0.2.0",
|
||||
"@id": "{{deterministicId}}",
|
||||
"author": "StellaOps Excititor",
|
||||
"timestamp": "{{timestamp:O}}",
|
||||
"version": 1,
|
||||
"statements": [
|
||||
{{string.Join(",\n ", statements)}}
|
||||
]
|
||||
}
|
||||
""";
|
||||
}
|
||||
|
||||
private static string GenerateCsafVex(VexInput input, DateTimeOffset timestamp)
|
||||
{
|
||||
// TODO: Integrate with actual CsafExporter
|
||||
// For now, return deterministic stub following CSAF 2.0 spec
|
||||
var deterministicId = GenerateDeterministicId(input, "csaf");
|
||||
var productIds = new[] { input.Product }
|
||||
.Concat(input.AdditionalProducts ?? Array.Empty<string>())
|
||||
.OrderBy(p => p, StringComparer.Ordinal);
|
||||
|
||||
var vulnerabilities = new[] { input.VulnerabilityId }
|
||||
.Concat(input.AdditionalVulnerabilities ?? Array.Empty<string>())
|
||||
.OrderBy(v => v, StringComparer.Ordinal)
|
||||
.Select(vuln => $$"""
|
||||
{
|
||||
"cve": "{{vuln}}",
|
||||
"product_status": {
|
||||
"{{CsafStatusCategory(input.Status)}}": [{{string.Join(", ", productIds.Select(p => $"\"{p}\""))}}]
|
||||
}
|
||||
}
|
||||
""");
|
||||
|
||||
var branches = productIds.Select(p => $$"""
|
||||
{
|
||||
"category": "product_version",
|
||||
"name": "{{p}}"
|
||||
}
|
||||
""");
|
||||
|
||||
return $$"""
|
||||
{
|
||||
"document": {
|
||||
"category": "vex",
|
||||
"csaf_version": "2.0",
|
||||
"title": "StellaOps VEX CSAF Export",
|
||||
"publisher": {
|
||||
"category": "tool",
|
||||
"name": "StellaOps Excititor"
|
||||
},
|
||||
"tracking": {
|
||||
"id": "{{deterministicId}}",
|
||||
"status": "final",
|
||||
"version": "1",
|
||||
"initial_release_date": "{{timestamp:O}}",
|
||||
"current_release_date": "{{timestamp:O}}"
|
||||
}
|
||||
},
|
||||
"product_tree": {
|
||||
"branches": [
|
||||
{{string.Join(",\n ", branches)}}
|
||||
]
|
||||
},
|
||||
"vulnerabilities": [
|
||||
{{string.Join(",\n ", vulnerabilities)}}
|
||||
]
|
||||
}
|
||||
""";
|
||||
}
|
||||
|
||||
private static string GenerateDeterministicId(VexInput input, string context)
|
||||
{
|
||||
var inputString = $"{context}:{input.VulnerabilityId}:{input.Product}:{input.Status}:{input.Timestamp:O}";
|
||||
var hash = CanonJson.Sha256Hex(Encoding.UTF8.GetBytes(inputString));
|
||||
return $"urn:uuid:{hash[..8]}-{hash[8..12]}-{hash[12..16]}-{hash[16..20]}-{hash[20..32]}";
|
||||
}
|
||||
|
||||
private static string StatusToString(VexStatus status) => status switch
|
||||
{
|
||||
VexStatus.NotAffected => "not_affected",
|
||||
VexStatus.Affected => "affected",
|
||||
VexStatus.Fixed => "fixed",
|
||||
VexStatus.UnderInvestigation => "under_investigation",
|
||||
_ => "unknown"
|
||||
};
|
||||
|
||||
private static string CsafStatusCategory(VexStatus status) => status switch
|
||||
{
|
||||
VexStatus.NotAffected => "known_not_affected",
|
||||
VexStatus.Affected => "known_affected",
|
||||
VexStatus.Fixed => "fixed",
|
||||
VexStatus.UnderInvestigation => "under_investigation",
|
||||
_ => "unknown"
|
||||
};
|
||||
|
||||
#endregion
|
||||
|
||||
#region DTOs
|
||||
|
||||
private sealed record VexInput
|
||||
{
|
||||
public required string VulnerabilityId { get; init; }
|
||||
public required string Product { get; init; }
|
||||
public required VexStatus Status { get; init; }
|
||||
public string? Justification { get; init; }
|
||||
public string? ImpactStatement { get; init; }
|
||||
public string[]? AdditionalProducts { get; init; }
|
||||
public string[]? AdditionalVulnerabilities { get; init; }
|
||||
public required DateTimeOffset Timestamp { get; init; }
|
||||
}
|
||||
|
||||
private enum VexStatus
|
||||
{
|
||||
NotAffected,
|
||||
Affected,
|
||||
Fixed,
|
||||
UnderInvestigation
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
@@ -0,0 +1,951 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// E2EReproducibilityTestFixture.cs
|
||||
// Sprint: SPRINT_8200_0001_0004_e2e_reproducibility_test
|
||||
// Task: E2E-8200-002 - Create E2EReproducibilityTestFixture with full service composition
|
||||
// Description: Test fixture providing full pipeline composition for E2E reproducibility tests.
|
||||
// Supports: ingest → normalize → diff → decide → attest → bundle → reverify
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System.Security.Cryptography;
|
||||
using System.Text.Json;
|
||||
using Microsoft.AspNetCore.Hosting;
|
||||
using Microsoft.AspNetCore.Mvc.Testing;
|
||||
using Microsoft.Extensions.Configuration;
|
||||
using Microsoft.Extensions.DependencyInjection;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using StellaOps.Attestor.Dsse;
|
||||
using StellaOps.Canonical.Json;
|
||||
using StellaOps.Policy.Deltas;
|
||||
using Testcontainers.PostgreSql;
|
||||
|
||||
namespace StellaOps.Integration.E2E;
|
||||
|
||||
/// <summary>
|
||||
/// Test fixture for end-to-end reproducibility tests.
|
||||
/// Provides a fully configured test environment with:
|
||||
/// - PostgreSQL database via Testcontainers
|
||||
/// - Mock advisory feeds
|
||||
/// - Policy engine with test policies
|
||||
/// - Attestor for DSSE envelope creation
|
||||
/// - Full pipeline execution capability
|
||||
/// </summary>
|
||||
public sealed class E2EReproducibilityTestFixture : IAsyncLifetime
|
||||
{
|
||||
private PostgreSqlContainer? _postgresContainer;
|
||||
private WebApplicationFactory<Program>? _factory;
|
||||
private ECDsa? _signingKey;
|
||||
private bool _initialized;
|
||||
|
||||
/// <summary>
|
||||
/// Gets the frozen timestamp used for deterministic tests.
|
||||
/// </summary>
|
||||
public DateTimeOffset FrozenTimestamp { get; } = new DateTimeOffset(2025, 6, 15, 12, 0, 0, TimeSpan.Zero);
|
||||
|
||||
/// <summary>
|
||||
/// Initializes the test fixture, starting required services.
|
||||
/// </summary>
|
||||
public async Task InitializeAsync()
|
||||
{
|
||||
if (_initialized)
|
||||
return;
|
||||
|
||||
// Generate deterministic signing key from fixed seed
|
||||
_signingKey = GenerateDeterministicKey(42);
|
||||
|
||||
// Start PostgreSQL container
|
||||
_postgresContainer = new PostgreSqlBuilder()
|
||||
.WithImage("postgres:16-alpine")
|
||||
.WithDatabase("stellaops_e2e_test")
|
||||
.WithUsername("e2e_test_user")
|
||||
.WithPassword("e2e_test_password")
|
||||
.WithPortBinding(5432, true)
|
||||
.Build();
|
||||
|
||||
await _postgresContainer.StartAsync();
|
||||
|
||||
// Create the test web application factory
|
||||
_factory = new WebApplicationFactory<Program>()
|
||||
.WithWebHostBuilder(builder =>
|
||||
{
|
||||
builder.ConfigureAppConfiguration((context, config) =>
|
||||
{
|
||||
config.AddInMemoryCollection(new Dictionary<string, string?>
|
||||
{
|
||||
["ConnectionStrings:ScannerDb"] = _postgresContainer.GetConnectionString(),
|
||||
["Scanner:Authority:Enabled"] = "false",
|
||||
["Scanner:AllowAnonymous"] = "true",
|
||||
["Scanner:ProofChain:Enabled"] = "true",
|
||||
["Scanner:ProofChain:SigningKeyId"] = "e2e-test-key",
|
||||
["Scanner:ProofChain:AutoSign"] = "true",
|
||||
["Scanner:Determinism:FrozenClock"] = "true",
|
||||
["Scanner:Determinism:FrozenTimestamp"] = FrozenTimestamp.ToString("O"),
|
||||
["Logging:LogLevel:Default"] = "Warning"
|
||||
});
|
||||
});
|
||||
|
||||
builder.ConfigureServices(services =>
|
||||
{
|
||||
services.AddLogging(logging =>
|
||||
{
|
||||
logging.ClearProviders();
|
||||
logging.AddConsole();
|
||||
logging.SetMinimumLevel(LogLevel.Warning);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
_initialized = true;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Creates an HTTP client for the test application.
|
||||
/// </summary>
|
||||
public async Task<HttpClient> CreateClientAsync()
|
||||
{
|
||||
if (!_initialized)
|
||||
{
|
||||
await InitializeAsync();
|
||||
}
|
||||
|
||||
return _factory!.CreateClient(new WebApplicationFactoryClientOptions
|
||||
{
|
||||
AllowAutoRedirect = false
|
||||
});
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Creates a snapshot of all inputs with computed hashes for verification.
|
||||
/// </summary>
|
||||
public async Task<InputSnapshot> SnapshotInputsAsync(
|
||||
string? sbomFixturePath = null,
|
||||
string? advisoryFeedPath = null,
|
||||
string? policyPackPath = null,
|
||||
string? vexDocumentPath = null)
|
||||
{
|
||||
// Load default fixtures if not specified
|
||||
var sbomContent = sbomFixturePath is not null
|
||||
? await File.ReadAllBytesAsync(sbomFixturePath)
|
||||
: CreateMinimalSbom();
|
||||
|
||||
var advisoryFeed = advisoryFeedPath is not null
|
||||
? await File.ReadAllBytesAsync(advisoryFeedPath)
|
||||
: CreateMockAdvisoryFeed();
|
||||
|
||||
var policyPack = policyPackPath is not null
|
||||
? await File.ReadAllBytesAsync(policyPackPath)
|
||||
: CreateDefaultPolicyPack();
|
||||
|
||||
var vexDocument = vexDocumentPath is not null
|
||||
? await File.ReadAllBytesAsync(vexDocumentPath)
|
||||
: null;
|
||||
|
||||
return new InputSnapshot
|
||||
{
|
||||
Sbom = sbomContent,
|
||||
SbomHash = ComputeHash(sbomContent),
|
||||
AdvisoryFeed = advisoryFeed,
|
||||
AdvisoryFeedHash = ComputeHash(advisoryFeed),
|
||||
PolicyPack = policyPack,
|
||||
PolicyPackHash = ComputeHash(policyPack),
|
||||
VexDocument = vexDocument,
|
||||
VexDocumentHash = vexDocument is not null ? ComputeHash(vexDocument) : null,
|
||||
SnapshotTimestamp = FrozenTimestamp
|
||||
};
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Executes the full pipeline with the given inputs.
|
||||
/// </summary>
|
||||
public async Task<PipelineResult> RunFullPipelineAsync(InputSnapshot inputs)
|
||||
{
|
||||
// Stage 1: Ingest advisories
|
||||
var advisories = await IngestAdvisoriesAsync(inputs.AdvisoryFeed);
|
||||
|
||||
// Stage 2: Normalize advisories
|
||||
var normalized = await NormalizeAdvisoriesAsync(advisories);
|
||||
|
||||
// Stage 3: Diff SBOM against advisories
|
||||
var diff = await ComputeDiffAsync(inputs.Sbom, normalized);
|
||||
|
||||
// Stage 4: Evaluate policy and compute verdict
|
||||
var verdict = await EvaluatePolicyAsync(diff, inputs.PolicyPack, inputs.VexDocument);
|
||||
|
||||
// Stage 5: Create DSSE attestation
|
||||
var envelope = await CreateAttestationAsync(verdict);
|
||||
|
||||
// Stage 6: Package into bundle
|
||||
var bundle = await CreateBundleAsync(envelope, inputs);
|
||||
|
||||
return new PipelineResult
|
||||
{
|
||||
VerdictId = verdict.VerdictId,
|
||||
VerdictHash = ComputeHash(SerializeVerdict(verdict)),
|
||||
EnvelopeHash = ComputeHash(SerializeEnvelope(envelope)),
|
||||
BundleManifest = bundle.Manifest,
|
||||
BundleManifestHash = ComputeHash(bundle.Manifest),
|
||||
ExecutionTimestamp = FrozenTimestamp
|
||||
};
|
||||
}
|
||||
|
||||
#region Stage 1: Ingest
|
||||
|
||||
/// <summary>
|
||||
/// Ingests advisory feed data.
|
||||
/// </summary>
|
||||
public Task<IReadOnlyList<AdvisoryRecord>> IngestAdvisoriesAsync(byte[] feedData)
|
||||
{
|
||||
// Parse advisory feed (mock implementation for E2E tests)
|
||||
var advisories = ParseAdvisoryFeed(feedData);
|
||||
return Task.FromResult(advisories);
|
||||
}
|
||||
|
||||
private static IReadOnlyList<AdvisoryRecord> ParseAdvisoryFeed(byte[] feedData)
|
||||
{
|
||||
// For E2E tests, parse the mock feed format
|
||||
var json = System.Text.Encoding.UTF8.GetString(feedData);
|
||||
using var doc = JsonDocument.Parse(json);
|
||||
|
||||
var advisories = new List<AdvisoryRecord>();
|
||||
foreach (var element in doc.RootElement.GetProperty("advisories").EnumerateArray())
|
||||
{
|
||||
advisories.Add(new AdvisoryRecord
|
||||
{
|
||||
Id = element.GetProperty("id").GetString()!,
|
||||
CveId = element.GetProperty("cveId").GetString(),
|
||||
Severity = element.GetProperty("severity").GetString()!,
|
||||
AffectedPackages = element.GetProperty("affected").EnumerateArray()
|
||||
.Select(a => a.GetString()!)
|
||||
.ToList(),
|
||||
FixedVersions = element.TryGetProperty("fixed", out var fixedProp)
|
||||
? fixedProp.EnumerateArray().Select(f => f.GetString()!).ToList()
|
||||
: []
|
||||
});
|
||||
}
|
||||
|
||||
// Sort for determinism
|
||||
return advisories.OrderBy(a => a.Id, StringComparer.Ordinal).ToList();
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Stage 2: Normalize
|
||||
|
||||
/// <summary>
|
||||
/// Normalizes and deduplicates advisories.
|
||||
/// </summary>
|
||||
public Task<NormalizedAdvisories> NormalizeAdvisoriesAsync(IReadOnlyList<AdvisoryRecord> advisories)
|
||||
{
|
||||
// Deduplicate by CVE ID
|
||||
var uniqueByCve = advisories
|
||||
.GroupBy(a => a.CveId ?? a.Id)
|
||||
.Select(g => g.OrderBy(a => a.Id, StringComparer.Ordinal).First())
|
||||
.OrderBy(a => a.CveId ?? a.Id, StringComparer.Ordinal)
|
||||
.ToList();
|
||||
|
||||
var normalized = new NormalizedAdvisories
|
||||
{
|
||||
Advisories = uniqueByCve,
|
||||
NormalizationTimestamp = FrozenTimestamp,
|
||||
ContentHash = ComputeHash(SerializeAdvisories(uniqueByCve))
|
||||
};
|
||||
|
||||
return Task.FromResult(normalized);
|
||||
}
|
||||
|
||||
private static byte[] SerializeAdvisories(IReadOnlyList<AdvisoryRecord> advisories)
|
||||
{
|
||||
var serializable = advisories.Select(a => new
|
||||
{
|
||||
id = a.Id,
|
||||
cveId = a.CveId,
|
||||
severity = a.Severity,
|
||||
affected = a.AffectedPackages,
|
||||
fixed_ = a.FixedVersions
|
||||
}).ToList();
|
||||
|
||||
return System.Text.Encoding.UTF8.GetBytes(CanonJson.Serialize(serializable));
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Stage 3: Diff
|
||||
|
||||
/// <summary>
|
||||
/// Computes diff between SBOM and advisories.
|
||||
/// </summary>
|
||||
public Task<DiffResult> ComputeDiffAsync(byte[] sbomData, NormalizedAdvisories advisories)
|
||||
{
|
||||
// Parse SBOM and find affected components
|
||||
var sbom = ParseSbom(sbomData);
|
||||
var findings = new List<Finding>();
|
||||
|
||||
foreach (var component in sbom.Components)
|
||||
{
|
||||
foreach (var advisory in advisories.Advisories)
|
||||
{
|
||||
if (advisory.AffectedPackages.Any(pkg =>
|
||||
pkg.Equals(component.Purl, StringComparison.OrdinalIgnoreCase) ||
|
||||
pkg.Contains(component.Name, StringComparison.OrdinalIgnoreCase)))
|
||||
{
|
||||
findings.Add(new Finding
|
||||
{
|
||||
Id = $"finding:{advisory.CveId ?? advisory.Id}:{component.Purl}",
|
||||
CveId = advisory.CveId ?? advisory.Id,
|
||||
Severity = advisory.Severity,
|
||||
AffectedComponent = component.Purl,
|
||||
ComponentVersion = component.Version,
|
||||
FixedVersions = advisory.FixedVersions
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Sort findings for determinism
|
||||
var sortedFindings = findings
|
||||
.OrderBy(f => f.CveId, StringComparer.Ordinal)
|
||||
.ThenBy(f => f.AffectedComponent, StringComparer.Ordinal)
|
||||
.ToList();
|
||||
|
||||
var diff = new DiffResult
|
||||
{
|
||||
Findings = sortedFindings,
|
||||
SbomDigest = ComputeHash(sbomData),
|
||||
AdvisoryDigest = advisories.ContentHash,
|
||||
DiffTimestamp = FrozenTimestamp
|
||||
};
|
||||
|
||||
return Task.FromResult(diff);
|
||||
}
|
||||
|
||||
private static SbomData ParseSbom(byte[] sbomData)
|
||||
{
|
||||
var json = System.Text.Encoding.UTF8.GetString(sbomData);
|
||||
using var doc = JsonDocument.Parse(json);
|
||||
|
||||
var components = new List<SbomComponent>();
|
||||
foreach (var element in doc.RootElement.GetProperty("components").EnumerateArray())
|
||||
{
|
||||
components.Add(new SbomComponent
|
||||
{
|
||||
Name = element.GetProperty("name").GetString()!,
|
||||
Version = element.GetProperty("version").GetString()!,
|
||||
Purl = element.GetProperty("purl").GetString()!
|
||||
});
|
||||
}
|
||||
|
||||
return new SbomData
|
||||
{
|
||||
Components = components.OrderBy(c => c.Purl, StringComparer.Ordinal).ToList()
|
||||
};
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Stage 4: Decide
|
||||
|
||||
/// <summary>
|
||||
/// Evaluates policy and computes verdict.
|
||||
/// </summary>
|
||||
public Task<DeltaVerdict> EvaluatePolicyAsync(DiffResult diff, byte[] policyPack, byte[]? vexDocument)
|
||||
{
|
||||
// Parse VEX document if provided for exception handling
|
||||
var exceptions = vexDocument is not null
|
||||
? ParseVexExceptions(vexDocument)
|
||||
: [];
|
||||
|
||||
// Evaluate findings against policy
|
||||
var blockingDrivers = new List<DeltaDriver>();
|
||||
var warningDrivers = new List<DeltaDriver>();
|
||||
var appliedExceptions = new List<string>();
|
||||
|
||||
foreach (var finding in diff.Findings)
|
||||
{
|
||||
// Check if finding is excepted via VEX
|
||||
var exception = exceptions.FirstOrDefault(e =>
|
||||
e.VulnerabilityId.Equals(finding.CveId, StringComparison.OrdinalIgnoreCase));
|
||||
|
||||
if (exception is not null)
|
||||
{
|
||||
appliedExceptions.Add(exception.Id);
|
||||
continue;
|
||||
}
|
||||
|
||||
var driver = new DeltaDriver
|
||||
{
|
||||
Type = "new-finding",
|
||||
Severity = MapSeverity(finding.Severity),
|
||||
Description = $"Vulnerability {finding.CveId} found in {finding.AffectedComponent}",
|
||||
CveId = finding.CveId,
|
||||
Purl = finding.AffectedComponent
|
||||
};
|
||||
|
||||
if (IsBlockingSeverity(finding.Severity))
|
||||
{
|
||||
blockingDrivers.Add(driver);
|
||||
}
|
||||
else
|
||||
{
|
||||
warningDrivers.Add(driver);
|
||||
}
|
||||
}
|
||||
|
||||
// Sort drivers for determinism
|
||||
blockingDrivers = [.. blockingDrivers.OrderBy(d => d.CveId, StringComparer.Ordinal)
|
||||
.ThenBy(d => d.Purl, StringComparer.Ordinal)];
|
||||
|
||||
warningDrivers = [.. warningDrivers.OrderBy(d => d.CveId, StringComparer.Ordinal)
|
||||
.ThenBy(d => d.Purl, StringComparer.Ordinal)];
|
||||
|
||||
appliedExceptions = [.. appliedExceptions.Order(StringComparer.Ordinal)];
|
||||
|
||||
// Compute gate level
|
||||
var gateLevel = blockingDrivers.Count > 0 ? DeltaGateLevel.G4 : DeltaGateLevel.G1;
|
||||
|
||||
// Build verdict with content-addressed ID
|
||||
var deltaId = $"delta:sha256:{ComputeHashString(System.Text.Encoding.UTF8.GetBytes(
|
||||
CanonJson.Serialize(new { diff.SbomDigest, diff.AdvisoryDigest })))}";
|
||||
|
||||
var builder = new DeltaVerdictBuilder()
|
||||
.WithGate(gateLevel);
|
||||
|
||||
foreach (var driver in blockingDrivers)
|
||||
{
|
||||
builder.AddBlockingDriver(driver);
|
||||
}
|
||||
|
||||
foreach (var driver in warningDrivers)
|
||||
{
|
||||
builder.AddWarningDriver(driver);
|
||||
}
|
||||
|
||||
foreach (var exception in appliedExceptions)
|
||||
{
|
||||
builder.AddException(exception);
|
||||
}
|
||||
|
||||
var verdict = builder.Build(deltaId);
|
||||
|
||||
return Task.FromResult(verdict);
|
||||
}
|
||||
|
||||
private static IReadOnlyList<VexException> ParseVexExceptions(byte[] vexData)
|
||||
{
|
||||
var json = System.Text.Encoding.UTF8.GetString(vexData);
|
||||
using var doc = JsonDocument.Parse(json);
|
||||
|
||||
var exceptions = new List<VexException>();
|
||||
if (doc.RootElement.TryGetProperty("statements", out var statements))
|
||||
{
|
||||
foreach (var stmt in statements.EnumerateArray())
|
||||
{
|
||||
if (stmt.GetProperty("status").GetString() == "not_affected")
|
||||
{
|
||||
exceptions.Add(new VexException
|
||||
{
|
||||
Id = stmt.GetProperty("id").GetString()!,
|
||||
VulnerabilityId = stmt.GetProperty("vulnerability").GetString()!,
|
||||
Status = "not_affected",
|
||||
Justification = stmt.TryGetProperty("justification", out var j) ? j.GetString() : null
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return exceptions.OrderBy(e => e.VulnerabilityId, StringComparer.Ordinal).ToList();
|
||||
}
|
||||
|
||||
private static DeltaDriverSeverity MapSeverity(string severity) => severity.ToUpperInvariant() switch
|
||||
{
|
||||
"CRITICAL" => DeltaDriverSeverity.Critical,
|
||||
"HIGH" => DeltaDriverSeverity.High,
|
||||
"MEDIUM" => DeltaDriverSeverity.Medium,
|
||||
"LOW" => DeltaDriverSeverity.Low,
|
||||
_ => DeltaDriverSeverity.Unknown
|
||||
};
|
||||
|
||||
private static bool IsBlockingSeverity(string severity) =>
|
||||
severity.Equals("CRITICAL", StringComparison.OrdinalIgnoreCase) ||
|
||||
severity.Equals("HIGH", StringComparison.OrdinalIgnoreCase);
|
||||
|
||||
#endregion
|
||||
|
||||
#region Stage 5: Attest
|
||||
|
||||
/// <summary>
|
||||
/// Creates DSSE attestation for the verdict.
|
||||
/// </summary>
|
||||
public Task<DsseEnvelopeData> CreateAttestationAsync(DeltaVerdict verdict)
|
||||
{
|
||||
// Serialize verdict to canonical JSON
|
||||
var payload = SerializeVerdict(verdict);
|
||||
|
||||
// Sign using deterministic key
|
||||
var signature = SignPayload(payload);
|
||||
|
||||
var envelope = new DsseEnvelopeData
|
||||
{
|
||||
PayloadType = "application/vnd.stellaops.verdict+json",
|
||||
Payload = payload,
|
||||
Signatures =
|
||||
[
|
||||
new DsseSignatureData
|
||||
{
|
||||
KeyId = "e2e-test-key",
|
||||
Signature = signature
|
||||
}
|
||||
]
|
||||
};
|
||||
|
||||
return Task.FromResult(envelope);
|
||||
}
|
||||
|
||||
private byte[] SignPayload(byte[] payload)
|
||||
{
|
||||
// Create PAE (Pre-Authentication Encoding) as per DSSE spec
|
||||
var payloadType = "application/vnd.stellaops.verdict+json"u8.ToArray();
|
||||
var pae = CreatePae(payloadType, payload);
|
||||
|
||||
// Sign with ECDSA P-256
|
||||
return _signingKey!.SignData(pae, HashAlgorithmName.SHA256);
|
||||
}
|
||||
|
||||
private static byte[] CreatePae(byte[] payloadType, byte[] payload)
|
||||
{
|
||||
// PAE(type, payload) = "DSSEv1" || SP || LEN(type) || SP || type || SP || LEN(payload) || SP || payload
|
||||
var parts = new List<byte>();
|
||||
parts.AddRange("DSSEv1 "u8.ToArray());
|
||||
parts.AddRange(System.Text.Encoding.UTF8.GetBytes(payloadType.Length.ToString()));
|
||||
parts.Add((byte)' ');
|
||||
parts.AddRange(payloadType);
|
||||
parts.Add((byte)' ');
|
||||
parts.AddRange(System.Text.Encoding.UTF8.GetBytes(payload.Length.ToString()));
|
||||
parts.Add((byte)' ');
|
||||
parts.AddRange(payload);
|
||||
return [.. parts];
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Stage 6: Bundle
|
||||
|
||||
/// <summary>
|
||||
/// Creates a bundle containing the attestation and all artifacts.
|
||||
/// </summary>
|
||||
public Task<BundleResult> CreateBundleAsync(DsseEnvelopeData envelope, InputSnapshot inputs)
|
||||
{
|
||||
// Create manifest with all artifact hashes
|
||||
var manifest = new BundleManifest
|
||||
{
|
||||
Version = "1.0",
|
||||
CreatedAt = FrozenTimestamp,
|
||||
Artifacts = new Dictionary<string, string>
|
||||
{
|
||||
["sbom"] = inputs.SbomHash,
|
||||
["advisory-feed"] = inputs.AdvisoryFeedHash,
|
||||
["policy-pack"] = inputs.PolicyPackHash,
|
||||
["envelope"] = ComputeHashString(SerializeEnvelope(envelope))
|
||||
}
|
||||
};
|
||||
|
||||
if (inputs.VexDocumentHash is not null)
|
||||
{
|
||||
manifest.Artifacts["vex-document"] = inputs.VexDocumentHash;
|
||||
}
|
||||
|
||||
// Serialize manifest deterministically
|
||||
var manifestBytes = System.Text.Encoding.UTF8.GetBytes(CanonJson.Serialize(manifest));
|
||||
|
||||
var bundle = new BundleResult
|
||||
{
|
||||
Manifest = manifestBytes,
|
||||
Envelope = envelope,
|
||||
ManifestHash = ComputeHash(manifestBytes)
|
||||
};
|
||||
|
||||
return Task.FromResult(bundle);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Serialization Helpers
|
||||
|
||||
private static byte[] SerializeVerdict(DeltaVerdict verdict)
|
||||
{
|
||||
return System.Text.Encoding.UTF8.GetBytes(CanonJson.Serialize(verdict));
|
||||
}
|
||||
|
||||
private static byte[] SerializeEnvelope(DsseEnvelopeData envelope)
|
||||
{
|
||||
var obj = new
|
||||
{
|
||||
payloadType = envelope.PayloadType,
|
||||
payload = Convert.ToBase64String(envelope.Payload),
|
||||
signatures = envelope.Signatures.Select(s => new
|
||||
{
|
||||
keyid = s.KeyId,
|
||||
sig = Convert.ToBase64String(s.Signature)
|
||||
}).ToArray()
|
||||
};
|
||||
return System.Text.Encoding.UTF8.GetBytes(CanonJson.Serialize(obj));
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Hashing Helpers
|
||||
|
||||
/// <summary>
|
||||
/// Computes SHA-256 hash of data and returns as hex string.
|
||||
/// </summary>
|
||||
public static string ComputeHash(byte[] data)
|
||||
{
|
||||
var hash = SHA256.HashData(data);
|
||||
return $"sha256:{Convert.ToHexStringLower(hash)}";
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Computes SHA-256 hash of data and returns hex string without prefix.
|
||||
/// </summary>
|
||||
public static string ComputeHashString(byte[] data)
|
||||
{
|
||||
var hash = SHA256.HashData(data);
|
||||
return Convert.ToHexStringLower(hash);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Test Data Factories
|
||||
|
||||
/// <summary>
|
||||
/// Creates a minimal SBOM for testing.
|
||||
/// </summary>
|
||||
public static byte[] CreateMinimalSbom()
|
||||
{
|
||||
var sbom = new
|
||||
{
|
||||
bomFormat = "CycloneDX",
|
||||
specVersion = "1.5",
|
||||
version = 1,
|
||||
components = new[]
|
||||
{
|
||||
new { name = "lodash", version = "4.17.20", purl = "pkg:npm/lodash@4.17.20" },
|
||||
new { name = "axios", version = "0.21.0", purl = "pkg:npm/axios@0.21.0" },
|
||||
new { name = "moment", version = "2.29.0", purl = "pkg:npm/moment@2.29.0" }
|
||||
}
|
||||
};
|
||||
|
||||
return System.Text.Encoding.UTF8.GetBytes(CanonJson.Serialize(sbom));
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Creates a mock advisory feed for testing.
|
||||
/// </summary>
|
||||
public static byte[] CreateMockAdvisoryFeed()
|
||||
{
|
||||
var feed = new
|
||||
{
|
||||
advisories = new[]
|
||||
{
|
||||
new
|
||||
{
|
||||
id = "GHSA-2024-0001",
|
||||
cveId = "CVE-2024-0001",
|
||||
severity = "CRITICAL",
|
||||
affected = new[] { "pkg:npm/lodash@4.17.20" },
|
||||
@fixed = new[] { "pkg:npm/lodash@4.17.21" }
|
||||
},
|
||||
new
|
||||
{
|
||||
id = "GHSA-2024-0002",
|
||||
cveId = "CVE-2024-0002",
|
||||
severity = "HIGH",
|
||||
affected = new[] { "pkg:npm/axios@0.21.0" },
|
||||
@fixed = new[] { "pkg:npm/axios@0.21.1" }
|
||||
},
|
||||
new
|
||||
{
|
||||
id = "GHSA-2024-0003",
|
||||
cveId = "CVE-2024-0003",
|
||||
severity = "LOW",
|
||||
affected = new[] { "pkg:npm/moment@2.29.0" },
|
||||
@fixed = new[] { "pkg:npm/moment@2.29.4" }
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
return System.Text.Encoding.UTF8.GetBytes(CanonJson.Serialize(feed));
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Creates a default policy pack for testing.
|
||||
/// </summary>
|
||||
public static byte[] CreateDefaultPolicyPack()
|
||||
{
|
||||
var policy = new
|
||||
{
|
||||
version = "1.0",
|
||||
rules = new[]
|
||||
{
|
||||
new { severity = "CRITICAL", action = "block" },
|
||||
new { severity = "HIGH", action = "block" },
|
||||
new { severity = "MEDIUM", action = "warn" },
|
||||
new { severity = "LOW", action = "warn" }
|
||||
}
|
||||
};
|
||||
|
||||
return System.Text.Encoding.UTF8.GetBytes(CanonJson.Serialize(policy));
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Creates a VEX document with exceptions.
|
||||
/// </summary>
|
||||
public static byte[] CreateVexDocumentWithExceptions(params string[] exceptedCveIds)
|
||||
{
|
||||
var statements = exceptedCveIds.Select((cve, i) => new
|
||||
{
|
||||
id = $"vex-exception-{i + 1:D3}",
|
||||
vulnerability = cve,
|
||||
status = "not_affected",
|
||||
justification = "vulnerable_code_not_in_execute_path"
|
||||
}).ToArray();
|
||||
|
||||
var vex = new
|
||||
{
|
||||
@context = "https://openvex.dev/ns/v0.2.0",
|
||||
id = "https://stellaops.test/vex/test-001",
|
||||
statements
|
||||
};
|
||||
|
||||
return System.Text.Encoding.UTF8.GetBytes(CanonJson.Serialize(vex));
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Key Generation
|
||||
|
||||
/// <summary>
|
||||
/// Generates a deterministic ECDSA key from a seed.
|
||||
/// </summary>
|
||||
private static ECDsa GenerateDeterministicKey(int seed)
|
||||
{
|
||||
// Use a deterministic RNG seeded from the input
|
||||
var rng = new DeterministicRng(seed);
|
||||
var keyBytes = new byte[32];
|
||||
rng.GetBytes(keyBytes);
|
||||
|
||||
// Create ECDSA key from the deterministic bytes
|
||||
var ecdsa = ECDsa.Create(ECCurve.NamedCurves.nistP256);
|
||||
|
||||
// Import deterministic private key
|
||||
var parameters = new ECParameters
|
||||
{
|
||||
Curve = ECCurve.NamedCurves.nistP256,
|
||||
D = keyBytes,
|
||||
Q = default // Will be computed from D
|
||||
};
|
||||
|
||||
// Compute public key from private key
|
||||
var tempKey = ECDsa.Create(ECCurve.NamedCurves.nistP256);
|
||||
tempKey.ImportParameters(new ECParameters
|
||||
{
|
||||
Curve = ECCurve.NamedCurves.nistP256,
|
||||
D = keyBytes
|
||||
});
|
||||
var exported = tempKey.ExportParameters(true);
|
||||
parameters.Q = exported.Q;
|
||||
|
||||
ecdsa.ImportParameters(parameters);
|
||||
return ecdsa;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Gets the public key for verification.
|
||||
/// </summary>
|
||||
public byte[] GetPublicKey()
|
||||
{
|
||||
return _signingKey!.ExportSubjectPublicKeyInfo();
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
/// <summary>
|
||||
/// Disposes of the test fixture resources.
|
||||
/// </summary>
|
||||
public async Task DisposeAsync()
|
||||
{
|
||||
_signingKey?.Dispose();
|
||||
_factory?.Dispose();
|
||||
|
||||
if (_postgresContainer is not null)
|
||||
{
|
||||
await _postgresContainer.DisposeAsync();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Deterministic random number generator for key generation.
|
||||
/// </summary>
|
||||
internal sealed class DeterministicRng(int seed)
|
||||
{
|
||||
private readonly Random _random = new(seed);
|
||||
|
||||
public void GetBytes(byte[] data)
|
||||
{
|
||||
_random.NextBytes(data);
|
||||
}
|
||||
}
|
||||
|
||||
#region Data Transfer Objects
|
||||
|
||||
/// <summary>
|
||||
/// Snapshot of all pipeline inputs with hashes.
|
||||
/// </summary>
|
||||
public sealed class InputSnapshot
|
||||
{
|
||||
public required byte[] Sbom { get; init; }
|
||||
public required string SbomHash { get; init; }
|
||||
public required byte[] AdvisoryFeed { get; init; }
|
||||
public required string AdvisoryFeedHash { get; init; }
|
||||
public required byte[] PolicyPack { get; init; }
|
||||
public required string PolicyPackHash { get; init; }
|
||||
public byte[]? VexDocument { get; init; }
|
||||
public string? VexDocumentHash { get; init; }
|
||||
public DateTimeOffset SnapshotTimestamp { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Result of full pipeline execution.
|
||||
/// </summary>
|
||||
public sealed class PipelineResult
|
||||
{
|
||||
public required string VerdictId { get; init; }
|
||||
public required string VerdictHash { get; init; }
|
||||
public required string EnvelopeHash { get; init; }
|
||||
public required byte[] BundleManifest { get; init; }
|
||||
public required string BundleManifestHash { get; init; }
|
||||
public DateTimeOffset ExecutionTimestamp { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Advisory record from ingestion.
|
||||
/// </summary>
|
||||
public sealed class AdvisoryRecord
|
||||
{
|
||||
public required string Id { get; init; }
|
||||
public string? CveId { get; init; }
|
||||
public required string Severity { get; init; }
|
||||
public required IReadOnlyList<string> AffectedPackages { get; init; }
|
||||
public IReadOnlyList<string> FixedVersions { get; init; } = [];
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Normalized advisories after deduplication.
|
||||
/// </summary>
|
||||
public sealed class NormalizedAdvisories
|
||||
{
|
||||
public required IReadOnlyList<AdvisoryRecord> Advisories { get; init; }
|
||||
public DateTimeOffset NormalizationTimestamp { get; init; }
|
||||
public required string ContentHash { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// SBOM component data.
|
||||
/// </summary>
|
||||
public sealed class SbomComponent
|
||||
{
|
||||
public required string Name { get; init; }
|
||||
public required string Version { get; init; }
|
||||
public required string Purl { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parsed SBOM data.
|
||||
/// </summary>
|
||||
public sealed class SbomData
|
||||
{
|
||||
public required IReadOnlyList<SbomComponent> Components { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Security finding from diff.
|
||||
/// </summary>
|
||||
public sealed class Finding
|
||||
{
|
||||
public required string Id { get; init; }
|
||||
public required string CveId { get; init; }
|
||||
public required string Severity { get; init; }
|
||||
public required string AffectedComponent { get; init; }
|
||||
public required string ComponentVersion { get; init; }
|
||||
public IReadOnlyList<string> FixedVersions { get; init; } = [];
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Result of diffing SBOM against advisories.
|
||||
/// </summary>
|
||||
public sealed class DiffResult
|
||||
{
|
||||
public required IReadOnlyList<Finding> Findings { get; init; }
|
||||
public required string SbomDigest { get; init; }
|
||||
public required string AdvisoryDigest { get; init; }
|
||||
public DateTimeOffset DiffTimestamp { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// VEX exception from VEX document.
|
||||
/// </summary>
|
||||
public sealed class VexException
|
||||
{
|
||||
public required string Id { get; init; }
|
||||
public required string VulnerabilityId { get; init; }
|
||||
public required string Status { get; init; }
|
||||
public string? Justification { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// DSSE envelope data.
|
||||
/// </summary>
|
||||
public sealed class DsseEnvelopeData
|
||||
{
|
||||
public required string PayloadType { get; init; }
|
||||
public required byte[] Payload { get; init; }
|
||||
public required IReadOnlyList<DsseSignatureData> Signatures { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// DSSE signature data.
|
||||
/// </summary>
|
||||
public sealed class DsseSignatureData
|
||||
{
|
||||
public required string KeyId { get; init; }
|
||||
public required byte[] Signature { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Bundle manifest structure.
|
||||
/// </summary>
|
||||
public sealed class BundleManifest
|
||||
{
|
||||
public required string Version { get; init; }
|
||||
public DateTimeOffset CreatedAt { get; init; }
|
||||
public required Dictionary<string, string> Artifacts { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Result of bundle creation.
|
||||
/// </summary>
|
||||
public sealed class BundleResult
|
||||
{
|
||||
public required byte[] Manifest { get; init; }
|
||||
public required DsseEnvelopeData Envelope { get; init; }
|
||||
public required string ManifestHash { get; init; }
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
/// <summary>
|
||||
/// Placeholder for Program class detection.
|
||||
/// The actual Program class is from Scanner.WebService.
|
||||
/// </summary>
|
||||
#pragma warning disable CA1050 // Declare types in namespaces
|
||||
public partial class Program { }
|
||||
#pragma warning restore CA1050
|
||||
@@ -0,0 +1,457 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// E2EReproducibilityTests.cs
|
||||
// Sprint: SPRINT_8200_0001_0004_e2e_reproducibility_test
|
||||
// Tasks: E2E-8200-011 to E2E-8200-014 - Reproducibility Tests
|
||||
// Description: End-to-end tests verifying full pipeline reproducibility.
|
||||
// Validates: identical verdict hash, identical manifest, frozen timestamps,
|
||||
// parallel execution produces identical results.
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using FluentAssertions;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Integration.E2E;
|
||||
|
||||
/// <summary>
|
||||
/// End-to-end reproducibility tests for the full security scanning pipeline.
|
||||
/// Verifies that identical inputs always produce identical outputs across:
|
||||
/// - Sequential runs
|
||||
/// - Parallel runs
|
||||
/// - With frozen timestamps
|
||||
/// </summary>
|
||||
[Collection("E2EReproducibility")]
|
||||
[Trait("Category", "Integration")]
|
||||
[Trait("Sprint", "8200.0001.0004")]
|
||||
[Trait("Feature", "E2E-Reproducibility")]
|
||||
public sealed class E2EReproducibilityTests : IClassFixture<E2EReproducibilityTestFixture>, IAsyncLifetime
|
||||
{
|
||||
private readonly E2EReproducibilityTestFixture _fixture;
|
||||
|
||||
public E2EReproducibilityTests(E2EReproducibilityTestFixture fixture)
|
||||
{
|
||||
_fixture = fixture;
|
||||
}
|
||||
|
||||
public async Task InitializeAsync()
|
||||
{
|
||||
await _fixture.InitializeAsync();
|
||||
}
|
||||
|
||||
public Task DisposeAsync() => Task.CompletedTask;
|
||||
|
||||
#region E2E-8200-011: Identical Verdict Hash
|
||||
|
||||
[Fact(DisplayName = "Pipeline produces identical verdict hash across runs")]
|
||||
public async Task FullPipeline_ProducesIdenticalVerdictHash_AcrossRuns()
|
||||
{
|
||||
// Arrange - Create input snapshot
|
||||
var inputs = await _fixture.SnapshotInputsAsync();
|
||||
|
||||
// Act - Run pipeline twice with identical inputs
|
||||
var result1 = await _fixture.RunFullPipelineAsync(inputs);
|
||||
var result2 = await _fixture.RunFullPipelineAsync(inputs);
|
||||
|
||||
// Assert - Verdict IDs must match
|
||||
result1.VerdictId.Should().NotBeNullOrEmpty("Verdict ID should be computed");
|
||||
result2.VerdictId.Should().NotBeNullOrEmpty("Verdict ID should be computed");
|
||||
result1.VerdictId.Should().Be(result2.VerdictId, "Verdict ID must be identical across runs");
|
||||
|
||||
// Verdict hash must match
|
||||
result1.VerdictHash.Should().Be(result2.VerdictHash, "Verdict hash must be identical");
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "Pipeline produces identical verdict hash with 5 sequential runs")]
|
||||
public async Task FullPipeline_ProducesIdenticalVerdictHash_With5SequentialRuns()
|
||||
{
|
||||
// Arrange
|
||||
var inputs = await _fixture.SnapshotInputsAsync();
|
||||
var results = new List<PipelineResult>();
|
||||
|
||||
// Act - Run pipeline 5 times sequentially
|
||||
for (int i = 0; i < 5; i++)
|
||||
{
|
||||
results.Add(await _fixture.RunFullPipelineAsync(inputs));
|
||||
}
|
||||
|
||||
// Assert - All verdict IDs must match
|
||||
var firstVerdictId = results[0].VerdictId;
|
||||
foreach (var result in results)
|
||||
{
|
||||
result.VerdictId.Should().Be(firstVerdictId, $"Run {results.IndexOf(result) + 1} verdict ID must match first run");
|
||||
}
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "Verdict ID format is content-addressed")]
|
||||
public async Task VerdictId_Format_IsContentAddressed()
|
||||
{
|
||||
// Arrange
|
||||
var inputs = await _fixture.SnapshotInputsAsync();
|
||||
|
||||
// Act
|
||||
var result = await _fixture.RunFullPipelineAsync(inputs);
|
||||
|
||||
// Assert - Verdict ID should be in content-addressed format
|
||||
result.VerdictId.Should().StartWith("verdict:sha256:", "Verdict ID must use sha256 content-addressing");
|
||||
result.VerdictId.Should().MatchRegex(@"^verdict:sha256:[0-9a-f]{64}$", "Verdict ID must be valid sha256 hex");
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region E2E-8200-012: Identical Bundle Manifest
|
||||
|
||||
[Fact(DisplayName = "Pipeline produces identical bundle manifest across runs")]
|
||||
public async Task FullPipeline_ProducesIdenticalBundleManifest_AcrossRuns()
|
||||
{
|
||||
// Arrange
|
||||
var inputs = await _fixture.SnapshotInputsAsync();
|
||||
|
||||
// Act - Run pipeline twice
|
||||
var result1 = await _fixture.RunFullPipelineAsync(inputs);
|
||||
var result2 = await _fixture.RunFullPipelineAsync(inputs);
|
||||
|
||||
// Assert - Bundle manifests must be byte-for-byte identical
|
||||
result1.BundleManifest.Should().BeEquivalentTo(result2.BundleManifest, "Bundle manifest bytes must match");
|
||||
result1.BundleManifestHash.Should().Be(result2.BundleManifestHash, "Bundle manifest hash must match");
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "Bundle manifest contains all artifact hashes")]
|
||||
public async Task BundleManifest_ContainsAllArtifactHashes()
|
||||
{
|
||||
// Arrange
|
||||
var inputs = await _fixture.SnapshotInputsAsync();
|
||||
|
||||
// Act
|
||||
var result = await _fixture.RunFullPipelineAsync(inputs);
|
||||
|
||||
// Assert - Parse manifest and verify structure
|
||||
var manifestJson = System.Text.Encoding.UTF8.GetString(result.BundleManifest);
|
||||
using var doc = System.Text.Json.JsonDocument.Parse(manifestJson);
|
||||
var root = doc.RootElement;
|
||||
|
||||
root.TryGetProperty("version", out _).Should().BeTrue("Manifest should have version");
|
||||
root.TryGetProperty("createdAt", out _).Should().BeTrue("Manifest should have createdAt");
|
||||
root.TryGetProperty("artifacts", out var artifacts).Should().BeTrue("Manifest should have artifacts");
|
||||
|
||||
artifacts.TryGetProperty("sbom", out _).Should().BeTrue("Artifacts should include SBOM hash");
|
||||
artifacts.TryGetProperty("advisory-feed", out _).Should().BeTrue("Artifacts should include advisory feed hash");
|
||||
artifacts.TryGetProperty("policy-pack", out _).Should().BeTrue("Artifacts should include policy pack hash");
|
||||
artifacts.TryGetProperty("envelope", out _).Should().BeTrue("Artifacts should include envelope hash");
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "Manifest comparison detects differences")]
|
||||
public async Task ManifestComparer_DetectsDifferences_WhenInputsChange()
|
||||
{
|
||||
// Arrange - Create two different input snapshots
|
||||
var inputs1 = await _fixture.SnapshotInputsAsync();
|
||||
|
||||
// Modify SBOM to create different input
|
||||
var modifiedSbom = E2EReproducibilityTestFixture.CreateMinimalSbom();
|
||||
var sbomJson = System.Text.Encoding.UTF8.GetString(modifiedSbom);
|
||||
var modifiedSbomJson = sbomJson.Replace("4.17.20", "4.17.21"); // Change version
|
||||
var modifiedSbomBytes = System.Text.Encoding.UTF8.GetBytes(modifiedSbomJson);
|
||||
|
||||
var inputs2 = new InputSnapshot
|
||||
{
|
||||
Sbom = modifiedSbomBytes,
|
||||
SbomHash = E2EReproducibilityTestFixture.ComputeHash(modifiedSbomBytes),
|
||||
AdvisoryFeed = inputs1.AdvisoryFeed,
|
||||
AdvisoryFeedHash = inputs1.AdvisoryFeedHash,
|
||||
PolicyPack = inputs1.PolicyPack,
|
||||
PolicyPackHash = inputs1.PolicyPackHash,
|
||||
VexDocument = inputs1.VexDocument,
|
||||
VexDocumentHash = inputs1.VexDocumentHash,
|
||||
SnapshotTimestamp = inputs1.SnapshotTimestamp
|
||||
};
|
||||
|
||||
// Act
|
||||
var result1 = await _fixture.RunFullPipelineAsync(inputs1);
|
||||
var result2 = await _fixture.RunFullPipelineAsync(inputs2);
|
||||
|
||||
// Assert - Results should differ
|
||||
var comparison = ManifestComparer.Compare(result1, result2);
|
||||
comparison.IsMatch.Should().BeFalse("Different inputs should produce different outputs");
|
||||
comparison.Differences.Should().NotBeEmpty("Should detect at least one difference");
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region E2E-8200-013: Frozen Clock Timestamps
|
||||
|
||||
[Fact(DisplayName = "Pipeline produces identical timestamps with frozen clock")]
|
||||
public async Task FullPipeline_ProducesIdenticalTimestamps_WithFrozenClock()
|
||||
{
|
||||
// Arrange
|
||||
var inputs = await _fixture.SnapshotInputsAsync();
|
||||
|
||||
// Act - Run pipeline twice
|
||||
var result1 = await _fixture.RunFullPipelineAsync(inputs);
|
||||
var result2 = await _fixture.RunFullPipelineAsync(inputs);
|
||||
|
||||
// Assert - Execution timestamps must match (frozen clock)
|
||||
result1.ExecutionTimestamp.Should().Be(_fixture.FrozenTimestamp, "Timestamp should match frozen clock");
|
||||
result2.ExecutionTimestamp.Should().Be(_fixture.FrozenTimestamp, "Timestamp should match frozen clock");
|
||||
result1.ExecutionTimestamp.Should().Be(result2.ExecutionTimestamp, "Timestamps must be identical");
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "Manifest createdAt matches frozen timestamp")]
|
||||
public async Task BundleManifest_CreatedAt_MatchesFrozenTimestamp()
|
||||
{
|
||||
// Arrange
|
||||
var inputs = await _fixture.SnapshotInputsAsync();
|
||||
|
||||
// Act
|
||||
var result = await _fixture.RunFullPipelineAsync(inputs);
|
||||
|
||||
// Assert - Parse manifest and verify timestamp
|
||||
var manifestJson = System.Text.Encoding.UTF8.GetString(result.BundleManifest);
|
||||
using var doc = System.Text.Json.JsonDocument.Parse(manifestJson);
|
||||
var createdAt = doc.RootElement.GetProperty("createdAt").GetDateTimeOffset();
|
||||
|
||||
createdAt.Should().Be(_fixture.FrozenTimestamp, "Manifest createdAt should match frozen clock");
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "Input snapshot timestamp matches frozen clock")]
|
||||
public async Task InputSnapshot_Timestamp_MatchesFrozenClock()
|
||||
{
|
||||
// Arrange & Act
|
||||
var inputs = await _fixture.SnapshotInputsAsync();
|
||||
|
||||
// Assert
|
||||
inputs.SnapshotTimestamp.Should().Be(_fixture.FrozenTimestamp, "Snapshot timestamp should match frozen clock");
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region E2E-8200-014: Parallel Execution
|
||||
|
||||
[Fact(DisplayName = "10 concurrent pipeline runs produce identical results")]
|
||||
public async Task FullPipeline_ParallelExecution_10Concurrent_AllIdentical()
|
||||
{
|
||||
// Arrange
|
||||
var inputs = await _fixture.SnapshotInputsAsync();
|
||||
const int concurrentRuns = 10;
|
||||
|
||||
// Act - Run pipeline 10 times in parallel
|
||||
var tasks = Enumerable.Range(0, concurrentRuns)
|
||||
.Select(_ => _fixture.RunFullPipelineAsync(inputs))
|
||||
.ToList();
|
||||
|
||||
var results = await Task.WhenAll(tasks);
|
||||
|
||||
// Assert - All results must be identical
|
||||
var comparison = ManifestComparer.CompareMultiple(results.ToList());
|
||||
comparison.AllMatch.Should().BeTrue($"All {concurrentRuns} concurrent runs must produce identical results. {comparison.Summary}");
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "5 concurrent pipeline runs produce identical verdict IDs")]
|
||||
public async Task FullPipeline_ParallelExecution_5Concurrent_IdenticalVerdictIds()
|
||||
{
|
||||
// Arrange
|
||||
var inputs = await _fixture.SnapshotInputsAsync();
|
||||
const int concurrentRuns = 5;
|
||||
|
||||
// Act - Run pipeline 5 times in parallel
|
||||
var tasks = Enumerable.Range(0, concurrentRuns)
|
||||
.Select(_ => _fixture.RunFullPipelineAsync(inputs))
|
||||
.ToList();
|
||||
|
||||
var results = await Task.WhenAll(tasks);
|
||||
|
||||
// Assert - All verdict IDs must match
|
||||
var firstVerdictId = results[0].VerdictId;
|
||||
foreach (var result in results)
|
||||
{
|
||||
result.VerdictId.Should().Be(firstVerdictId, "All parallel runs must produce same verdict ID");
|
||||
}
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "Parallel runs with VEX exceptions produce identical results")]
|
||||
public async Task FullPipeline_ParallelWithVex_ProducesIdenticalResults()
|
||||
{
|
||||
// Arrange - Create inputs with VEX exceptions
|
||||
var vexDocument = E2EReproducibilityTestFixture.CreateVexDocumentWithExceptions("CVE-2024-0001");
|
||||
var inputs = await _fixture.SnapshotInputsAsync(vexDocumentPath: null);
|
||||
var inputsWithVex = new InputSnapshot
|
||||
{
|
||||
Sbom = inputs.Sbom,
|
||||
SbomHash = inputs.SbomHash,
|
||||
AdvisoryFeed = inputs.AdvisoryFeed,
|
||||
AdvisoryFeedHash = inputs.AdvisoryFeedHash,
|
||||
PolicyPack = inputs.PolicyPack,
|
||||
PolicyPackHash = inputs.PolicyPackHash,
|
||||
VexDocument = vexDocument,
|
||||
VexDocumentHash = E2EReproducibilityTestFixture.ComputeHash(vexDocument),
|
||||
SnapshotTimestamp = inputs.SnapshotTimestamp
|
||||
};
|
||||
|
||||
const int concurrentRuns = 5;
|
||||
|
||||
// Act - Run pipeline 5 times in parallel
|
||||
var tasks = Enumerable.Range(0, concurrentRuns)
|
||||
.Select(_ => _fixture.RunFullPipelineAsync(inputsWithVex))
|
||||
.ToList();
|
||||
|
||||
var results = await Task.WhenAll(tasks);
|
||||
|
||||
// Assert - All results must be identical
|
||||
var comparison = ManifestComparer.CompareMultiple(results.ToList());
|
||||
comparison.AllMatch.Should().BeTrue("All parallel runs with VEX must produce identical results");
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Edge Cases and Error Handling
|
||||
|
||||
[Fact(DisplayName = "Empty SBOM produces deterministic empty result")]
|
||||
public async Task FullPipeline_EmptySbom_ProducesDeterministicResult()
|
||||
{
|
||||
// Arrange - Create empty SBOM
|
||||
var emptySbom = System.Text.Encoding.UTF8.GetBytes(
|
||||
System.Text.Json.JsonSerializer.Serialize(new { bomFormat = "CycloneDX", specVersion = "1.5", version = 1, components = Array.Empty<object>() }));
|
||||
|
||||
var inputs = new InputSnapshot
|
||||
{
|
||||
Sbom = emptySbom,
|
||||
SbomHash = E2EReproducibilityTestFixture.ComputeHash(emptySbom),
|
||||
AdvisoryFeed = E2EReproducibilityTestFixture.CreateMockAdvisoryFeed(),
|
||||
AdvisoryFeedHash = E2EReproducibilityTestFixture.ComputeHash(E2EReproducibilityTestFixture.CreateMockAdvisoryFeed()),
|
||||
PolicyPack = E2EReproducibilityTestFixture.CreateDefaultPolicyPack(),
|
||||
PolicyPackHash = E2EReproducibilityTestFixture.ComputeHash(E2EReproducibilityTestFixture.CreateDefaultPolicyPack()),
|
||||
VexDocument = null,
|
||||
VexDocumentHash = null,
|
||||
SnapshotTimestamp = _fixture.FrozenTimestamp
|
||||
};
|
||||
|
||||
// Act - Run pipeline twice
|
||||
var result1 = await _fixture.RunFullPipelineAsync(inputs);
|
||||
var result2 = await _fixture.RunFullPipelineAsync(inputs);
|
||||
|
||||
// Assert - Results must be identical even with empty SBOM
|
||||
result1.VerdictId.Should().Be(result2.VerdictId);
|
||||
result1.BundleManifestHash.Should().Be(result2.BundleManifestHash);
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "VEX exceptions reduce blocking findings deterministically")]
|
||||
public async Task FullPipeline_VexExceptions_ReduceBlockingFindingsDeterministically()
|
||||
{
|
||||
// Arrange - Run without VEX
|
||||
var inputsWithoutVex = await _fixture.SnapshotInputsAsync();
|
||||
var resultWithoutVex = await _fixture.RunFullPipelineAsync(inputsWithoutVex);
|
||||
|
||||
// Run with VEX exception for CVE-2024-0001 (CRITICAL)
|
||||
var vexDocument = E2EReproducibilityTestFixture.CreateVexDocumentWithExceptions("CVE-2024-0001");
|
||||
var inputsWithVex = new InputSnapshot
|
||||
{
|
||||
Sbom = inputsWithoutVex.Sbom,
|
||||
SbomHash = inputsWithoutVex.SbomHash,
|
||||
AdvisoryFeed = inputsWithoutVex.AdvisoryFeed,
|
||||
AdvisoryFeedHash = inputsWithoutVex.AdvisoryFeedHash,
|
||||
PolicyPack = inputsWithoutVex.PolicyPack,
|
||||
PolicyPackHash = inputsWithoutVex.PolicyPackHash,
|
||||
VexDocument = vexDocument,
|
||||
VexDocumentHash = E2EReproducibilityTestFixture.ComputeHash(vexDocument),
|
||||
SnapshotTimestamp = inputsWithoutVex.SnapshotTimestamp
|
||||
};
|
||||
|
||||
var resultWithVex = await _fixture.RunFullPipelineAsync(inputsWithVex);
|
||||
|
||||
// Assert - VEX should change the verdict
|
||||
resultWithVex.VerdictId.Should().NotBe(resultWithoutVex.VerdictId, "VEX exception should change verdict");
|
||||
|
||||
// But the result with VEX should be deterministic
|
||||
var resultWithVex2 = await _fixture.RunFullPipelineAsync(inputsWithVex);
|
||||
resultWithVex.VerdictId.Should().Be(resultWithVex2.VerdictId, "VEX result should be deterministic");
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "DSSE envelope hash is deterministic")]
|
||||
public async Task DsseEnvelope_Hash_IsDeterministic()
|
||||
{
|
||||
// Arrange
|
||||
var inputs = await _fixture.SnapshotInputsAsync();
|
||||
|
||||
// Act - Run pipeline 3 times
|
||||
var result1 = await _fixture.RunFullPipelineAsync(inputs);
|
||||
var result2 = await _fixture.RunFullPipelineAsync(inputs);
|
||||
var result3 = await _fixture.RunFullPipelineAsync(inputs);
|
||||
|
||||
// Assert - All envelope hashes must match
|
||||
result1.EnvelopeHash.Should().Be(result2.EnvelopeHash, "Envelope hash run 1 vs 2");
|
||||
result2.EnvelopeHash.Should().Be(result3.EnvelopeHash, "Envelope hash run 2 vs 3");
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Comparison Helper Tests
|
||||
|
||||
[Fact(DisplayName = "ManifestComparer generates readable diff report")]
|
||||
public async Task ManifestComparer_GeneratesReadableDiffReport()
|
||||
{
|
||||
// Arrange
|
||||
var inputs1 = await _fixture.SnapshotInputsAsync();
|
||||
|
||||
// Create different inputs
|
||||
var differentSbom = System.Text.Encoding.UTF8.GetBytes(
|
||||
System.Text.Json.JsonSerializer.Serialize(new
|
||||
{
|
||||
bomFormat = "CycloneDX",
|
||||
specVersion = "1.5",
|
||||
version = 1,
|
||||
components = new[] { new { name = "different", version = "1.0.0", purl = "pkg:npm/different@1.0.0" } }
|
||||
}));
|
||||
|
||||
var inputs2 = new InputSnapshot
|
||||
{
|
||||
Sbom = differentSbom,
|
||||
SbomHash = E2EReproducibilityTestFixture.ComputeHash(differentSbom),
|
||||
AdvisoryFeed = inputs1.AdvisoryFeed,
|
||||
AdvisoryFeedHash = inputs1.AdvisoryFeedHash,
|
||||
PolicyPack = inputs1.PolicyPack,
|
||||
PolicyPackHash = inputs1.PolicyPackHash,
|
||||
VexDocument = null,
|
||||
VexDocumentHash = null,
|
||||
SnapshotTimestamp = inputs1.SnapshotTimestamp
|
||||
};
|
||||
|
||||
// Act
|
||||
var result1 = await _fixture.RunFullPipelineAsync(inputs1);
|
||||
var result2 = await _fixture.RunFullPipelineAsync(inputs2);
|
||||
var comparison = ManifestComparer.Compare(result1, result2);
|
||||
var report = ManifestComparer.GenerateDiffReport(comparison);
|
||||
|
||||
// Assert
|
||||
comparison.IsMatch.Should().BeFalse();
|
||||
report.Should().Contain("difference");
|
||||
report.Should().Contain("VerdictId");
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "ManifestComparer multiple comparison returns correct summary")]
|
||||
public async Task ManifestComparer_MultipleComparison_ReturnsCorrectSummary()
|
||||
{
|
||||
// Arrange
|
||||
var inputs = await _fixture.SnapshotInputsAsync();
|
||||
|
||||
// Act - Run pipeline 3 times
|
||||
var results = new List<PipelineResult>
|
||||
{
|
||||
await _fixture.RunFullPipelineAsync(inputs),
|
||||
await _fixture.RunFullPipelineAsync(inputs),
|
||||
await _fixture.RunFullPipelineAsync(inputs)
|
||||
};
|
||||
|
||||
var comparison = ManifestComparer.CompareMultiple(results);
|
||||
|
||||
// Assert
|
||||
comparison.AllMatch.Should().BeTrue();
|
||||
comparison.Summary.Should().Contain("identical");
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Collection definition for E2E reproducibility tests to share the fixture.
|
||||
/// </summary>
|
||||
[CollectionDefinition("E2EReproducibility")]
|
||||
public sealed class E2EReproducibilityCollection : ICollectionFixture<E2EReproducibilityTestFixture>
|
||||
{
|
||||
}
|
||||
@@ -0,0 +1,473 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// ManifestComparer.cs
|
||||
// Sprint: SPRINT_8200_0001_0004_e2e_reproducibility_test
|
||||
// Task: E2E-8200-004 - Add helper to compare verdict manifests byte-for-byte
|
||||
// Description: Provides byte-for-byte comparison of manifests and detailed diff reporting.
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System.Security.Cryptography;
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
|
||||
namespace StellaOps.Integration.E2E;
|
||||
|
||||
/// <summary>
|
||||
/// Compares manifests and pipeline results byte-for-byte for reproducibility verification.
|
||||
/// </summary>
|
||||
public static class ManifestComparer
|
||||
{
|
||||
/// <summary>
|
||||
/// Compares two pipeline results for exact equality.
|
||||
/// </summary>
|
||||
public static ManifestComparisonResult Compare(PipelineResult expected, PipelineResult actual)
|
||||
{
|
||||
var differences = new List<ManifestDifference>();
|
||||
|
||||
// Compare verdict IDs
|
||||
if (!string.Equals(expected.VerdictId, actual.VerdictId, StringComparison.Ordinal))
|
||||
{
|
||||
differences.Add(new ManifestDifference(
|
||||
"VerdictId",
|
||||
expected.VerdictId,
|
||||
actual.VerdictId,
|
||||
DifferenceType.ValueMismatch));
|
||||
}
|
||||
|
||||
// Compare verdict hashes
|
||||
if (!string.Equals(expected.VerdictHash, actual.VerdictHash, StringComparison.Ordinal))
|
||||
{
|
||||
differences.Add(new ManifestDifference(
|
||||
"VerdictHash",
|
||||
expected.VerdictHash,
|
||||
actual.VerdictHash,
|
||||
DifferenceType.HashMismatch));
|
||||
}
|
||||
|
||||
// Compare envelope hashes
|
||||
if (!string.Equals(expected.EnvelopeHash, actual.EnvelopeHash, StringComparison.Ordinal))
|
||||
{
|
||||
differences.Add(new ManifestDifference(
|
||||
"EnvelopeHash",
|
||||
expected.EnvelopeHash,
|
||||
actual.EnvelopeHash,
|
||||
DifferenceType.HashMismatch));
|
||||
}
|
||||
|
||||
// Compare bundle manifest hashes
|
||||
if (!string.Equals(expected.BundleManifestHash, actual.BundleManifestHash, StringComparison.Ordinal))
|
||||
{
|
||||
differences.Add(new ManifestDifference(
|
||||
"BundleManifestHash",
|
||||
expected.BundleManifestHash,
|
||||
actual.BundleManifestHash,
|
||||
DifferenceType.HashMismatch));
|
||||
}
|
||||
|
||||
// Compare bundle manifest bytes
|
||||
if (!expected.BundleManifest.AsSpan().SequenceEqual(actual.BundleManifest))
|
||||
{
|
||||
var byteDiff = FindByteDifference(expected.BundleManifest, actual.BundleManifest);
|
||||
differences.Add(new ManifestDifference(
|
||||
"BundleManifest",
|
||||
$"Bytes differ at offset {byteDiff.Offset}: expected 0x{byteDiff.Expected:X2}, actual 0x{byteDiff.Actual:X2}",
|
||||
$"Expected length: {expected.BundleManifest.Length}, Actual length: {actual.BundleManifest.Length}",
|
||||
DifferenceType.ByteMismatch));
|
||||
}
|
||||
|
||||
// Compare timestamps
|
||||
if (expected.ExecutionTimestamp != actual.ExecutionTimestamp)
|
||||
{
|
||||
differences.Add(new ManifestDifference(
|
||||
"ExecutionTimestamp",
|
||||
expected.ExecutionTimestamp.ToString("O"),
|
||||
actual.ExecutionTimestamp.ToString("O"),
|
||||
DifferenceType.ValueMismatch));
|
||||
}
|
||||
|
||||
return new ManifestComparisonResult(differences.Count == 0, differences);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Compares multiple pipeline results to verify they are all identical.
|
||||
/// </summary>
|
||||
public static MultipleComparisonResult CompareMultiple(IReadOnlyList<PipelineResult> results)
|
||||
{
|
||||
if (results.Count == 0)
|
||||
{
|
||||
return new MultipleComparisonResult(true, [], "No results to compare");
|
||||
}
|
||||
|
||||
if (results.Count == 1)
|
||||
{
|
||||
return new MultipleComparisonResult(true, [], "Only one result, nothing to compare");
|
||||
}
|
||||
|
||||
var baseline = results[0];
|
||||
var comparisons = new List<(int Index, ManifestComparisonResult Result)>();
|
||||
var allMatch = true;
|
||||
|
||||
for (int i = 1; i < results.Count; i++)
|
||||
{
|
||||
var comparison = Compare(baseline, results[i]);
|
||||
comparisons.Add((i, comparison));
|
||||
|
||||
if (!comparison.IsMatch)
|
||||
{
|
||||
allMatch = false;
|
||||
}
|
||||
}
|
||||
|
||||
var summary = allMatch
|
||||
? $"All {results.Count} results are identical"
|
||||
: $"{comparisons.Count(c => !c.Result.IsMatch)} of {results.Count - 1} comparisons have differences";
|
||||
|
||||
return new MultipleComparisonResult(allMatch, comparisons, summary);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Compares two byte arrays and returns detailed difference information.
|
||||
/// </summary>
|
||||
public static ByteComparisonResult CompareBytes(ReadOnlySpan<byte> expected, ReadOnlySpan<byte> actual)
|
||||
{
|
||||
var differences = new List<ByteDifference>();
|
||||
|
||||
var minLength = Math.Min(expected.Length, actual.Length);
|
||||
var maxLength = Math.Max(expected.Length, actual.Length);
|
||||
|
||||
// Compare common bytes
|
||||
for (int i = 0; i < minLength; i++)
|
||||
{
|
||||
if (expected[i] != actual[i])
|
||||
{
|
||||
differences.Add(new ByteDifference(i, expected[i], actual[i]));
|
||||
}
|
||||
}
|
||||
|
||||
// Check for length mismatch
|
||||
var lengthMismatch = expected.Length != actual.Length;
|
||||
if (lengthMismatch)
|
||||
{
|
||||
for (int i = minLength; i < maxLength; i++)
|
||||
{
|
||||
var expectedByte = i < expected.Length ? expected[i] : (byte?)null;
|
||||
var actualByte = i < actual.Length ? actual[i] : (byte?)null;
|
||||
differences.Add(new ByteDifference(i, expectedByte, actualByte));
|
||||
}
|
||||
}
|
||||
|
||||
return new ByteComparisonResult(
|
||||
IsMatch: differences.Count == 0,
|
||||
ExpectedLength: expected.Length,
|
||||
ActualLength: actual.Length,
|
||||
Differences: differences,
|
||||
FirstDifferenceOffset: differences.Count > 0 ? differences[0].Offset : null);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Compares two JSON documents for semantic equality (ignoring whitespace differences).
|
||||
/// </summary>
|
||||
public static JsonComparisonResult CompareJson(ReadOnlySpan<byte> expected, ReadOnlySpan<byte> actual)
|
||||
{
|
||||
try
|
||||
{
|
||||
using var expectedDoc = JsonDocument.Parse(expected.ToArray());
|
||||
using var actualDoc = JsonDocument.Parse(actual.ToArray());
|
||||
|
||||
var differences = CompareJsonElements("$", expectedDoc.RootElement, actualDoc.RootElement);
|
||||
|
||||
return new JsonComparisonResult(
|
||||
IsMatch: differences.Count == 0,
|
||||
Differences: differences,
|
||||
ExpectedJson: Encoding.UTF8.GetString(expected),
|
||||
ActualJson: Encoding.UTF8.GetString(actual));
|
||||
}
|
||||
catch (JsonException ex)
|
||||
{
|
||||
return new JsonComparisonResult(
|
||||
IsMatch: false,
|
||||
Differences: [new JsonDifference("$", $"JSON parse error: {ex.Message}", null, JsonDifferenceType.ParseError)],
|
||||
ExpectedJson: Encoding.UTF8.GetString(expected),
|
||||
ActualJson: Encoding.UTF8.GetString(actual));
|
||||
}
|
||||
}
|
||||
|
||||
private static List<JsonDifference> CompareJsonElements(string path, JsonElement expected, JsonElement actual)
|
||||
{
|
||||
var differences = new List<JsonDifference>();
|
||||
|
||||
if (expected.ValueKind != actual.ValueKind)
|
||||
{
|
||||
differences.Add(new JsonDifference(
|
||||
path,
|
||||
$"Type: {expected.ValueKind}",
|
||||
$"Type: {actual.ValueKind}",
|
||||
JsonDifferenceType.TypeMismatch));
|
||||
return differences;
|
||||
}
|
||||
|
||||
switch (expected.ValueKind)
|
||||
{
|
||||
case JsonValueKind.Object:
|
||||
var expectedProps = expected.EnumerateObject().ToDictionary(p => p.Name);
|
||||
var actualProps = actual.EnumerateObject().ToDictionary(p => p.Name);
|
||||
|
||||
foreach (var prop in expectedProps)
|
||||
{
|
||||
var propPath = $"{path}.{prop.Key}";
|
||||
if (!actualProps.TryGetValue(prop.Key, out var actualProp))
|
||||
{
|
||||
differences.Add(new JsonDifference(propPath, prop.Value.ToString(), null, JsonDifferenceType.MissingProperty));
|
||||
}
|
||||
else
|
||||
{
|
||||
differences.AddRange(CompareJsonElements(propPath, prop.Value.Value, actualProp.Value));
|
||||
}
|
||||
}
|
||||
|
||||
foreach (var prop in actualProps)
|
||||
{
|
||||
if (!expectedProps.ContainsKey(prop.Key))
|
||||
{
|
||||
var propPath = $"{path}.{prop.Key}";
|
||||
differences.Add(new JsonDifference(propPath, null, prop.Value.ToString(), JsonDifferenceType.ExtraProperty));
|
||||
}
|
||||
}
|
||||
break;
|
||||
|
||||
case JsonValueKind.Array:
|
||||
var expectedArray = expected.EnumerateArray().ToList();
|
||||
var actualArray = actual.EnumerateArray().ToList();
|
||||
|
||||
if (expectedArray.Count != actualArray.Count)
|
||||
{
|
||||
differences.Add(new JsonDifference(
|
||||
path,
|
||||
$"Length: {expectedArray.Count}",
|
||||
$"Length: {actualArray.Count}",
|
||||
JsonDifferenceType.ArrayLengthMismatch));
|
||||
}
|
||||
|
||||
var minCount = Math.Min(expectedArray.Count, actualArray.Count);
|
||||
for (int i = 0; i < minCount; i++)
|
||||
{
|
||||
differences.AddRange(CompareJsonElements($"{path}[{i}]", expectedArray[i], actualArray[i]));
|
||||
}
|
||||
break;
|
||||
|
||||
case JsonValueKind.String:
|
||||
if (expected.GetString() != actual.GetString())
|
||||
{
|
||||
differences.Add(new JsonDifference(path, expected.GetString(), actual.GetString(), JsonDifferenceType.ValueMismatch));
|
||||
}
|
||||
break;
|
||||
|
||||
case JsonValueKind.Number:
|
||||
if (expected.GetRawText() != actual.GetRawText())
|
||||
{
|
||||
differences.Add(new JsonDifference(path, expected.GetRawText(), actual.GetRawText(), JsonDifferenceType.ValueMismatch));
|
||||
}
|
||||
break;
|
||||
|
||||
case JsonValueKind.True:
|
||||
case JsonValueKind.False:
|
||||
if (expected.GetBoolean() != actual.GetBoolean())
|
||||
{
|
||||
differences.Add(new JsonDifference(path, expected.GetBoolean().ToString(), actual.GetBoolean().ToString(), JsonDifferenceType.ValueMismatch));
|
||||
}
|
||||
break;
|
||||
|
||||
case JsonValueKind.Null:
|
||||
// Both are null, no difference
|
||||
break;
|
||||
}
|
||||
|
||||
return differences;
|
||||
}
|
||||
|
||||
private static ByteDifference FindByteDifference(byte[] expected, byte[] actual)
|
||||
{
|
||||
var minLength = Math.Min(expected.Length, actual.Length);
|
||||
|
||||
for (int i = 0; i < minLength; i++)
|
||||
{
|
||||
if (expected[i] != actual[i])
|
||||
{
|
||||
return new ByteDifference(i, expected[i], actual[i]);
|
||||
}
|
||||
}
|
||||
|
||||
// Length difference
|
||||
if (expected.Length != actual.Length)
|
||||
{
|
||||
return new ByteDifference(
|
||||
minLength,
|
||||
minLength < expected.Length ? expected[minLength] : (byte?)null,
|
||||
minLength < actual.Length ? actual[minLength] : (byte?)null);
|
||||
}
|
||||
|
||||
// No difference (shouldn't happen if called correctly)
|
||||
return new ByteDifference(0, 0, 0);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Generates a detailed diff report for debugging reproducibility failures.
|
||||
/// </summary>
|
||||
public static string GenerateDiffReport(ManifestComparisonResult comparison)
|
||||
{
|
||||
var sb = new StringBuilder();
|
||||
sb.AppendLine("=== Manifest Comparison Report ===");
|
||||
sb.AppendLine();
|
||||
|
||||
if (comparison.IsMatch)
|
||||
{
|
||||
sb.AppendLine("✓ All fields match exactly");
|
||||
return sb.ToString();
|
||||
}
|
||||
|
||||
sb.AppendLine($"✗ Found {comparison.Differences.Count} difference(s):");
|
||||
sb.AppendLine();
|
||||
|
||||
foreach (var diff in comparison.Differences)
|
||||
{
|
||||
sb.AppendLine($" [{diff.Type}] {diff.Field}:");
|
||||
sb.AppendLine($" Expected: {diff.Expected}");
|
||||
sb.AppendLine($" Actual: {diff.Actual}");
|
||||
sb.AppendLine();
|
||||
}
|
||||
|
||||
return sb.ToString();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Generates a hex dump comparison for byte-level debugging.
|
||||
/// </summary>
|
||||
public static string GenerateHexDump(ReadOnlySpan<byte> expected, ReadOnlySpan<byte> actual, int contextBytes = 16)
|
||||
{
|
||||
var comparison = CompareBytes(expected, actual);
|
||||
var sb = new StringBuilder();
|
||||
|
||||
sb.AppendLine("=== Hex Dump Comparison ===");
|
||||
sb.AppendLine($"Expected length: {expected.Length}");
|
||||
sb.AppendLine($"Actual length: {actual.Length}");
|
||||
sb.AppendLine();
|
||||
|
||||
if (comparison.IsMatch)
|
||||
{
|
||||
sb.AppendLine("✓ Bytes are identical");
|
||||
return sb.ToString();
|
||||
}
|
||||
|
||||
sb.AppendLine($"✗ Found {comparison.Differences.Count} byte difference(s)");
|
||||
sb.AppendLine();
|
||||
|
||||
// Show first few differences with context
|
||||
var diffsToShow = comparison.Differences.Take(5).ToList();
|
||||
foreach (var diff in diffsToShow)
|
||||
{
|
||||
var startOffset = Math.Max(0, diff.Offset - contextBytes);
|
||||
var endOffset = Math.Min(Math.Max(expected.Length, actual.Length), diff.Offset + contextBytes);
|
||||
|
||||
sb.AppendLine($"Difference at offset 0x{diff.Offset:X8} ({diff.Offset}):");
|
||||
sb.AppendLine($" Expected: 0x{diff.Expected:X2} ('{(char?)(diff.Expected >= 32 && diff.Expected < 127 ? diff.Expected : '.')}')" );
|
||||
sb.AppendLine($" Actual: 0x{diff.Actual:X2} ('{(char?)(diff.Actual >= 32 && diff.Actual < 127 ? diff.Actual : '.')}')" );
|
||||
sb.AppendLine();
|
||||
}
|
||||
|
||||
if (comparison.Differences.Count > 5)
|
||||
{
|
||||
sb.AppendLine($"... and {comparison.Differences.Count - 5} more differences");
|
||||
}
|
||||
|
||||
return sb.ToString();
|
||||
}
|
||||
}
|
||||
|
||||
#region Result Types
|
||||
|
||||
/// <summary>
|
||||
/// Result of comparing two manifests.
|
||||
/// </summary>
|
||||
public sealed record ManifestComparisonResult(
|
||||
bool IsMatch,
|
||||
IReadOnlyList<ManifestDifference> Differences);
|
||||
|
||||
/// <summary>
|
||||
/// A single difference between manifests.
|
||||
/// </summary>
|
||||
public sealed record ManifestDifference(
|
||||
string Field,
|
||||
string? Expected,
|
||||
string? Actual,
|
||||
DifferenceType Type);
|
||||
|
||||
/// <summary>
|
||||
/// Type of difference found.
|
||||
/// </summary>
|
||||
public enum DifferenceType
|
||||
{
|
||||
ValueMismatch,
|
||||
HashMismatch,
|
||||
ByteMismatch,
|
||||
LengthMismatch,
|
||||
Missing,
|
||||
Extra
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Result of comparing multiple pipeline results.
|
||||
/// </summary>
|
||||
public sealed record MultipleComparisonResult(
|
||||
bool AllMatch,
|
||||
IReadOnlyList<(int Index, ManifestComparisonResult Result)> Comparisons,
|
||||
string Summary);
|
||||
|
||||
/// <summary>
|
||||
/// Result of byte-level comparison.
|
||||
/// </summary>
|
||||
public sealed record ByteComparisonResult(
|
||||
bool IsMatch,
|
||||
int ExpectedLength,
|
||||
int ActualLength,
|
||||
IReadOnlyList<ByteDifference> Differences,
|
||||
int? FirstDifferenceOffset);
|
||||
|
||||
/// <summary>
|
||||
/// A single byte difference.
|
||||
/// </summary>
|
||||
public sealed record ByteDifference(
|
||||
int Offset,
|
||||
byte? Expected,
|
||||
byte? Actual);
|
||||
|
||||
/// <summary>
|
||||
/// Result of JSON comparison.
|
||||
/// </summary>
|
||||
public sealed record JsonComparisonResult(
|
||||
bool IsMatch,
|
||||
IReadOnlyList<JsonDifference> Differences,
|
||||
string ExpectedJson,
|
||||
string ActualJson);
|
||||
|
||||
/// <summary>
|
||||
/// A single JSON difference.
|
||||
/// </summary>
|
||||
public sealed record JsonDifference(
|
||||
string Path,
|
||||
string? Expected,
|
||||
string? Actual,
|
||||
JsonDifferenceType Type);
|
||||
|
||||
/// <summary>
|
||||
/// Type of JSON difference.
|
||||
/// </summary>
|
||||
public enum JsonDifferenceType
|
||||
{
|
||||
ValueMismatch,
|
||||
TypeMismatch,
|
||||
MissingProperty,
|
||||
ExtraProperty,
|
||||
ArrayLengthMismatch,
|
||||
ParseError
|
||||
}
|
||||
|
||||
#endregion
|
||||
@@ -0,0 +1,79 @@
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<!--
|
||||
StellaOps.Integration.E2E.csproj
|
||||
Sprint: SPRINT_8200_0001_0004_e2e_reproducibility_test
|
||||
Task: E2E-8200-001 - Create tests/integration/StellaOps.Integration.E2E/ project
|
||||
Description: End-to-end reproducibility tests covering full pipeline:
|
||||
ingest → normalize → diff → decide → attest → bundle → reverify
|
||||
-->
|
||||
<Project Sdk="Microsoft.NET.Sdk">
|
||||
|
||||
<PropertyGroup>
|
||||
<TargetFramework>net10.0</TargetFramework>
|
||||
<LangVersion>preview</LangVersion>
|
||||
<ImplicitUsings>enable</ImplicitUsings>
|
||||
<Nullable>enable</Nullable>
|
||||
<IsPackable>false</IsPackable>
|
||||
<IsTestProject>true</IsTestProject>
|
||||
</PropertyGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.10.0" />
|
||||
<PackageReference Include="xunit" Version="2.7.0" />
|
||||
<PackageReference Include="xunit.runner.visualstudio" Version="2.5.8">
|
||||
<PrivateAssets>all</PrivateAssets>
|
||||
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
|
||||
</PackageReference>
|
||||
<PackageReference Include="FluentAssertions" Version="6.12.0" />
|
||||
<PackageReference Include="Microsoft.AspNetCore.Mvc.Testing" Version="10.0.0" />
|
||||
<PackageReference Include="Testcontainers" Version="3.6.0" />
|
||||
<PackageReference Include="Testcontainers.PostgreSql" Version="3.6.0" />
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<!-- Scanner WebService for integration testing -->
|
||||
<ProjectReference Include="../../Scanner/StellaOps.Scanner.WebService/StellaOps.Scanner.WebService.csproj" />
|
||||
|
||||
<!-- Scanner Core for contracts -->
|
||||
<ProjectReference Include="../../Scanner/__Libraries/StellaOps.Scanner.Core/StellaOps.Scanner.Core.csproj" />
|
||||
|
||||
<!-- Concelier for advisory ingestion and normalization -->
|
||||
<ProjectReference Include="../../Concelier/__Libraries/StellaOps.Concelier.Core/StellaOps.Concelier.Core.csproj" />
|
||||
<ProjectReference Include="../../Concelier/__Libraries/StellaOps.Concelier.Models/StellaOps.Concelier.Models.csproj" />
|
||||
<ProjectReference Include="../../Concelier/__Libraries/StellaOps.Concelier.Normalization/StellaOps.Concelier.Normalization.csproj" />
|
||||
<ProjectReference Include="../../Concelier/__Libraries/StellaOps.Concelier.Merge/StellaOps.Concelier.Merge.csproj" />
|
||||
|
||||
<!-- Policy for verdict computation -->
|
||||
<ProjectReference Include="../../Policy/__Libraries/StellaOps.Policy/StellaOps.Policy.csproj" />
|
||||
<ProjectReference Include="../../Policy/__Libraries/StellaOps.Policy.Scoring/StellaOps.Policy.Scoring.csproj" />
|
||||
|
||||
<!-- Attestor for DSSE envelope and bundle creation -->
|
||||
<ProjectReference Include="../../Attestor/__Libraries/StellaOps.Attestor.Dsse/StellaOps.Attestor.Dsse.csproj" />
|
||||
<ProjectReference Include="../../Attestor/__Libraries/StellaOps.Attestor.ProofChain/StellaOps.Attestor.ProofChain.csproj" />
|
||||
<ProjectReference Include="../../Attestor/StellaOps.Attestor.Envelope/StellaOps.Attestor.Envelope.csproj" />
|
||||
|
||||
<!-- Cryptography for hashing and content addressing -->
|
||||
<ProjectReference Include="../../__Libraries/StellaOps.Cryptography/StellaOps.Cryptography.csproj" />
|
||||
|
||||
<!-- Canonical JSON for deterministic serialization -->
|
||||
<ProjectReference Include="../../__Libraries/StellaOps.Canonical.Json/StellaOps.Canonical.Json.csproj" />
|
||||
|
||||
<!-- Testing infrastructure -->
|
||||
<ProjectReference Include="../__Libraries/StellaOps.Testing.Determinism/StellaOps.Testing.Determinism.csproj" />
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<!-- E2E test fixtures -->
|
||||
<Content Include="../../fixtures/**/*">
|
||||
<Link>fixtures/%(RecursiveDir)%(Filename)%(Extension)</Link>
|
||||
<CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
|
||||
</Content>
|
||||
|
||||
<!-- Golden baselines for reproducibility verification -->
|
||||
<Content Include="../__Benchmarks/determinism/**/*">
|
||||
<Link>baselines/%(RecursiveDir)%(Filename)%(Extension)</Link>
|
||||
<CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
|
||||
</Content>
|
||||
</ItemGroup>
|
||||
|
||||
</Project>
|
||||
@@ -0,0 +1,453 @@
|
||||
// =============================================================================
|
||||
// StellaOps.Integration.Performance - Performance Baseline Tests
|
||||
// Sprint 3500.0004.0003 - T7: Performance Baseline Tests
|
||||
// =============================================================================
|
||||
|
||||
using FluentAssertions;
|
||||
using System.Diagnostics;
|
||||
using System.Text.Json;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Integration.Performance;
|
||||
|
||||
/// <summary>
|
||||
/// Performance baseline tests to establish and validate performance characteristics.
|
||||
/// Uses timing measurements against known baselines with 20% regression threshold.
|
||||
/// </summary>
|
||||
/// <remarks>
|
||||
/// T7-AC1: Score computation time baseline
|
||||
/// T7-AC2: Proof bundle generation baseline
|
||||
/// T7-AC3: Call graph extraction baseline
|
||||
/// T7-AC4: Reachability computation baseline
|
||||
/// T7-AC5: Regression alerts on >20% degradation
|
||||
/// </remarks>
|
||||
[Trait("Category", "Performance")]
|
||||
[Trait("Category", "Integration")]
|
||||
public class PerformanceBaselineTests : IClassFixture<PerformanceTestFixture>
|
||||
{
|
||||
private readonly PerformanceTestFixture _fixture;
|
||||
private const double RegressionThresholdPercent = 20.0;
|
||||
|
||||
public PerformanceBaselineTests(PerformanceTestFixture fixture)
|
||||
{
|
||||
_fixture = fixture;
|
||||
}
|
||||
|
||||
#region T7-AC1: Score Computation Baseline
|
||||
|
||||
[Fact(DisplayName = "T7-AC1.1: Score computation completes within baseline")]
|
||||
public async Task ScoreComputation_CompletesWithinBaseline()
|
||||
{
|
||||
// Arrange
|
||||
var baseline = _fixture.GetBaseline("score_computation_ms");
|
||||
var findings = GenerateSampleFindings(100);
|
||||
|
||||
// Act
|
||||
var sw = Stopwatch.StartNew();
|
||||
var score = await ComputeScoreAsync(findings);
|
||||
sw.Stop();
|
||||
|
||||
// Assert
|
||||
var actualMs = sw.ElapsedMilliseconds;
|
||||
var threshold = baseline * (1 + RegressionThresholdPercent / 100);
|
||||
|
||||
actualMs.Should().BeLessThanOrEqualTo((long)threshold,
|
||||
$"Score computation took {actualMs}ms, exceeding baseline {baseline}ms + {RegressionThresholdPercent}% threshold");
|
||||
|
||||
// Record for baseline updates
|
||||
_fixture.RecordMeasurement("score_computation_ms", actualMs);
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "T7-AC1.2: Score computation scales linearly with findings")]
|
||||
public async Task ScoreComputation_ScalesLinearly()
|
||||
{
|
||||
// Arrange
|
||||
var sizes = new[] { 10, 50, 100, 200 };
|
||||
var times = new List<(int size, long ms)>();
|
||||
|
||||
// Act
|
||||
foreach (var size in sizes)
|
||||
{
|
||||
var findings = GenerateSampleFindings(size);
|
||||
var sw = Stopwatch.StartNew();
|
||||
await ComputeScoreAsync(findings);
|
||||
sw.Stop();
|
||||
times.Add((size, sw.ElapsedMilliseconds));
|
||||
}
|
||||
|
||||
// Assert - verify roughly linear scaling (within 3x of linear)
|
||||
var baseRatio = times[0].ms / (double)times[0].size;
|
||||
foreach (var (size, ms) in times.Skip(1))
|
||||
{
|
||||
var actualRatio = ms / (double)size;
|
||||
var scaleFactor = actualRatio / baseRatio;
|
||||
scaleFactor.Should().BeLessThan(3.0,
|
||||
$"Score computation at size {size} shows non-linear scaling (factor: {scaleFactor:F2}x)");
|
||||
}
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "T7-AC1.3: Score computation handles large finding sets")]
|
||||
public async Task ScoreComputation_HandlesLargeSets()
|
||||
{
|
||||
// Arrange
|
||||
var baseline = _fixture.GetBaseline("score_computation_large_ms");
|
||||
var findings = GenerateSampleFindings(1000);
|
||||
|
||||
// Act
|
||||
var sw = Stopwatch.StartNew();
|
||||
var score = await ComputeScoreAsync(findings);
|
||||
sw.Stop();
|
||||
|
||||
// Assert
|
||||
var threshold = baseline * (1 + RegressionThresholdPercent / 100);
|
||||
sw.ElapsedMilliseconds.Should().BeLessThanOrEqualTo((long)threshold);
|
||||
|
||||
_fixture.RecordMeasurement("score_computation_large_ms", sw.ElapsedMilliseconds);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region T7-AC2: Proof Bundle Generation Baseline
|
||||
|
||||
[Fact(DisplayName = "T7-AC2.1: Proof bundle generation completes within baseline")]
|
||||
public async Task ProofBundleGeneration_CompletesWithinBaseline()
|
||||
{
|
||||
// Arrange
|
||||
var baseline = _fixture.GetBaseline("proof_bundle_generation_ms");
|
||||
var manifest = GenerateSampleManifest();
|
||||
|
||||
// Act
|
||||
var sw = Stopwatch.StartNew();
|
||||
var bundle = await GenerateProofBundleAsync(manifest);
|
||||
sw.Stop();
|
||||
|
||||
// Assert
|
||||
var threshold = baseline * (1 + RegressionThresholdPercent / 100);
|
||||
sw.ElapsedMilliseconds.Should().BeLessThanOrEqualTo((long)threshold,
|
||||
$"Proof bundle generation took {sw.ElapsedMilliseconds}ms, exceeding baseline {baseline}ms");
|
||||
|
||||
_fixture.RecordMeasurement("proof_bundle_generation_ms", sw.ElapsedMilliseconds);
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "T7-AC2.2: Proof signing performance within baseline")]
|
||||
public async Task ProofSigning_WithinBaseline()
|
||||
{
|
||||
// Arrange
|
||||
var baseline = _fixture.GetBaseline("proof_signing_ms");
|
||||
var payload = GenerateSamplePayload(10 * 1024); // 10KB payload
|
||||
|
||||
// Act
|
||||
var sw = Stopwatch.StartNew();
|
||||
var signature = await SignPayloadAsync(payload);
|
||||
sw.Stop();
|
||||
|
||||
// Assert
|
||||
var threshold = baseline * (1 + RegressionThresholdPercent / 100);
|
||||
sw.ElapsedMilliseconds.Should().BeLessThanOrEqualTo((long)threshold);
|
||||
|
||||
_fixture.RecordMeasurement("proof_signing_ms", sw.ElapsedMilliseconds);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region T7-AC3: Call Graph Extraction Baseline
|
||||
|
||||
[Fact(DisplayName = "T7-AC3.1: .NET call graph extraction within baseline")]
|
||||
public async Task DotNetCallGraphExtraction_WithinBaseline()
|
||||
{
|
||||
// Arrange
|
||||
var baseline = _fixture.GetBaseline("dotnet_callgraph_extraction_ms");
|
||||
var assemblyPath = _fixture.GetTestAssemblyPath("DotNetSample");
|
||||
|
||||
// Act
|
||||
var sw = Stopwatch.StartNew();
|
||||
var graph = await ExtractDotNetCallGraphAsync(assemblyPath);
|
||||
sw.Stop();
|
||||
|
||||
// Assert
|
||||
var threshold = baseline * (1 + RegressionThresholdPercent / 100);
|
||||
sw.ElapsedMilliseconds.Should().BeLessThanOrEqualTo((long)threshold,
|
||||
$"Call graph extraction took {sw.ElapsedMilliseconds}ms, exceeding baseline {baseline}ms");
|
||||
|
||||
_fixture.RecordMeasurement("dotnet_callgraph_extraction_ms", sw.ElapsedMilliseconds);
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "T7-AC3.2: Call graph scales with assembly size")]
|
||||
public async Task CallGraphExtraction_ScalesWithSize()
|
||||
{
|
||||
// Arrange
|
||||
var assemblies = _fixture.GetTestAssemblies();
|
||||
var results = new List<(string name, int nodes, long ms)>();
|
||||
|
||||
// Act
|
||||
foreach (var assembly in assemblies)
|
||||
{
|
||||
var sw = Stopwatch.StartNew();
|
||||
var graph = await ExtractDotNetCallGraphAsync(assembly.Path);
|
||||
sw.Stop();
|
||||
results.Add((assembly.Name, graph.NodeCount, sw.ElapsedMilliseconds));
|
||||
}
|
||||
|
||||
// Assert - log results for baseline establishment
|
||||
foreach (var (name, nodes, ms) in results)
|
||||
{
|
||||
_fixture.RecordMeasurement($"callgraph_{name}_ms", ms);
|
||||
}
|
||||
|
||||
// Verify no catastrophic performance (>10s for any assembly)
|
||||
results.Should().AllSatisfy(r => r.ms.Should().BeLessThan(10000));
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region T7-AC4: Reachability Computation Baseline
|
||||
|
||||
[Fact(DisplayName = "T7-AC4.1: Reachability computation within baseline")]
|
||||
public async Task ReachabilityComputation_WithinBaseline()
|
||||
{
|
||||
// Arrange
|
||||
var baseline = _fixture.GetBaseline("reachability_computation_ms");
|
||||
var callGraph = GenerateSampleCallGraph(500, 1000); // 500 nodes, 1000 edges
|
||||
|
||||
// Act
|
||||
var sw = Stopwatch.StartNew();
|
||||
var result = await ComputeReachabilityAsync(callGraph);
|
||||
sw.Stop();
|
||||
|
||||
// Assert
|
||||
var threshold = baseline * (1 + RegressionThresholdPercent / 100);
|
||||
sw.ElapsedMilliseconds.Should().BeLessThanOrEqualTo((long)threshold,
|
||||
$"Reachability computation took {sw.ElapsedMilliseconds}ms, exceeding baseline {baseline}ms");
|
||||
|
||||
_fixture.RecordMeasurement("reachability_computation_ms", sw.ElapsedMilliseconds);
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "T7-AC4.2: Large graph reachability within baseline")]
|
||||
public async Task LargeGraphReachability_WithinBaseline()
|
||||
{
|
||||
// Arrange
|
||||
var baseline = _fixture.GetBaseline("reachability_large_graph_ms");
|
||||
var callGraph = GenerateSampleCallGraph(2000, 5000); // 2000 nodes, 5000 edges
|
||||
|
||||
// Act
|
||||
var sw = Stopwatch.StartNew();
|
||||
var result = await ComputeReachabilityAsync(callGraph);
|
||||
sw.Stop();
|
||||
|
||||
// Assert
|
||||
var threshold = baseline * (1 + RegressionThresholdPercent / 100);
|
||||
sw.ElapsedMilliseconds.Should().BeLessThanOrEqualTo((long)threshold,
|
||||
$"Large graph reachability took {sw.ElapsedMilliseconds}ms, exceeding baseline {baseline}ms");
|
||||
|
||||
_fixture.RecordMeasurement("reachability_large_graph_ms", sw.ElapsedMilliseconds);
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "T7-AC4.3: Reachability with deep paths within baseline")]
|
||||
public async Task DeepPathReachability_WithinBaseline()
|
||||
{
|
||||
// Arrange
|
||||
var baseline = _fixture.GetBaseline("reachability_deep_path_ms");
|
||||
var callGraph = GenerateDeepCallGraph(100); // 100 levels deep
|
||||
|
||||
// Act
|
||||
var sw = Stopwatch.StartNew();
|
||||
var result = await ComputeReachabilityAsync(callGraph);
|
||||
sw.Stop();
|
||||
|
||||
// Assert
|
||||
var threshold = baseline * (1 + RegressionThresholdPercent / 100);
|
||||
sw.ElapsedMilliseconds.Should().BeLessThanOrEqualTo((long)threshold);
|
||||
|
||||
_fixture.RecordMeasurement("reachability_deep_path_ms", sw.ElapsedMilliseconds);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region T7-AC5: Regression Alerts
|
||||
|
||||
[Fact(DisplayName = "T7-AC5.1: All baselines within threshold")]
|
||||
public void AllBaselines_WithinThreshold()
|
||||
{
|
||||
// Arrange
|
||||
var measurements = _fixture.GetAllMeasurements();
|
||||
var regressions = new List<string>();
|
||||
|
||||
// Act & Assert
|
||||
foreach (var (metric, measured) in measurements)
|
||||
{
|
||||
var baseline = _fixture.GetBaseline(metric);
|
||||
var threshold = baseline * (1 + RegressionThresholdPercent / 100);
|
||||
|
||||
if (measured > threshold)
|
||||
{
|
||||
var regression = (measured - baseline) / baseline * 100;
|
||||
regressions.Add($"{metric}: {measured}ms vs baseline {baseline}ms (+{regression:F1}%)");
|
||||
}
|
||||
}
|
||||
|
||||
regressions.Should().BeEmpty(
|
||||
$"Performance regressions detected (>{RegressionThresholdPercent}%):\n" +
|
||||
string.Join("\n", regressions));
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "T7-AC5.2: Generate regression report")]
|
||||
public void GenerateRegressionReport()
|
||||
{
|
||||
// Arrange
|
||||
var measurements = _fixture.GetAllMeasurements();
|
||||
|
||||
// Act
|
||||
var report = new PerformanceReport
|
||||
{
|
||||
GeneratedAt = DateTime.UtcNow,
|
||||
ThresholdPercent = RegressionThresholdPercent,
|
||||
Metrics = measurements.Select(m => new MetricReport
|
||||
{
|
||||
Name = m.metric,
|
||||
Baseline = _fixture.GetBaseline(m.metric),
|
||||
Measured = m.value,
|
||||
DeltaPercent = (m.value - _fixture.GetBaseline(m.metric)) / _fixture.GetBaseline(m.metric) * 100
|
||||
}).ToList()
|
||||
};
|
||||
|
||||
// Assert - report should be valid
|
||||
report.Metrics.Should().NotBeEmpty();
|
||||
|
||||
// Write report for CI consumption
|
||||
var json = JsonSerializer.Serialize(report, new JsonSerializerOptions { WriteIndented = true });
|
||||
_fixture.SaveReport("performance-report.json", json);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Helper Methods
|
||||
|
||||
private static List<SampleFinding> GenerateSampleFindings(int count)
|
||||
{
|
||||
return Enumerable.Range(1, count)
|
||||
.Select(i => new SampleFinding
|
||||
{
|
||||
Id = $"finding-{i:D4}",
|
||||
CveId = $"CVE-2024-{i:D5}",
|
||||
Severity = (i % 4) switch
|
||||
{
|
||||
0 => "CRITICAL",
|
||||
1 => "HIGH",
|
||||
2 => "MEDIUM",
|
||||
_ => "LOW"
|
||||
},
|
||||
CvssScore = 10.0 - (i % 10)
|
||||
})
|
||||
.ToList();
|
||||
}
|
||||
|
||||
private static async Task<double> ComputeScoreAsync(List<SampleFinding> findings)
|
||||
{
|
||||
// Simulated score computation
|
||||
await Task.Delay(findings.Count / 10); // ~10 findings per ms
|
||||
return findings.Sum(f => f.CvssScore) / findings.Count;
|
||||
}
|
||||
|
||||
private static SampleManifest GenerateSampleManifest()
|
||||
{
|
||||
return new SampleManifest
|
||||
{
|
||||
Id = Guid.NewGuid().ToString(),
|
||||
CreatedAt = DateTime.UtcNow,
|
||||
Findings = GenerateSampleFindings(50)
|
||||
};
|
||||
}
|
||||
|
||||
private static async Task<byte[]> GenerateProofBundleAsync(SampleManifest manifest)
|
||||
{
|
||||
await Task.Delay(50); // Simulated bundle generation
|
||||
return JsonSerializer.SerializeToUtf8Bytes(manifest);
|
||||
}
|
||||
|
||||
private static byte[] GenerateSamplePayload(int sizeBytes)
|
||||
{
|
||||
var random = new Random(42);
|
||||
var buffer = new byte[sizeBytes];
|
||||
random.NextBytes(buffer);
|
||||
return buffer;
|
||||
}
|
||||
|
||||
private static async Task<byte[]> SignPayloadAsync(byte[] payload)
|
||||
{
|
||||
await Task.Delay(10); // Simulated signing
|
||||
using var sha256 = System.Security.Cryptography.SHA256.Create();
|
||||
return sha256.ComputeHash(payload);
|
||||
}
|
||||
|
||||
private static async Task<SampleCallGraph> ExtractDotNetCallGraphAsync(string assemblyPath)
|
||||
{
|
||||
await Task.Delay(100); // Simulated extraction
|
||||
return new SampleCallGraph { NodeCount = 100, EdgeCount = 250 };
|
||||
}
|
||||
|
||||
private static SampleCallGraph GenerateSampleCallGraph(int nodes, int edges)
|
||||
{
|
||||
return new SampleCallGraph { NodeCount = nodes, EdgeCount = edges };
|
||||
}
|
||||
|
||||
private static SampleCallGraph GenerateDeepCallGraph(int depth)
|
||||
{
|
||||
return new SampleCallGraph { NodeCount = depth, EdgeCount = depth - 1, Depth = depth };
|
||||
}
|
||||
|
||||
private static async Task<ReachabilityResult> ComputeReachabilityAsync(SampleCallGraph graph)
|
||||
{
|
||||
// Simulated reachability - O(V + E) complexity
|
||||
var delay = (graph.NodeCount + graph.EdgeCount) / 100;
|
||||
await Task.Delay(Math.Max(1, delay));
|
||||
return new ReachabilityResult { ReachableNodes = graph.NodeCount / 2 };
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Sample Types
|
||||
|
||||
private record SampleFinding
|
||||
{
|
||||
public string Id { get; init; } = "";
|
||||
public string CveId { get; init; } = "";
|
||||
public string Severity { get; init; } = "";
|
||||
public double CvssScore { get; init; }
|
||||
}
|
||||
|
||||
private record SampleManifest
|
||||
{
|
||||
public string Id { get; init; } = "";
|
||||
public DateTime CreatedAt { get; init; }
|
||||
public List<SampleFinding> Findings { get; init; } = new();
|
||||
}
|
||||
|
||||
private record SampleCallGraph
|
||||
{
|
||||
public int NodeCount { get; init; }
|
||||
public int EdgeCount { get; init; }
|
||||
public int Depth { get; init; }
|
||||
}
|
||||
|
||||
private record ReachabilityResult
|
||||
{
|
||||
public int ReachableNodes { get; init; }
|
||||
}
|
||||
|
||||
private record PerformanceReport
|
||||
{
|
||||
public DateTime GeneratedAt { get; init; }
|
||||
public double ThresholdPercent { get; init; }
|
||||
public List<MetricReport> Metrics { get; init; } = new();
|
||||
}
|
||||
|
||||
private record MetricReport
|
||||
{
|
||||
public string Name { get; init; } = "";
|
||||
public double Baseline { get; init; }
|
||||
public double Measured { get; init; }
|
||||
public double DeltaPercent { get; init; }
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
@@ -0,0 +1,147 @@
|
||||
// =============================================================================
|
||||
// StellaOps.Integration.Performance - Performance Test Fixture
|
||||
// Sprint 3500.0004.0003 - T7: Performance Baseline Tests
|
||||
// =============================================================================
|
||||
|
||||
using System.Text.Json;
|
||||
|
||||
namespace StellaOps.Integration.Performance;
|
||||
|
||||
/// <summary>
|
||||
/// Test fixture for performance baseline tests.
|
||||
/// Manages baseline data and measurement recording.
|
||||
/// </summary>
|
||||
public sealed class PerformanceTestFixture : IDisposable
|
||||
{
|
||||
private readonly string _baselinesPath;
|
||||
private readonly string _outputPath;
|
||||
private readonly Dictionary<string, double> _baselines;
|
||||
private readonly Dictionary<string, double> _measurements = new();
|
||||
|
||||
public PerformanceTestFixture()
|
||||
{
|
||||
_baselinesPath = Path.Combine(AppContext.BaseDirectory, "baselines");
|
||||
_outputPath = Path.Combine(AppContext.BaseDirectory, "output");
|
||||
|
||||
Directory.CreateDirectory(_outputPath);
|
||||
|
||||
_baselines = LoadBaselines();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Gets the baseline value for a metric.
|
||||
/// Returns default if baseline not found.
|
||||
/// </summary>
|
||||
public double GetBaseline(string metric)
|
||||
{
|
||||
return _baselines.TryGetValue(metric, out var baseline) ? baseline : GetDefaultBaseline(metric);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Records a measurement for a metric.
|
||||
/// </summary>
|
||||
public void RecordMeasurement(string metric, double value)
|
||||
{
|
||||
_measurements[metric] = value;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Gets all recorded measurements.
|
||||
/// </summary>
|
||||
public IEnumerable<(string metric, double value)> GetAllMeasurements()
|
||||
{
|
||||
return _measurements.Select(kv => (kv.Key, kv.Value));
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Gets the path to a test assembly.
|
||||
/// </summary>
|
||||
public string GetTestAssemblyPath(string name)
|
||||
{
|
||||
var path = Path.Combine(AppContext.BaseDirectory, "test-assemblies", $"{name}.dll");
|
||||
return File.Exists(path) ? path : Path.Combine(AppContext.BaseDirectory, "StellaOps.Integration.Performance.dll");
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Gets available test assemblies.
|
||||
/// </summary>
|
||||
public IEnumerable<(string Name, string Path)> GetTestAssemblies()
|
||||
{
|
||||
var testAssembliesDir = Path.Combine(AppContext.BaseDirectory, "test-assemblies");
|
||||
|
||||
if (Directory.Exists(testAssembliesDir))
|
||||
{
|
||||
foreach (var file in Directory.GetFiles(testAssembliesDir, "*.dll"))
|
||||
{
|
||||
yield return (Path.GetFileNameWithoutExtension(file), file);
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
// Use self as test assembly
|
||||
var selfPath = Path.Combine(AppContext.BaseDirectory, "StellaOps.Integration.Performance.dll");
|
||||
if (File.Exists(selfPath))
|
||||
{
|
||||
yield return ("Self", selfPath);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Saves a report file.
|
||||
/// </summary>
|
||||
public void SaveReport(string filename, string content)
|
||||
{
|
||||
var path = Path.Combine(_outputPath, filename);
|
||||
File.WriteAllText(path, content);
|
||||
}
|
||||
|
||||
private Dictionary<string, double> LoadBaselines()
|
||||
{
|
||||
var baselinesFile = Path.Combine(_baselinesPath, "performance-baselines.json");
|
||||
|
||||
if (File.Exists(baselinesFile))
|
||||
{
|
||||
var json = File.ReadAllText(baselinesFile);
|
||||
return JsonSerializer.Deserialize<Dictionary<string, double>>(json) ?? GetDefaultBaselines();
|
||||
}
|
||||
|
||||
return GetDefaultBaselines();
|
||||
}
|
||||
|
||||
private static Dictionary<string, double> GetDefaultBaselines()
|
||||
{
|
||||
return new Dictionary<string, double>
|
||||
{
|
||||
// Score computation
|
||||
["score_computation_ms"] = 100,
|
||||
["score_computation_large_ms"] = 500,
|
||||
|
||||
// Proof bundle
|
||||
["proof_bundle_generation_ms"] = 200,
|
||||
["proof_signing_ms"] = 50,
|
||||
|
||||
// Call graph
|
||||
["dotnet_callgraph_extraction_ms"] = 500,
|
||||
|
||||
// Reachability
|
||||
["reachability_computation_ms"] = 100,
|
||||
["reachability_large_graph_ms"] = 500,
|
||||
["reachability_deep_path_ms"] = 200
|
||||
};
|
||||
}
|
||||
|
||||
private static double GetDefaultBaseline(string metric)
|
||||
{
|
||||
// Default to 1 second for unknown metrics
|
||||
return 1000;
|
||||
}
|
||||
|
||||
public void Dispose()
|
||||
{
|
||||
// Save measurements for potential baseline updates
|
||||
var measurementsFile = Path.Combine(_outputPath, "measurements.json");
|
||||
var json = JsonSerializer.Serialize(_measurements, new JsonSerializerOptions { WriteIndented = true });
|
||||
File.WriteAllText(measurementsFile, json);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,34 @@
|
||||
<Project Sdk="Microsoft.NET.Sdk">
|
||||
|
||||
<PropertyGroup>
|
||||
<TargetFramework>net10.0</TargetFramework>
|
||||
<ImplicitUsings>enable</ImplicitUsings>
|
||||
<Nullable>enable</Nullable>
|
||||
<LangVersion>preview</LangVersion>
|
||||
<IsPackable>false</IsPackable>
|
||||
<IsTestProject>true</IsTestProject>
|
||||
</PropertyGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<PackageReference Include="BenchmarkDotNet" Version="0.14.0" />
|
||||
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.11.0" />
|
||||
<PackageReference Include="xunit" Version="2.9.0" />
|
||||
<PackageReference Include="xunit.runner.visualstudio" Version="2.8.2">
|
||||
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
|
||||
<PrivateAssets>all</PrivateAssets>
|
||||
</PackageReference>
|
||||
<PackageReference Include="FluentAssertions" Version="6.12.0" />
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<ProjectReference Include="..\Scanner\StellaOps.Scanner.WebService\StellaOps.Scanner.WebService.csproj" />
|
||||
<ProjectReference Include="..\Attestor\StellaOps.Attestor.ProofChain\StellaOps.Attestor.ProofChain.csproj" />
|
||||
<ProjectReference Include="..\Concelier\__Libraries\StellaOps.Concelier.CallGraph\StellaOps.Concelier.CallGraph.csproj" />
|
||||
<ProjectReference Include="..\Policy\__Libraries\StellaOps.Policy.Scoring\StellaOps.Policy.Scoring.csproj" />
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<Content Include="..\__Benchmarks\baselines\**\*" LinkBase="baselines" CopyToOutputDirectory="PreserveNewest" />
|
||||
</ItemGroup>
|
||||
|
||||
</Project>
|
||||
@@ -0,0 +1,248 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// PostgresOnlyStartupTests.cs
|
||||
// Sprint: SPRINT_5100_0001_0001_mongodb_cli_cleanup_consolidation
|
||||
// Task: T1.13 - PostgreSQL-only Platform Startup Test
|
||||
// Description: Validates platform can start with PostgreSQL-only infrastructure.
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System.Reflection;
|
||||
using StellaOps.Infrastructure.Postgres.Testing;
|
||||
using Testcontainers.PostgreSql;
|
||||
|
||||
namespace StellaOps.Integration.Platform;
|
||||
|
||||
/// <summary>
|
||||
/// Integration tests validating PostgreSQL-only platform startup.
|
||||
/// </summary>
|
||||
/// <remarks>
|
||||
/// T1.13-AC1: Platform starts successfully with PostgreSQL only
|
||||
/// T1.13-AC2: All services connect to PostgreSQL correctly
|
||||
/// T1.13-AC3: Schema migrations run successfully
|
||||
/// T1.13-AC4: No MongoDB connection attempts in logs
|
||||
/// </remarks>
|
||||
[Trait("Category", "Integration")]
|
||||
[Trait("Category", "Platform")]
|
||||
[Trait("Category", "PostgresOnly")]
|
||||
public class PostgresOnlyStartupTests : IAsyncLifetime
|
||||
{
|
||||
private PostgreSqlContainer? _container;
|
||||
private string? _connectionString;
|
||||
|
||||
public async Task InitializeAsync()
|
||||
{
|
||||
_container = new PostgreSqlBuilder()
|
||||
.WithImage("postgres:16-alpine")
|
||||
.Build();
|
||||
|
||||
await _container.StartAsync();
|
||||
_connectionString = _container.GetConnectionString();
|
||||
}
|
||||
|
||||
public async Task DisposeAsync()
|
||||
{
|
||||
if (_container != null)
|
||||
{
|
||||
await _container.DisposeAsync();
|
||||
}
|
||||
}
|
||||
|
||||
#region T1.13-AC1: Platform starts successfully with PostgreSQL only
|
||||
|
||||
[Fact(DisplayName = "T1.13-AC1.1: PostgreSQL container starts and accepts connections")]
|
||||
public async Task PostgresContainer_StartsAndAcceptsConnections()
|
||||
{
|
||||
// Arrange & Act - already done in InitializeAsync
|
||||
|
||||
// Assert
|
||||
_connectionString.Should().NotBeNullOrEmpty();
|
||||
_container!.State.Should().Be(DotNet.Testcontainers.Containers.TestcontainersStates.Running);
|
||||
|
||||
// Verify connection works
|
||||
using var connection = new Npgsql.NpgsqlConnection(_connectionString);
|
||||
await connection.OpenAsync();
|
||||
connection.State.Should().Be(System.Data.ConnectionState.Open);
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "T1.13-AC1.2: PostgreSQL connection string contains no MongoDB references")]
|
||||
public void ConnectionString_ContainsNoMongoDbReferences()
|
||||
{
|
||||
// Assert
|
||||
_connectionString.Should().NotContainAny("mongo", "mongodb", "27017");
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region T1.13-AC2: Services connect to PostgreSQL correctly
|
||||
|
||||
[Fact(DisplayName = "T1.13-AC2.1: Can create and verify database schema")]
|
||||
public async Task Database_CanCreateAndVerifySchema()
|
||||
{
|
||||
// Arrange
|
||||
using var connection = new Npgsql.NpgsqlConnection(_connectionString);
|
||||
await connection.OpenAsync();
|
||||
|
||||
// Act - Create a test schema
|
||||
using var createCmd = connection.CreateCommand();
|
||||
createCmd.CommandText = "CREATE SCHEMA IF NOT EXISTS test_platform";
|
||||
await createCmd.ExecuteNonQueryAsync();
|
||||
|
||||
// Assert - Verify schema exists
|
||||
using var verifyCmd = connection.CreateCommand();
|
||||
verifyCmd.CommandText = @"
|
||||
SELECT schema_name
|
||||
FROM information_schema.schemata
|
||||
WHERE schema_name = 'test_platform'";
|
||||
var result = await verifyCmd.ExecuteScalarAsync();
|
||||
result.Should().Be("test_platform");
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "T1.13-AC2.2: Can perform basic CRUD operations")]
|
||||
public async Task Database_CanPerformCrudOperations()
|
||||
{
|
||||
// Arrange
|
||||
using var connection = new Npgsql.NpgsqlConnection(_connectionString);
|
||||
await connection.OpenAsync();
|
||||
|
||||
// Create test table
|
||||
using var createCmd = connection.CreateCommand();
|
||||
createCmd.CommandText = @"
|
||||
CREATE TABLE IF NOT EXISTS test_crud (
|
||||
id SERIAL PRIMARY KEY,
|
||||
name VARCHAR(100) NOT NULL,
|
||||
created_at TIMESTAMPTZ DEFAULT NOW()
|
||||
)";
|
||||
await createCmd.ExecuteNonQueryAsync();
|
||||
|
||||
// Act - Insert
|
||||
using var insertCmd = connection.CreateCommand();
|
||||
insertCmd.CommandText = "INSERT INTO test_crud (name) VALUES ('test-record') RETURNING id";
|
||||
var insertedId = await insertCmd.ExecuteScalarAsync();
|
||||
insertedId.Should().NotBeNull();
|
||||
|
||||
// Act - Select
|
||||
using var selectCmd = connection.CreateCommand();
|
||||
selectCmd.CommandText = "SELECT name FROM test_crud WHERE id = @id";
|
||||
selectCmd.Parameters.AddWithValue("id", insertedId!);
|
||||
var name = await selectCmd.ExecuteScalarAsync();
|
||||
name.Should().Be("test-record");
|
||||
|
||||
// Act - Update
|
||||
using var updateCmd = connection.CreateCommand();
|
||||
updateCmd.CommandText = "UPDATE test_crud SET name = 'updated-record' WHERE id = @id";
|
||||
updateCmd.Parameters.AddWithValue("id", insertedId!);
|
||||
var rowsAffected = await updateCmd.ExecuteNonQueryAsync();
|
||||
rowsAffected.Should().Be(1);
|
||||
|
||||
// Act - Delete
|
||||
using var deleteCmd = connection.CreateCommand();
|
||||
deleteCmd.CommandText = "DELETE FROM test_crud WHERE id = @id";
|
||||
deleteCmd.Parameters.AddWithValue("id", insertedId!);
|
||||
rowsAffected = await deleteCmd.ExecuteNonQueryAsync();
|
||||
rowsAffected.Should().Be(1);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region T1.13-AC3: Schema migrations run successfully
|
||||
|
||||
[Fact(DisplayName = "T1.13-AC3.1: Can run DDL migrations")]
|
||||
public async Task Database_CanRunDdlMigrations()
|
||||
{
|
||||
// Arrange
|
||||
using var connection = new Npgsql.NpgsqlConnection(_connectionString);
|
||||
await connection.OpenAsync();
|
||||
|
||||
// Act - Run a migration-like DDL script
|
||||
var migrationScript = @"
|
||||
-- V1: Create migrations tracking table
|
||||
CREATE TABLE IF NOT EXISTS schema_migrations (
|
||||
version VARCHAR(50) PRIMARY KEY,
|
||||
applied_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||
checksum VARCHAR(64) NOT NULL
|
||||
);
|
||||
|
||||
-- V2: Create sample domain table
|
||||
CREATE TABLE IF NOT EXISTS scan_results (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
image_ref TEXT NOT NULL,
|
||||
findings_count INT NOT NULL DEFAULT 0,
|
||||
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW()
|
||||
);
|
||||
|
||||
-- Record migration
|
||||
INSERT INTO schema_migrations (version, checksum)
|
||||
VALUES ('V2_create_scan_results', 'abc123')
|
||||
ON CONFLICT (version) DO NOTHING;
|
||||
";
|
||||
|
||||
using var migrateCmd = connection.CreateCommand();
|
||||
migrateCmd.CommandText = migrationScript;
|
||||
await migrateCmd.ExecuteNonQueryAsync();
|
||||
|
||||
// Assert - Verify migration recorded
|
||||
using var verifyCmd = connection.CreateCommand();
|
||||
verifyCmd.CommandText = "SELECT COUNT(*) FROM schema_migrations WHERE version = 'V2_create_scan_results'";
|
||||
var count = await verifyCmd.ExecuteScalarAsync();
|
||||
Convert.ToInt32(count).Should().Be(1);
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "T1.13-AC3.2: PostgreSQL extensions can be created")]
|
||||
public async Task Database_CanCreateExtensions()
|
||||
{
|
||||
// Arrange
|
||||
using var connection = new Npgsql.NpgsqlConnection(_connectionString);
|
||||
await connection.OpenAsync();
|
||||
|
||||
// Act - Create common extensions used by StellaOps
|
||||
using var extCmd = connection.CreateCommand();
|
||||
extCmd.CommandText = "CREATE EXTENSION IF NOT EXISTS \"uuid-ossp\"";
|
||||
await extCmd.ExecuteNonQueryAsync();
|
||||
|
||||
// Assert - Verify extension exists
|
||||
using var verifyCmd = connection.CreateCommand();
|
||||
verifyCmd.CommandText = "SELECT COUNT(*) FROM pg_extension WHERE extname = 'uuid-ossp'";
|
||||
var count = await verifyCmd.ExecuteScalarAsync();
|
||||
Convert.ToInt32(count).Should().Be(1);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region T1.13-AC4: No MongoDB connection attempts
|
||||
|
||||
[Fact(DisplayName = "T1.13-AC4.1: Environment variables contain no MongoDB references")]
|
||||
public void EnvironmentVariables_ContainNoMongoDbReferences()
|
||||
{
|
||||
// Arrange - Get all environment variables
|
||||
var envVars = Environment.GetEnvironmentVariables();
|
||||
|
||||
// Act & Assert
|
||||
foreach (string key in envVars.Keys)
|
||||
{
|
||||
var value = envVars[key]?.ToString() ?? "";
|
||||
|
||||
// Skip if this is our test connection string
|
||||
if (key.Contains("POSTGRES", StringComparison.OrdinalIgnoreCase))
|
||||
continue;
|
||||
|
||||
key.Should().NotContainEquivalentOf("mongo",
|
||||
$"Environment variable key '{key}' should not reference MongoDB");
|
||||
}
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "T1.13-AC4.2: PostgreSQL-only configuration is valid")]
|
||||
public void Configuration_IsPostgresOnly()
|
||||
{
|
||||
// This test documents the expected configuration pattern
|
||||
var expectedConfig = new Dictionary<string, string>
|
||||
{
|
||||
["STELLAOPS_STORAGE_DRIVER"] = "postgres",
|
||||
["STELLAOPS_CACHE_DRIVER"] = "valkey", // or "redis" for compatibility
|
||||
};
|
||||
|
||||
// Assert - Document the expected pattern
|
||||
expectedConfig["STELLAOPS_STORAGE_DRIVER"].Should().NotBe("mongodb");
|
||||
expectedConfig["STELLAOPS_STORAGE_DRIVER"].Should().Be("postgres");
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
@@ -0,0 +1,41 @@
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<!--
|
||||
StellaOps.Integration.Platform.csproj
|
||||
Sprint: SPRINT_5100_0001_0001_mongodb_cli_cleanup_consolidation
|
||||
Task: T1.13 - PostgreSQL-only Platform Startup Test
|
||||
Description: Integration tests for platform startup with PostgreSQL-only stack
|
||||
-->
|
||||
<Project Sdk="Microsoft.NET.Sdk">
|
||||
|
||||
<PropertyGroup>
|
||||
<TargetFramework>net10.0</TargetFramework>
|
||||
<LangVersion>preview</LangVersion>
|
||||
<ImplicitUsings>enable</ImplicitUsings>
|
||||
<Nullable>enable</Nullable>
|
||||
<IsPackable>false</IsPackable>
|
||||
<IsTestProject>true</IsTestProject>
|
||||
</PropertyGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.10.0" />
|
||||
<PackageReference Include="xunit" Version="2.9.2" />
|
||||
<PackageReference Include="xunit.runner.visualstudio" Version="2.8.2">
|
||||
<PrivateAssets>all</PrivateAssets>
|
||||
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
|
||||
</PackageReference>
|
||||
<PackageReference Include="FluentAssertions" Version="6.12.0" />
|
||||
<PackageReference Include="Testcontainers" Version="3.6.0" />
|
||||
<PackageReference Include="Testcontainers.PostgreSql" Version="3.6.0" />
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<!-- Infrastructure testing library -->
|
||||
<ProjectReference Include="../__Libraries/StellaOps.Infrastructure.Postgres.Testing/StellaOps.Infrastructure.Postgres.Testing.csproj" />
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<Using Include="Xunit" />
|
||||
<Using Include="FluentAssertions" />
|
||||
</ItemGroup>
|
||||
|
||||
</Project>
|
||||
@@ -0,0 +1,373 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// ProofChainIntegrationTests.cs
|
||||
// Sprint: SPRINT_3500_0004_0003_integration_tests_corpus
|
||||
// Task: T1 - Proof Chain Integration Tests
|
||||
// Description: End-to-end tests for complete proof chain workflow:
|
||||
// scan → manifest → score → proof bundle → verify
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System.Net;
|
||||
using System.Net.Http.Json;
|
||||
using System.Security.Cryptography;
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
using FluentAssertions;
|
||||
using Microsoft.AspNetCore.Mvc.Testing;
|
||||
using Microsoft.Extensions.DependencyInjection;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Integration.ProofChain;
|
||||
|
||||
/// <summary>
|
||||
/// End-to-end integration tests for the proof chain workflow.
|
||||
/// Tests the complete flow: scan submission → manifest creation → score computation
|
||||
/// → proof bundle generation → verification.
|
||||
/// </summary>
|
||||
[Collection("ProofChainIntegration")]
|
||||
public class ProofChainIntegrationTests : IAsyncLifetime
|
||||
{
|
||||
private readonly ProofChainTestFixture _fixture;
|
||||
private HttpClient _client = null!;
|
||||
|
||||
public ProofChainIntegrationTests(ProofChainTestFixture fixture)
|
||||
{
|
||||
_fixture = fixture;
|
||||
}
|
||||
|
||||
public async Task InitializeAsync()
|
||||
{
|
||||
_client = await _fixture.CreateClientAsync();
|
||||
}
|
||||
|
||||
public Task DisposeAsync()
|
||||
{
|
||||
_client.Dispose();
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
|
||||
#region T1-AC1: Test scan submission creates manifest
|
||||
|
||||
[Fact]
|
||||
public async Task ScanSubmission_CreatesManifest_WithCorrectHashes()
|
||||
{
|
||||
// Arrange
|
||||
var sbomContent = CreateMinimalSbom();
|
||||
var scanRequest = new
|
||||
{
|
||||
sbom = sbomContent,
|
||||
policyId = "default",
|
||||
metadata = new { source = "integration-test" }
|
||||
};
|
||||
|
||||
// Act
|
||||
var response = await _client.PostAsJsonAsync("/api/v1/scans", scanRequest);
|
||||
|
||||
// Assert
|
||||
response.StatusCode.Should().Be(HttpStatusCode.Created);
|
||||
|
||||
var scanResult = await response.Content.ReadFromJsonAsync<ScanResponse>();
|
||||
scanResult.Should().NotBeNull();
|
||||
scanResult!.ScanId.Should().NotBeEmpty();
|
||||
|
||||
// Verify manifest was created
|
||||
var manifestResponse = await _client.GetAsync($"/api/v1/scans/{scanResult.ScanId}/manifest");
|
||||
manifestResponse.StatusCode.Should().Be(HttpStatusCode.OK);
|
||||
|
||||
var manifest = await manifestResponse.Content.ReadFromJsonAsync<ManifestResponse>();
|
||||
manifest.Should().NotBeNull();
|
||||
manifest!.SbomHash.Should().StartWith("sha256:");
|
||||
manifest.ManifestHash.Should().StartWith("sha256:");
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region T1-AC2: Test score computation produces deterministic results
|
||||
|
||||
[Fact]
|
||||
public async Task ScoreComputation_IsDeterministic_WithSameInputs()
|
||||
{
|
||||
// Arrange
|
||||
var sbomContent = CreateSbomWithVulnerability("CVE-2024-12345");
|
||||
var scanRequest = new
|
||||
{
|
||||
sbom = sbomContent,
|
||||
policyId = "default"
|
||||
};
|
||||
|
||||
// Act - Run scan twice with identical inputs
|
||||
var response1 = await _client.PostAsJsonAsync("/api/v1/scans", scanRequest);
|
||||
var scan1 = await response1.Content.ReadFromJsonAsync<ScanResponse>();
|
||||
|
||||
var response2 = await _client.PostAsJsonAsync("/api/v1/scans", scanRequest);
|
||||
var scan2 = await response2.Content.ReadFromJsonAsync<ScanResponse>();
|
||||
|
||||
// Assert - Both scans should produce identical manifest hashes
|
||||
var manifest1 = await GetManifestAsync(scan1!.ScanId);
|
||||
var manifest2 = await GetManifestAsync(scan2!.ScanId);
|
||||
|
||||
manifest1.SbomHash.Should().Be(manifest2.SbomHash);
|
||||
manifest1.RulesHash.Should().Be(manifest2.RulesHash);
|
||||
manifest1.PolicyHash.Should().Be(manifest2.PolicyHash);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region T1-AC3: Test proof bundle generation and signing
|
||||
|
||||
[Fact]
|
||||
public async Task ProofBundle_IsGenerated_WithValidDsseEnvelope()
|
||||
{
|
||||
// Arrange
|
||||
var sbomContent = CreateMinimalSbom();
|
||||
var scanRequest = new { sbom = sbomContent, policyId = "default" };
|
||||
|
||||
// Act
|
||||
var response = await _client.PostAsJsonAsync("/api/v1/scans", scanRequest);
|
||||
var scan = await response.Content.ReadFromJsonAsync<ScanResponse>();
|
||||
|
||||
// Get proof bundle
|
||||
var proofsResponse = await _client.GetAsync($"/api/v1/scans/{scan!.ScanId}/proofs");
|
||||
|
||||
// Assert
|
||||
proofsResponse.StatusCode.Should().Be(HttpStatusCode.OK);
|
||||
|
||||
var proofs = await proofsResponse.Content.ReadFromJsonAsync<ProofsListResponse>();
|
||||
proofs.Should().NotBeNull();
|
||||
proofs!.Items.Should().NotBeEmpty();
|
||||
|
||||
var proof = proofs.Items.First();
|
||||
proof.RootHash.Should().StartWith("sha256:");
|
||||
proof.DsseEnvelopeValid.Should().BeTrue();
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region T1-AC4: Test proof verification succeeds for valid bundles
|
||||
|
||||
[Fact]
|
||||
public async Task ProofVerification_Succeeds_ForValidBundle()
|
||||
{
|
||||
// Arrange
|
||||
var sbomContent = CreateMinimalSbom();
|
||||
var scanRequest = new { sbom = sbomContent, policyId = "default" };
|
||||
|
||||
var response = await _client.PostAsJsonAsync("/api/v1/scans", scanRequest);
|
||||
var scan = await response.Content.ReadFromJsonAsync<ScanResponse>();
|
||||
|
||||
var proofsResponse = await _client.GetAsync($"/api/v1/scans/{scan!.ScanId}/proofs");
|
||||
var proofs = await proofsResponse.Content.ReadFromJsonAsync<ProofsListResponse>();
|
||||
var rootHash = proofs!.Items.First().RootHash;
|
||||
|
||||
// Act
|
||||
var verifyResponse = await _client.PostAsJsonAsync(
|
||||
$"/api/v1/scans/{scan.ScanId}/proofs/{rootHash}/verify",
|
||||
new { });
|
||||
|
||||
// Assert
|
||||
verifyResponse.StatusCode.Should().Be(HttpStatusCode.OK);
|
||||
|
||||
var verifyResult = await verifyResponse.Content.ReadFromJsonAsync<VerifyResponse>();
|
||||
verifyResult.Should().NotBeNull();
|
||||
verifyResult!.Valid.Should().BeTrue();
|
||||
verifyResult.Checks.Should().Contain(c => c.Name == "dsse_signature" && c.Passed);
|
||||
verifyResult.Checks.Should().Contain(c => c.Name == "merkle_root" && c.Passed);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region T1-AC5: Test verification fails for tampered bundles
|
||||
|
||||
[Fact]
|
||||
public async Task ProofVerification_Fails_ForTamperedBundle()
|
||||
{
|
||||
// Arrange
|
||||
var sbomContent = CreateMinimalSbom();
|
||||
var scanRequest = new { sbom = sbomContent, policyId = "default" };
|
||||
|
||||
var response = await _client.PostAsJsonAsync("/api/v1/scans", scanRequest);
|
||||
var scan = await response.Content.ReadFromJsonAsync<ScanResponse>();
|
||||
|
||||
// Get a valid proof then tamper with the hash
|
||||
var proofsResponse = await _client.GetAsync($"/api/v1/scans/{scan!.ScanId}/proofs");
|
||||
var proofs = await proofsResponse.Content.ReadFromJsonAsync<ProofsListResponse>();
|
||||
var originalHash = proofs!.Items.First().RootHash;
|
||||
var tamperedHash = "sha256:" + new string('0', 64); // Tampered hash
|
||||
|
||||
// Act
|
||||
var verifyResponse = await _client.PostAsJsonAsync(
|
||||
$"/api/v1/scans/{scan.ScanId}/proofs/{tamperedHash}/verify",
|
||||
new { });
|
||||
|
||||
// Assert
|
||||
verifyResponse.StatusCode.Should().Be(HttpStatusCode.NotFound);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region T1-AC6: Test replay produces identical scores
|
||||
|
||||
[Fact]
|
||||
public async Task ScoreReplay_ProducesIdenticalScore_WithSameManifest()
|
||||
{
|
||||
// Arrange
|
||||
var sbomContent = CreateSbomWithVulnerability("CVE-2024-99999");
|
||||
var scanRequest = new { sbom = sbomContent, policyId = "default" };
|
||||
|
||||
var response = await _client.PostAsJsonAsync("/api/v1/scans", scanRequest);
|
||||
var scan = await response.Content.ReadFromJsonAsync<ScanResponse>();
|
||||
|
||||
var manifest = await GetManifestAsync(scan!.ScanId);
|
||||
var originalProofs = await GetProofsAsync(scan.ScanId);
|
||||
var originalRootHash = originalProofs.Items.First().RootHash;
|
||||
|
||||
// Act - Replay the score computation
|
||||
var replayResponse = await _client.PostAsJsonAsync(
|
||||
$"/api/v1/scans/{scan.ScanId}/score/replay",
|
||||
new { manifestHash = manifest.ManifestHash });
|
||||
|
||||
// Assert
|
||||
replayResponse.StatusCode.Should().Be(HttpStatusCode.OK);
|
||||
|
||||
var replayResult = await replayResponse.Content.ReadFromJsonAsync<ReplayResponse>();
|
||||
replayResult.Should().NotBeNull();
|
||||
replayResult!.RootHash.Should().Be(originalRootHash);
|
||||
replayResult.Deterministic.Should().BeTrue();
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Helper Methods
|
||||
|
||||
private static string CreateMinimalSbom()
|
||||
{
|
||||
return JsonSerializer.Serialize(new
|
||||
{
|
||||
bomFormat = "CycloneDX",
|
||||
specVersion = "1.5",
|
||||
version = 1,
|
||||
metadata = new
|
||||
{
|
||||
timestamp = DateTimeOffset.UtcNow.ToString("O"),
|
||||
component = new
|
||||
{
|
||||
type = "application",
|
||||
name = "integration-test-app",
|
||||
version = "1.0.0"
|
||||
}
|
||||
},
|
||||
components = Array.Empty<object>()
|
||||
});
|
||||
}
|
||||
|
||||
private static string CreateSbomWithVulnerability(string cveId)
|
||||
{
|
||||
return JsonSerializer.Serialize(new
|
||||
{
|
||||
bomFormat = "CycloneDX",
|
||||
specVersion = "1.5",
|
||||
version = 1,
|
||||
metadata = new
|
||||
{
|
||||
timestamp = DateTimeOffset.UtcNow.ToString("O"),
|
||||
component = new
|
||||
{
|
||||
type = "application",
|
||||
name = "vuln-test-app",
|
||||
version = "1.0.0"
|
||||
}
|
||||
},
|
||||
components = new[]
|
||||
{
|
||||
new
|
||||
{
|
||||
type = "library",
|
||||
name = "vulnerable-package",
|
||||
version = "1.0.0",
|
||||
purl = "pkg:npm/vulnerable-package@1.0.0"
|
||||
}
|
||||
},
|
||||
vulnerabilities = new[]
|
||||
{
|
||||
new
|
||||
{
|
||||
id = cveId,
|
||||
source = new { name = "NVD" },
|
||||
ratings = new[]
|
||||
{
|
||||
new { severity = "high", score = 7.5, method = "CVSSv31" }
|
||||
},
|
||||
affects = new[]
|
||||
{
|
||||
new { @ref = "pkg:npm/vulnerable-package@1.0.0" }
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
private async Task<ManifestResponse> GetManifestAsync(string scanId)
|
||||
{
|
||||
var response = await _client.GetAsync($"/api/v1/scans/{scanId}/manifest");
|
||||
response.EnsureSuccessStatusCode();
|
||||
return (await response.Content.ReadFromJsonAsync<ManifestResponse>())!;
|
||||
}
|
||||
|
||||
private async Task<ProofsListResponse> GetProofsAsync(string scanId)
|
||||
{
|
||||
var response = await _client.GetAsync($"/api/v1/scans/{scanId}/proofs");
|
||||
response.EnsureSuccessStatusCode();
|
||||
return (await response.Content.ReadFromJsonAsync<ProofsListResponse>())!;
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region DTOs
|
||||
|
||||
private sealed record ScanResponse(
|
||||
string ScanId,
|
||||
string Status,
|
||||
DateTimeOffset CreatedAt);
|
||||
|
||||
private sealed record ManifestResponse(
|
||||
string ManifestHash,
|
||||
string SbomHash,
|
||||
string RulesHash,
|
||||
string FeedHash,
|
||||
string PolicyHash,
|
||||
DateTimeOffset CreatedAt);
|
||||
|
||||
private sealed record ProofsListResponse(
|
||||
IReadOnlyList<ProofItem> Items);
|
||||
|
||||
private sealed record ProofItem(
|
||||
string RootHash,
|
||||
string BundleUri,
|
||||
bool DsseEnvelopeValid,
|
||||
DateTimeOffset CreatedAt);
|
||||
|
||||
private sealed record VerifyResponse(
|
||||
bool Valid,
|
||||
string RootHash,
|
||||
IReadOnlyList<VerifyCheck> Checks);
|
||||
|
||||
private sealed record VerifyCheck(
|
||||
string Name,
|
||||
bool Passed,
|
||||
string? Message);
|
||||
|
||||
private sealed record ReplayResponse(
|
||||
string RootHash,
|
||||
double Score,
|
||||
bool Deterministic,
|
||||
DateTimeOffset ReplayedAt);
|
||||
|
||||
#endregion
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Collection definition for proof chain integration tests.
|
||||
/// </summary>
|
||||
[CollectionDefinition("ProofChainIntegration")]
|
||||
public class ProofChainIntegrationCollection : ICollectionFixture<ProofChainTestFixture>
|
||||
{
|
||||
}
|
||||
@@ -0,0 +1,117 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// ProofChainTestFixture.cs
|
||||
// Sprint: SPRINT_3500_0004_0003_integration_tests_corpus
|
||||
// Task: T1 - Proof Chain Integration Tests
|
||||
// Description: Test fixture for proof chain integration tests with PostgreSQL
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using Microsoft.AspNetCore.Hosting;
|
||||
using Microsoft.AspNetCore.Mvc.Testing;
|
||||
using Microsoft.Extensions.Configuration;
|
||||
using Microsoft.Extensions.DependencyInjection;
|
||||
using Microsoft.Extensions.Hosting;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Testcontainers.PostgreSql;
|
||||
|
||||
namespace StellaOps.Integration.ProofChain;
|
||||
|
||||
/// <summary>
|
||||
/// Test fixture for proof chain integration tests.
|
||||
/// Provides a fully configured Scanner WebService with PostgreSQL backing store.
|
||||
/// </summary>
|
||||
public sealed class ProofChainTestFixture : IAsyncLifetime
|
||||
{
|
||||
private PostgreSqlContainer? _postgresContainer;
|
||||
private WebApplicationFactory<Program>? _factory;
|
||||
private bool _initialized;
|
||||
|
||||
/// <summary>
|
||||
/// Initializes the test fixture, starting PostgreSQL container.
|
||||
/// </summary>
|
||||
public async Task InitializeAsync()
|
||||
{
|
||||
if (_initialized)
|
||||
return;
|
||||
|
||||
// Start PostgreSQL container
|
||||
_postgresContainer = new PostgreSqlBuilder()
|
||||
.WithImage("postgres:16-alpine")
|
||||
.WithDatabase("stellaops_test")
|
||||
.WithUsername("test_user")
|
||||
.WithPassword("test_password")
|
||||
.WithPortBinding(5432, true)
|
||||
.Build();
|
||||
|
||||
await _postgresContainer.StartAsync();
|
||||
|
||||
// Create the test web application factory
|
||||
_factory = new WebApplicationFactory<Program>()
|
||||
.WithWebHostBuilder(builder =>
|
||||
{
|
||||
builder.ConfigureAppConfiguration((context, config) =>
|
||||
{
|
||||
// Override connection string with test container
|
||||
config.AddInMemoryCollection(new Dictionary<string, string?>
|
||||
{
|
||||
["ConnectionStrings:ScannerDb"] = _postgresContainer.GetConnectionString(),
|
||||
["Scanner:Authority:Enabled"] = "false",
|
||||
["Scanner:AllowAnonymous"] = "true",
|
||||
["Scanner:ProofChain:Enabled"] = "true",
|
||||
["Scanner:ProofChain:SigningKeyId"] = "test-key",
|
||||
["Scanner:ProofChain:AutoSign"] = "true",
|
||||
["Logging:LogLevel:Default"] = "Warning"
|
||||
});
|
||||
});
|
||||
|
||||
builder.ConfigureServices(services =>
|
||||
{
|
||||
// Add test-specific service overrides if needed
|
||||
services.AddLogging(logging =>
|
||||
{
|
||||
logging.ClearProviders();
|
||||
logging.AddConsole();
|
||||
logging.SetMinimumLevel(LogLevel.Warning);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
_initialized = true;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Creates an HTTP client for the test application.
|
||||
/// </summary>
|
||||
public async Task<HttpClient> CreateClientAsync()
|
||||
{
|
||||
if (!_initialized)
|
||||
{
|
||||
await InitializeAsync();
|
||||
}
|
||||
|
||||
return _factory!.CreateClient(new WebApplicationFactoryClientOptions
|
||||
{
|
||||
AllowAutoRedirect = false
|
||||
});
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Disposes of the test fixture resources.
|
||||
/// </summary>
|
||||
public async Task DisposeAsync()
|
||||
{
|
||||
_factory?.Dispose();
|
||||
|
||||
if (_postgresContainer is not null)
|
||||
{
|
||||
await _postgresContainer.DisposeAsync();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Placeholder for Program class detection.
|
||||
/// The actual Program class is from Scanner.WebService.
|
||||
/// </summary>
|
||||
#pragma warning disable CA1050 // Declare types in namespaces
|
||||
public partial class Program { }
|
||||
#pragma warning restore CA1050
|
||||
@@ -0,0 +1,54 @@
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<!--
|
||||
StellaOps.Integration.ProofChain.csproj
|
||||
Sprint: SPRINT_3500_0004_0003_integration_tests_corpus
|
||||
Task: T1 - Proof Chain Integration Tests
|
||||
Description: End-to-end integration tests for proof chain workflow
|
||||
-->
|
||||
<Project Sdk="Microsoft.NET.Sdk">
|
||||
|
||||
<PropertyGroup>
|
||||
<TargetFramework>net10.0</TargetFramework>
|
||||
<LangVersion>preview</LangVersion>
|
||||
<ImplicitUsings>enable</ImplicitUsings>
|
||||
<Nullable>enable</Nullable>
|
||||
<IsPackable>false</IsPackable>
|
||||
<IsTestProject>true</IsTestProject>
|
||||
</PropertyGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.10.0" />
|
||||
<PackageReference Include="xunit" Version="2.7.0" />
|
||||
<PackageReference Include="xunit.runner.visualstudio" Version="2.5.8">
|
||||
<PrivateAssets>all</PrivateAssets>
|
||||
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
|
||||
</PackageReference>
|
||||
<PackageReference Include="FluentAssertions" Version="6.12.0" />
|
||||
<PackageReference Include="Microsoft.AspNetCore.Mvc.Testing" Version="10.0.0" />
|
||||
<PackageReference Include="Testcontainers" Version="3.6.0" />
|
||||
<PackageReference Include="Testcontainers.PostgreSql" Version="3.6.0" />
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<!-- Scanner WebService for integration testing -->
|
||||
<ProjectReference Include="../../Scanner/StellaOps.Scanner.WebService/StellaOps.Scanner.WebService.csproj" />
|
||||
|
||||
<!-- Proof chain and attestation libraries -->
|
||||
<ProjectReference Include="../../Attestor/__Libraries/StellaOps.Attestor.ProofChain/StellaOps.Attestor.ProofChain.csproj" />
|
||||
<ProjectReference Include="../../Attestor/__Libraries/StellaOps.Attestor.Dsse/StellaOps.Attestor.Dsse.csproj" />
|
||||
|
||||
<!-- Policy scoring -->
|
||||
<ProjectReference Include="../../Policy/__Libraries/StellaOps.Policy.Scoring/StellaOps.Policy.Scoring.csproj" />
|
||||
|
||||
<!-- Cryptography -->
|
||||
<ProjectReference Include="../../__Libraries/StellaOps.Cryptography/StellaOps.Cryptography.csproj" />
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<Content Include="../../fixtures/**/*">
|
||||
<Link>fixtures/%(RecursiveDir)%(Filename)%(Extension)</Link>
|
||||
<CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
|
||||
</Content>
|
||||
</ItemGroup>
|
||||
|
||||
</Project>
|
||||
@@ -0,0 +1,280 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// ReachabilityIntegrationTests.cs
|
||||
// Sprint: SPRINT_3500_0004_0003_integration_tests_corpus
|
||||
// Task: T2 - Reachability Integration Tests
|
||||
// Description: End-to-end tests for call graph extraction and reachability analysis
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System.Text.Json;
|
||||
using FluentAssertions;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Integration.Reachability;
|
||||
|
||||
/// <summary>
|
||||
/// End-to-end integration tests for reachability workflow.
|
||||
/// Tests: call graph extraction → entrypoint discovery → reachability analysis
|
||||
/// → explanation output → graph attestation signing.
|
||||
/// </summary>
|
||||
public class ReachabilityIntegrationTests : IClassFixture<ReachabilityTestFixture>
|
||||
{
|
||||
private readonly ReachabilityTestFixture _fixture;
|
||||
private static readonly JsonSerializerOptions JsonOptions = new()
|
||||
{
|
||||
PropertyNameCaseInsensitive = true
|
||||
};
|
||||
|
||||
public ReachabilityIntegrationTests(ReachabilityTestFixture fixture)
|
||||
{
|
||||
_fixture = fixture;
|
||||
}
|
||||
|
||||
#region T2-AC1: Test .NET call graph extraction
|
||||
|
||||
[Fact]
|
||||
public async Task DotNetCallGraph_ExtractsNodes_FromCorpusFixture()
|
||||
{
|
||||
// Arrange
|
||||
var corpusPath = _fixture.GetCorpusPath("dotnet");
|
||||
var callGraphPath = Path.Combine(corpusPath, "callgraph.static.json");
|
||||
|
||||
// Act - Load and parse the call graph
|
||||
var callGraphJson = await File.ReadAllTextAsync(callGraphPath);
|
||||
var callGraph = JsonSerializer.Deserialize<CallGraphModel>(callGraphJson, JsonOptions);
|
||||
|
||||
// Assert
|
||||
callGraph.Should().NotBeNull();
|
||||
callGraph!.Nodes.Should().NotBeEmpty();
|
||||
callGraph.Edges.Should().NotBeEmpty();
|
||||
callGraph.Nodes.Should().Contain(n => n.IsEntrypoint == true);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task DotNetCallGraph_IdentifiesEntrypoints_ForKestrelApp()
|
||||
{
|
||||
// Arrange
|
||||
var corpusPath = _fixture.GetCorpusPath("dotnet");
|
||||
var callGraphPath = Path.Combine(corpusPath, "callgraph.static.json");
|
||||
var callGraphJson = await File.ReadAllTextAsync(callGraphPath);
|
||||
var callGraph = JsonSerializer.Deserialize<CallGraphModel>(callGraphJson, JsonOptions);
|
||||
|
||||
// Act
|
||||
var entrypoints = callGraph!.Nodes.Where(n => n.IsEntrypoint == true).ToList();
|
||||
|
||||
// Assert
|
||||
entrypoints.Should().NotBeEmpty("Kestrel apps should have HTTP entrypoints");
|
||||
entrypoints.Should().Contain(e =>
|
||||
e.Symbol?.Contains("Controller", StringComparison.OrdinalIgnoreCase) == true ||
|
||||
e.Symbol?.Contains("Endpoint", StringComparison.OrdinalIgnoreCase) == true ||
|
||||
e.Symbol?.Contains("Handler", StringComparison.OrdinalIgnoreCase) == true);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region T2-AC2: Test Java call graph extraction
|
||||
|
||||
[Fact]
|
||||
public async Task JavaCallGraph_ExtractsNodes_FromCorpusFixture()
|
||||
{
|
||||
// Arrange - Java corpus may not exist, skip if missing
|
||||
var corpusPath = _fixture.GetCorpusPath("java");
|
||||
var callGraphPath = Path.Combine(corpusPath, "callgraph.static.json");
|
||||
|
||||
if (!File.Exists(callGraphPath))
|
||||
{
|
||||
// Skip test if Java corpus not available
|
||||
return;
|
||||
}
|
||||
|
||||
// Act
|
||||
var callGraphJson = await File.ReadAllTextAsync(callGraphPath);
|
||||
var callGraph = JsonSerializer.Deserialize<CallGraphModel>(callGraphJson, JsonOptions);
|
||||
|
||||
// Assert
|
||||
callGraph.Should().NotBeNull();
|
||||
callGraph!.Nodes.Should().NotBeEmpty();
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region T2-AC3: Test entrypoint discovery
|
||||
|
||||
[Fact]
|
||||
public async Task EntrypointDiscovery_FindsWebEntrypoints_InDotNetCorpus()
|
||||
{
|
||||
// Arrange
|
||||
var corpusPath = _fixture.GetCorpusPath("dotnet");
|
||||
var callGraphPath = Path.Combine(corpusPath, "callgraph.static.json");
|
||||
var callGraphJson = await File.ReadAllTextAsync(callGraphPath);
|
||||
var callGraph = JsonSerializer.Deserialize<CallGraphModel>(callGraphJson, JsonOptions);
|
||||
|
||||
// Act
|
||||
var entrypoints = callGraph!.Nodes.Where(n => n.IsEntrypoint == true).ToList();
|
||||
var webEntrypoints = entrypoints.Where(e =>
|
||||
e.Symbol?.Contains("Get", StringComparison.OrdinalIgnoreCase) == true ||
|
||||
e.Symbol?.Contains("Post", StringComparison.OrdinalIgnoreCase) == true ||
|
||||
e.Symbol?.Contains("Handle", StringComparison.OrdinalIgnoreCase) == true).ToList();
|
||||
|
||||
// Assert
|
||||
webEntrypoints.Should().NotBeEmpty("Web applications should have HTTP handler entrypoints");
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region T2-AC4: Test reachability computation
|
||||
|
||||
[Fact]
|
||||
public async Task ReachabilityComputation_FindsPath_ToVulnerableFunction()
|
||||
{
|
||||
// Arrange
|
||||
var corpusPath = _fixture.GetCorpusPath("dotnet");
|
||||
var groundTruthPath = Path.Combine(corpusPath, "ground-truth.json");
|
||||
var groundTruthJson = await File.ReadAllTextAsync(groundTruthPath);
|
||||
var groundTruth = JsonSerializer.Deserialize<GroundTruthModel>(groundTruthJson, JsonOptions);
|
||||
|
||||
// Assert
|
||||
groundTruth.Should().NotBeNull();
|
||||
groundTruth!.Paths.Should().NotBeEmpty("Ground truth should contain reachability paths");
|
||||
|
||||
// Verify at least one path is marked as reachable
|
||||
var reachablePaths = groundTruth.Paths.Where(p => p.Reachable).ToList();
|
||||
reachablePaths.Should().NotBeEmpty("At least one vulnerability should be reachable");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ReachabilityComputation_DistinguishesReachableFromUnreachable()
|
||||
{
|
||||
// Arrange
|
||||
var corpusPath = _fixture.GetCorpusPath("dotnet");
|
||||
var groundTruthPath = Path.Combine(corpusPath, "ground-truth.json");
|
||||
var groundTruthJson = await File.ReadAllTextAsync(groundTruthPath);
|
||||
var groundTruth = JsonSerializer.Deserialize<GroundTruthModel>(groundTruthJson, JsonOptions);
|
||||
|
||||
// Assert
|
||||
groundTruth.Should().NotBeNull();
|
||||
|
||||
// Check that reachable paths have non-empty call chains
|
||||
foreach (var path in groundTruth!.Paths.Where(p => p.Reachable))
|
||||
{
|
||||
path.CallChain.Should().NotBeEmpty(
|
||||
"Reachable paths must have call chain evidence");
|
||||
}
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region T2-AC5: Test reachability explanation output
|
||||
|
||||
[Fact]
|
||||
public async Task ReachabilityExplanation_ContainsCallPath_ForReachableVuln()
|
||||
{
|
||||
// Arrange
|
||||
var corpusPath = _fixture.GetCorpusPath("dotnet");
|
||||
var groundTruthPath = Path.Combine(corpusPath, "ground-truth.json");
|
||||
var groundTruthJson = await File.ReadAllTextAsync(groundTruthPath);
|
||||
var groundTruth = JsonSerializer.Deserialize<GroundTruthModel>(groundTruthJson, JsonOptions);
|
||||
|
||||
// Act
|
||||
var reachablePath = groundTruth!.Paths.FirstOrDefault(p => p.Reachable);
|
||||
|
||||
// Assert
|
||||
reachablePath.Should().NotBeNull("Should have at least one reachable path");
|
||||
reachablePath!.CallChain.Should().HaveCountGreaterThan(1,
|
||||
"Call chain should show path from entrypoint to vulnerable code");
|
||||
reachablePath.Confidence.Should().BeGreaterThan(0,
|
||||
"Reachable paths should have confidence > 0");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ReachabilityExplanation_IncludesConfidenceTier()
|
||||
{
|
||||
// Arrange
|
||||
var corpusPath = _fixture.GetCorpusPath("dotnet");
|
||||
var groundTruthPath = Path.Combine(corpusPath, "ground-truth.json");
|
||||
var groundTruthJson = await File.ReadAllTextAsync(groundTruthPath);
|
||||
var groundTruth = JsonSerializer.Deserialize<GroundTruthModel>(groundTruthJson, JsonOptions);
|
||||
|
||||
// Assert
|
||||
foreach (var path in groundTruth!.Paths.Where(p => p.Reachable))
|
||||
{
|
||||
path.Tier.Should().NotBeNullOrEmpty(
|
||||
"Reachable paths should have a confidence tier (confirmed/likely/present)");
|
||||
path.Tier.Should().BeOneOf("confirmed", "likely", "present",
|
||||
"Tier should be one of the defined values");
|
||||
}
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region T2-AC6: Test graph attestation signing
|
||||
|
||||
[Fact]
|
||||
public async Task GraphAttestation_HasValidVexFile_InCorpus()
|
||||
{
|
||||
// Arrange
|
||||
var corpusPath = _fixture.GetCorpusPath("dotnet");
|
||||
var vexPath = Path.Combine(corpusPath, "vex.openvex.json");
|
||||
|
||||
// Act
|
||||
var vexExists = File.Exists(vexPath);
|
||||
|
||||
// Assert
|
||||
vexExists.Should().BeTrue("Corpus should include VEX attestation file");
|
||||
|
||||
if (vexExists)
|
||||
{
|
||||
var vexJson = await File.ReadAllTextAsync(vexPath);
|
||||
var vex = JsonSerializer.Deserialize<VexDocument>(vexJson, JsonOptions);
|
||||
|
||||
vex.Should().NotBeNull();
|
||||
vex!.Context.Should().Contain("openvex");
|
||||
vex.Statements.Should().NotBeEmpty();
|
||||
}
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region DTOs
|
||||
|
||||
private sealed record CallGraphModel(
|
||||
IReadOnlyList<CallGraphNode> Nodes,
|
||||
IReadOnlyList<CallGraphEdge> Edges,
|
||||
string? Version,
|
||||
string? Language);
|
||||
|
||||
private sealed record CallGraphNode(
|
||||
string NodeId,
|
||||
string? Symbol,
|
||||
string? File,
|
||||
int? Line,
|
||||
bool? IsEntrypoint,
|
||||
bool? IsSink);
|
||||
|
||||
private sealed record CallGraphEdge(
|
||||
string SourceId,
|
||||
string TargetId,
|
||||
string? CallKind);
|
||||
|
||||
private sealed record GroundTruthModel(
|
||||
string CveId,
|
||||
string? Language,
|
||||
IReadOnlyList<ReachabilityPath> Paths);
|
||||
|
||||
private sealed record ReachabilityPath(
|
||||
string VulnerableFunction,
|
||||
bool Reachable,
|
||||
IReadOnlyList<string> CallChain,
|
||||
double Confidence,
|
||||
string? Tier);
|
||||
|
||||
private sealed record VexDocument(
|
||||
string Context,
|
||||
IReadOnlyList<VexStatement> Statements);
|
||||
|
||||
private sealed record VexStatement(
|
||||
string Vulnerability,
|
||||
string Status,
|
||||
string? Justification);
|
||||
|
||||
#endregion
|
||||
}
|
||||
@@ -0,0 +1,91 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// ReachabilityTestFixture.cs
|
||||
// Sprint: SPRINT_3500_0004_0003_integration_tests_corpus
|
||||
// Task: T2 - Reachability Integration Tests
|
||||
// Description: Test fixture for reachability integration tests
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System.Reflection;
|
||||
|
||||
namespace StellaOps.Integration.Reachability;
|
||||
|
||||
/// <summary>
|
||||
/// Test fixture for reachability integration tests.
|
||||
/// Provides access to corpus fixtures and test data.
|
||||
/// </summary>
|
||||
public sealed class ReachabilityTestFixture
|
||||
{
|
||||
private readonly string _corpusBasePath;
|
||||
private readonly string _fixturesBasePath;
|
||||
|
||||
public ReachabilityTestFixture()
|
||||
{
|
||||
var assemblyLocation = Assembly.GetExecutingAssembly().Location;
|
||||
var assemblyDirectory = Path.GetDirectoryName(assemblyLocation)!;
|
||||
|
||||
_corpusBasePath = Path.Combine(assemblyDirectory, "corpus");
|
||||
_fixturesBasePath = Path.Combine(assemblyDirectory, "fixtures");
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Gets the path to a language-specific corpus directory.
|
||||
/// </summary>
|
||||
/// <param name="language">Language identifier (dotnet, java, python, etc.)</param>
|
||||
/// <returns>Full path to the corpus directory</returns>
|
||||
public string GetCorpusPath(string language)
|
||||
{
|
||||
var corpusPath = Path.Combine(_corpusBasePath, language);
|
||||
|
||||
if (!Directory.Exists(corpusPath))
|
||||
{
|
||||
throw new DirectoryNotFoundException(
|
||||
$"Corpus directory not found for language '{language}' at: {corpusPath}");
|
||||
}
|
||||
|
||||
return corpusPath;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Gets the path to a specific fixture directory.
|
||||
/// </summary>
|
||||
/// <param name="fixtureName">Name of the fixture</param>
|
||||
/// <returns>Full path to the fixture directory</returns>
|
||||
public string GetFixturePath(string fixtureName)
|
||||
{
|
||||
var fixturePath = Path.Combine(_fixturesBasePath, fixtureName);
|
||||
|
||||
if (!Directory.Exists(fixturePath))
|
||||
{
|
||||
throw new DirectoryNotFoundException(
|
||||
$"Fixture directory not found: {fixturePath}");
|
||||
}
|
||||
|
||||
return fixturePath;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Lists all available corpus languages.
|
||||
/// </summary>
|
||||
public IReadOnlyList<string> GetAvailableCorpusLanguages()
|
||||
{
|
||||
if (!Directory.Exists(_corpusBasePath))
|
||||
{
|
||||
return Array.Empty<string>();
|
||||
}
|
||||
|
||||
return Directory.GetDirectories(_corpusBasePath)
|
||||
.Select(Path.GetFileName)
|
||||
.Where(name => !string.IsNullOrEmpty(name))
|
||||
.Cast<string>()
|
||||
.ToList();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Checks if a corpus exists for the given language.
|
||||
/// </summary>
|
||||
public bool HasCorpus(string language)
|
||||
{
|
||||
var corpusPath = Path.Combine(_corpusBasePath, language);
|
||||
return Directory.Exists(corpusPath);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,55 @@
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<!--
|
||||
StellaOps.Integration.Reachability.csproj
|
||||
Sprint: SPRINT_3500_0004_0003_integration_tests_corpus
|
||||
Task: T2 - Reachability Integration Tests
|
||||
Description: End-to-end integration tests for reachability workflow
|
||||
-->
|
||||
<Project Sdk="Microsoft.NET.Sdk">
|
||||
|
||||
<PropertyGroup>
|
||||
<TargetFramework>net10.0</TargetFramework>
|
||||
<LangVersion>preview</LangVersion>
|
||||
<ImplicitUsings>enable</ImplicitUsings>
|
||||
<Nullable>enable</Nullable>
|
||||
<IsPackable>false</IsPackable>
|
||||
<IsTestProject>true</IsTestProject>
|
||||
</PropertyGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.10.0" />
|
||||
<PackageReference Include="xunit" Version="2.7.0" />
|
||||
<PackageReference Include="xunit.runner.visualstudio" Version="2.5.8">
|
||||
<PrivateAssets>all</PrivateAssets>
|
||||
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
|
||||
</PackageReference>
|
||||
<PackageReference Include="FluentAssertions" Version="6.12.0" />
|
||||
<PackageReference Include="Microsoft.AspNetCore.Mvc.Testing" Version="10.0.0" />
|
||||
<PackageReference Include="Testcontainers" Version="3.6.0" />
|
||||
<PackageReference Include="Testcontainers.PostgreSql" Version="3.6.0" />
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<!-- Scanner libraries for reachability -->
|
||||
<ProjectReference Include="../../Scanner/__Libraries/StellaOps.Scanner.Reachability/StellaOps.Scanner.Reachability.csproj" />
|
||||
<ProjectReference Include="../../Scanner/__Libraries/StellaOps.Scanner.CallGraph/StellaOps.Scanner.CallGraph.csproj" />
|
||||
<ProjectReference Include="../../Scanner/__Libraries/StellaOps.Scanner.CallGraph.DotNet/StellaOps.Scanner.CallGraph.DotNet.csproj" />
|
||||
<ProjectReference Include="../../Scanner/__Libraries/StellaOps.Scanner.CallGraph.Java/StellaOps.Scanner.CallGraph.Java.csproj" />
|
||||
|
||||
<!-- Attestation for graph signing -->
|
||||
<ProjectReference Include="../../Attestor/__Libraries/StellaOps.Attestor.Dsse/StellaOps.Attestor.Dsse.csproj" />
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<!-- Corpus fixtures -->
|
||||
<Content Include="../../reachability/corpus/**/*">
|
||||
<Link>corpus/%(RecursiveDir)%(Filename)%(Extension)</Link>
|
||||
<CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
|
||||
</Content>
|
||||
<Content Include="../../reachability/fixtures/**/*">
|
||||
<Link>fixtures/%(RecursiveDir)%(Filename)%(Extension)</Link>
|
||||
<CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
|
||||
</Content>
|
||||
</ItemGroup>
|
||||
|
||||
</Project>
|
||||
@@ -0,0 +1,41 @@
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<!--
|
||||
StellaOps.Integration.Unknowns.csproj
|
||||
Sprint: SPRINT_3500_0004_0003_integration_tests_corpus
|
||||
Task: T3 - Unknowns Workflow Tests
|
||||
Description: Integration tests for unknowns lifecycle workflow
|
||||
-->
|
||||
<Project Sdk="Microsoft.NET.Sdk">
|
||||
|
||||
<PropertyGroup>
|
||||
<TargetFramework>net10.0</TargetFramework>
|
||||
<LangVersion>preview</LangVersion>
|
||||
<ImplicitUsings>enable</ImplicitUsings>
|
||||
<Nullable>enable</Nullable>
|
||||
<IsPackable>false</IsPackable>
|
||||
<IsTestProject>true</IsTestProject>
|
||||
</PropertyGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.10.0" />
|
||||
<PackageReference Include="xunit" Version="2.7.0" />
|
||||
<PackageReference Include="xunit.runner.visualstudio" Version="2.5.8">
|
||||
<PrivateAssets>all</PrivateAssets>
|
||||
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
|
||||
</PackageReference>
|
||||
<PackageReference Include="FluentAssertions" Version="6.12.0" />
|
||||
<PackageReference Include="Microsoft.AspNetCore.Mvc.Testing" Version="10.0.0" />
|
||||
<PackageReference Include="Testcontainers" Version="3.6.0" />
|
||||
<PackageReference Include="Testcontainers.PostgreSql" Version="3.6.0" />
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<!-- Policy libraries for unknowns -->
|
||||
<ProjectReference Include="../../Policy/__Libraries/StellaOps.Policy.Unknowns/StellaOps.Policy.Unknowns.csproj" />
|
||||
<ProjectReference Include="../../Policy/__Libraries/StellaOps.Policy.Scoring/StellaOps.Policy.Scoring.csproj" />
|
||||
|
||||
<!-- Scheduler for rescan integration -->
|
||||
<ProjectReference Include="../../Scheduler/__Libraries/StellaOps.Scheduler.Client/StellaOps.Scheduler.Client.csproj" />
|
||||
</ItemGroup>
|
||||
|
||||
</Project>
|
||||
@@ -0,0 +1,458 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// UnknownsWorkflowTests.cs
|
||||
// Sprint: SPRINT_3500_0004_0003_integration_tests_corpus
|
||||
// Task: T3 - Unknowns Workflow Tests
|
||||
// Description: Integration tests for unknowns lifecycle:
|
||||
// detection → ranking → escalation → resolution
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using FluentAssertions;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Integration.Unknowns;
|
||||
|
||||
/// <summary>
|
||||
/// Integration tests for the unknowns registry workflow.
|
||||
/// Tests the complete lifecycle: detection → ranking → band assignment
|
||||
/// → escalation → resolution.
|
||||
/// </summary>
|
||||
public class UnknownsWorkflowTests
|
||||
{
|
||||
#region T3-AC1: Test unknown detection during scan
|
||||
|
||||
[Fact]
|
||||
public void UnknownDetection_CreatesEntry_ForUnmatchedVulnerability()
|
||||
{
|
||||
// Arrange
|
||||
var ranker = new UnknownRanker();
|
||||
var unknown = new UnknownEntry
|
||||
{
|
||||
CveId = "CVE-2024-UNKNOWN-001",
|
||||
Package = "mystery-package@1.0.0",
|
||||
DetectedAt = DateTimeOffset.UtcNow,
|
||||
ExploitPressure = 0.5,
|
||||
Uncertainty = 0.8
|
||||
};
|
||||
|
||||
// Act
|
||||
var ranked = ranker.Rank(unknown);
|
||||
|
||||
// Assert
|
||||
ranked.Should().NotBeNull();
|
||||
ranked.Score.Should().BeGreaterThan(0);
|
||||
ranked.Band.Should().NotBeNullOrEmpty();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void UnknownDetection_CapturesMetadata_FromScan()
|
||||
{
|
||||
// Arrange
|
||||
var unknown = new UnknownEntry
|
||||
{
|
||||
CveId = "CVE-2024-SCAN-001",
|
||||
Package = "scanned-package@2.0.0",
|
||||
DetectedAt = DateTimeOffset.UtcNow,
|
||||
ScanId = Guid.NewGuid().ToString(),
|
||||
SourceFeed = "nvd",
|
||||
ExploitPressure = 0.3,
|
||||
Uncertainty = 0.6
|
||||
};
|
||||
|
||||
// Assert
|
||||
unknown.ScanId.Should().NotBeNullOrEmpty();
|
||||
unknown.SourceFeed.Should().Be("nvd");
|
||||
unknown.DetectedAt.Should().BeCloseTo(DateTimeOffset.UtcNow, TimeSpan.FromSeconds(5));
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region T3-AC2: Test ranking determinism
|
||||
|
||||
[Fact]
|
||||
public void UnknownRanking_IsDeterministic_WithSameInputs()
|
||||
{
|
||||
// Arrange
|
||||
var ranker = new UnknownRanker();
|
||||
var unknown = new UnknownEntry
|
||||
{
|
||||
CveId = "CVE-2024-DETERM-001",
|
||||
Package = "det-package@1.0.0",
|
||||
DetectedAt = DateTimeOffset.Parse("2024-01-01T00:00:00Z"),
|
||||
ExploitPressure = 0.7,
|
||||
Uncertainty = 0.4
|
||||
};
|
||||
|
||||
// Act - Rank the same entry multiple times
|
||||
var rank1 = ranker.Rank(unknown);
|
||||
var rank2 = ranker.Rank(unknown);
|
||||
var rank3 = ranker.Rank(unknown);
|
||||
|
||||
// Assert - All rankings should be identical
|
||||
rank1.Score.Should().Be(rank2.Score);
|
||||
rank2.Score.Should().Be(rank3.Score);
|
||||
rank1.Band.Should().Be(rank2.Band);
|
||||
rank2.Band.Should().Be(rank3.Band);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void UnknownRanking_UsesSimplifiedTwoFactorModel()
|
||||
{
|
||||
// Arrange - Per advisory: 2-factor model (uncertainty + exploit pressure)
|
||||
var ranker = new UnknownRanker();
|
||||
|
||||
var highPressureHighUncertainty = new UnknownEntry
|
||||
{
|
||||
CveId = "CVE-HIGH-HIGH",
|
||||
ExploitPressure = 0.9,
|
||||
Uncertainty = 0.9,
|
||||
DetectedAt = DateTimeOffset.UtcNow
|
||||
};
|
||||
|
||||
var lowPressureLowUncertainty = new UnknownEntry
|
||||
{
|
||||
CveId = "CVE-LOW-LOW",
|
||||
ExploitPressure = 0.1,
|
||||
Uncertainty = 0.1,
|
||||
DetectedAt = DateTimeOffset.UtcNow
|
||||
};
|
||||
|
||||
// Act
|
||||
var highRank = ranker.Rank(highPressureHighUncertainty);
|
||||
var lowRank = ranker.Rank(lowPressureLowUncertainty);
|
||||
|
||||
// Assert
|
||||
highRank.Score.Should().BeGreaterThan(lowRank.Score,
|
||||
"High pressure + high uncertainty should rank higher");
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region T3-AC3: Test band assignment
|
||||
|
||||
[Theory]
|
||||
[InlineData(0.9, 0.9, "HOT")]
|
||||
[InlineData(0.5, 0.5, "WARM")]
|
||||
[InlineData(0.1, 0.1, "COLD")]
|
||||
public void BandAssignment_MapsCorrectly_BasedOnScore(
|
||||
double exploitPressure, double uncertainty, string expectedBand)
|
||||
{
|
||||
// Arrange
|
||||
var ranker = new UnknownRanker();
|
||||
var unknown = new UnknownEntry
|
||||
{
|
||||
CveId = $"CVE-BAND-{expectedBand}",
|
||||
ExploitPressure = exploitPressure,
|
||||
Uncertainty = uncertainty,
|
||||
DetectedAt = DateTimeOffset.UtcNow
|
||||
};
|
||||
|
||||
// Act
|
||||
var ranked = ranker.Rank(unknown);
|
||||
|
||||
// Assert
|
||||
ranked.Band.Should().Be(expectedBand);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void BandThresholds_AreWellDefined()
|
||||
{
|
||||
// Arrange - Verify thresholds per sprint spec
|
||||
var ranker = new UnknownRanker();
|
||||
|
||||
// Act & Assert
|
||||
// HOT: score >= 0.7
|
||||
var hotEntry = new UnknownEntry
|
||||
{
|
||||
CveId = "CVE-HOT",
|
||||
ExploitPressure = 0.85,
|
||||
Uncertainty = 0.85,
|
||||
DetectedAt = DateTimeOffset.UtcNow
|
||||
};
|
||||
ranker.Rank(hotEntry).Band.Should().Be("HOT");
|
||||
|
||||
// WARM: 0.3 <= score < 0.7
|
||||
var warmEntry = new UnknownEntry
|
||||
{
|
||||
CveId = "CVE-WARM",
|
||||
ExploitPressure = 0.5,
|
||||
Uncertainty = 0.5,
|
||||
DetectedAt = DateTimeOffset.UtcNow
|
||||
};
|
||||
ranker.Rank(warmEntry).Band.Should().Be("WARM");
|
||||
|
||||
// COLD: score < 0.3
|
||||
var coldEntry = new UnknownEntry
|
||||
{
|
||||
CveId = "CVE-COLD",
|
||||
ExploitPressure = 0.15,
|
||||
Uncertainty = 0.15,
|
||||
DetectedAt = DateTimeOffset.UtcNow
|
||||
};
|
||||
ranker.Rank(coldEntry).Band.Should().Be("COLD");
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region T3-AC4: Test escalation triggers rescan
|
||||
|
||||
[Fact]
|
||||
public void Escalation_MovesBandToHot()
|
||||
{
|
||||
// Arrange
|
||||
var unknown = new UnknownEntry
|
||||
{
|
||||
CveId = "CVE-ESCALATE-001",
|
||||
ExploitPressure = 0.3,
|
||||
Uncertainty = 0.3,
|
||||
DetectedAt = DateTimeOffset.UtcNow,
|
||||
Band = "WARM"
|
||||
};
|
||||
|
||||
// Act
|
||||
var escalated = unknown.Escalate("Urgent customer request");
|
||||
|
||||
// Assert
|
||||
escalated.Band.Should().Be("HOT");
|
||||
escalated.EscalatedAt.Should().NotBeNull();
|
||||
escalated.EscalationReason.Should().Be("Urgent customer request");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Escalation_SetsRescanFlag()
|
||||
{
|
||||
// Arrange
|
||||
var unknown = new UnknownEntry
|
||||
{
|
||||
CveId = "CVE-RESCAN-001",
|
||||
Band = "COLD",
|
||||
DetectedAt = DateTimeOffset.UtcNow
|
||||
};
|
||||
|
||||
// Act
|
||||
var escalated = unknown.Escalate("New exploit discovered");
|
||||
|
||||
// Assert
|
||||
escalated.RequiresRescan.Should().BeTrue();
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region T3-AC5: Test resolution updates status
|
||||
|
||||
[Theory]
|
||||
[InlineData("matched", "RESOLVED")]
|
||||
[InlineData("not_applicable", "RESOLVED")]
|
||||
[InlineData("deferred", "DEFERRED")]
|
||||
public void Resolution_UpdatesStatus_Correctly(string resolution, string expectedStatus)
|
||||
{
|
||||
// Arrange
|
||||
var unknown = new UnknownEntry
|
||||
{
|
||||
CveId = "CVE-RESOLVE-001",
|
||||
Band = "HOT",
|
||||
DetectedAt = DateTimeOffset.UtcNow,
|
||||
Status = "OPEN"
|
||||
};
|
||||
|
||||
// Act
|
||||
var resolved = unknown.Resolve(resolution, "Test resolution");
|
||||
|
||||
// Assert
|
||||
resolved.Status.Should().Be(expectedStatus);
|
||||
resolved.ResolvedAt.Should().NotBeNull();
|
||||
resolved.ResolutionNote.Should().Be("Test resolution");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Resolution_RecordsResolutionType()
|
||||
{
|
||||
// Arrange
|
||||
var unknown = new UnknownEntry
|
||||
{
|
||||
CveId = "CVE-RESOLUTION-TYPE",
|
||||
Band = "WARM",
|
||||
DetectedAt = DateTimeOffset.UtcNow,
|
||||
Status = "OPEN"
|
||||
};
|
||||
|
||||
// Act
|
||||
var resolved = unknown.Resolve("matched", "Found in OSV feed");
|
||||
|
||||
// Assert
|
||||
resolved.ResolutionType.Should().Be("matched");
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region T3-AC6: Test band transitions
|
||||
|
||||
[Fact]
|
||||
public void BandTransition_IsTracked_OnRerank()
|
||||
{
|
||||
// Arrange
|
||||
var ranker = new UnknownRanker();
|
||||
var unknown = new UnknownEntry
|
||||
{
|
||||
CveId = "CVE-TRANSITION-001",
|
||||
ExploitPressure = 0.3,
|
||||
Uncertainty = 0.3,
|
||||
DetectedAt = DateTimeOffset.UtcNow.AddDays(-7),
|
||||
Band = "COLD"
|
||||
};
|
||||
|
||||
// Update pressure (simulating new exploit info)
|
||||
unknown = unknown with { ExploitPressure = 0.9 };
|
||||
|
||||
// Act
|
||||
var reranked = ranker.Rank(unknown);
|
||||
|
||||
// Assert
|
||||
reranked.Band.Should().NotBe("COLD");
|
||||
reranked.PreviousBand.Should().Be("COLD");
|
||||
reranked.BandTransitionAt.Should().NotBeNull();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void BandTransition_RecordsHistory()
|
||||
{
|
||||
// Arrange
|
||||
var unknown = new UnknownEntry
|
||||
{
|
||||
CveId = "CVE-HISTORY-001",
|
||||
Band = "COLD",
|
||||
DetectedAt = DateTimeOffset.UtcNow.AddDays(-30),
|
||||
BandHistory = new List<BandHistoryEntry>()
|
||||
};
|
||||
|
||||
// Act - Simulate transition
|
||||
unknown = unknown.RecordBandTransition("COLD", "WARM", "Score increased");
|
||||
unknown = unknown.RecordBandTransition("WARM", "HOT", "Escalated");
|
||||
|
||||
// Assert
|
||||
unknown.BandHistory.Should().HaveCount(2);
|
||||
unknown.BandHistory[0].FromBand.Should().Be("COLD");
|
||||
unknown.BandHistory[0].ToBand.Should().Be("WARM");
|
||||
unknown.BandHistory[1].FromBand.Should().Be("WARM");
|
||||
unknown.BandHistory[1].ToBand.Should().Be("HOT");
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Helper Classes
|
||||
|
||||
/// <summary>
|
||||
/// Unknown entry model for tests.
|
||||
/// </summary>
|
||||
public sealed record UnknownEntry
|
||||
{
|
||||
public string CveId { get; init; } = string.Empty;
|
||||
public string? Package { get; init; }
|
||||
public DateTimeOffset DetectedAt { get; init; }
|
||||
public string? ScanId { get; init; }
|
||||
public string? SourceFeed { get; init; }
|
||||
public double ExploitPressure { get; init; }
|
||||
public double Uncertainty { get; init; }
|
||||
public string Band { get; init; } = "COLD";
|
||||
public string Status { get; init; } = "OPEN";
|
||||
public DateTimeOffset? EscalatedAt { get; init; }
|
||||
public string? EscalationReason { get; init; }
|
||||
public bool RequiresRescan { get; init; }
|
||||
public DateTimeOffset? ResolvedAt { get; init; }
|
||||
public string? ResolutionType { get; init; }
|
||||
public string? ResolutionNote { get; init; }
|
||||
public string? PreviousBand { get; init; }
|
||||
public DateTimeOffset? BandTransitionAt { get; init; }
|
||||
public List<BandHistoryEntry> BandHistory { get; init; } = new();
|
||||
|
||||
public UnknownEntry Escalate(string reason)
|
||||
{
|
||||
return this with
|
||||
{
|
||||
Band = "HOT",
|
||||
EscalatedAt = DateTimeOffset.UtcNow,
|
||||
EscalationReason = reason,
|
||||
RequiresRescan = true,
|
||||
PreviousBand = Band,
|
||||
BandTransitionAt = DateTimeOffset.UtcNow
|
||||
};
|
||||
}
|
||||
|
||||
public UnknownEntry Resolve(string resolution, string note)
|
||||
{
|
||||
var status = resolution == "deferred" ? "DEFERRED" : "RESOLVED";
|
||||
return this with
|
||||
{
|
||||
Status = status,
|
||||
ResolvedAt = DateTimeOffset.UtcNow,
|
||||
ResolutionType = resolution,
|
||||
ResolutionNote = note
|
||||
};
|
||||
}
|
||||
|
||||
public UnknownEntry RecordBandTransition(string fromBand, string toBand, string reason)
|
||||
{
|
||||
var history = new List<BandHistoryEntry>(BandHistory)
|
||||
{
|
||||
new(fromBand, toBand, DateTimeOffset.UtcNow, reason)
|
||||
};
|
||||
return this with
|
||||
{
|
||||
Band = toBand,
|
||||
PreviousBand = fromBand,
|
||||
BandTransitionAt = DateTimeOffset.UtcNow,
|
||||
BandHistory = history
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
public sealed record BandHistoryEntry(
|
||||
string FromBand,
|
||||
string ToBand,
|
||||
DateTimeOffset TransitionAt,
|
||||
string Reason);
|
||||
|
||||
/// <summary>
|
||||
/// Ranked unknown result.
|
||||
/// </summary>
|
||||
public sealed record RankedUnknown(
|
||||
string CveId,
|
||||
double Score,
|
||||
string Band,
|
||||
string? PreviousBand = null,
|
||||
DateTimeOffset? BandTransitionAt = null);
|
||||
|
||||
/// <summary>
|
||||
/// Simple 2-factor ranker for unknowns.
|
||||
/// Uses: Uncertainty + Exploit Pressure (per advisory spec)
|
||||
/// </summary>
|
||||
public sealed class UnknownRanker
|
||||
{
|
||||
private const double HotThreshold = 0.7;
|
||||
private const double WarmThreshold = 0.3;
|
||||
|
||||
public RankedUnknown Rank(UnknownEntry entry)
|
||||
{
|
||||
// 2-factor model: simple average of uncertainty and exploit pressure
|
||||
var score = (entry.Uncertainty + entry.ExploitPressure) / 2.0;
|
||||
|
||||
var band = score switch
|
||||
{
|
||||
>= HotThreshold => "HOT",
|
||||
>= WarmThreshold => "WARM",
|
||||
_ => "COLD"
|
||||
};
|
||||
|
||||
var previousBand = entry.Band != band ? entry.Band : null;
|
||||
var transitionAt = previousBand != null ? DateTimeOffset.UtcNow : (DateTimeOffset?)null;
|
||||
|
||||
return new RankedUnknown(
|
||||
entry.CveId,
|
||||
score,
|
||||
band,
|
||||
previousBand,
|
||||
transitionAt);
|
||||
}
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
Reference in New Issue
Block a user