sprints work
This commit is contained in:
24
tests/fixtures/invalid/README.md
vendored
Normal file
24
tests/fixtures/invalid/README.md
vendored
Normal file
@@ -0,0 +1,24 @@
|
||||
# Invalid Fixtures for Negative Testing
|
||||
|
||||
This directory contains intentionally invalid SBOM fixtures used for CI negative testing.
|
||||
These fixtures MUST fail schema validation to ensure the CI pipeline correctly detects errors.
|
||||
|
||||
## CycloneDX Invalid Fixtures
|
||||
|
||||
| File | Defect | Expected Error |
|
||||
|------|--------|----------------|
|
||||
| `cyclonedx-wrong-version.json` | specVersion "2.0" doesn't exist | Invalid enum value for specVersion |
|
||||
| `cyclonedx-missing-required.json` | Missing required specVersion field | Missing required property: specVersion |
|
||||
| `cyclonedx-invalid-component.json` | Component missing name and type | Required properties missing in component |
|
||||
|
||||
## CI Usage
|
||||
|
||||
The schema validation workflow uses the `tests/fixtures/invalid/` directory for negative test cases.
|
||||
When `--expect-failures` is passed, the CI expects these files to fail validation.
|
||||
|
||||
## Adding New Test Cases
|
||||
|
||||
1. Create a new JSON file with an intentional schema violation
|
||||
2. Add a `$comment` field explaining the defect
|
||||
3. Update this README with the expected error
|
||||
4. Ensure the file has the correct format marker (e.g., `"bomFormat": "CycloneDX"`)
|
||||
15
tests/fixtures/invalid/cyclonedx-invalid-component.json
vendored
Normal file
15
tests/fixtures/invalid/cyclonedx-invalid-component.json
vendored
Normal file
@@ -0,0 +1,15 @@
|
||||
{
|
||||
"$comment": "INTENTIONALLY INVALID CycloneDX fixture - component missing required 'name' and 'type' fields.",
|
||||
"bomFormat": "CycloneDX",
|
||||
"specVersion": "1.6",
|
||||
"version": 1,
|
||||
"metadata": {
|
||||
"timestamp": "2025-12-25T00:00:00Z"
|
||||
},
|
||||
"components": [
|
||||
{
|
||||
"invalid-field": "this-is-not-valid",
|
||||
"purl": "pkg:npm/missing-required-fields@1.0.0"
|
||||
}
|
||||
]
|
||||
}
|
||||
9
tests/fixtures/invalid/cyclonedx-missing-required.json
vendored
Normal file
9
tests/fixtures/invalid/cyclonedx-missing-required.json
vendored
Normal file
@@ -0,0 +1,9 @@
|
||||
{
|
||||
"$comment": "INTENTIONALLY INVALID CycloneDX fixture - missing required 'specVersion' field.",
|
||||
"bomFormat": "CycloneDX",
|
||||
"version": 1,
|
||||
"metadata": {
|
||||
"timestamp": "2025-12-25T00:00:00Z"
|
||||
},
|
||||
"components": []
|
||||
}
|
||||
11
tests/fixtures/invalid/cyclonedx-wrong-version.json
vendored
Normal file
11
tests/fixtures/invalid/cyclonedx-wrong-version.json
vendored
Normal file
@@ -0,0 +1,11 @@
|
||||
{
|
||||
"$comment": "INTENTIONALLY INVALID CycloneDX fixture for negative testing.",
|
||||
"$comment2": "specVersion 2.0 does not exist and should fail schema validation.",
|
||||
"bomFormat": "CycloneDX",
|
||||
"specVersion": "2.0",
|
||||
"version": 1,
|
||||
"metadata": {
|
||||
"timestamp": "2025-12-25T00:00:00Z"
|
||||
},
|
||||
"components": []
|
||||
}
|
||||
@@ -0,0 +1,951 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// E2EReproducibilityTestFixture.cs
|
||||
// Sprint: SPRINT_8200_0001_0004_e2e_reproducibility_test
|
||||
// Task: E2E-8200-002 - Create E2EReproducibilityTestFixture with full service composition
|
||||
// Description: Test fixture providing full pipeline composition for E2E reproducibility tests.
|
||||
// Supports: ingest → normalize → diff → decide → attest → bundle → reverify
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System.Security.Cryptography;
|
||||
using System.Text.Json;
|
||||
using Microsoft.AspNetCore.Hosting;
|
||||
using Microsoft.AspNetCore.Mvc.Testing;
|
||||
using Microsoft.Extensions.Configuration;
|
||||
using Microsoft.Extensions.DependencyInjection;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using StellaOps.Attestor.Dsse;
|
||||
using StellaOps.Canonical.Json;
|
||||
using StellaOps.Policy.Deltas;
|
||||
using Testcontainers.PostgreSql;
|
||||
|
||||
namespace StellaOps.Integration.E2E;
|
||||
|
||||
/// <summary>
|
||||
/// Test fixture for end-to-end reproducibility tests.
|
||||
/// Provides a fully configured test environment with:
|
||||
/// - PostgreSQL database via Testcontainers
|
||||
/// - Mock advisory feeds
|
||||
/// - Policy engine with test policies
|
||||
/// - Attestor for DSSE envelope creation
|
||||
/// - Full pipeline execution capability
|
||||
/// </summary>
|
||||
public sealed class E2EReproducibilityTestFixture : IAsyncLifetime
|
||||
{
|
||||
private PostgreSqlContainer? _postgresContainer;
|
||||
private WebApplicationFactory<Program>? _factory;
|
||||
private ECDsa? _signingKey;
|
||||
private bool _initialized;
|
||||
|
||||
/// <summary>
|
||||
/// Gets the frozen timestamp used for deterministic tests.
|
||||
/// </summary>
|
||||
public DateTimeOffset FrozenTimestamp { get; } = new DateTimeOffset(2025, 6, 15, 12, 0, 0, TimeSpan.Zero);
|
||||
|
||||
/// <summary>
|
||||
/// Initializes the test fixture, starting required services.
|
||||
/// </summary>
|
||||
public async Task InitializeAsync()
|
||||
{
|
||||
if (_initialized)
|
||||
return;
|
||||
|
||||
// Generate deterministic signing key from fixed seed
|
||||
_signingKey = GenerateDeterministicKey(42);
|
||||
|
||||
// Start PostgreSQL container
|
||||
_postgresContainer = new PostgreSqlBuilder()
|
||||
.WithImage("postgres:16-alpine")
|
||||
.WithDatabase("stellaops_e2e_test")
|
||||
.WithUsername("e2e_test_user")
|
||||
.WithPassword("e2e_test_password")
|
||||
.WithPortBinding(5432, true)
|
||||
.Build();
|
||||
|
||||
await _postgresContainer.StartAsync();
|
||||
|
||||
// Create the test web application factory
|
||||
_factory = new WebApplicationFactory<Program>()
|
||||
.WithWebHostBuilder(builder =>
|
||||
{
|
||||
builder.ConfigureAppConfiguration((context, config) =>
|
||||
{
|
||||
config.AddInMemoryCollection(new Dictionary<string, string?>
|
||||
{
|
||||
["ConnectionStrings:ScannerDb"] = _postgresContainer.GetConnectionString(),
|
||||
["Scanner:Authority:Enabled"] = "false",
|
||||
["Scanner:AllowAnonymous"] = "true",
|
||||
["Scanner:ProofChain:Enabled"] = "true",
|
||||
["Scanner:ProofChain:SigningKeyId"] = "e2e-test-key",
|
||||
["Scanner:ProofChain:AutoSign"] = "true",
|
||||
["Scanner:Determinism:FrozenClock"] = "true",
|
||||
["Scanner:Determinism:FrozenTimestamp"] = FrozenTimestamp.ToString("O"),
|
||||
["Logging:LogLevel:Default"] = "Warning"
|
||||
});
|
||||
});
|
||||
|
||||
builder.ConfigureServices(services =>
|
||||
{
|
||||
services.AddLogging(logging =>
|
||||
{
|
||||
logging.ClearProviders();
|
||||
logging.AddConsole();
|
||||
logging.SetMinimumLevel(LogLevel.Warning);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
_initialized = true;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Creates an HTTP client for the test application.
|
||||
/// </summary>
|
||||
public async Task<HttpClient> CreateClientAsync()
|
||||
{
|
||||
if (!_initialized)
|
||||
{
|
||||
await InitializeAsync();
|
||||
}
|
||||
|
||||
return _factory!.CreateClient(new WebApplicationFactoryClientOptions
|
||||
{
|
||||
AllowAutoRedirect = false
|
||||
});
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Creates a snapshot of all inputs with computed hashes for verification.
|
||||
/// </summary>
|
||||
public async Task<InputSnapshot> SnapshotInputsAsync(
|
||||
string? sbomFixturePath = null,
|
||||
string? advisoryFeedPath = null,
|
||||
string? policyPackPath = null,
|
||||
string? vexDocumentPath = null)
|
||||
{
|
||||
// Load default fixtures if not specified
|
||||
var sbomContent = sbomFixturePath is not null
|
||||
? await File.ReadAllBytesAsync(sbomFixturePath)
|
||||
: CreateMinimalSbom();
|
||||
|
||||
var advisoryFeed = advisoryFeedPath is not null
|
||||
? await File.ReadAllBytesAsync(advisoryFeedPath)
|
||||
: CreateMockAdvisoryFeed();
|
||||
|
||||
var policyPack = policyPackPath is not null
|
||||
? await File.ReadAllBytesAsync(policyPackPath)
|
||||
: CreateDefaultPolicyPack();
|
||||
|
||||
var vexDocument = vexDocumentPath is not null
|
||||
? await File.ReadAllBytesAsync(vexDocumentPath)
|
||||
: null;
|
||||
|
||||
return new InputSnapshot
|
||||
{
|
||||
Sbom = sbomContent,
|
||||
SbomHash = ComputeHash(sbomContent),
|
||||
AdvisoryFeed = advisoryFeed,
|
||||
AdvisoryFeedHash = ComputeHash(advisoryFeed),
|
||||
PolicyPack = policyPack,
|
||||
PolicyPackHash = ComputeHash(policyPack),
|
||||
VexDocument = vexDocument,
|
||||
VexDocumentHash = vexDocument is not null ? ComputeHash(vexDocument) : null,
|
||||
SnapshotTimestamp = FrozenTimestamp
|
||||
};
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Executes the full pipeline with the given inputs.
|
||||
/// </summary>
|
||||
public async Task<PipelineResult> RunFullPipelineAsync(InputSnapshot inputs)
|
||||
{
|
||||
// Stage 1: Ingest advisories
|
||||
var advisories = await IngestAdvisoriesAsync(inputs.AdvisoryFeed);
|
||||
|
||||
// Stage 2: Normalize advisories
|
||||
var normalized = await NormalizeAdvisoriesAsync(advisories);
|
||||
|
||||
// Stage 3: Diff SBOM against advisories
|
||||
var diff = await ComputeDiffAsync(inputs.Sbom, normalized);
|
||||
|
||||
// Stage 4: Evaluate policy and compute verdict
|
||||
var verdict = await EvaluatePolicyAsync(diff, inputs.PolicyPack, inputs.VexDocument);
|
||||
|
||||
// Stage 5: Create DSSE attestation
|
||||
var envelope = await CreateAttestationAsync(verdict);
|
||||
|
||||
// Stage 6: Package into bundle
|
||||
var bundle = await CreateBundleAsync(envelope, inputs);
|
||||
|
||||
return new PipelineResult
|
||||
{
|
||||
VerdictId = verdict.VerdictId,
|
||||
VerdictHash = ComputeHash(SerializeVerdict(verdict)),
|
||||
EnvelopeHash = ComputeHash(SerializeEnvelope(envelope)),
|
||||
BundleManifest = bundle.Manifest,
|
||||
BundleManifestHash = ComputeHash(bundle.Manifest),
|
||||
ExecutionTimestamp = FrozenTimestamp
|
||||
};
|
||||
}
|
||||
|
||||
#region Stage 1: Ingest
|
||||
|
||||
/// <summary>
|
||||
/// Ingests advisory feed data.
|
||||
/// </summary>
|
||||
public Task<IReadOnlyList<AdvisoryRecord>> IngestAdvisoriesAsync(byte[] feedData)
|
||||
{
|
||||
// Parse advisory feed (mock implementation for E2E tests)
|
||||
var advisories = ParseAdvisoryFeed(feedData);
|
||||
return Task.FromResult(advisories);
|
||||
}
|
||||
|
||||
private static IReadOnlyList<AdvisoryRecord> ParseAdvisoryFeed(byte[] feedData)
|
||||
{
|
||||
// For E2E tests, parse the mock feed format
|
||||
var json = System.Text.Encoding.UTF8.GetString(feedData);
|
||||
using var doc = JsonDocument.Parse(json);
|
||||
|
||||
var advisories = new List<AdvisoryRecord>();
|
||||
foreach (var element in doc.RootElement.GetProperty("advisories").EnumerateArray())
|
||||
{
|
||||
advisories.Add(new AdvisoryRecord
|
||||
{
|
||||
Id = element.GetProperty("id").GetString()!,
|
||||
CveId = element.GetProperty("cveId").GetString(),
|
||||
Severity = element.GetProperty("severity").GetString()!,
|
||||
AffectedPackages = element.GetProperty("affected").EnumerateArray()
|
||||
.Select(a => a.GetString()!)
|
||||
.ToList(),
|
||||
FixedVersions = element.TryGetProperty("fixed", out var fixedProp)
|
||||
? fixedProp.EnumerateArray().Select(f => f.GetString()!).ToList()
|
||||
: []
|
||||
});
|
||||
}
|
||||
|
||||
// Sort for determinism
|
||||
return advisories.OrderBy(a => a.Id, StringComparer.Ordinal).ToList();
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Stage 2: Normalize
|
||||
|
||||
/// <summary>
|
||||
/// Normalizes and deduplicates advisories.
|
||||
/// </summary>
|
||||
public Task<NormalizedAdvisories> NormalizeAdvisoriesAsync(IReadOnlyList<AdvisoryRecord> advisories)
|
||||
{
|
||||
// Deduplicate by CVE ID
|
||||
var uniqueByCve = advisories
|
||||
.GroupBy(a => a.CveId ?? a.Id)
|
||||
.Select(g => g.OrderBy(a => a.Id, StringComparer.Ordinal).First())
|
||||
.OrderBy(a => a.CveId ?? a.Id, StringComparer.Ordinal)
|
||||
.ToList();
|
||||
|
||||
var normalized = new NormalizedAdvisories
|
||||
{
|
||||
Advisories = uniqueByCve,
|
||||
NormalizationTimestamp = FrozenTimestamp,
|
||||
ContentHash = ComputeHash(SerializeAdvisories(uniqueByCve))
|
||||
};
|
||||
|
||||
return Task.FromResult(normalized);
|
||||
}
|
||||
|
||||
private static byte[] SerializeAdvisories(IReadOnlyList<AdvisoryRecord> advisories)
|
||||
{
|
||||
var serializable = advisories.Select(a => new
|
||||
{
|
||||
id = a.Id,
|
||||
cveId = a.CveId,
|
||||
severity = a.Severity,
|
||||
affected = a.AffectedPackages,
|
||||
fixed_ = a.FixedVersions
|
||||
}).ToList();
|
||||
|
||||
return System.Text.Encoding.UTF8.GetBytes(CanonJson.Serialize(serializable));
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Stage 3: Diff
|
||||
|
||||
/// <summary>
|
||||
/// Computes diff between SBOM and advisories.
|
||||
/// </summary>
|
||||
public Task<DiffResult> ComputeDiffAsync(byte[] sbomData, NormalizedAdvisories advisories)
|
||||
{
|
||||
// Parse SBOM and find affected components
|
||||
var sbom = ParseSbom(sbomData);
|
||||
var findings = new List<Finding>();
|
||||
|
||||
foreach (var component in sbom.Components)
|
||||
{
|
||||
foreach (var advisory in advisories.Advisories)
|
||||
{
|
||||
if (advisory.AffectedPackages.Any(pkg =>
|
||||
pkg.Equals(component.Purl, StringComparison.OrdinalIgnoreCase) ||
|
||||
pkg.Contains(component.Name, StringComparison.OrdinalIgnoreCase)))
|
||||
{
|
||||
findings.Add(new Finding
|
||||
{
|
||||
Id = $"finding:{advisory.CveId ?? advisory.Id}:{component.Purl}",
|
||||
CveId = advisory.CveId ?? advisory.Id,
|
||||
Severity = advisory.Severity,
|
||||
AffectedComponent = component.Purl,
|
||||
ComponentVersion = component.Version,
|
||||
FixedVersions = advisory.FixedVersions
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Sort findings for determinism
|
||||
var sortedFindings = findings
|
||||
.OrderBy(f => f.CveId, StringComparer.Ordinal)
|
||||
.ThenBy(f => f.AffectedComponent, StringComparer.Ordinal)
|
||||
.ToList();
|
||||
|
||||
var diff = new DiffResult
|
||||
{
|
||||
Findings = sortedFindings,
|
||||
SbomDigest = ComputeHash(sbomData),
|
||||
AdvisoryDigest = advisories.ContentHash,
|
||||
DiffTimestamp = FrozenTimestamp
|
||||
};
|
||||
|
||||
return Task.FromResult(diff);
|
||||
}
|
||||
|
||||
private static SbomData ParseSbom(byte[] sbomData)
|
||||
{
|
||||
var json = System.Text.Encoding.UTF8.GetString(sbomData);
|
||||
using var doc = JsonDocument.Parse(json);
|
||||
|
||||
var components = new List<SbomComponent>();
|
||||
foreach (var element in doc.RootElement.GetProperty("components").EnumerateArray())
|
||||
{
|
||||
components.Add(new SbomComponent
|
||||
{
|
||||
Name = element.GetProperty("name").GetString()!,
|
||||
Version = element.GetProperty("version").GetString()!,
|
||||
Purl = element.GetProperty("purl").GetString()!
|
||||
});
|
||||
}
|
||||
|
||||
return new SbomData
|
||||
{
|
||||
Components = components.OrderBy(c => c.Purl, StringComparer.Ordinal).ToList()
|
||||
};
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Stage 4: Decide
|
||||
|
||||
/// <summary>
|
||||
/// Evaluates policy and computes verdict.
|
||||
/// </summary>
|
||||
public Task<DeltaVerdict> EvaluatePolicyAsync(DiffResult diff, byte[] policyPack, byte[]? vexDocument)
|
||||
{
|
||||
// Parse VEX document if provided for exception handling
|
||||
var exceptions = vexDocument is not null
|
||||
? ParseVexExceptions(vexDocument)
|
||||
: [];
|
||||
|
||||
// Evaluate findings against policy
|
||||
var blockingDrivers = new List<DeltaDriver>();
|
||||
var warningDrivers = new List<DeltaDriver>();
|
||||
var appliedExceptions = new List<string>();
|
||||
|
||||
foreach (var finding in diff.Findings)
|
||||
{
|
||||
// Check if finding is excepted via VEX
|
||||
var exception = exceptions.FirstOrDefault(e =>
|
||||
e.VulnerabilityId.Equals(finding.CveId, StringComparison.OrdinalIgnoreCase));
|
||||
|
||||
if (exception is not null)
|
||||
{
|
||||
appliedExceptions.Add(exception.Id);
|
||||
continue;
|
||||
}
|
||||
|
||||
var driver = new DeltaDriver
|
||||
{
|
||||
Type = "new-finding",
|
||||
Severity = MapSeverity(finding.Severity),
|
||||
Description = $"Vulnerability {finding.CveId} found in {finding.AffectedComponent}",
|
||||
CveId = finding.CveId,
|
||||
Purl = finding.AffectedComponent
|
||||
};
|
||||
|
||||
if (IsBlockingSeverity(finding.Severity))
|
||||
{
|
||||
blockingDrivers.Add(driver);
|
||||
}
|
||||
else
|
||||
{
|
||||
warningDrivers.Add(driver);
|
||||
}
|
||||
}
|
||||
|
||||
// Sort drivers for determinism
|
||||
blockingDrivers = [.. blockingDrivers.OrderBy(d => d.CveId, StringComparer.Ordinal)
|
||||
.ThenBy(d => d.Purl, StringComparer.Ordinal)];
|
||||
|
||||
warningDrivers = [.. warningDrivers.OrderBy(d => d.CveId, StringComparer.Ordinal)
|
||||
.ThenBy(d => d.Purl, StringComparer.Ordinal)];
|
||||
|
||||
appliedExceptions = [.. appliedExceptions.Order(StringComparer.Ordinal)];
|
||||
|
||||
// Compute gate level
|
||||
var gateLevel = blockingDrivers.Count > 0 ? DeltaGateLevel.G4 : DeltaGateLevel.G1;
|
||||
|
||||
// Build verdict with content-addressed ID
|
||||
var deltaId = $"delta:sha256:{ComputeHashString(System.Text.Encoding.UTF8.GetBytes(
|
||||
CanonJson.Serialize(new { diff.SbomDigest, diff.AdvisoryDigest })))}";
|
||||
|
||||
var builder = new DeltaVerdictBuilder()
|
||||
.WithGate(gateLevel);
|
||||
|
||||
foreach (var driver in blockingDrivers)
|
||||
{
|
||||
builder.AddBlockingDriver(driver);
|
||||
}
|
||||
|
||||
foreach (var driver in warningDrivers)
|
||||
{
|
||||
builder.AddWarningDriver(driver);
|
||||
}
|
||||
|
||||
foreach (var exception in appliedExceptions)
|
||||
{
|
||||
builder.AddException(exception);
|
||||
}
|
||||
|
||||
var verdict = builder.Build(deltaId);
|
||||
|
||||
return Task.FromResult(verdict);
|
||||
}
|
||||
|
||||
private static IReadOnlyList<VexException> ParseVexExceptions(byte[] vexData)
|
||||
{
|
||||
var json = System.Text.Encoding.UTF8.GetString(vexData);
|
||||
using var doc = JsonDocument.Parse(json);
|
||||
|
||||
var exceptions = new List<VexException>();
|
||||
if (doc.RootElement.TryGetProperty("statements", out var statements))
|
||||
{
|
||||
foreach (var stmt in statements.EnumerateArray())
|
||||
{
|
||||
if (stmt.GetProperty("status").GetString() == "not_affected")
|
||||
{
|
||||
exceptions.Add(new VexException
|
||||
{
|
||||
Id = stmt.GetProperty("id").GetString()!,
|
||||
VulnerabilityId = stmt.GetProperty("vulnerability").GetString()!,
|
||||
Status = "not_affected",
|
||||
Justification = stmt.TryGetProperty("justification", out var j) ? j.GetString() : null
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return exceptions.OrderBy(e => e.VulnerabilityId, StringComparer.Ordinal).ToList();
|
||||
}
|
||||
|
||||
private static DeltaDriverSeverity MapSeverity(string severity) => severity.ToUpperInvariant() switch
|
||||
{
|
||||
"CRITICAL" => DeltaDriverSeverity.Critical,
|
||||
"HIGH" => DeltaDriverSeverity.High,
|
||||
"MEDIUM" => DeltaDriverSeverity.Medium,
|
||||
"LOW" => DeltaDriverSeverity.Low,
|
||||
_ => DeltaDriverSeverity.Unknown
|
||||
};
|
||||
|
||||
private static bool IsBlockingSeverity(string severity) =>
|
||||
severity.Equals("CRITICAL", StringComparison.OrdinalIgnoreCase) ||
|
||||
severity.Equals("HIGH", StringComparison.OrdinalIgnoreCase);
|
||||
|
||||
#endregion
|
||||
|
||||
#region Stage 5: Attest
|
||||
|
||||
/// <summary>
|
||||
/// Creates DSSE attestation for the verdict.
|
||||
/// </summary>
|
||||
public Task<DsseEnvelopeData> CreateAttestationAsync(DeltaVerdict verdict)
|
||||
{
|
||||
// Serialize verdict to canonical JSON
|
||||
var payload = SerializeVerdict(verdict);
|
||||
|
||||
// Sign using deterministic key
|
||||
var signature = SignPayload(payload);
|
||||
|
||||
var envelope = new DsseEnvelopeData
|
||||
{
|
||||
PayloadType = "application/vnd.stellaops.verdict+json",
|
||||
Payload = payload,
|
||||
Signatures =
|
||||
[
|
||||
new DsseSignatureData
|
||||
{
|
||||
KeyId = "e2e-test-key",
|
||||
Signature = signature
|
||||
}
|
||||
]
|
||||
};
|
||||
|
||||
return Task.FromResult(envelope);
|
||||
}
|
||||
|
||||
private byte[] SignPayload(byte[] payload)
|
||||
{
|
||||
// Create PAE (Pre-Authentication Encoding) as per DSSE spec
|
||||
var payloadType = "application/vnd.stellaops.verdict+json"u8.ToArray();
|
||||
var pae = CreatePae(payloadType, payload);
|
||||
|
||||
// Sign with ECDSA P-256
|
||||
return _signingKey!.SignData(pae, HashAlgorithmName.SHA256);
|
||||
}
|
||||
|
||||
private static byte[] CreatePae(byte[] payloadType, byte[] payload)
|
||||
{
|
||||
// PAE(type, payload) = "DSSEv1" || SP || LEN(type) || SP || type || SP || LEN(payload) || SP || payload
|
||||
var parts = new List<byte>();
|
||||
parts.AddRange("DSSEv1 "u8.ToArray());
|
||||
parts.AddRange(System.Text.Encoding.UTF8.GetBytes(payloadType.Length.ToString()));
|
||||
parts.Add((byte)' ');
|
||||
parts.AddRange(payloadType);
|
||||
parts.Add((byte)' ');
|
||||
parts.AddRange(System.Text.Encoding.UTF8.GetBytes(payload.Length.ToString()));
|
||||
parts.Add((byte)' ');
|
||||
parts.AddRange(payload);
|
||||
return [.. parts];
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Stage 6: Bundle
|
||||
|
||||
/// <summary>
|
||||
/// Creates a bundle containing the attestation and all artifacts.
|
||||
/// </summary>
|
||||
public Task<BundleResult> CreateBundleAsync(DsseEnvelopeData envelope, InputSnapshot inputs)
|
||||
{
|
||||
// Create manifest with all artifact hashes
|
||||
var manifest = new BundleManifest
|
||||
{
|
||||
Version = "1.0",
|
||||
CreatedAt = FrozenTimestamp,
|
||||
Artifacts = new Dictionary<string, string>
|
||||
{
|
||||
["sbom"] = inputs.SbomHash,
|
||||
["advisory-feed"] = inputs.AdvisoryFeedHash,
|
||||
["policy-pack"] = inputs.PolicyPackHash,
|
||||
["envelope"] = ComputeHashString(SerializeEnvelope(envelope))
|
||||
}
|
||||
};
|
||||
|
||||
if (inputs.VexDocumentHash is not null)
|
||||
{
|
||||
manifest.Artifacts["vex-document"] = inputs.VexDocumentHash;
|
||||
}
|
||||
|
||||
// Serialize manifest deterministically
|
||||
var manifestBytes = System.Text.Encoding.UTF8.GetBytes(CanonJson.Serialize(manifest));
|
||||
|
||||
var bundle = new BundleResult
|
||||
{
|
||||
Manifest = manifestBytes,
|
||||
Envelope = envelope,
|
||||
ManifestHash = ComputeHash(manifestBytes)
|
||||
};
|
||||
|
||||
return Task.FromResult(bundle);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Serialization Helpers
|
||||
|
||||
private static byte[] SerializeVerdict(DeltaVerdict verdict)
|
||||
{
|
||||
return System.Text.Encoding.UTF8.GetBytes(CanonJson.Serialize(verdict));
|
||||
}
|
||||
|
||||
private static byte[] SerializeEnvelope(DsseEnvelopeData envelope)
|
||||
{
|
||||
var obj = new
|
||||
{
|
||||
payloadType = envelope.PayloadType,
|
||||
payload = Convert.ToBase64String(envelope.Payload),
|
||||
signatures = envelope.Signatures.Select(s => new
|
||||
{
|
||||
keyid = s.KeyId,
|
||||
sig = Convert.ToBase64String(s.Signature)
|
||||
}).ToArray()
|
||||
};
|
||||
return System.Text.Encoding.UTF8.GetBytes(CanonJson.Serialize(obj));
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Hashing Helpers
|
||||
|
||||
/// <summary>
|
||||
/// Computes SHA-256 hash of data and returns as hex string.
|
||||
/// </summary>
|
||||
public static string ComputeHash(byte[] data)
|
||||
{
|
||||
var hash = SHA256.HashData(data);
|
||||
return $"sha256:{Convert.ToHexStringLower(hash)}";
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Computes SHA-256 hash of data and returns hex string without prefix.
|
||||
/// </summary>
|
||||
public static string ComputeHashString(byte[] data)
|
||||
{
|
||||
var hash = SHA256.HashData(data);
|
||||
return Convert.ToHexStringLower(hash);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Test Data Factories
|
||||
|
||||
/// <summary>
|
||||
/// Creates a minimal SBOM for testing.
|
||||
/// </summary>
|
||||
public static byte[] CreateMinimalSbom()
|
||||
{
|
||||
var sbom = new
|
||||
{
|
||||
bomFormat = "CycloneDX",
|
||||
specVersion = "1.5",
|
||||
version = 1,
|
||||
components = new[]
|
||||
{
|
||||
new { name = "lodash", version = "4.17.20", purl = "pkg:npm/lodash@4.17.20" },
|
||||
new { name = "axios", version = "0.21.0", purl = "pkg:npm/axios@0.21.0" },
|
||||
new { name = "moment", version = "2.29.0", purl = "pkg:npm/moment@2.29.0" }
|
||||
}
|
||||
};
|
||||
|
||||
return System.Text.Encoding.UTF8.GetBytes(CanonJson.Serialize(sbom));
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Creates a mock advisory feed for testing.
|
||||
/// </summary>
|
||||
public static byte[] CreateMockAdvisoryFeed()
|
||||
{
|
||||
var feed = new
|
||||
{
|
||||
advisories = new[]
|
||||
{
|
||||
new
|
||||
{
|
||||
id = "GHSA-2024-0001",
|
||||
cveId = "CVE-2024-0001",
|
||||
severity = "CRITICAL",
|
||||
affected = new[] { "pkg:npm/lodash@4.17.20" },
|
||||
@fixed = new[] { "pkg:npm/lodash@4.17.21" }
|
||||
},
|
||||
new
|
||||
{
|
||||
id = "GHSA-2024-0002",
|
||||
cveId = "CVE-2024-0002",
|
||||
severity = "HIGH",
|
||||
affected = new[] { "pkg:npm/axios@0.21.0" },
|
||||
@fixed = new[] { "pkg:npm/axios@0.21.1" }
|
||||
},
|
||||
new
|
||||
{
|
||||
id = "GHSA-2024-0003",
|
||||
cveId = "CVE-2024-0003",
|
||||
severity = "LOW",
|
||||
affected = new[] { "pkg:npm/moment@2.29.0" },
|
||||
@fixed = new[] { "pkg:npm/moment@2.29.4" }
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
return System.Text.Encoding.UTF8.GetBytes(CanonJson.Serialize(feed));
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Creates a default policy pack for testing.
|
||||
/// </summary>
|
||||
public static byte[] CreateDefaultPolicyPack()
|
||||
{
|
||||
var policy = new
|
||||
{
|
||||
version = "1.0",
|
||||
rules = new[]
|
||||
{
|
||||
new { severity = "CRITICAL", action = "block" },
|
||||
new { severity = "HIGH", action = "block" },
|
||||
new { severity = "MEDIUM", action = "warn" },
|
||||
new { severity = "LOW", action = "warn" }
|
||||
}
|
||||
};
|
||||
|
||||
return System.Text.Encoding.UTF8.GetBytes(CanonJson.Serialize(policy));
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Creates a VEX document with exceptions.
|
||||
/// </summary>
|
||||
public static byte[] CreateVexDocumentWithExceptions(params string[] exceptedCveIds)
|
||||
{
|
||||
var statements = exceptedCveIds.Select((cve, i) => new
|
||||
{
|
||||
id = $"vex-exception-{i + 1:D3}",
|
||||
vulnerability = cve,
|
||||
status = "not_affected",
|
||||
justification = "vulnerable_code_not_in_execute_path"
|
||||
}).ToArray();
|
||||
|
||||
var vex = new
|
||||
{
|
||||
@context = "https://openvex.dev/ns/v0.2.0",
|
||||
id = "https://stellaops.test/vex/test-001",
|
||||
statements
|
||||
};
|
||||
|
||||
return System.Text.Encoding.UTF8.GetBytes(CanonJson.Serialize(vex));
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Key Generation
|
||||
|
||||
/// <summary>
|
||||
/// Generates a deterministic ECDSA key from a seed.
|
||||
/// </summary>
|
||||
private static ECDsa GenerateDeterministicKey(int seed)
|
||||
{
|
||||
// Use a deterministic RNG seeded from the input
|
||||
var rng = new DeterministicRng(seed);
|
||||
var keyBytes = new byte[32];
|
||||
rng.GetBytes(keyBytes);
|
||||
|
||||
// Create ECDSA key from the deterministic bytes
|
||||
var ecdsa = ECDsa.Create(ECCurve.NamedCurves.nistP256);
|
||||
|
||||
// Import deterministic private key
|
||||
var parameters = new ECParameters
|
||||
{
|
||||
Curve = ECCurve.NamedCurves.nistP256,
|
||||
D = keyBytes,
|
||||
Q = default // Will be computed from D
|
||||
};
|
||||
|
||||
// Compute public key from private key
|
||||
var tempKey = ECDsa.Create(ECCurve.NamedCurves.nistP256);
|
||||
tempKey.ImportParameters(new ECParameters
|
||||
{
|
||||
Curve = ECCurve.NamedCurves.nistP256,
|
||||
D = keyBytes
|
||||
});
|
||||
var exported = tempKey.ExportParameters(true);
|
||||
parameters.Q = exported.Q;
|
||||
|
||||
ecdsa.ImportParameters(parameters);
|
||||
return ecdsa;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Gets the public key for verification.
|
||||
/// </summary>
|
||||
public byte[] GetPublicKey()
|
||||
{
|
||||
return _signingKey!.ExportSubjectPublicKeyInfo();
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
/// <summary>
|
||||
/// Disposes of the test fixture resources.
|
||||
/// </summary>
|
||||
public async Task DisposeAsync()
|
||||
{
|
||||
_signingKey?.Dispose();
|
||||
_factory?.Dispose();
|
||||
|
||||
if (_postgresContainer is not null)
|
||||
{
|
||||
await _postgresContainer.DisposeAsync();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Deterministic random number generator for key generation.
|
||||
/// </summary>
|
||||
internal sealed class DeterministicRng(int seed)
|
||||
{
|
||||
private readonly Random _random = new(seed);
|
||||
|
||||
public void GetBytes(byte[] data)
|
||||
{
|
||||
_random.NextBytes(data);
|
||||
}
|
||||
}
|
||||
|
||||
#region Data Transfer Objects
|
||||
|
||||
/// <summary>
|
||||
/// Snapshot of all pipeline inputs with hashes.
|
||||
/// </summary>
|
||||
public sealed class InputSnapshot
|
||||
{
|
||||
public required byte[] Sbom { get; init; }
|
||||
public required string SbomHash { get; init; }
|
||||
public required byte[] AdvisoryFeed { get; init; }
|
||||
public required string AdvisoryFeedHash { get; init; }
|
||||
public required byte[] PolicyPack { get; init; }
|
||||
public required string PolicyPackHash { get; init; }
|
||||
public byte[]? VexDocument { get; init; }
|
||||
public string? VexDocumentHash { get; init; }
|
||||
public DateTimeOffset SnapshotTimestamp { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Result of full pipeline execution.
|
||||
/// </summary>
|
||||
public sealed class PipelineResult
|
||||
{
|
||||
public required string VerdictId { get; init; }
|
||||
public required string VerdictHash { get; init; }
|
||||
public required string EnvelopeHash { get; init; }
|
||||
public required byte[] BundleManifest { get; init; }
|
||||
public required string BundleManifestHash { get; init; }
|
||||
public DateTimeOffset ExecutionTimestamp { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Advisory record from ingestion.
|
||||
/// </summary>
|
||||
public sealed class AdvisoryRecord
|
||||
{
|
||||
public required string Id { get; init; }
|
||||
public string? CveId { get; init; }
|
||||
public required string Severity { get; init; }
|
||||
public required IReadOnlyList<string> AffectedPackages { get; init; }
|
||||
public IReadOnlyList<string> FixedVersions { get; init; } = [];
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Normalized advisories after deduplication.
|
||||
/// </summary>
|
||||
public sealed class NormalizedAdvisories
|
||||
{
|
||||
public required IReadOnlyList<AdvisoryRecord> Advisories { get; init; }
|
||||
public DateTimeOffset NormalizationTimestamp { get; init; }
|
||||
public required string ContentHash { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// SBOM component data.
|
||||
/// </summary>
|
||||
public sealed class SbomComponent
|
||||
{
|
||||
public required string Name { get; init; }
|
||||
public required string Version { get; init; }
|
||||
public required string Purl { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parsed SBOM data.
|
||||
/// </summary>
|
||||
public sealed class SbomData
|
||||
{
|
||||
public required IReadOnlyList<SbomComponent> Components { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Security finding from diff.
|
||||
/// </summary>
|
||||
public sealed class Finding
|
||||
{
|
||||
public required string Id { get; init; }
|
||||
public required string CveId { get; init; }
|
||||
public required string Severity { get; init; }
|
||||
public required string AffectedComponent { get; init; }
|
||||
public required string ComponentVersion { get; init; }
|
||||
public IReadOnlyList<string> FixedVersions { get; init; } = [];
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Result of diffing SBOM against advisories.
|
||||
/// </summary>
|
||||
public sealed class DiffResult
|
||||
{
|
||||
public required IReadOnlyList<Finding> Findings { get; init; }
|
||||
public required string SbomDigest { get; init; }
|
||||
public required string AdvisoryDigest { get; init; }
|
||||
public DateTimeOffset DiffTimestamp { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// VEX exception from VEX document.
|
||||
/// </summary>
|
||||
public sealed class VexException
|
||||
{
|
||||
public required string Id { get; init; }
|
||||
public required string VulnerabilityId { get; init; }
|
||||
public required string Status { get; init; }
|
||||
public string? Justification { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// DSSE envelope data.
|
||||
/// </summary>
|
||||
public sealed class DsseEnvelopeData
|
||||
{
|
||||
public required string PayloadType { get; init; }
|
||||
public required byte[] Payload { get; init; }
|
||||
public required IReadOnlyList<DsseSignatureData> Signatures { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// DSSE signature data.
|
||||
/// </summary>
|
||||
public sealed class DsseSignatureData
|
||||
{
|
||||
public required string KeyId { get; init; }
|
||||
public required byte[] Signature { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Bundle manifest structure.
|
||||
/// </summary>
|
||||
public sealed class BundleManifest
|
||||
{
|
||||
public required string Version { get; init; }
|
||||
public DateTimeOffset CreatedAt { get; init; }
|
||||
public required Dictionary<string, string> Artifacts { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Result of bundle creation.
|
||||
/// </summary>
|
||||
public sealed class BundleResult
|
||||
{
|
||||
public required byte[] Manifest { get; init; }
|
||||
public required DsseEnvelopeData Envelope { get; init; }
|
||||
public required string ManifestHash { get; init; }
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
/// <summary>
|
||||
/// Placeholder for Program class detection.
|
||||
/// The actual Program class is from Scanner.WebService.
|
||||
/// </summary>
|
||||
#pragma warning disable CA1050 // Declare types in namespaces
|
||||
public partial class Program { }
|
||||
#pragma warning restore CA1050
|
||||
@@ -0,0 +1,457 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// E2EReproducibilityTests.cs
|
||||
// Sprint: SPRINT_8200_0001_0004_e2e_reproducibility_test
|
||||
// Tasks: E2E-8200-011 to E2E-8200-014 - Reproducibility Tests
|
||||
// Description: End-to-end tests verifying full pipeline reproducibility.
|
||||
// Validates: identical verdict hash, identical manifest, frozen timestamps,
|
||||
// parallel execution produces identical results.
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using FluentAssertions;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Integration.E2E;
|
||||
|
||||
/// <summary>
|
||||
/// End-to-end reproducibility tests for the full security scanning pipeline.
|
||||
/// Verifies that identical inputs always produce identical outputs across:
|
||||
/// - Sequential runs
|
||||
/// - Parallel runs
|
||||
/// - With frozen timestamps
|
||||
/// </summary>
|
||||
[Collection("E2EReproducibility")]
|
||||
[Trait("Category", "Integration")]
|
||||
[Trait("Sprint", "8200.0001.0004")]
|
||||
[Trait("Feature", "E2E-Reproducibility")]
|
||||
public sealed class E2EReproducibilityTests : IClassFixture<E2EReproducibilityTestFixture>, IAsyncLifetime
|
||||
{
|
||||
private readonly E2EReproducibilityTestFixture _fixture;
|
||||
|
||||
public E2EReproducibilityTests(E2EReproducibilityTestFixture fixture)
|
||||
{
|
||||
_fixture = fixture;
|
||||
}
|
||||
|
||||
public async Task InitializeAsync()
|
||||
{
|
||||
await _fixture.InitializeAsync();
|
||||
}
|
||||
|
||||
public Task DisposeAsync() => Task.CompletedTask;
|
||||
|
||||
#region E2E-8200-011: Identical Verdict Hash
|
||||
|
||||
[Fact(DisplayName = "Pipeline produces identical verdict hash across runs")]
|
||||
public async Task FullPipeline_ProducesIdenticalVerdictHash_AcrossRuns()
|
||||
{
|
||||
// Arrange - Create input snapshot
|
||||
var inputs = await _fixture.SnapshotInputsAsync();
|
||||
|
||||
// Act - Run pipeline twice with identical inputs
|
||||
var result1 = await _fixture.RunFullPipelineAsync(inputs);
|
||||
var result2 = await _fixture.RunFullPipelineAsync(inputs);
|
||||
|
||||
// Assert - Verdict IDs must match
|
||||
result1.VerdictId.Should().NotBeNullOrEmpty("Verdict ID should be computed");
|
||||
result2.VerdictId.Should().NotBeNullOrEmpty("Verdict ID should be computed");
|
||||
result1.VerdictId.Should().Be(result2.VerdictId, "Verdict ID must be identical across runs");
|
||||
|
||||
// Verdict hash must match
|
||||
result1.VerdictHash.Should().Be(result2.VerdictHash, "Verdict hash must be identical");
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "Pipeline produces identical verdict hash with 5 sequential runs")]
|
||||
public async Task FullPipeline_ProducesIdenticalVerdictHash_With5SequentialRuns()
|
||||
{
|
||||
// Arrange
|
||||
var inputs = await _fixture.SnapshotInputsAsync();
|
||||
var results = new List<PipelineResult>();
|
||||
|
||||
// Act - Run pipeline 5 times sequentially
|
||||
for (int i = 0; i < 5; i++)
|
||||
{
|
||||
results.Add(await _fixture.RunFullPipelineAsync(inputs));
|
||||
}
|
||||
|
||||
// Assert - All verdict IDs must match
|
||||
var firstVerdictId = results[0].VerdictId;
|
||||
foreach (var result in results)
|
||||
{
|
||||
result.VerdictId.Should().Be(firstVerdictId, $"Run {results.IndexOf(result) + 1} verdict ID must match first run");
|
||||
}
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "Verdict ID format is content-addressed")]
|
||||
public async Task VerdictId_Format_IsContentAddressed()
|
||||
{
|
||||
// Arrange
|
||||
var inputs = await _fixture.SnapshotInputsAsync();
|
||||
|
||||
// Act
|
||||
var result = await _fixture.RunFullPipelineAsync(inputs);
|
||||
|
||||
// Assert - Verdict ID should be in content-addressed format
|
||||
result.VerdictId.Should().StartWith("verdict:sha256:", "Verdict ID must use sha256 content-addressing");
|
||||
result.VerdictId.Should().MatchRegex(@"^verdict:sha256:[0-9a-f]{64}$", "Verdict ID must be valid sha256 hex");
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region E2E-8200-012: Identical Bundle Manifest
|
||||
|
||||
[Fact(DisplayName = "Pipeline produces identical bundle manifest across runs")]
|
||||
public async Task FullPipeline_ProducesIdenticalBundleManifest_AcrossRuns()
|
||||
{
|
||||
// Arrange
|
||||
var inputs = await _fixture.SnapshotInputsAsync();
|
||||
|
||||
// Act - Run pipeline twice
|
||||
var result1 = await _fixture.RunFullPipelineAsync(inputs);
|
||||
var result2 = await _fixture.RunFullPipelineAsync(inputs);
|
||||
|
||||
// Assert - Bundle manifests must be byte-for-byte identical
|
||||
result1.BundleManifest.Should().BeEquivalentTo(result2.BundleManifest, "Bundle manifest bytes must match");
|
||||
result1.BundleManifestHash.Should().Be(result2.BundleManifestHash, "Bundle manifest hash must match");
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "Bundle manifest contains all artifact hashes")]
|
||||
public async Task BundleManifest_ContainsAllArtifactHashes()
|
||||
{
|
||||
// Arrange
|
||||
var inputs = await _fixture.SnapshotInputsAsync();
|
||||
|
||||
// Act
|
||||
var result = await _fixture.RunFullPipelineAsync(inputs);
|
||||
|
||||
// Assert - Parse manifest and verify structure
|
||||
var manifestJson = System.Text.Encoding.UTF8.GetString(result.BundleManifest);
|
||||
using var doc = System.Text.Json.JsonDocument.Parse(manifestJson);
|
||||
var root = doc.RootElement;
|
||||
|
||||
root.TryGetProperty("version", out _).Should().BeTrue("Manifest should have version");
|
||||
root.TryGetProperty("createdAt", out _).Should().BeTrue("Manifest should have createdAt");
|
||||
root.TryGetProperty("artifacts", out var artifacts).Should().BeTrue("Manifest should have artifacts");
|
||||
|
||||
artifacts.TryGetProperty("sbom", out _).Should().BeTrue("Artifacts should include SBOM hash");
|
||||
artifacts.TryGetProperty("advisory-feed", out _).Should().BeTrue("Artifacts should include advisory feed hash");
|
||||
artifacts.TryGetProperty("policy-pack", out _).Should().BeTrue("Artifacts should include policy pack hash");
|
||||
artifacts.TryGetProperty("envelope", out _).Should().BeTrue("Artifacts should include envelope hash");
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "Manifest comparison detects differences")]
|
||||
public async Task ManifestComparer_DetectsDifferences_WhenInputsChange()
|
||||
{
|
||||
// Arrange - Create two different input snapshots
|
||||
var inputs1 = await _fixture.SnapshotInputsAsync();
|
||||
|
||||
// Modify SBOM to create different input
|
||||
var modifiedSbom = E2EReproducibilityTestFixture.CreateMinimalSbom();
|
||||
var sbomJson = System.Text.Encoding.UTF8.GetString(modifiedSbom);
|
||||
var modifiedSbomJson = sbomJson.Replace("4.17.20", "4.17.21"); // Change version
|
||||
var modifiedSbomBytes = System.Text.Encoding.UTF8.GetBytes(modifiedSbomJson);
|
||||
|
||||
var inputs2 = new InputSnapshot
|
||||
{
|
||||
Sbom = modifiedSbomBytes,
|
||||
SbomHash = E2EReproducibilityTestFixture.ComputeHash(modifiedSbomBytes),
|
||||
AdvisoryFeed = inputs1.AdvisoryFeed,
|
||||
AdvisoryFeedHash = inputs1.AdvisoryFeedHash,
|
||||
PolicyPack = inputs1.PolicyPack,
|
||||
PolicyPackHash = inputs1.PolicyPackHash,
|
||||
VexDocument = inputs1.VexDocument,
|
||||
VexDocumentHash = inputs1.VexDocumentHash,
|
||||
SnapshotTimestamp = inputs1.SnapshotTimestamp
|
||||
};
|
||||
|
||||
// Act
|
||||
var result1 = await _fixture.RunFullPipelineAsync(inputs1);
|
||||
var result2 = await _fixture.RunFullPipelineAsync(inputs2);
|
||||
|
||||
// Assert - Results should differ
|
||||
var comparison = ManifestComparer.Compare(result1, result2);
|
||||
comparison.IsMatch.Should().BeFalse("Different inputs should produce different outputs");
|
||||
comparison.Differences.Should().NotBeEmpty("Should detect at least one difference");
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region E2E-8200-013: Frozen Clock Timestamps
|
||||
|
||||
[Fact(DisplayName = "Pipeline produces identical timestamps with frozen clock")]
|
||||
public async Task FullPipeline_ProducesIdenticalTimestamps_WithFrozenClock()
|
||||
{
|
||||
// Arrange
|
||||
var inputs = await _fixture.SnapshotInputsAsync();
|
||||
|
||||
// Act - Run pipeline twice
|
||||
var result1 = await _fixture.RunFullPipelineAsync(inputs);
|
||||
var result2 = await _fixture.RunFullPipelineAsync(inputs);
|
||||
|
||||
// Assert - Execution timestamps must match (frozen clock)
|
||||
result1.ExecutionTimestamp.Should().Be(_fixture.FrozenTimestamp, "Timestamp should match frozen clock");
|
||||
result2.ExecutionTimestamp.Should().Be(_fixture.FrozenTimestamp, "Timestamp should match frozen clock");
|
||||
result1.ExecutionTimestamp.Should().Be(result2.ExecutionTimestamp, "Timestamps must be identical");
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "Manifest createdAt matches frozen timestamp")]
|
||||
public async Task BundleManifest_CreatedAt_MatchesFrozenTimestamp()
|
||||
{
|
||||
// Arrange
|
||||
var inputs = await _fixture.SnapshotInputsAsync();
|
||||
|
||||
// Act
|
||||
var result = await _fixture.RunFullPipelineAsync(inputs);
|
||||
|
||||
// Assert - Parse manifest and verify timestamp
|
||||
var manifestJson = System.Text.Encoding.UTF8.GetString(result.BundleManifest);
|
||||
using var doc = System.Text.Json.JsonDocument.Parse(manifestJson);
|
||||
var createdAt = doc.RootElement.GetProperty("createdAt").GetDateTimeOffset();
|
||||
|
||||
createdAt.Should().Be(_fixture.FrozenTimestamp, "Manifest createdAt should match frozen clock");
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "Input snapshot timestamp matches frozen clock")]
|
||||
public async Task InputSnapshot_Timestamp_MatchesFrozenClock()
|
||||
{
|
||||
// Arrange & Act
|
||||
var inputs = await _fixture.SnapshotInputsAsync();
|
||||
|
||||
// Assert
|
||||
inputs.SnapshotTimestamp.Should().Be(_fixture.FrozenTimestamp, "Snapshot timestamp should match frozen clock");
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region E2E-8200-014: Parallel Execution
|
||||
|
||||
[Fact(DisplayName = "10 concurrent pipeline runs produce identical results")]
|
||||
public async Task FullPipeline_ParallelExecution_10Concurrent_AllIdentical()
|
||||
{
|
||||
// Arrange
|
||||
var inputs = await _fixture.SnapshotInputsAsync();
|
||||
const int concurrentRuns = 10;
|
||||
|
||||
// Act - Run pipeline 10 times in parallel
|
||||
var tasks = Enumerable.Range(0, concurrentRuns)
|
||||
.Select(_ => _fixture.RunFullPipelineAsync(inputs))
|
||||
.ToList();
|
||||
|
||||
var results = await Task.WhenAll(tasks);
|
||||
|
||||
// Assert - All results must be identical
|
||||
var comparison = ManifestComparer.CompareMultiple(results.ToList());
|
||||
comparison.AllMatch.Should().BeTrue($"All {concurrentRuns} concurrent runs must produce identical results. {comparison.Summary}");
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "5 concurrent pipeline runs produce identical verdict IDs")]
|
||||
public async Task FullPipeline_ParallelExecution_5Concurrent_IdenticalVerdictIds()
|
||||
{
|
||||
// Arrange
|
||||
var inputs = await _fixture.SnapshotInputsAsync();
|
||||
const int concurrentRuns = 5;
|
||||
|
||||
// Act - Run pipeline 5 times in parallel
|
||||
var tasks = Enumerable.Range(0, concurrentRuns)
|
||||
.Select(_ => _fixture.RunFullPipelineAsync(inputs))
|
||||
.ToList();
|
||||
|
||||
var results = await Task.WhenAll(tasks);
|
||||
|
||||
// Assert - All verdict IDs must match
|
||||
var firstVerdictId = results[0].VerdictId;
|
||||
foreach (var result in results)
|
||||
{
|
||||
result.VerdictId.Should().Be(firstVerdictId, "All parallel runs must produce same verdict ID");
|
||||
}
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "Parallel runs with VEX exceptions produce identical results")]
|
||||
public async Task FullPipeline_ParallelWithVex_ProducesIdenticalResults()
|
||||
{
|
||||
// Arrange - Create inputs with VEX exceptions
|
||||
var vexDocument = E2EReproducibilityTestFixture.CreateVexDocumentWithExceptions("CVE-2024-0001");
|
||||
var inputs = await _fixture.SnapshotInputsAsync(vexDocumentPath: null);
|
||||
var inputsWithVex = new InputSnapshot
|
||||
{
|
||||
Sbom = inputs.Sbom,
|
||||
SbomHash = inputs.SbomHash,
|
||||
AdvisoryFeed = inputs.AdvisoryFeed,
|
||||
AdvisoryFeedHash = inputs.AdvisoryFeedHash,
|
||||
PolicyPack = inputs.PolicyPack,
|
||||
PolicyPackHash = inputs.PolicyPackHash,
|
||||
VexDocument = vexDocument,
|
||||
VexDocumentHash = E2EReproducibilityTestFixture.ComputeHash(vexDocument),
|
||||
SnapshotTimestamp = inputs.SnapshotTimestamp
|
||||
};
|
||||
|
||||
const int concurrentRuns = 5;
|
||||
|
||||
// Act - Run pipeline 5 times in parallel
|
||||
var tasks = Enumerable.Range(0, concurrentRuns)
|
||||
.Select(_ => _fixture.RunFullPipelineAsync(inputsWithVex))
|
||||
.ToList();
|
||||
|
||||
var results = await Task.WhenAll(tasks);
|
||||
|
||||
// Assert - All results must be identical
|
||||
var comparison = ManifestComparer.CompareMultiple(results.ToList());
|
||||
comparison.AllMatch.Should().BeTrue("All parallel runs with VEX must produce identical results");
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Edge Cases and Error Handling
|
||||
|
||||
[Fact(DisplayName = "Empty SBOM produces deterministic empty result")]
|
||||
public async Task FullPipeline_EmptySbom_ProducesDeterministicResult()
|
||||
{
|
||||
// Arrange - Create empty SBOM
|
||||
var emptySbom = System.Text.Encoding.UTF8.GetBytes(
|
||||
System.Text.Json.JsonSerializer.Serialize(new { bomFormat = "CycloneDX", specVersion = "1.5", version = 1, components = Array.Empty<object>() }));
|
||||
|
||||
var inputs = new InputSnapshot
|
||||
{
|
||||
Sbom = emptySbom,
|
||||
SbomHash = E2EReproducibilityTestFixture.ComputeHash(emptySbom),
|
||||
AdvisoryFeed = E2EReproducibilityTestFixture.CreateMockAdvisoryFeed(),
|
||||
AdvisoryFeedHash = E2EReproducibilityTestFixture.ComputeHash(E2EReproducibilityTestFixture.CreateMockAdvisoryFeed()),
|
||||
PolicyPack = E2EReproducibilityTestFixture.CreateDefaultPolicyPack(),
|
||||
PolicyPackHash = E2EReproducibilityTestFixture.ComputeHash(E2EReproducibilityTestFixture.CreateDefaultPolicyPack()),
|
||||
VexDocument = null,
|
||||
VexDocumentHash = null,
|
||||
SnapshotTimestamp = _fixture.FrozenTimestamp
|
||||
};
|
||||
|
||||
// Act - Run pipeline twice
|
||||
var result1 = await _fixture.RunFullPipelineAsync(inputs);
|
||||
var result2 = await _fixture.RunFullPipelineAsync(inputs);
|
||||
|
||||
// Assert - Results must be identical even with empty SBOM
|
||||
result1.VerdictId.Should().Be(result2.VerdictId);
|
||||
result1.BundleManifestHash.Should().Be(result2.BundleManifestHash);
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "VEX exceptions reduce blocking findings deterministically")]
|
||||
public async Task FullPipeline_VexExceptions_ReduceBlockingFindingsDeterministically()
|
||||
{
|
||||
// Arrange - Run without VEX
|
||||
var inputsWithoutVex = await _fixture.SnapshotInputsAsync();
|
||||
var resultWithoutVex = await _fixture.RunFullPipelineAsync(inputsWithoutVex);
|
||||
|
||||
// Run with VEX exception for CVE-2024-0001 (CRITICAL)
|
||||
var vexDocument = E2EReproducibilityTestFixture.CreateVexDocumentWithExceptions("CVE-2024-0001");
|
||||
var inputsWithVex = new InputSnapshot
|
||||
{
|
||||
Sbom = inputsWithoutVex.Sbom,
|
||||
SbomHash = inputsWithoutVex.SbomHash,
|
||||
AdvisoryFeed = inputsWithoutVex.AdvisoryFeed,
|
||||
AdvisoryFeedHash = inputsWithoutVex.AdvisoryFeedHash,
|
||||
PolicyPack = inputsWithoutVex.PolicyPack,
|
||||
PolicyPackHash = inputsWithoutVex.PolicyPackHash,
|
||||
VexDocument = vexDocument,
|
||||
VexDocumentHash = E2EReproducibilityTestFixture.ComputeHash(vexDocument),
|
||||
SnapshotTimestamp = inputsWithoutVex.SnapshotTimestamp
|
||||
};
|
||||
|
||||
var resultWithVex = await _fixture.RunFullPipelineAsync(inputsWithVex);
|
||||
|
||||
// Assert - VEX should change the verdict
|
||||
resultWithVex.VerdictId.Should().NotBe(resultWithoutVex.VerdictId, "VEX exception should change verdict");
|
||||
|
||||
// But the result with VEX should be deterministic
|
||||
var resultWithVex2 = await _fixture.RunFullPipelineAsync(inputsWithVex);
|
||||
resultWithVex.VerdictId.Should().Be(resultWithVex2.VerdictId, "VEX result should be deterministic");
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "DSSE envelope hash is deterministic")]
|
||||
public async Task DsseEnvelope_Hash_IsDeterministic()
|
||||
{
|
||||
// Arrange
|
||||
var inputs = await _fixture.SnapshotInputsAsync();
|
||||
|
||||
// Act - Run pipeline 3 times
|
||||
var result1 = await _fixture.RunFullPipelineAsync(inputs);
|
||||
var result2 = await _fixture.RunFullPipelineAsync(inputs);
|
||||
var result3 = await _fixture.RunFullPipelineAsync(inputs);
|
||||
|
||||
// Assert - All envelope hashes must match
|
||||
result1.EnvelopeHash.Should().Be(result2.EnvelopeHash, "Envelope hash run 1 vs 2");
|
||||
result2.EnvelopeHash.Should().Be(result3.EnvelopeHash, "Envelope hash run 2 vs 3");
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Comparison Helper Tests
|
||||
|
||||
[Fact(DisplayName = "ManifestComparer generates readable diff report")]
|
||||
public async Task ManifestComparer_GeneratesReadableDiffReport()
|
||||
{
|
||||
// Arrange
|
||||
var inputs1 = await _fixture.SnapshotInputsAsync();
|
||||
|
||||
// Create different inputs
|
||||
var differentSbom = System.Text.Encoding.UTF8.GetBytes(
|
||||
System.Text.Json.JsonSerializer.Serialize(new
|
||||
{
|
||||
bomFormat = "CycloneDX",
|
||||
specVersion = "1.5",
|
||||
version = 1,
|
||||
components = new[] { new { name = "different", version = "1.0.0", purl = "pkg:npm/different@1.0.0" } }
|
||||
}));
|
||||
|
||||
var inputs2 = new InputSnapshot
|
||||
{
|
||||
Sbom = differentSbom,
|
||||
SbomHash = E2EReproducibilityTestFixture.ComputeHash(differentSbom),
|
||||
AdvisoryFeed = inputs1.AdvisoryFeed,
|
||||
AdvisoryFeedHash = inputs1.AdvisoryFeedHash,
|
||||
PolicyPack = inputs1.PolicyPack,
|
||||
PolicyPackHash = inputs1.PolicyPackHash,
|
||||
VexDocument = null,
|
||||
VexDocumentHash = null,
|
||||
SnapshotTimestamp = inputs1.SnapshotTimestamp
|
||||
};
|
||||
|
||||
// Act
|
||||
var result1 = await _fixture.RunFullPipelineAsync(inputs1);
|
||||
var result2 = await _fixture.RunFullPipelineAsync(inputs2);
|
||||
var comparison = ManifestComparer.Compare(result1, result2);
|
||||
var report = ManifestComparer.GenerateDiffReport(comparison);
|
||||
|
||||
// Assert
|
||||
comparison.IsMatch.Should().BeFalse();
|
||||
report.Should().Contain("difference");
|
||||
report.Should().Contain("VerdictId");
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "ManifestComparer multiple comparison returns correct summary")]
|
||||
public async Task ManifestComparer_MultipleComparison_ReturnsCorrectSummary()
|
||||
{
|
||||
// Arrange
|
||||
var inputs = await _fixture.SnapshotInputsAsync();
|
||||
|
||||
// Act - Run pipeline 3 times
|
||||
var results = new List<PipelineResult>
|
||||
{
|
||||
await _fixture.RunFullPipelineAsync(inputs),
|
||||
await _fixture.RunFullPipelineAsync(inputs),
|
||||
await _fixture.RunFullPipelineAsync(inputs)
|
||||
};
|
||||
|
||||
var comparison = ManifestComparer.CompareMultiple(results);
|
||||
|
||||
// Assert
|
||||
comparison.AllMatch.Should().BeTrue();
|
||||
comparison.Summary.Should().Contain("identical");
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Collection definition for E2E reproducibility tests to share the fixture.
|
||||
/// </summary>
|
||||
[CollectionDefinition("E2EReproducibility")]
|
||||
public sealed class E2EReproducibilityCollection : ICollectionFixture<E2EReproducibilityTestFixture>
|
||||
{
|
||||
}
|
||||
473
tests/integration/StellaOps.Integration.E2E/ManifestComparer.cs
Normal file
473
tests/integration/StellaOps.Integration.E2E/ManifestComparer.cs
Normal file
@@ -0,0 +1,473 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// ManifestComparer.cs
|
||||
// Sprint: SPRINT_8200_0001_0004_e2e_reproducibility_test
|
||||
// Task: E2E-8200-004 - Add helper to compare verdict manifests byte-for-byte
|
||||
// Description: Provides byte-for-byte comparison of manifests and detailed diff reporting.
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System.Security.Cryptography;
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
|
||||
namespace StellaOps.Integration.E2E;
|
||||
|
||||
/// <summary>
|
||||
/// Compares manifests and pipeline results byte-for-byte for reproducibility verification.
|
||||
/// </summary>
|
||||
public static class ManifestComparer
|
||||
{
|
||||
/// <summary>
|
||||
/// Compares two pipeline results for exact equality.
|
||||
/// </summary>
|
||||
public static ManifestComparisonResult Compare(PipelineResult expected, PipelineResult actual)
|
||||
{
|
||||
var differences = new List<ManifestDifference>();
|
||||
|
||||
// Compare verdict IDs
|
||||
if (!string.Equals(expected.VerdictId, actual.VerdictId, StringComparison.Ordinal))
|
||||
{
|
||||
differences.Add(new ManifestDifference(
|
||||
"VerdictId",
|
||||
expected.VerdictId,
|
||||
actual.VerdictId,
|
||||
DifferenceType.ValueMismatch));
|
||||
}
|
||||
|
||||
// Compare verdict hashes
|
||||
if (!string.Equals(expected.VerdictHash, actual.VerdictHash, StringComparison.Ordinal))
|
||||
{
|
||||
differences.Add(new ManifestDifference(
|
||||
"VerdictHash",
|
||||
expected.VerdictHash,
|
||||
actual.VerdictHash,
|
||||
DifferenceType.HashMismatch));
|
||||
}
|
||||
|
||||
// Compare envelope hashes
|
||||
if (!string.Equals(expected.EnvelopeHash, actual.EnvelopeHash, StringComparison.Ordinal))
|
||||
{
|
||||
differences.Add(new ManifestDifference(
|
||||
"EnvelopeHash",
|
||||
expected.EnvelopeHash,
|
||||
actual.EnvelopeHash,
|
||||
DifferenceType.HashMismatch));
|
||||
}
|
||||
|
||||
// Compare bundle manifest hashes
|
||||
if (!string.Equals(expected.BundleManifestHash, actual.BundleManifestHash, StringComparison.Ordinal))
|
||||
{
|
||||
differences.Add(new ManifestDifference(
|
||||
"BundleManifestHash",
|
||||
expected.BundleManifestHash,
|
||||
actual.BundleManifestHash,
|
||||
DifferenceType.HashMismatch));
|
||||
}
|
||||
|
||||
// Compare bundle manifest bytes
|
||||
if (!expected.BundleManifest.AsSpan().SequenceEqual(actual.BundleManifest))
|
||||
{
|
||||
var byteDiff = FindByteDifference(expected.BundleManifest, actual.BundleManifest);
|
||||
differences.Add(new ManifestDifference(
|
||||
"BundleManifest",
|
||||
$"Bytes differ at offset {byteDiff.Offset}: expected 0x{byteDiff.Expected:X2}, actual 0x{byteDiff.Actual:X2}",
|
||||
$"Expected length: {expected.BundleManifest.Length}, Actual length: {actual.BundleManifest.Length}",
|
||||
DifferenceType.ByteMismatch));
|
||||
}
|
||||
|
||||
// Compare timestamps
|
||||
if (expected.ExecutionTimestamp != actual.ExecutionTimestamp)
|
||||
{
|
||||
differences.Add(new ManifestDifference(
|
||||
"ExecutionTimestamp",
|
||||
expected.ExecutionTimestamp.ToString("O"),
|
||||
actual.ExecutionTimestamp.ToString("O"),
|
||||
DifferenceType.ValueMismatch));
|
||||
}
|
||||
|
||||
return new ManifestComparisonResult(differences.Count == 0, differences);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Compares multiple pipeline results to verify they are all identical.
|
||||
/// </summary>
|
||||
public static MultipleComparisonResult CompareMultiple(IReadOnlyList<PipelineResult> results)
|
||||
{
|
||||
if (results.Count == 0)
|
||||
{
|
||||
return new MultipleComparisonResult(true, [], "No results to compare");
|
||||
}
|
||||
|
||||
if (results.Count == 1)
|
||||
{
|
||||
return new MultipleComparisonResult(true, [], "Only one result, nothing to compare");
|
||||
}
|
||||
|
||||
var baseline = results[0];
|
||||
var comparisons = new List<(int Index, ManifestComparisonResult Result)>();
|
||||
var allMatch = true;
|
||||
|
||||
for (int i = 1; i < results.Count; i++)
|
||||
{
|
||||
var comparison = Compare(baseline, results[i]);
|
||||
comparisons.Add((i, comparison));
|
||||
|
||||
if (!comparison.IsMatch)
|
||||
{
|
||||
allMatch = false;
|
||||
}
|
||||
}
|
||||
|
||||
var summary = allMatch
|
||||
? $"All {results.Count} results are identical"
|
||||
: $"{comparisons.Count(c => !c.Result.IsMatch)} of {results.Count - 1} comparisons have differences";
|
||||
|
||||
return new MultipleComparisonResult(allMatch, comparisons, summary);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Compares two byte arrays and returns detailed difference information.
|
||||
/// </summary>
|
||||
public static ByteComparisonResult CompareBytes(ReadOnlySpan<byte> expected, ReadOnlySpan<byte> actual)
|
||||
{
|
||||
var differences = new List<ByteDifference>();
|
||||
|
||||
var minLength = Math.Min(expected.Length, actual.Length);
|
||||
var maxLength = Math.Max(expected.Length, actual.Length);
|
||||
|
||||
// Compare common bytes
|
||||
for (int i = 0; i < minLength; i++)
|
||||
{
|
||||
if (expected[i] != actual[i])
|
||||
{
|
||||
differences.Add(new ByteDifference(i, expected[i], actual[i]));
|
||||
}
|
||||
}
|
||||
|
||||
// Check for length mismatch
|
||||
var lengthMismatch = expected.Length != actual.Length;
|
||||
if (lengthMismatch)
|
||||
{
|
||||
for (int i = minLength; i < maxLength; i++)
|
||||
{
|
||||
var expectedByte = i < expected.Length ? expected[i] : (byte?)null;
|
||||
var actualByte = i < actual.Length ? actual[i] : (byte?)null;
|
||||
differences.Add(new ByteDifference(i, expectedByte, actualByte));
|
||||
}
|
||||
}
|
||||
|
||||
return new ByteComparisonResult(
|
||||
IsMatch: differences.Count == 0,
|
||||
ExpectedLength: expected.Length,
|
||||
ActualLength: actual.Length,
|
||||
Differences: differences,
|
||||
FirstDifferenceOffset: differences.Count > 0 ? differences[0].Offset : null);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Compares two JSON documents for semantic equality (ignoring whitespace differences).
|
||||
/// </summary>
|
||||
public static JsonComparisonResult CompareJson(ReadOnlySpan<byte> expected, ReadOnlySpan<byte> actual)
|
||||
{
|
||||
try
|
||||
{
|
||||
using var expectedDoc = JsonDocument.Parse(expected.ToArray());
|
||||
using var actualDoc = JsonDocument.Parse(actual.ToArray());
|
||||
|
||||
var differences = CompareJsonElements("$", expectedDoc.RootElement, actualDoc.RootElement);
|
||||
|
||||
return new JsonComparisonResult(
|
||||
IsMatch: differences.Count == 0,
|
||||
Differences: differences,
|
||||
ExpectedJson: Encoding.UTF8.GetString(expected),
|
||||
ActualJson: Encoding.UTF8.GetString(actual));
|
||||
}
|
||||
catch (JsonException ex)
|
||||
{
|
||||
return new JsonComparisonResult(
|
||||
IsMatch: false,
|
||||
Differences: [new JsonDifference("$", $"JSON parse error: {ex.Message}", null, JsonDifferenceType.ParseError)],
|
||||
ExpectedJson: Encoding.UTF8.GetString(expected),
|
||||
ActualJson: Encoding.UTF8.GetString(actual));
|
||||
}
|
||||
}
|
||||
|
||||
private static List<JsonDifference> CompareJsonElements(string path, JsonElement expected, JsonElement actual)
|
||||
{
|
||||
var differences = new List<JsonDifference>();
|
||||
|
||||
if (expected.ValueKind != actual.ValueKind)
|
||||
{
|
||||
differences.Add(new JsonDifference(
|
||||
path,
|
||||
$"Type: {expected.ValueKind}",
|
||||
$"Type: {actual.ValueKind}",
|
||||
JsonDifferenceType.TypeMismatch));
|
||||
return differences;
|
||||
}
|
||||
|
||||
switch (expected.ValueKind)
|
||||
{
|
||||
case JsonValueKind.Object:
|
||||
var expectedProps = expected.EnumerateObject().ToDictionary(p => p.Name);
|
||||
var actualProps = actual.EnumerateObject().ToDictionary(p => p.Name);
|
||||
|
||||
foreach (var prop in expectedProps)
|
||||
{
|
||||
var propPath = $"{path}.{prop.Key}";
|
||||
if (!actualProps.TryGetValue(prop.Key, out var actualProp))
|
||||
{
|
||||
differences.Add(new JsonDifference(propPath, prop.Value.ToString(), null, JsonDifferenceType.MissingProperty));
|
||||
}
|
||||
else
|
||||
{
|
||||
differences.AddRange(CompareJsonElements(propPath, prop.Value.Value, actualProp.Value));
|
||||
}
|
||||
}
|
||||
|
||||
foreach (var prop in actualProps)
|
||||
{
|
||||
if (!expectedProps.ContainsKey(prop.Key))
|
||||
{
|
||||
var propPath = $"{path}.{prop.Key}";
|
||||
differences.Add(new JsonDifference(propPath, null, prop.Value.ToString(), JsonDifferenceType.ExtraProperty));
|
||||
}
|
||||
}
|
||||
break;
|
||||
|
||||
case JsonValueKind.Array:
|
||||
var expectedArray = expected.EnumerateArray().ToList();
|
||||
var actualArray = actual.EnumerateArray().ToList();
|
||||
|
||||
if (expectedArray.Count != actualArray.Count)
|
||||
{
|
||||
differences.Add(new JsonDifference(
|
||||
path,
|
||||
$"Length: {expectedArray.Count}",
|
||||
$"Length: {actualArray.Count}",
|
||||
JsonDifferenceType.ArrayLengthMismatch));
|
||||
}
|
||||
|
||||
var minCount = Math.Min(expectedArray.Count, actualArray.Count);
|
||||
for (int i = 0; i < minCount; i++)
|
||||
{
|
||||
differences.AddRange(CompareJsonElements($"{path}[{i}]", expectedArray[i], actualArray[i]));
|
||||
}
|
||||
break;
|
||||
|
||||
case JsonValueKind.String:
|
||||
if (expected.GetString() != actual.GetString())
|
||||
{
|
||||
differences.Add(new JsonDifference(path, expected.GetString(), actual.GetString(), JsonDifferenceType.ValueMismatch));
|
||||
}
|
||||
break;
|
||||
|
||||
case JsonValueKind.Number:
|
||||
if (expected.GetRawText() != actual.GetRawText())
|
||||
{
|
||||
differences.Add(new JsonDifference(path, expected.GetRawText(), actual.GetRawText(), JsonDifferenceType.ValueMismatch));
|
||||
}
|
||||
break;
|
||||
|
||||
case JsonValueKind.True:
|
||||
case JsonValueKind.False:
|
||||
if (expected.GetBoolean() != actual.GetBoolean())
|
||||
{
|
||||
differences.Add(new JsonDifference(path, expected.GetBoolean().ToString(), actual.GetBoolean().ToString(), JsonDifferenceType.ValueMismatch));
|
||||
}
|
||||
break;
|
||||
|
||||
case JsonValueKind.Null:
|
||||
// Both are null, no difference
|
||||
break;
|
||||
}
|
||||
|
||||
return differences;
|
||||
}
|
||||
|
||||
private static ByteDifference FindByteDifference(byte[] expected, byte[] actual)
|
||||
{
|
||||
var minLength = Math.Min(expected.Length, actual.Length);
|
||||
|
||||
for (int i = 0; i < minLength; i++)
|
||||
{
|
||||
if (expected[i] != actual[i])
|
||||
{
|
||||
return new ByteDifference(i, expected[i], actual[i]);
|
||||
}
|
||||
}
|
||||
|
||||
// Length difference
|
||||
if (expected.Length != actual.Length)
|
||||
{
|
||||
return new ByteDifference(
|
||||
minLength,
|
||||
minLength < expected.Length ? expected[minLength] : (byte?)null,
|
||||
minLength < actual.Length ? actual[minLength] : (byte?)null);
|
||||
}
|
||||
|
||||
// No difference (shouldn't happen if called correctly)
|
||||
return new ByteDifference(0, 0, 0);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Generates a detailed diff report for debugging reproducibility failures.
|
||||
/// </summary>
|
||||
public static string GenerateDiffReport(ManifestComparisonResult comparison)
|
||||
{
|
||||
var sb = new StringBuilder();
|
||||
sb.AppendLine("=== Manifest Comparison Report ===");
|
||||
sb.AppendLine();
|
||||
|
||||
if (comparison.IsMatch)
|
||||
{
|
||||
sb.AppendLine("✓ All fields match exactly");
|
||||
return sb.ToString();
|
||||
}
|
||||
|
||||
sb.AppendLine($"✗ Found {comparison.Differences.Count} difference(s):");
|
||||
sb.AppendLine();
|
||||
|
||||
foreach (var diff in comparison.Differences)
|
||||
{
|
||||
sb.AppendLine($" [{diff.Type}] {diff.Field}:");
|
||||
sb.AppendLine($" Expected: {diff.Expected}");
|
||||
sb.AppendLine($" Actual: {diff.Actual}");
|
||||
sb.AppendLine();
|
||||
}
|
||||
|
||||
return sb.ToString();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Generates a hex dump comparison for byte-level debugging.
|
||||
/// </summary>
|
||||
public static string GenerateHexDump(ReadOnlySpan<byte> expected, ReadOnlySpan<byte> actual, int contextBytes = 16)
|
||||
{
|
||||
var comparison = CompareBytes(expected, actual);
|
||||
var sb = new StringBuilder();
|
||||
|
||||
sb.AppendLine("=== Hex Dump Comparison ===");
|
||||
sb.AppendLine($"Expected length: {expected.Length}");
|
||||
sb.AppendLine($"Actual length: {actual.Length}");
|
||||
sb.AppendLine();
|
||||
|
||||
if (comparison.IsMatch)
|
||||
{
|
||||
sb.AppendLine("✓ Bytes are identical");
|
||||
return sb.ToString();
|
||||
}
|
||||
|
||||
sb.AppendLine($"✗ Found {comparison.Differences.Count} byte difference(s)");
|
||||
sb.AppendLine();
|
||||
|
||||
// Show first few differences with context
|
||||
var diffsToShow = comparison.Differences.Take(5).ToList();
|
||||
foreach (var diff in diffsToShow)
|
||||
{
|
||||
var startOffset = Math.Max(0, diff.Offset - contextBytes);
|
||||
var endOffset = Math.Min(Math.Max(expected.Length, actual.Length), diff.Offset + contextBytes);
|
||||
|
||||
sb.AppendLine($"Difference at offset 0x{diff.Offset:X8} ({diff.Offset}):");
|
||||
sb.AppendLine($" Expected: 0x{diff.Expected:X2} ('{(char?)(diff.Expected >= 32 && diff.Expected < 127 ? diff.Expected : '.')}')" );
|
||||
sb.AppendLine($" Actual: 0x{diff.Actual:X2} ('{(char?)(diff.Actual >= 32 && diff.Actual < 127 ? diff.Actual : '.')}')" );
|
||||
sb.AppendLine();
|
||||
}
|
||||
|
||||
if (comparison.Differences.Count > 5)
|
||||
{
|
||||
sb.AppendLine($"... and {comparison.Differences.Count - 5} more differences");
|
||||
}
|
||||
|
||||
return sb.ToString();
|
||||
}
|
||||
}
|
||||
|
||||
#region Result Types
|
||||
|
||||
/// <summary>
|
||||
/// Result of comparing two manifests.
|
||||
/// </summary>
|
||||
public sealed record ManifestComparisonResult(
|
||||
bool IsMatch,
|
||||
IReadOnlyList<ManifestDifference> Differences);
|
||||
|
||||
/// <summary>
|
||||
/// A single difference between manifests.
|
||||
/// </summary>
|
||||
public sealed record ManifestDifference(
|
||||
string Field,
|
||||
string? Expected,
|
||||
string? Actual,
|
||||
DifferenceType Type);
|
||||
|
||||
/// <summary>
|
||||
/// Type of difference found.
|
||||
/// </summary>
|
||||
public enum DifferenceType
|
||||
{
|
||||
ValueMismatch,
|
||||
HashMismatch,
|
||||
ByteMismatch,
|
||||
LengthMismatch,
|
||||
Missing,
|
||||
Extra
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Result of comparing multiple pipeline results.
|
||||
/// </summary>
|
||||
public sealed record MultipleComparisonResult(
|
||||
bool AllMatch,
|
||||
IReadOnlyList<(int Index, ManifestComparisonResult Result)> Comparisons,
|
||||
string Summary);
|
||||
|
||||
/// <summary>
|
||||
/// Result of byte-level comparison.
|
||||
/// </summary>
|
||||
public sealed record ByteComparisonResult(
|
||||
bool IsMatch,
|
||||
int ExpectedLength,
|
||||
int ActualLength,
|
||||
IReadOnlyList<ByteDifference> Differences,
|
||||
int? FirstDifferenceOffset);
|
||||
|
||||
/// <summary>
|
||||
/// A single byte difference.
|
||||
/// </summary>
|
||||
public sealed record ByteDifference(
|
||||
int Offset,
|
||||
byte? Expected,
|
||||
byte? Actual);
|
||||
|
||||
/// <summary>
|
||||
/// Result of JSON comparison.
|
||||
/// </summary>
|
||||
public sealed record JsonComparisonResult(
|
||||
bool IsMatch,
|
||||
IReadOnlyList<JsonDifference> Differences,
|
||||
string ExpectedJson,
|
||||
string ActualJson);
|
||||
|
||||
/// <summary>
|
||||
/// A single JSON difference.
|
||||
/// </summary>
|
||||
public sealed record JsonDifference(
|
||||
string Path,
|
||||
string? Expected,
|
||||
string? Actual,
|
||||
JsonDifferenceType Type);
|
||||
|
||||
/// <summary>
|
||||
/// Type of JSON difference.
|
||||
/// </summary>
|
||||
public enum JsonDifferenceType
|
||||
{
|
||||
ValueMismatch,
|
||||
TypeMismatch,
|
||||
MissingProperty,
|
||||
ExtraProperty,
|
||||
ArrayLengthMismatch,
|
||||
ParseError
|
||||
}
|
||||
|
||||
#endregion
|
||||
@@ -0,0 +1,79 @@
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<!--
|
||||
StellaOps.Integration.E2E.csproj
|
||||
Sprint: SPRINT_8200_0001_0004_e2e_reproducibility_test
|
||||
Task: E2E-8200-001 - Create tests/integration/StellaOps.Integration.E2E/ project
|
||||
Description: End-to-end reproducibility tests covering full pipeline:
|
||||
ingest → normalize → diff → decide → attest → bundle → reverify
|
||||
-->
|
||||
<Project Sdk="Microsoft.NET.Sdk">
|
||||
|
||||
<PropertyGroup>
|
||||
<TargetFramework>net10.0</TargetFramework>
|
||||
<LangVersion>preview</LangVersion>
|
||||
<ImplicitUsings>enable</ImplicitUsings>
|
||||
<Nullable>enable</Nullable>
|
||||
<IsPackable>false</IsPackable>
|
||||
<IsTestProject>true</IsTestProject>
|
||||
</PropertyGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.10.0" />
|
||||
<PackageReference Include="xunit" Version="2.7.0" />
|
||||
<PackageReference Include="xunit.runner.visualstudio" Version="2.5.8">
|
||||
<PrivateAssets>all</PrivateAssets>
|
||||
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
|
||||
</PackageReference>
|
||||
<PackageReference Include="FluentAssertions" Version="6.12.0" />
|
||||
<PackageReference Include="Microsoft.AspNetCore.Mvc.Testing" Version="10.0.0" />
|
||||
<PackageReference Include="Testcontainers" Version="3.6.0" />
|
||||
<PackageReference Include="Testcontainers.PostgreSql" Version="3.6.0" />
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<!-- Scanner WebService for integration testing -->
|
||||
<ProjectReference Include="../../../src/Scanner/StellaOps.Scanner.WebService/StellaOps.Scanner.WebService.csproj" />
|
||||
|
||||
<!-- Scanner Core for contracts -->
|
||||
<ProjectReference Include="../../../src/Scanner/__Libraries/StellaOps.Scanner.Core/StellaOps.Scanner.Core.csproj" />
|
||||
|
||||
<!-- Concelier for advisory ingestion and normalization -->
|
||||
<ProjectReference Include="../../../src/Concelier/__Libraries/StellaOps.Concelier.Core/StellaOps.Concelier.Core.csproj" />
|
||||
<ProjectReference Include="../../../src/Concelier/__Libraries/StellaOps.Concelier.Models/StellaOps.Concelier.Models.csproj" />
|
||||
<ProjectReference Include="../../../src/Concelier/__Libraries/StellaOps.Concelier.Normalization/StellaOps.Concelier.Normalization.csproj" />
|
||||
<ProjectReference Include="../../../src/Concelier/__Libraries/StellaOps.Concelier.Merge/StellaOps.Concelier.Merge.csproj" />
|
||||
|
||||
<!-- Policy for verdict computation -->
|
||||
<ProjectReference Include="../../../src/Policy/__Libraries/StellaOps.Policy/StellaOps.Policy.csproj" />
|
||||
<ProjectReference Include="../../../src/Policy/__Libraries/StellaOps.Policy.Scoring/StellaOps.Policy.Scoring.csproj" />
|
||||
|
||||
<!-- Attestor for DSSE envelope and bundle creation -->
|
||||
<ProjectReference Include="../../../src/Attestor/__Libraries/StellaOps.Attestor.Dsse/StellaOps.Attestor.Dsse.csproj" />
|
||||
<ProjectReference Include="../../../src/Attestor/__Libraries/StellaOps.Attestor.ProofChain/StellaOps.Attestor.ProofChain.csproj" />
|
||||
<ProjectReference Include="../../../src/Attestor/StellaOps.Attestor.Envelope/StellaOps.Attestor.Envelope.csproj" />
|
||||
|
||||
<!-- Cryptography for hashing and content addressing -->
|
||||
<ProjectReference Include="../../../src/__Libraries/StellaOps.Cryptography/StellaOps.Cryptography.csproj" />
|
||||
|
||||
<!-- Canonical JSON for deterministic serialization -->
|
||||
<ProjectReference Include="../../../src/__Libraries/StellaOps.Canonical.Json/StellaOps.Canonical.Json.csproj" />
|
||||
|
||||
<!-- Testing infrastructure -->
|
||||
<ProjectReference Include="../../../src/__Libraries/StellaOps.Testing.Determinism/StellaOps.Testing.Determinism.csproj" />
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<!-- E2E test fixtures -->
|
||||
<Content Include="../../fixtures/**/*">
|
||||
<Link>fixtures/%(RecursiveDir)%(Filename)%(Extension)</Link>
|
||||
<CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
|
||||
</Content>
|
||||
|
||||
<!-- Golden baselines for reproducibility verification -->
|
||||
<Content Include="../../../bench/determinism/**/*">
|
||||
<Link>baselines/%(RecursiveDir)%(Filename)%(Extension)</Link>
|
||||
<CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
|
||||
</Content>
|
||||
</ItemGroup>
|
||||
|
||||
</Project>
|
||||
Reference in New Issue
Block a user