Files
git.stella-ops.org/src/__Libraries/__Tests/StellaOps.AuditPack.Tests/AuditReplayE2ETests.cs

523 lines
18 KiB
C#

// -----------------------------------------------------------------------------
// AuditReplayE2ETests.cs
// Sprint: SPRINT_4300_0001_0002 (One-Command Audit Replay CLI)
// Task: REPLAY-028 - E2E test: export -> transfer -> replay offline
// Description: End-to-end integration tests for audit bundle export and replay.
// -----------------------------------------------------------------------------
using System.Security.Cryptography;
using System.Text;
using System.Text.Json;
using StellaOps.AuditPack.Models;
using StellaOps.AuditPack.Services;
using StellaOps.TestKit;
namespace StellaOps.AuditPack.Tests;
/// <summary>
/// End-to-end integration tests that verify the complete audit bundle workflow:
/// export -> transfer -> replay offline.
/// </summary>
public class AuditReplayE2ETests : IDisposable
{
private readonly string _tempDir;
private readonly string _exportDir;
private readonly string _importDir;
public AuditReplayE2ETests()
{
_tempDir = Path.Combine(Path.GetTempPath(), $"e2e-test-{Guid.NewGuid():N}");
_exportDir = Path.Combine(_tempDir, "export");
_importDir = Path.Combine(_tempDir, "import");
Directory.CreateDirectory(_exportDir);
Directory.CreateDirectory(_importDir);
}
public void Dispose()
{
if (Directory.Exists(_tempDir))
{
Directory.Delete(_tempDir, recursive: true);
}
}
[Trait("Category", TestCategories.Unit)]
[Fact]
public async Task E2E_ExportTransferReplayOffline_MatchingVerdict()
{
// ===== PHASE 1: EXPORT =====
// Create scan data
var scanId = $"scan-{Guid.NewGuid():N}";
var imageRef = "registry.example.com/app:v1.2.3";
var imageDigest = "sha256:abc123def456789";
var decision = "pass";
var sbom = CreateCycloneDxSbom(imageRef);
var feeds = CreateFeedsSnapshot();
var policy = CreatePolicyBundle();
var verdict = CreateVerdict(decision, scanId);
var vex = CreateVexStatements();
// Create audit bundle (unsigned for E2E test simplicity)
var writer = new AuditBundleWriter();
var bundlePath = Path.Combine(_exportDir, "audit-bundle.tar.gz");
var writeRequest = new AuditBundleWriteRequest
{
OutputPath = bundlePath,
ScanId = scanId,
ImageRef = imageRef,
ImageDigest = imageDigest,
Decision = decision,
Sbom = sbom,
FeedsSnapshot = feeds,
PolicyBundle = policy,
Verdict = verdict,
VexStatements = vex,
Sign = false, // Skip signing for unit test
TimeAnchor = new TimeAnchorInput
{
Timestamp = DateTimeOffset.UtcNow,
Source = "local-test"
}
};
var writeResult = await writer.WriteAsync(writeRequest);
// Assert export succeeded
Assert.True(writeResult.Success, $"Export failed: {writeResult.Error}");
Assert.True(File.Exists(bundlePath), "Bundle file not created");
Assert.NotNull(writeResult.MerkleRoot);
Assert.NotNull(writeResult.BundleDigest);
// ===== PHASE 2: TRANSFER (simulate by copying) =====
var transferredBundlePath = Path.Combine(_importDir, "transferred-bundle.tar.gz");
File.Copy(bundlePath, transferredBundlePath);
// Verify transfer integrity
var originalHash = await ComputeFileHashAsync(bundlePath);
var transferredHash = await ComputeFileHashAsync(transferredBundlePath);
Assert.Equal(originalHash, transferredHash);
// ===== PHASE 3: REPLAY OFFLINE =====
// Read the bundle
var reader = new AuditBundleReader();
var readRequest = new AuditBundleReadRequest
{
BundlePath = transferredBundlePath,
VerifySignature = false, // No signature in this test
VerifyMerkleRoot = true,
VerifyInputDigests = true,
LoadReplayInputs = true
};
var readResult = await reader.ReadAsync(readRequest);
// Assert read succeeded
Assert.True(readResult.Success, $"Read failed: {readResult.Error}");
Assert.True(readResult.MerkleRootVerified ?? false, "Merkle root validation failed");
Assert.True(readResult.InputDigestsVerified ?? false, "Input digests validation failed");
// Create isolated replay context
using var replayContext = new IsolatedReplayContext(new IsolatedReplayContextOptions
{
CleanupOnDispose = true,
EnforceOffline = true
});
var initResult = await replayContext.InitializeAsync(readResult);
Assert.True(initResult.Success, $"Replay context init failed: {initResult.Error}");
// Execute replay
var executor = new ReplayExecutor();
var replayResult = await executor.ExecuteAsync(
replayContext,
readResult.Manifest!,
new ReplayExecutionOptions
{
FailOnInputDrift = false,
DetailedDriftDetection = true
});
// Assert replay succeeded with matching verdict
Assert.True(replayResult.Success, $"Replay failed: {replayResult.Error}");
Assert.Equal(ReplayStatus.Match, replayResult.Status);
Assert.True(replayResult.InputsVerified, "Inputs should be verified");
Assert.True(replayResult.DecisionMatches, "Decision should match");
Assert.Equal(decision, replayResult.OriginalDecision);
}
[Trait("Category", TestCategories.Unit)]
[Fact]
public async Task E2E_ReplayDetectsTamperedSbom()
{
// Setup
var scanId = $"scan-{Guid.NewGuid():N}";
var sbom = CreateCycloneDxSbom("app:v1");
var feeds = CreateFeedsSnapshot();
var policy = CreatePolicyBundle();
var verdict = CreateVerdict("pass", scanId);
// Export original bundle
var writer = new AuditBundleWriter();
var bundlePath = Path.Combine(_exportDir, "original.tar.gz");
var writeResult = await writer.WriteAsync(new AuditBundleWriteRequest
{
OutputPath = bundlePath,
ScanId = scanId,
ImageRef = "app:v1",
ImageDigest = "sha256:abc",
Decision = "pass",
Sbom = sbom,
FeedsSnapshot = feeds,
PolicyBundle = policy,
Verdict = verdict,
Sign = false
});
Assert.True(writeResult.Success);
// Export tampered bundle with modified SBOM
var tamperedSbom = CreateCycloneDxSbom("app:v1", addMaliciousComponent: true);
var tamperedBundlePath = Path.Combine(_importDir, "tampered.tar.gz");
var tamperedResult = await writer.WriteAsync(new AuditBundleWriteRequest
{
OutputPath = tamperedBundlePath,
ScanId = scanId,
ImageRef = "app:v1",
ImageDigest = "sha256:abc",
Decision = "pass",
Sbom = tamperedSbom, // Different SBOM
FeedsSnapshot = feeds,
PolicyBundle = policy,
Verdict = verdict,
Sign = false
});
Assert.True(tamperedResult.Success);
// Read both bundles
var reader = new AuditBundleReader();
var originalRead = await reader.ReadAsync(new AuditBundleReadRequest
{
BundlePath = bundlePath,
VerifySignature = false,
LoadReplayInputs = true
});
var tamperedRead = await reader.ReadAsync(new AuditBundleReadRequest
{
BundlePath = tamperedBundlePath,
VerifySignature = false,
LoadReplayInputs = true
});
// The merkle roots should differ
Assert.NotEqual(originalRead.Manifest?.MerkleRoot, tamperedRead.Manifest?.MerkleRoot);
// Input digests should differ
Assert.NotEqual(
originalRead.Manifest?.Inputs.SbomDigest,
tamperedRead.Manifest?.Inputs.SbomDigest);
}
[Trait("Category", TestCategories.Unit)]
[Fact]
public async Task E2E_DeterministicMerkleRoot_SameInputs()
{
// Create identical inputs
var sbom = CreateCycloneDxSbom("app:deterministic");
var feeds = CreateFeedsSnapshot();
var policy = CreatePolicyBundle();
var verdict = CreateVerdict("pass", "scan-deterministic");
var writer = new AuditBundleWriter();
// Write bundle 1
var bundle1Path = Path.Combine(_exportDir, "deterministic-1.tar.gz");
var result1 = await writer.WriteAsync(new AuditBundleWriteRequest
{
OutputPath = bundle1Path,
ScanId = "scan-deterministic",
ImageRef = "app:deterministic",
ImageDigest = "sha256:deterministic123",
Decision = "pass",
Sbom = sbom,
FeedsSnapshot = feeds,
PolicyBundle = policy,
Verdict = verdict,
Sign = false
});
// Write bundle 2 with same inputs
var bundle2Path = Path.Combine(_exportDir, "deterministic-2.tar.gz");
var result2 = await writer.WriteAsync(new AuditBundleWriteRequest
{
OutputPath = bundle2Path,
ScanId = "scan-deterministic",
ImageRef = "app:deterministic",
ImageDigest = "sha256:deterministic123",
Decision = "pass",
Sbom = sbom,
FeedsSnapshot = feeds,
PolicyBundle = policy,
Verdict = verdict,
Sign = false
});
// Merkle roots must be identical
Assert.True(result1.Success);
Assert.True(result2.Success);
Assert.Equal(result1.MerkleRoot, result2.MerkleRoot);
}
[Trait("Category", TestCategories.Unit)]
[Fact]
public async Task E2E_BundleContainsAllRequiredFiles()
{
// Setup
var sbom = CreateCycloneDxSbom("app:v1");
var feeds = CreateFeedsSnapshot();
var policy = CreatePolicyBundle();
var verdict = CreateVerdict("pass", "scan-files-test");
var vex = CreateVexStatements();
var writer = new AuditBundleWriter();
var bundlePath = Path.Combine(_exportDir, "files-test.tar.gz");
var writeResult = await writer.WriteAsync(new AuditBundleWriteRequest
{
OutputPath = bundlePath,
ScanId = "scan-files-test",
ImageRef = "app:v1",
ImageDigest = "sha256:abc",
Decision = "pass",
Sbom = sbom,
FeedsSnapshot = feeds,
PolicyBundle = policy,
Verdict = verdict,
VexStatements = vex,
Sign = false
});
Assert.True(writeResult.Success);
Assert.True(writeResult.FileCount >= 5, $"Expected at least 5 files, got {writeResult.FileCount}");
// Read and verify manifest contains all files
var reader = new AuditBundleReader();
var readResult = await reader.ReadAsync(new AuditBundleReadRequest
{
BundlePath = bundlePath,
VerifySignature = false
});
Assert.True(readResult.Success);
Assert.NotNull(readResult.Manifest);
Assert.NotEmpty(readResult.Manifest.Files);
// Verify essential files are present
var filePaths = readResult.Manifest.Files.Select(f => f.RelativePath).ToList();
Assert.Contains(filePaths, p => p.Contains("sbom"));
Assert.Contains(filePaths, p => p.Contains("feeds"));
Assert.Contains(filePaths, p => p.Contains("policy"));
Assert.Contains(filePaths, p => p.Contains("verdict"));
Assert.Contains(filePaths, p => p.Contains("vex"));
}
[Trait("Category", TestCategories.Unit)]
[Fact]
public async Task E2E_FullCycleWithTimeAnchor()
{
// Setup with explicit time anchor
var timestamp = new DateTimeOffset(2024, 6, 15, 12, 0, 0, TimeSpan.Zero);
var sbom = CreateCycloneDxSbom("app:time-test");
var feeds = CreateFeedsSnapshot();
var policy = CreatePolicyBundle();
var verdict = CreateVerdict("pass", "scan-time-test");
var writer = new AuditBundleWriter();
var bundlePath = Path.Combine(_exportDir, "time-anchor-test.tar.gz");
var writeResult = await writer.WriteAsync(new AuditBundleWriteRequest
{
OutputPath = bundlePath,
ScanId = "scan-time-test",
ImageRef = "app:time-test",
ImageDigest = "sha256:abc",
Decision = "pass",
Sbom = sbom,
FeedsSnapshot = feeds,
PolicyBundle = policy,
Verdict = verdict,
Sign = false,
TimeAnchor = new TimeAnchorInput
{
Timestamp = timestamp,
Source = "test-time-server"
}
});
Assert.True(writeResult.Success);
// Read and verify time anchor
var reader = new AuditBundleReader();
var readResult = await reader.ReadAsync(new AuditBundleReadRequest
{
BundlePath = bundlePath,
VerifySignature = false,
LoadReplayInputs = true
});
Assert.True(readResult.Success);
Assert.NotNull(readResult.Manifest?.TimeAnchor);
Assert.Equal(timestamp, readResult.Manifest.TimeAnchor.Timestamp);
Assert.Equal("test-time-server", readResult.Manifest.TimeAnchor.Source);
// Replay with time anchor context
using var context = new IsolatedReplayContext(new IsolatedReplayContextOptions
{
EvaluationTime = timestamp,
CleanupOnDispose = true
});
var initResult = await context.InitializeAsync(readResult);
Assert.True(initResult.Success);
Assert.Equal(timestamp, context.EvaluationTime);
}
#region Test Data Factories
private static byte[] CreateCycloneDxSbom(string imageRef, bool addMaliciousComponent = false)
{
var components = new List<object>
{
new { type = "library", name = "lodash", version = "4.17.21", purl = "pkg:npm/lodash@4.17.21" },
new { type = "library", name = "express", version = "4.18.2", purl = "pkg:npm/express@4.18.2" }
};
if (addMaliciousComponent)
{
components.Add(new { type = "library", name = "evil-package", version = "1.0.0", purl = "pkg:npm/evil-package@1.0.0" });
}
var sbom = new
{
bomFormat = "CycloneDX",
specVersion = "1.6",
version = 1,
serialNumber = $"urn:uuid:{Guid.NewGuid()}",
metadata = new
{
timestamp = DateTimeOffset.UtcNow.ToString("o"),
component = new { type = "container", name = imageRef }
},
components = components.ToArray()
};
return Encoding.UTF8.GetBytes(JsonSerializer.Serialize(sbom, new JsonSerializerOptions
{
WriteIndented = false,
PropertyNamingPolicy = JsonNamingPolicy.CamelCase
}));
}
private static byte[] CreateFeedsSnapshot()
{
var snapshot = new
{
type = "feed-snapshot",
version = "1.0",
timestamp = DateTimeOffset.UtcNow.ToString("o"),
sources = new[]
{
new { name = "nvd", lastSync = DateTimeOffset.UtcNow.AddHours(-1).ToString("o") },
new { name = "ghsa", lastSync = DateTimeOffset.UtcNow.AddHours(-2).ToString("o") }
},
advisoryCount = 150000
};
return Encoding.UTF8.GetBytes(JsonSerializer.Serialize(snapshot) + "\n");
}
private static byte[] CreatePolicyBundle()
{
// Minimal valid gzip content (empty archive)
return new byte[]
{
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x03, 0x03, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00
};
}
private static byte[] CreateVerdict(string decision, string scanId)
{
var verdict = new
{
version = "1.0",
scanId = scanId,
decision = decision,
evaluatedAt = DateTimeOffset.UtcNow.ToString("o"),
policyVersion = "2024.1",
findings = new
{
critical = 0,
high = 2,
medium = 5,
low = 10,
unknown = 0
},
attestation = new
{
type = "https://stellaops.io/verdict/v1",
predicateType = "https://stellaops.io/attestation/verdict/v1"
}
};
return Encoding.UTF8.GetBytes(JsonSerializer.Serialize(verdict, new JsonSerializerOptions
{
PropertyNamingPolicy = JsonNamingPolicy.CamelCase
}));
}
private static byte[] CreateVexStatements()
{
var vex = new
{
type = "https://openvex.dev/ns/v0.2.0",
id = $"https://stellaops.io/vex/{Guid.NewGuid()}",
author = "security-team@example.com",
timestamp = DateTimeOffset.UtcNow.ToString("o"),
statements = new[]
{
new
{
vulnerability = new { id = "CVE-2024-1234" },
status = "not_affected",
justification = "vulnerable_code_not_present"
}
}
};
return Encoding.UTF8.GetBytes(JsonSerializer.Serialize(vex, new JsonSerializerOptions
{
PropertyNamingPolicy = JsonNamingPolicy.CamelCase
}));
}
private static async Task<string> ComputeFileHashAsync(string filePath)
{
await using var stream = File.OpenRead(filePath);
using StellaOps.TestKit;
var hash = await SHA256.HashDataAsync(stream);
return Convert.ToHexString(hash).ToLowerInvariant();
}
#endregion
}