Files
git.stella-ops.org/src/Cli/__Tests/StellaOps.Cli.Tests/Integration/SbomVerifyIntegrationTests.cs

577 lines
20 KiB
C#

// -----------------------------------------------------------------------------
// SbomVerifyIntegrationTests.cs
// Sprint: SPRINT_20260112_016_CLI_sbom_verify_offline
// Task: SBOM-CLI-009 — Integration tests with sample signed SBOM archives
// -----------------------------------------------------------------------------
using System.Formats.Tar;
using System.IO.Compression;
using System.Security.Cryptography;
using System.Text;
using System.Text.Json;
using StellaOps.TestKit;
using Xunit;
namespace StellaOps.Cli.Tests.Integration;
[Trait("Category", TestCategories.Integration)]
public sealed class SbomVerifyIntegrationTests : IDisposable
{
private readonly string _testDir;
private readonly List<string> _tempFiles = new();
public SbomVerifyIntegrationTests()
{
_testDir = Path.Combine(Path.GetTempPath(), $"sbom-verify-test-{Guid.NewGuid():N}");
Directory.CreateDirectory(_testDir);
}
public void Dispose()
{
foreach (var file in _tempFiles)
{
try { File.Delete(file); } catch { /* ignore */ }
}
try { Directory.Delete(_testDir, recursive: true); } catch { /* ignore */ }
}
#region Archive Creation Helpers
private string CreateValidSignedSbomArchive(string format = "spdx", bool includeMetadata = true)
{
var archivePath = Path.Combine(_testDir, $"test-{Guid.NewGuid():N}.tar.gz");
_tempFiles.Add(archivePath);
using var fileStream = File.Create(archivePath);
using var gzipStream = new GZipStream(fileStream, CompressionLevel.Optimal);
using var tarWriter = new TarWriter(gzipStream, TarEntryFormat.Pax);
var files = new Dictionary<string, string>();
// Add SBOM file
var sbomContent = format == "spdx" ? CreateSpdxSbom() : CreateCycloneDxSbom();
var sbomFileName = format == "spdx" ? "sbom.spdx.json" : "sbom.cdx.json";
files[sbomFileName] = sbomContent;
// Add DSSE envelope
var dsseContent = CreateDsseEnvelope(sbomContent);
files["sbom.dsse.json"] = dsseContent;
// Add metadata
if (includeMetadata)
{
var metadataContent = CreateMetadata();
files["metadata.json"] = metadataContent;
}
// Create manifest with hashes
var manifestContent = CreateManifest(files);
files["manifest.json"] = manifestContent;
// Add all files to archive
foreach (var (name, content) in files)
{
var entry = new PaxTarEntry(TarEntryType.RegularFile, name)
{
Mode = UnixFileMode.UserRead | UnixFileMode.UserWrite | UnixFileMode.GroupRead | UnixFileMode.OtherRead,
ModificationTime = new DateTimeOffset(2026, 1, 15, 10, 30, 0, TimeSpan.Zero),
DataStream = new MemoryStream(Encoding.UTF8.GetBytes(content), writable: false)
};
tarWriter.WriteEntry(entry);
}
return archivePath;
}
private string CreateCorruptedArchive()
{
var archivePath = Path.Combine(_testDir, $"corrupted-{Guid.NewGuid():N}.tar.gz");
_tempFiles.Add(archivePath);
using var fileStream = File.Create(archivePath);
using var gzipStream = new GZipStream(fileStream, CompressionLevel.Optimal);
using var tarWriter = new TarWriter(gzipStream, TarEntryFormat.Pax);
var files = new Dictionary<string, string>();
// Add SBOM file
var sbomContent = CreateSpdxSbom();
files["sbom.spdx.json"] = sbomContent;
// Add DSSE envelope
var dsseContent = CreateDsseEnvelope(sbomContent);
files["sbom.dsse.json"] = dsseContent;
// Create manifest with WRONG hash to simulate corruption
var manifestContent = JsonSerializer.Serialize(new
{
schemaVersion = "1.0.0",
files = new[]
{
new { path = "sbom.spdx.json", sha256 = "0000000000000000000000000000000000000000000000000000000000000000" },
new { path = "sbom.dsse.json", sha256 = ComputeSha256(dsseContent) }
}
}, new JsonSerializerOptions { WriteIndented = true });
files["manifest.json"] = manifestContent;
// Add all files to archive
foreach (var (name, content) in files)
{
var entry = new PaxTarEntry(TarEntryType.RegularFile, name)
{
Mode = UnixFileMode.UserRead | UnixFileMode.UserWrite | UnixFileMode.GroupRead | UnixFileMode.OtherRead,
ModificationTime = new DateTimeOffset(2026, 1, 15, 10, 30, 0, TimeSpan.Zero),
DataStream = new MemoryStream(Encoding.UTF8.GetBytes(content), writable: false)
};
tarWriter.WriteEntry(entry);
}
return archivePath;
}
private string CreateArchiveWithInvalidDsse()
{
var archivePath = Path.Combine(_testDir, $"invalid-dsse-{Guid.NewGuid():N}.tar.gz");
_tempFiles.Add(archivePath);
using var fileStream = File.Create(archivePath);
using var gzipStream = new GZipStream(fileStream, CompressionLevel.Optimal);
using var tarWriter = new TarWriter(gzipStream, TarEntryFormat.Pax);
var files = new Dictionary<string, string>();
// Add SBOM file
var sbomContent = CreateSpdxSbom();
files["sbom.spdx.json"] = sbomContent;
// Add INVALID DSSE envelope (missing signatures)
var dsseContent = JsonSerializer.Serialize(new
{
payloadType = "application/vnd.in-toto+json",
payload = Convert.ToBase64String(Encoding.UTF8.GetBytes(sbomContent))
// Missing signatures array!
}, new JsonSerializerOptions { WriteIndented = true });
files["sbom.dsse.json"] = dsseContent;
// Create manifest
var manifestContent = CreateManifest(files);
files["manifest.json"] = manifestContent;
foreach (var (name, content) in files)
{
var entry = new PaxTarEntry(TarEntryType.RegularFile, name)
{
Mode = UnixFileMode.UserRead | UnixFileMode.UserWrite | UnixFileMode.GroupRead | UnixFileMode.OtherRead,
ModificationTime = new DateTimeOffset(2026, 1, 15, 10, 30, 0, TimeSpan.Zero),
DataStream = new MemoryStream(Encoding.UTF8.GetBytes(content), writable: false)
};
tarWriter.WriteEntry(entry);
}
return archivePath;
}
private string CreateArchiveWithInvalidSbom()
{
var archivePath = Path.Combine(_testDir, $"invalid-sbom-{Guid.NewGuid():N}.tar.gz");
_tempFiles.Add(archivePath);
using var fileStream = File.Create(archivePath);
using var gzipStream = new GZipStream(fileStream, CompressionLevel.Optimal);
using var tarWriter = new TarWriter(gzipStream, TarEntryFormat.Pax);
var files = new Dictionary<string, string>();
// Add INVALID SBOM file (missing required fields)
var sbomContent = JsonSerializer.Serialize(new
{
// Missing spdxVersion, SPDXID, name
packages = new[] { new { name = "test" } }
}, new JsonSerializerOptions { WriteIndented = true });
files["sbom.spdx.json"] = sbomContent;
// Add DSSE envelope
var dsseContent = CreateDsseEnvelope(sbomContent);
files["sbom.dsse.json"] = dsseContent;
// Create manifest
var manifestContent = CreateManifest(files);
files["manifest.json"] = manifestContent;
foreach (var (name, content) in files)
{
var entry = new PaxTarEntry(TarEntryType.RegularFile, name)
{
Mode = UnixFileMode.UserRead | UnixFileMode.UserWrite | UnixFileMode.GroupRead | UnixFileMode.OtherRead,
ModificationTime = new DateTimeOffset(2026, 1, 15, 10, 30, 0, TimeSpan.Zero),
DataStream = new MemoryStream(Encoding.UTF8.GetBytes(content), writable: false)
};
tarWriter.WriteEntry(entry);
}
return archivePath;
}
private static string CreateSpdxSbom()
{
return JsonSerializer.Serialize(new
{
spdxVersion = "SPDX-2.3",
SPDXID = "SPDXRef-DOCUMENT",
name = "test-sbom",
creationInfo = new
{
created = "2026-01-15T10:30:00Z",
creators = new[] { "Tool: StellaOps Scanner" }
},
packages = new[]
{
new { name = "test-package", SPDXID = "SPDXRef-Package-1", versionInfo = "1.0.0" },
new { name = "dependency-a", SPDXID = "SPDXRef-Package-2", versionInfo = "2.0.0" }
}
}, new JsonSerializerOptions { WriteIndented = true });
}
private static string CreateCycloneDxSbom()
{
return JsonSerializer.Serialize(new
{
bomFormat = "CycloneDX",
specVersion = "1.6",
version = 1,
metadata = new
{
timestamp = "2026-01-15T10:30:00Z",
tools = new[] { new { name = "StellaOps Scanner", version = "2027.Q1" } }
},
components = new[]
{
new { type = "library", name = "test-package", version = "1.0.0" },
new { type = "library", name = "dependency-a", version = "2.0.0" }
}
}, new JsonSerializerOptions { WriteIndented = true });
}
private static string CreateDsseEnvelope(string payload)
{
return JsonSerializer.Serialize(new
{
payloadType = "application/vnd.in-toto+json",
payload = Convert.ToBase64String(Encoding.UTF8.GetBytes(payload)),
signatures = new[]
{
new
{
keyid = "test-key-id",
sig = Convert.ToBase64String(Encoding.UTF8.GetBytes("test-signature"))
}
}
}, new JsonSerializerOptions { WriteIndented = true });
}
private static string CreateMetadata()
{
return JsonSerializer.Serialize(new
{
schemaVersion = "1.0.0",
stellaOps = new
{
suiteVersion = "2027.Q1",
scannerVersion = "1.2.3",
signerVersion = "1.0.0"
},
generation = new
{
timestamp = "2026-01-15T10:30:00Z"
},
input = new
{
imageRef = "myregistry/app:1.0",
imageDigest = "sha256:abc123def456"
}
}, new JsonSerializerOptions { WriteIndented = true });
}
private static string CreateManifest(Dictionary<string, string> files)
{
var fileEntries = files.Where(f => f.Key != "manifest.json")
.Select(f => new { path = f.Key, sha256 = ComputeSha256(f.Value) })
.ToArray();
return JsonSerializer.Serialize(new
{
schemaVersion = "1.0.0",
files = fileEntries
}, new JsonSerializerOptions { WriteIndented = true });
}
private static string ComputeSha256(string content)
{
var bytes = Encoding.UTF8.GetBytes(content);
var hash = SHA256.HashData(bytes);
return Convert.ToHexString(hash).ToLowerInvariant();
}
#endregion
#region Tests
[Fact]
public void ValidSpdxArchive_CanBeCreated()
{
// Act
var archivePath = CreateValidSignedSbomArchive("spdx");
// Assert
Assert.True(File.Exists(archivePath));
Assert.True(new FileInfo(archivePath).Length > 0);
}
[Fact]
public void ValidCycloneDxArchive_CanBeCreated()
{
// Act
var archivePath = CreateValidSignedSbomArchive("cdx");
// Assert
Assert.True(File.Exists(archivePath));
Assert.True(new FileInfo(archivePath).Length > 0);
}
[Fact]
public void ValidArchive_ContainsExpectedFiles()
{
// Arrange
var archivePath = CreateValidSignedSbomArchive("spdx");
// Act
var extractedFiles = ExtractArchiveFileNames(archivePath);
// Assert
Assert.Contains("sbom.spdx.json", extractedFiles);
Assert.Contains("sbom.dsse.json", extractedFiles);
Assert.Contains("manifest.json", extractedFiles);
Assert.Contains("metadata.json", extractedFiles);
}
[Fact]
public void ValidArchive_ManifestHashesMatch()
{
// Arrange
var archivePath = CreateValidSignedSbomArchive("spdx");
// Act
var (manifestContent, fileContents) = ExtractArchiveContents(archivePath);
var manifest = JsonDocument.Parse(manifestContent);
var filesArray = manifest.RootElement.GetProperty("files");
// Assert
foreach (var file in filesArray.EnumerateArray())
{
var path = file.GetProperty("path").GetString()!;
var expectedHash = file.GetProperty("sha256").GetString()!;
var actualHash = ComputeSha256(fileContents[path]);
Assert.Equal(expectedHash.ToLowerInvariant(), actualHash.ToLowerInvariant());
}
}
[Fact]
public void CorruptedArchive_HasMismatchedHashes()
{
// Arrange
var archivePath = CreateCorruptedArchive();
// Act
var (manifestContent, fileContents) = ExtractArchiveContents(archivePath);
var manifest = JsonDocument.Parse(manifestContent);
var filesArray = manifest.RootElement.GetProperty("files");
// Assert - at least one hash should NOT match
var hasMismatch = false;
foreach (var file in filesArray.EnumerateArray())
{
var path = file.GetProperty("path").GetString()!;
var expectedHash = file.GetProperty("sha256").GetString()!;
var actualHash = ComputeSha256(fileContents[path]);
if (!expectedHash.Equals(actualHash, StringComparison.OrdinalIgnoreCase))
{
hasMismatch = true;
break;
}
}
Assert.True(hasMismatch, "Corrupted archive should have at least one mismatched hash");
}
[Fact]
public void ValidArchive_DsseHasSignatures()
{
// Arrange
var archivePath = CreateValidSignedSbomArchive("spdx");
// Act
var (_, fileContents) = ExtractArchiveContents(archivePath);
var dsse = JsonDocument.Parse(fileContents["sbom.dsse.json"]);
// Assert
Assert.True(dsse.RootElement.TryGetProperty("payloadType", out _));
Assert.True(dsse.RootElement.TryGetProperty("payload", out _));
Assert.True(dsse.RootElement.TryGetProperty("signatures", out var sigs));
Assert.True(sigs.GetArrayLength() > 0);
}
[Fact]
public void InvalidDsseArchive_MissesSignatures()
{
// Arrange
var archivePath = CreateArchiveWithInvalidDsse();
// Act
var (_, fileContents) = ExtractArchiveContents(archivePath);
var dsse = JsonDocument.Parse(fileContents["sbom.dsse.json"]);
// Assert
Assert.False(dsse.RootElement.TryGetProperty("signatures", out _));
}
[Fact]
public void ValidSpdxArchive_HasRequiredSpdxFields()
{
// Arrange
var archivePath = CreateValidSignedSbomArchive("spdx");
// Act
var (_, fileContents) = ExtractArchiveContents(archivePath);
var sbom = JsonDocument.Parse(fileContents["sbom.spdx.json"]);
// Assert
Assert.True(sbom.RootElement.TryGetProperty("spdxVersion", out _));
Assert.True(sbom.RootElement.TryGetProperty("SPDXID", out _));
Assert.True(sbom.RootElement.TryGetProperty("name", out _));
}
[Fact]
public void ValidCycloneDxArchive_HasRequiredFields()
{
// Arrange
var archivePath = CreateValidSignedSbomArchive("cdx");
// Act
var (_, fileContents) = ExtractArchiveContents(archivePath);
var sbom = JsonDocument.Parse(fileContents["sbom.cdx.json"]);
// Assert
Assert.True(sbom.RootElement.TryGetProperty("bomFormat", out _));
Assert.True(sbom.RootElement.TryGetProperty("specVersion", out _));
}
[Fact]
public void InvalidSbomArchive_MissesRequiredFields()
{
// Arrange
var archivePath = CreateArchiveWithInvalidSbom();
// Act
var (_, fileContents) = ExtractArchiveContents(archivePath);
var sbom = JsonDocument.Parse(fileContents["sbom.spdx.json"]);
// Assert
Assert.False(sbom.RootElement.TryGetProperty("spdxVersion", out _));
Assert.False(sbom.RootElement.TryGetProperty("SPDXID", out _));
}
[Fact]
public void ValidArchive_MetadataHasToolVersions()
{
// Arrange
var archivePath = CreateValidSignedSbomArchive("spdx");
// Act
var (_, fileContents) = ExtractArchiveContents(archivePath);
var metadata = JsonDocument.Parse(fileContents["metadata.json"]);
// Assert
Assert.True(metadata.RootElement.TryGetProperty("stellaOps", out var stellaOps));
Assert.True(stellaOps.TryGetProperty("suiteVersion", out _));
Assert.True(stellaOps.TryGetProperty("scannerVersion", out _));
}
[Fact]
public void ValidArchive_MetadataHasTimestamp()
{
// Arrange
var archivePath = CreateValidSignedSbomArchive("spdx");
// Act
var (_, fileContents) = ExtractArchiveContents(archivePath);
var metadata = JsonDocument.Parse(fileContents["metadata.json"]);
// Assert
Assert.True(metadata.RootElement.TryGetProperty("generation", out var generation));
Assert.True(generation.TryGetProperty("timestamp", out _));
}
[Fact]
public void ValidArchive_WithoutMetadata_StillValid()
{
// Arrange
var archivePath = CreateValidSignedSbomArchive("spdx", includeMetadata: false);
// Act
var extractedFiles = ExtractArchiveFileNames(archivePath);
// Assert
Assert.DoesNotContain("metadata.json", extractedFiles);
Assert.Contains("sbom.spdx.json", extractedFiles);
Assert.Contains("sbom.dsse.json", extractedFiles);
Assert.Contains("manifest.json", extractedFiles);
}
#endregion
#region Extraction Helpers
private static List<string> ExtractArchiveFileNames(string archivePath)
{
var fileNames = new List<string>();
using var fileStream = File.OpenRead(archivePath);
using var gzipStream = new GZipStream(fileStream, CompressionMode.Decompress);
using var tarReader = new TarReader(gzipStream);
while (tarReader.GetNextEntry() is { } entry)
{
if (entry.EntryType == TarEntryType.RegularFile)
{
fileNames.Add(entry.Name);
}
}
return fileNames;
}
private static (string ManifestContent, Dictionary<string, string> FileContents) ExtractArchiveContents(string archivePath)
{
var fileContents = new Dictionary<string, string>();
using var fileStream = File.OpenRead(archivePath);
using var gzipStream = new GZipStream(fileStream, CompressionMode.Decompress);
using var tarReader = new TarReader(gzipStream);
while (tarReader.GetNextEntry() is { } entry)
{
if (entry.EntryType == TarEntryType.RegularFile && entry.DataStream is not null)
{
using var reader = new StreamReader(entry.DataStream);
fileContents[entry.Name] = reader.ReadToEnd();
}
}
return (fileContents.GetValueOrDefault("manifest.json", "{}"), fileContents);
}
#endregion
}