Frontend gaps fill work. Testing fixes work. Auditing in progress.
This commit is contained in:
@@ -12,7 +12,6 @@ using Microsoft.Extensions.Logging.Abstractions;
|
||||
using StellaOps.TestKit;
|
||||
using StellaOps.Verdict;
|
||||
using Xunit;
|
||||
using Xunit.Abstractions;
|
||||
|
||||
namespace StellaOps.Tests.Determinism;
|
||||
|
||||
|
||||
@@ -11,7 +11,7 @@
|
||||
<PackageReference Include="coverlet.collector" />
|
||||
<PackageReference Include="FluentAssertions" />
|
||||
<PackageReference Include="Microsoft.NET.Test.Sdk" />
|
||||
<PackageReference Include="xunit" />
|
||||
<PackageReference Include="xunit.v3" />
|
||||
<PackageReference Include="xunit.runner.visualstudio" />
|
||||
</ItemGroup>
|
||||
|
||||
@@ -20,3 +20,4 @@
|
||||
<ProjectReference Include="..\__Libraries\StellaOps.TestKit\StellaOps.TestKit.csproj" />
|
||||
</ItemGroup>
|
||||
</Project>
|
||||
|
||||
|
||||
@@ -11,7 +11,7 @@
|
||||
|
||||
<ItemGroup>
|
||||
<PackageReference Include="Microsoft.NET.Test.Sdk" />
|
||||
<PackageReference Include="xunit" />
|
||||
<PackageReference Include="xunit.v3" />
|
||||
<PackageReference Include="xunit.runner.visualstudio" >
|
||||
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
|
||||
<PrivateAssets>all</PrivateAssets>
|
||||
@@ -32,3 +32,4 @@
|
||||
</ItemGroup>
|
||||
|
||||
</Project>
|
||||
|
||||
|
||||
@@ -18,7 +18,7 @@
|
||||
|
||||
<ItemGroup>
|
||||
<PackageReference Include="Microsoft.NET.Test.Sdk" />
|
||||
<PackageReference Include="xunit" />
|
||||
<PackageReference Include="xunit.v3" />
|
||||
<PackageReference Include="xunit.runner.visualstudio" >
|
||||
<PrivateAssets>all</PrivateAssets>
|
||||
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
|
||||
@@ -66,3 +66,4 @@
|
||||
</ItemGroup>
|
||||
|
||||
</Project>
|
||||
|
||||
|
||||
@@ -45,7 +45,7 @@ public sealed class E2EReproducibilityTestFixture : IAsyncLifetime
|
||||
/// <summary>
|
||||
/// Initializes the test fixture, starting required services.
|
||||
/// </summary>
|
||||
public async Task InitializeAsync()
|
||||
public async ValueTask InitializeAsync()
|
||||
{
|
||||
if (_initialized)
|
||||
return;
|
||||
@@ -771,7 +771,7 @@ public sealed class E2EReproducibilityTestFixture : IAsyncLifetime
|
||||
/// <summary>
|
||||
/// Disposes of the test fixture resources.
|
||||
/// </summary>
|
||||
public async Task DisposeAsync()
|
||||
public async ValueTask DisposeAsync()
|
||||
{
|
||||
_signingKey?.Dispose();
|
||||
_factory?.Dispose();
|
||||
@@ -950,3 +950,6 @@ public sealed class BundleResult
|
||||
#pragma warning disable CA1050 // Declare types in namespaces
|
||||
public partial class Program { }
|
||||
#pragma warning restore CA1050
|
||||
|
||||
|
||||
|
||||
|
||||
@@ -32,12 +32,12 @@ public sealed class E2EReproducibilityTests : IClassFixture<E2EReproducibilityTe
|
||||
_fixture = fixture;
|
||||
}
|
||||
|
||||
public async Task InitializeAsync()
|
||||
public async ValueTask InitializeAsync()
|
||||
{
|
||||
await _fixture.InitializeAsync();
|
||||
}
|
||||
|
||||
public Task DisposeAsync() => Task.CompletedTask;
|
||||
public ValueTask DisposeAsync() => ValueTask.CompletedTask;
|
||||
|
||||
#region E2E-8200-011: Identical Verdict Hash
|
||||
|
||||
@@ -455,3 +455,6 @@ public sealed class E2EReproducibilityTests : IClassFixture<E2EReproducibilityTe
|
||||
public sealed class E2EReproducibilityCollection : ICollectionFixture<E2EReproducibilityTestFixture>
|
||||
{
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
@@ -19,7 +19,7 @@
|
||||
|
||||
<ItemGroup>
|
||||
<PackageReference Include="Microsoft.NET.Test.Sdk" />
|
||||
<PackageReference Include="xunit" />
|
||||
<PackageReference Include="xunit.v3" />
|
||||
<PackageReference Include="xunit.runner.visualstudio" >
|
||||
<PrivateAssets>all</PrivateAssets>
|
||||
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
|
||||
@@ -85,3 +85,4 @@
|
||||
</ItemGroup>
|
||||
|
||||
</Project>
|
||||
|
||||
|
||||
@@ -12,7 +12,7 @@
|
||||
<ItemGroup>
|
||||
<PackageReference Include="BenchmarkDotNet" />
|
||||
<PackageReference Include="Microsoft.NET.Test.Sdk" />
|
||||
<PackageReference Include="xunit" />
|
||||
<PackageReference Include="xunit.v3" />
|
||||
<PackageReference Include="xunit.runner.visualstudio" >
|
||||
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
|
||||
<PrivateAssets>all</PrivateAssets>
|
||||
@@ -31,3 +31,4 @@
|
||||
</ItemGroup>
|
||||
|
||||
</Project>
|
||||
|
||||
|
||||
@@ -28,7 +28,7 @@ public class PostgresOnlyStartupTests : IAsyncLifetime
|
||||
private PostgreSqlContainer? _container;
|
||||
private string? _connectionString;
|
||||
|
||||
public async Task InitializeAsync()
|
||||
public async ValueTask InitializeAsync()
|
||||
{
|
||||
_container = new PostgreSqlBuilder()
|
||||
.WithImage("postgres:16-alpine")
|
||||
@@ -38,7 +38,7 @@ public class PostgresOnlyStartupTests : IAsyncLifetime
|
||||
_connectionString = _container.GetConnectionString();
|
||||
}
|
||||
|
||||
public async Task DisposeAsync()
|
||||
public async ValueTask DisposeAsync()
|
||||
{
|
||||
if (_container != null)
|
||||
{
|
||||
@@ -246,3 +246,6 @@ public class PostgresOnlyStartupTests : IAsyncLifetime
|
||||
|
||||
#endregion
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
@@ -19,7 +19,7 @@
|
||||
<ItemGroup>
|
||||
<PackageReference Include="Microsoft.NET.Test.Sdk" />
|
||||
<PackageReference Include="Npgsql" />
|
||||
<PackageReference Include="xunit" />
|
||||
<PackageReference Include="xunit.v3" />
|
||||
<PackageReference Include="xunit.runner.visualstudio" >
|
||||
<PrivateAssets>all</PrivateAssets>
|
||||
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
|
||||
@@ -40,3 +40,5 @@
|
||||
</ItemGroup>
|
||||
|
||||
</Project>
|
||||
|
||||
|
||||
|
||||
@@ -34,15 +34,15 @@ public class ProofChainIntegrationTests : IAsyncLifetime
|
||||
_fixture = fixture;
|
||||
}
|
||||
|
||||
public async Task InitializeAsync()
|
||||
public async ValueTask InitializeAsync()
|
||||
{
|
||||
_client = await _fixture.CreateClientAsync();
|
||||
}
|
||||
|
||||
public Task DisposeAsync()
|
||||
public ValueTask DisposeAsync()
|
||||
{
|
||||
_client.Dispose();
|
||||
return Task.CompletedTask;
|
||||
return ValueTask.CompletedTask;
|
||||
}
|
||||
|
||||
#region T1-AC1: Test scan submission creates manifest
|
||||
@@ -371,3 +371,6 @@ public class ProofChainIntegrationTests : IAsyncLifetime
|
||||
public class ProofChainIntegrationCollection : ICollectionFixture<ProofChainTestFixture>
|
||||
{
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
@@ -29,7 +29,7 @@ public sealed class ProofChainTestFixture : IAsyncLifetime
|
||||
/// <summary>
|
||||
/// Initializes the test fixture, starting PostgreSQL container.
|
||||
/// </summary>
|
||||
public async Task InitializeAsync()
|
||||
public async ValueTask InitializeAsync()
|
||||
{
|
||||
if (_initialized)
|
||||
return;
|
||||
@@ -98,7 +98,7 @@ public sealed class ProofChainTestFixture : IAsyncLifetime
|
||||
/// <summary>
|
||||
/// Disposes of the test fixture resources.
|
||||
/// </summary>
|
||||
public async Task DisposeAsync()
|
||||
public async ValueTask DisposeAsync()
|
||||
{
|
||||
_factory?.Dispose();
|
||||
|
||||
@@ -116,3 +116,6 @@ public sealed class ProofChainTestFixture : IAsyncLifetime
|
||||
#pragma warning disable CA1050 // Declare types in namespaces
|
||||
public partial class Program { }
|
||||
#pragma warning restore CA1050
|
||||
|
||||
|
||||
|
||||
|
||||
@@ -18,7 +18,7 @@
|
||||
|
||||
<ItemGroup>
|
||||
<PackageReference Include="Microsoft.NET.Test.Sdk" />
|
||||
<PackageReference Include="xunit" />
|
||||
<PackageReference Include="xunit.v3" />
|
||||
<PackageReference Include="xunit.runner.visualstudio" >
|
||||
<PrivateAssets>all</PrivateAssets>
|
||||
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
|
||||
@@ -51,3 +51,4 @@
|
||||
</ItemGroup>
|
||||
|
||||
</Project>
|
||||
|
||||
|
||||
@@ -18,7 +18,7 @@
|
||||
|
||||
<ItemGroup>
|
||||
<PackageReference Include="Microsoft.NET.Test.Sdk" />
|
||||
<PackageReference Include="xunit" />
|
||||
<PackageReference Include="xunit.v3" />
|
||||
<PackageReference Include="xunit.runner.visualstudio" >
|
||||
<PrivateAssets>all</PrivateAssets>
|
||||
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
|
||||
@@ -48,3 +48,4 @@
|
||||
</ItemGroup>
|
||||
|
||||
</Project>
|
||||
|
||||
|
||||
@@ -18,7 +18,7 @@
|
||||
|
||||
<ItemGroup>
|
||||
<PackageReference Include="Microsoft.NET.Test.Sdk" />
|
||||
<PackageReference Include="xunit" />
|
||||
<PackageReference Include="xunit.v3" />
|
||||
<PackageReference Include="xunit.runner.visualstudio" >
|
||||
<PrivateAssets>all</PrivateAssets>
|
||||
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
|
||||
@@ -36,3 +36,4 @@
|
||||
</ItemGroup>
|
||||
|
||||
</Project>
|
||||
|
||||
|
||||
@@ -22,11 +22,12 @@ public class VulnApiTests : IClassFixture<WebApplicationFactory<Program>>
|
||||
{
|
||||
var client = factory.CreateClient();
|
||||
client.DefaultRequestHeaders.Add("x-stella-tenant", "tenant-a");
|
||||
var cancellationToken = TestContext.Current.CancellationToken;
|
||||
|
||||
var response = await client.GetAsync("/v1/vulns");
|
||||
var response = await client.GetAsync("/v1/vulns", cancellationToken);
|
||||
Assert.Equal(HttpStatusCode.OK, response.StatusCode);
|
||||
|
||||
var payload = await response.Content.ReadFromJsonAsync<VulnListResponse>();
|
||||
var payload = await response.Content.ReadFromJsonAsync<VulnListResponse>(cancellationToken);
|
||||
Assert.NotNull(payload);
|
||||
Assert.Equal(new[] { "vuln-0001", "vuln-0002" }, payload!.Items.Select(v => v.Id));
|
||||
}
|
||||
@@ -37,11 +38,12 @@ public class VulnApiTests : IClassFixture<WebApplicationFactory<Program>>
|
||||
{
|
||||
var client = factory.CreateClient();
|
||||
client.DefaultRequestHeaders.Add("x-stella-tenant", "tenant-a");
|
||||
var cancellationToken = TestContext.Current.CancellationToken;
|
||||
|
||||
var response = await client.GetAsync("/v1/vulns?cve=CVE-2024-2222");
|
||||
var response = await client.GetAsync("/v1/vulns?cve=CVE-2024-2222", cancellationToken);
|
||||
response.EnsureSuccessStatusCode();
|
||||
|
||||
var payload = await response.Content.ReadFromJsonAsync<VulnListResponse>();
|
||||
var payload = await response.Content.ReadFromJsonAsync<VulnListResponse>(cancellationToken);
|
||||
Assert.Single(payload!.Items);
|
||||
Assert.Equal("vuln-0002", payload.Items[0].Id);
|
||||
}
|
||||
@@ -52,8 +54,9 @@ public class VulnApiTests : IClassFixture<WebApplicationFactory<Program>>
|
||||
{
|
||||
var client = factory.CreateClient();
|
||||
client.DefaultRequestHeaders.Add("x-stella-tenant", "tenant-a");
|
||||
var cancellationToken = TestContext.Current.CancellationToken;
|
||||
|
||||
var response = await client.GetAsync("/v1/vulns/missing");
|
||||
var response = await client.GetAsync("/v1/vulns/missing", cancellationToken);
|
||||
Assert.Equal(HttpStatusCode.NotFound, response.StatusCode);
|
||||
}
|
||||
|
||||
@@ -63,11 +66,12 @@ public class VulnApiTests : IClassFixture<WebApplicationFactory<Program>>
|
||||
{
|
||||
var client = factory.CreateClient();
|
||||
client.DefaultRequestHeaders.Add("x-stella-tenant", "tenant-a");
|
||||
var cancellationToken = TestContext.Current.CancellationToken;
|
||||
|
||||
var response = await client.GetAsync("/v1/vulns/vuln-0001");
|
||||
var response = await client.GetAsync("/v1/vulns/vuln-0001", cancellationToken);
|
||||
response.EnsureSuccessStatusCode();
|
||||
|
||||
var detail = await response.Content.ReadFromJsonAsync<VulnDetail>();
|
||||
var detail = await response.Content.ReadFromJsonAsync<VulnDetail>(cancellationToken);
|
||||
Assert.NotNull(detail);
|
||||
Assert.Equal("rat-0001", detail!.Rationale.Id);
|
||||
Assert.Contains("/src/app/Program.cs", detail.Paths);
|
||||
|
||||
@@ -0,0 +1,306 @@
|
||||
// <copyright file="FeedSnapshotCommand.cs" company="Stella Operations">
|
||||
// Copyright (c) Stella Operations. Licensed under AGPL-3.0-or-later.
|
||||
// </copyright>
|
||||
|
||||
using System.Security.Cryptography;
|
||||
using System.Text.Json;
|
||||
using StellaOps.Testing.FixtureHarvester.Models;
|
||||
|
||||
namespace StellaOps.Testing.FixtureHarvester.Commands;
|
||||
|
||||
/// <summary>
|
||||
/// Feed Snapshot command - capture vulnerability feed snapshots from Concelier for deterministic testing.
|
||||
/// @sprint SPRINT_20251229_004_LIB_fixture_harvester (FH-005)
|
||||
/// </summary>
|
||||
internal static class FeedSnapshotCommand
|
||||
{
|
||||
/// <summary>
|
||||
/// Execute the feed snapshot command to capture vulnerability feed data.
|
||||
/// </summary>
|
||||
internal static async Task ExecuteAsync(string feedType, string? concelierUrl, int count, string? output)
|
||||
{
|
||||
Console.WriteLine($"Capturing {feedType} feed snapshot...");
|
||||
|
||||
var baseUrl = concelierUrl ?? "http://localhost:5010";
|
||||
var outputDir = output ?? "src/__Tests/fixtures/feeds";
|
||||
var fixtureId = $"feed-{feedType.ToLowerInvariant()}-{count}";
|
||||
var fixtureDir = Path.Combine(outputDir, fixtureId);
|
||||
|
||||
Directory.CreateDirectory(fixtureDir);
|
||||
Directory.CreateDirectory(Path.Combine(fixtureDir, "raw"));
|
||||
|
||||
var capturedAt = DateTime.UtcNow;
|
||||
var advisories = new List<JsonDocument>();
|
||||
|
||||
try
|
||||
{
|
||||
using var client = new HttpClient { BaseAddress = new Uri(baseUrl) };
|
||||
client.Timeout = TimeSpan.FromMinutes(5);
|
||||
|
||||
Console.WriteLine($"Connecting to Concelier: {baseUrl}");
|
||||
|
||||
// Route depends on feed type
|
||||
var apiEndpoint = GetFeedEndpoint(feedType);
|
||||
Console.WriteLine($" Endpoint: {apiEndpoint}");
|
||||
|
||||
// Fetch advisories
|
||||
var response = await client.GetAsync($"{apiEndpoint}?limit={count}");
|
||||
|
||||
if (!response.IsSuccessStatusCode)
|
||||
{
|
||||
Console.WriteLine($"WARNING: Concelier returned {response.StatusCode}");
|
||||
Console.WriteLine("Falling back to sample feed generation...");
|
||||
await GenerateSampleFeedAsync(feedType, count, fixtureDir, capturedAt);
|
||||
return;
|
||||
}
|
||||
|
||||
var content = await response.Content.ReadAsStringAsync();
|
||||
|
||||
// Save raw response
|
||||
var rawPath = Path.Combine(fixtureDir, "raw", $"{feedType}_snapshot.json");
|
||||
await File.WriteAllTextAsync(rawPath, content);
|
||||
Console.WriteLine($" Raw snapshot: {rawPath}");
|
||||
|
||||
// Compute hash
|
||||
var sha256 = await ComputeSha256Async(rawPath);
|
||||
Console.WriteLine($" SHA-256: {sha256}");
|
||||
|
||||
// Parse and normalize
|
||||
var doc = JsonDocument.Parse(content);
|
||||
var normalizedPath = Path.Combine(fixtureDir, "normalized.ndjson");
|
||||
await NormalizeFeedAsync(doc, normalizedPath, feedType);
|
||||
Console.WriteLine($" Normalized: {normalizedPath}");
|
||||
|
||||
// Create metadata
|
||||
var meta = new FeedSnapshotMeta
|
||||
{
|
||||
Id = fixtureId,
|
||||
FeedType = feedType,
|
||||
Source = "concelier",
|
||||
ConcelierEndpoint = $"{baseUrl}{apiEndpoint}",
|
||||
Count = count,
|
||||
CapturedAt = capturedAt.ToString("O"),
|
||||
Sha256 = sha256,
|
||||
RefreshPolicy = "manual",
|
||||
Notes = $"Captured from Concelier feed snapshot endpoint",
|
||||
};
|
||||
|
||||
var metaPath = Path.Combine(fixtureDir, "meta.json");
|
||||
var metaJson = JsonSerializer.Serialize(meta, new JsonSerializerOptions { WriteIndented = true });
|
||||
await File.WriteAllTextAsync(metaPath, metaJson);
|
||||
|
||||
Console.WriteLine();
|
||||
Console.WriteLine($"✓ Feed snapshot captured: {fixtureDir}");
|
||||
Console.WriteLine($" Type: {feedType}");
|
||||
Console.WriteLine($" Count: {count}");
|
||||
Console.WriteLine($" Source: {baseUrl}");
|
||||
}
|
||||
catch (HttpRequestException ex)
|
||||
{
|
||||
Console.WriteLine($"WARNING: Could not connect to Concelier: {ex.Message}");
|
||||
Console.WriteLine("Generating sample feed fixtures instead...");
|
||||
await GenerateSampleFeedAsync(feedType, count, fixtureDir, capturedAt);
|
||||
}
|
||||
}
|
||||
|
||||
private static string GetFeedEndpoint(string feedType)
|
||||
{
|
||||
return feedType.ToUpperInvariant() switch
|
||||
{
|
||||
"OSV" => "/api/v1/feeds/osv/advisories",
|
||||
"GHSA" => "/api/v1/feeds/ghsa/advisories",
|
||||
"NVD" => "/api/v1/feeds/nvd/advisories",
|
||||
"EPSS" => "/api/v1/feeds/epss/scores",
|
||||
"KEV" => "/api/v1/feeds/kev/catalog",
|
||||
"OVAL" => "/api/v1/feeds/oval/definitions",
|
||||
_ => $"/api/v1/feeds/{feedType.ToLowerInvariant()}/advisories",
|
||||
};
|
||||
}
|
||||
|
||||
private static async Task GenerateSampleFeedAsync(string feedType, int count, string fixtureDir, DateTime capturedAt)
|
||||
{
|
||||
Console.WriteLine($"Generating {count} sample {feedType} advisories...");
|
||||
|
||||
var rawDir = Path.Combine(fixtureDir, "raw");
|
||||
var advisories = GenerateSampleAdvisories(feedType, count);
|
||||
|
||||
// Write as NDJSON
|
||||
var rawPath = Path.Combine(rawDir, $"{feedType}_sample.ndjson");
|
||||
await using var writer = File.CreateText(rawPath);
|
||||
foreach (var advisory in advisories)
|
||||
{
|
||||
await writer.WriteLineAsync(JsonSerializer.Serialize(advisory));
|
||||
}
|
||||
|
||||
// Compute hash
|
||||
var sha256 = await ComputeSha256Async(rawPath);
|
||||
|
||||
// Create meta
|
||||
var meta = new FeedSnapshotMeta
|
||||
{
|
||||
Id = $"feed-{feedType.ToLowerInvariant()}-{count}",
|
||||
FeedType = feedType,
|
||||
Source = "generated-sample",
|
||||
ConcelierEndpoint = null,
|
||||
Count = count,
|
||||
CapturedAt = capturedAt.ToString("O"),
|
||||
Sha256 = sha256,
|
||||
RefreshPolicy = "manual",
|
||||
Notes = "Generated sample data for offline testing. Replace with real feed data when Concelier is available.",
|
||||
};
|
||||
|
||||
var metaPath = Path.Combine(fixtureDir, "meta.json");
|
||||
var metaJson = JsonSerializer.Serialize(meta, new JsonSerializerOptions { WriteIndented = true });
|
||||
await File.WriteAllTextAsync(metaPath, metaJson);
|
||||
|
||||
Console.WriteLine($"✓ Sample feed generated: {fixtureDir}");
|
||||
Console.WriteLine($" Advisories: {count}");
|
||||
Console.WriteLine($" SHA-256: {sha256}");
|
||||
}
|
||||
|
||||
private static List<object> GenerateSampleAdvisories(string feedType, int count)
|
||||
{
|
||||
var advisories = new List<object>();
|
||||
var ecosystems = new[] { "PyPI", "npm", "Go", "Maven", "NuGet", "RubyGems", "crates.io" };
|
||||
var severities = new[] { "CRITICAL", "HIGH", "MEDIUM", "LOW" };
|
||||
|
||||
for (int i = 1; i <= count; i++)
|
||||
{
|
||||
var ecosystem = ecosystems[i % ecosystems.Length];
|
||||
var severity = severities[i % severities.Length];
|
||||
|
||||
advisories.Add(feedType.ToUpperInvariant() switch
|
||||
{
|
||||
"OSV" => new
|
||||
{
|
||||
id = $"OSV-SAMPLE-{i:D4}",
|
||||
summary = $"Sample vulnerability {i} in {ecosystem} package",
|
||||
details = $"This is a sample {severity.ToLowerInvariant()} vulnerability for testing purposes.",
|
||||
affected = new[]
|
||||
{
|
||||
new
|
||||
{
|
||||
package = new { ecosystem = ecosystem, name = $"sample-package-{i}" },
|
||||
ranges = new[]
|
||||
{
|
||||
new
|
||||
{
|
||||
type = "ECOSYSTEM",
|
||||
events = new object[]
|
||||
{
|
||||
new { introduced = "0" },
|
||||
new { @fixed = $"1.{i}.0" }
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
severity = new[] { new { type = "CVSS_V3", score = $"{6.0 + (i % 4)}.{i % 10}" } },
|
||||
published = DateTime.UtcNow.AddDays(-i).ToString("O"),
|
||||
modified = DateTime.UtcNow.ToString("O"),
|
||||
},
|
||||
"GHSA" => new
|
||||
{
|
||||
ghsaId = $"GHSA-sample-{i:D4}",
|
||||
summary = $"Sample GHSA vulnerability {i}",
|
||||
severity = severity,
|
||||
cvss = new { score = 6.0 + (i % 4), vectorString = "CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:U/C:L/I:L/A:N" },
|
||||
publishedAt = DateTime.UtcNow.AddDays(-i).ToString("O"),
|
||||
updatedAt = DateTime.UtcNow.ToString("O"),
|
||||
},
|
||||
"NVD" => new
|
||||
{
|
||||
cve = new
|
||||
{
|
||||
id = $"CVE-2024-{10000 + i}",
|
||||
sourceIdentifier = "sample@stellaops.dev",
|
||||
published = DateTime.UtcNow.AddDays(-i).ToString("O"),
|
||||
lastModified = DateTime.UtcNow.ToString("O"),
|
||||
descriptions = new[]
|
||||
{
|
||||
new { lang = "en", value = $"Sample NVD vulnerability {i} for testing." }
|
||||
},
|
||||
metrics = new
|
||||
{
|
||||
cvssMetricV31 = new[]
|
||||
{
|
||||
new
|
||||
{
|
||||
cvssData = new
|
||||
{
|
||||
version = "3.1",
|
||||
baseScore = 6.0 + (i % 4),
|
||||
baseSeverity = severity
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
_ => new
|
||||
{
|
||||
id = $"SAMPLE-{feedType.ToUpperInvariant()}-{i:D4}",
|
||||
type = feedType,
|
||||
severity = severity,
|
||||
created = DateTime.UtcNow.AddDays(-i).ToString("O"),
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
return advisories;
|
||||
}
|
||||
|
||||
private static async Task NormalizeFeedAsync(JsonDocument doc, string outputPath, string feedType)
|
||||
{
|
||||
await using var writer = File.CreateText(outputPath);
|
||||
|
||||
// Handle different feed response structures
|
||||
JsonElement items;
|
||||
if (doc.RootElement.TryGetProperty("items", out items) ||
|
||||
doc.RootElement.TryGetProperty("advisories", out items) ||
|
||||
doc.RootElement.TryGetProperty("vulnerabilities", out items) ||
|
||||
doc.RootElement.TryGetProperty("data", out items))
|
||||
{
|
||||
foreach (var item in items.EnumerateArray())
|
||||
{
|
||||
await writer.WriteLineAsync(item.GetRawText());
|
||||
}
|
||||
}
|
||||
else if (doc.RootElement.ValueKind == JsonValueKind.Array)
|
||||
{
|
||||
foreach (var item in doc.RootElement.EnumerateArray())
|
||||
{
|
||||
await writer.WriteLineAsync(item.GetRawText());
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
// Single object
|
||||
await writer.WriteLineAsync(doc.RootElement.GetRawText());
|
||||
}
|
||||
}
|
||||
|
||||
private static async Task<string> ComputeSha256Async(string filePath)
|
||||
{
|
||||
using var sha256 = SHA256.Create();
|
||||
await using var stream = File.OpenRead(filePath);
|
||||
var hashBytes = await sha256.ComputeHashAsync(stream);
|
||||
return "sha256:" + BitConverter.ToString(hashBytes).Replace("-", string.Empty).ToLowerInvariant();
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Metadata for captured feed snapshots.
|
||||
/// </summary>
|
||||
internal class FeedSnapshotMeta
|
||||
{
|
||||
public string Id { get; set; } = string.Empty;
|
||||
public string FeedType { get; set; } = string.Empty;
|
||||
public string Source { get; set; } = string.Empty;
|
||||
public string? ConcelierEndpoint { get; set; }
|
||||
public int Count { get; set; }
|
||||
public string CapturedAt { get; set; } = string.Empty;
|
||||
public string Sha256 { get; set; } = string.Empty;
|
||||
public string RefreshPolicy { get; set; } = "manual";
|
||||
public string Notes { get; set; } = string.Empty;
|
||||
}
|
||||
277
src/__Tests/Tools/FixtureHarvester/Commands/OciPinCommand.cs
Normal file
277
src/__Tests/Tools/FixtureHarvester/Commands/OciPinCommand.cs
Normal file
@@ -0,0 +1,277 @@
|
||||
// <copyright file="OciPinCommand.cs" company="Stella Operations">
|
||||
// Copyright (c) Stella Operations. Licensed under AGPL-3.0-or-later.
|
||||
// </copyright>
|
||||
|
||||
using System.Security.Cryptography;
|
||||
using System.Text.Json;
|
||||
using StellaOps.Testing.FixtureHarvester.Models;
|
||||
|
||||
namespace StellaOps.Testing.FixtureHarvester.Commands;
|
||||
|
||||
/// <summary>
|
||||
/// OCI Pin command - retrieve and pin OCI image digests for deterministic testing.
|
||||
/// @sprint SPRINT_20251229_004_LIB_fixture_harvester (FH-004)
|
||||
/// </summary>
|
||||
internal static class OciPinCommand
|
||||
{
|
||||
/// <summary>
|
||||
/// Execute the OCI pin command to capture image digest metadata.
|
||||
/// </summary>
|
||||
internal static async Task ExecuteAsync(string imageRef, string? output, bool verify)
|
||||
{
|
||||
Console.WriteLine($"Pinning OCI image: {imageRef}");
|
||||
|
||||
// Parse image reference
|
||||
var (registry, repository, tag) = ParseImageRef(imageRef);
|
||||
Console.WriteLine($" Registry: {registry}");
|
||||
Console.WriteLine($" Repository: {repository}");
|
||||
Console.WriteLine($" Tag: {tag}");
|
||||
|
||||
// Get manifest digest via OCI Distribution API
|
||||
var manifestDigest = await GetManifestDigestAsync(registry, repository, tag);
|
||||
if (string.IsNullOrEmpty(manifestDigest))
|
||||
{
|
||||
Console.WriteLine("ERROR: Could not retrieve manifest digest.");
|
||||
Console.WriteLine(" Ensure the image exists and you have access to the registry.");
|
||||
Console.WriteLine(" For private registries, authenticate with: docker login <registry>");
|
||||
return;
|
||||
}
|
||||
|
||||
Console.WriteLine($" Manifest Digest: {manifestDigest}");
|
||||
|
||||
// Build pinned reference
|
||||
var pinnedRef = $"{registry}/{repository}@{manifestDigest}";
|
||||
Console.WriteLine($" Pinned Reference: {pinnedRef}");
|
||||
|
||||
// Fetch config digest (for verification)
|
||||
var configDigest = await GetConfigDigestAsync(registry, repository, manifestDigest);
|
||||
Console.WriteLine($" Config Digest: {configDigest ?? "N/A"}");
|
||||
|
||||
// Create fixture metadata
|
||||
var fixtureId = SanitizeId($"oci-{repository.Replace('/', '-')}-{tag}");
|
||||
var fixtureDir = Path.Combine(output ?? "src/__Tests/fixtures/oci", fixtureId);
|
||||
Directory.CreateDirectory(fixtureDir);
|
||||
|
||||
var meta = new OciFixtureMeta
|
||||
{
|
||||
Id = fixtureId,
|
||||
ImageReference = imageRef,
|
||||
PinnedReference = pinnedRef,
|
||||
Registry = registry,
|
||||
Repository = repository,
|
||||
Tag = tag,
|
||||
ManifestDigest = manifestDigest,
|
||||
ConfigDigest = configDigest,
|
||||
PinnedAt = DateTime.UtcNow.ToString("O"),
|
||||
Verified = verify && await VerifyDigestAsync(registry, repository, manifestDigest),
|
||||
Notes = $"OCI image pinned for deterministic testing",
|
||||
RefreshPolicy = "manual",
|
||||
};
|
||||
|
||||
var metaPath = Path.Combine(fixtureDir, "oci-pin.json");
|
||||
var metaJson = JsonSerializer.Serialize(meta, new JsonSerializerOptions
|
||||
{
|
||||
WriteIndented = true,
|
||||
});
|
||||
await File.WriteAllTextAsync(metaPath, metaJson);
|
||||
|
||||
Console.WriteLine();
|
||||
Console.WriteLine($"✓ OCI image pinned: {fixtureDir}");
|
||||
Console.WriteLine($" Metadata: {metaPath}");
|
||||
Console.WriteLine();
|
||||
Console.WriteLine("Usage in tests:");
|
||||
Console.WriteLine($" // Use pinned digest reference:");
|
||||
Console.WriteLine($" var imageRef = \"{pinnedRef}\";");
|
||||
Console.WriteLine();
|
||||
Console.WriteLine("Next steps:");
|
||||
Console.WriteLine("1. Add to fixtures.manifest.yml under 'oci' section");
|
||||
Console.WriteLine("2. Run: fixture-harvester validate --path src/__Tests/fixtures");
|
||||
}
|
||||
|
||||
private static (string registry, string repository, string tag) ParseImageRef(string imageRef)
|
||||
{
|
||||
var tag = "latest";
|
||||
var repository = imageRef;
|
||||
var registry = "docker.io";
|
||||
|
||||
// Extract tag
|
||||
var tagIndex = imageRef.LastIndexOf(':');
|
||||
var slashAfterTag = tagIndex > 0 ? imageRef.IndexOf('/', tagIndex) : -1;
|
||||
if (tagIndex > 0 && slashAfterTag < 0 && !imageRef.Substring(tagIndex + 1).Contains('/'))
|
||||
{
|
||||
tag = imageRef.Substring(tagIndex + 1);
|
||||
repository = imageRef.Substring(0, tagIndex);
|
||||
}
|
||||
|
||||
// Extract registry
|
||||
var firstSlash = repository.IndexOf('/');
|
||||
if (firstSlash > 0)
|
||||
{
|
||||
var possibleRegistry = repository.Substring(0, firstSlash);
|
||||
if (possibleRegistry.Contains('.') || possibleRegistry.Contains(':') || possibleRegistry == "localhost")
|
||||
{
|
||||
registry = possibleRegistry;
|
||||
repository = repository.Substring(firstSlash + 1);
|
||||
}
|
||||
}
|
||||
|
||||
// Handle Docker Hub library images
|
||||
if (registry == "docker.io" && !repository.Contains('/'))
|
||||
{
|
||||
repository = $"library/{repository}";
|
||||
}
|
||||
|
||||
return (registry, repository, tag);
|
||||
}
|
||||
|
||||
private static async Task<string?> GetManifestDigestAsync(string registry, string repository, string tag)
|
||||
{
|
||||
try
|
||||
{
|
||||
using var client = new HttpClient();
|
||||
|
||||
// Handle Docker Hub auth token
|
||||
if (registry == "docker.io")
|
||||
{
|
||||
var tokenUrl = $"https://auth.docker.io/token?service=registry.docker.io&scope=repository:{repository}:pull";
|
||||
var tokenResponse = await client.GetStringAsync(tokenUrl);
|
||||
var tokenDoc = JsonDocument.Parse(tokenResponse);
|
||||
var token = tokenDoc.RootElement.GetProperty("token").GetString();
|
||||
client.DefaultRequestHeaders.Authorization = new System.Net.Http.Headers.AuthenticationHeaderValue("Bearer", token);
|
||||
}
|
||||
|
||||
// Request manifest with digest header
|
||||
var registryHost = registry == "docker.io" ? "registry-1.docker.io" : registry;
|
||||
var manifestUrl = $"https://{registryHost}/v2/{repository}/manifests/{tag}";
|
||||
|
||||
var request = new HttpRequestMessage(HttpMethod.Head, manifestUrl);
|
||||
// Accept multiple manifest types
|
||||
request.Headers.Accept.Add(new System.Net.Http.Headers.MediaTypeWithQualityHeaderValue("application/vnd.oci.image.manifest.v1+json"));
|
||||
request.Headers.Accept.Add(new System.Net.Http.Headers.MediaTypeWithQualityHeaderValue("application/vnd.docker.distribution.manifest.v2+json"));
|
||||
request.Headers.Accept.Add(new System.Net.Http.Headers.MediaTypeWithQualityHeaderValue("application/vnd.docker.distribution.manifest.list.v2+json"));
|
||||
request.Headers.Accept.Add(new System.Net.Http.Headers.MediaTypeWithQualityHeaderValue("application/vnd.oci.image.index.v1+json"));
|
||||
|
||||
var response = await client.SendAsync(request);
|
||||
response.EnsureSuccessStatusCode();
|
||||
|
||||
// Get Docker-Content-Digest header
|
||||
if (response.Headers.TryGetValues("Docker-Content-Digest", out var digestValues))
|
||||
{
|
||||
return digestValues.FirstOrDefault();
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
Console.WriteLine($" Warning: Could not fetch manifest: {ex.Message}");
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
private static async Task<string?> GetConfigDigestAsync(string registry, string repository, string manifestDigest)
|
||||
{
|
||||
try
|
||||
{
|
||||
using var client = new HttpClient();
|
||||
|
||||
// Handle Docker Hub auth
|
||||
if (registry == "docker.io")
|
||||
{
|
||||
var tokenUrl = $"https://auth.docker.io/token?service=registry.docker.io&scope=repository:{repository}:pull";
|
||||
var tokenResponse = await client.GetStringAsync(tokenUrl);
|
||||
var tokenDoc = JsonDocument.Parse(tokenResponse);
|
||||
var token = tokenDoc.RootElement.GetProperty("token").GetString();
|
||||
client.DefaultRequestHeaders.Authorization = new System.Net.Http.Headers.AuthenticationHeaderValue("Bearer", token);
|
||||
}
|
||||
|
||||
var registryHost = registry == "docker.io" ? "registry-1.docker.io" : registry;
|
||||
var manifestUrl = $"https://{registryHost}/v2/{repository}/manifests/{manifestDigest}";
|
||||
|
||||
var request = new HttpRequestMessage(HttpMethod.Get, manifestUrl);
|
||||
request.Headers.Accept.Add(new System.Net.Http.Headers.MediaTypeWithQualityHeaderValue("application/vnd.oci.image.manifest.v1+json"));
|
||||
request.Headers.Accept.Add(new System.Net.Http.Headers.MediaTypeWithQualityHeaderValue("application/vnd.docker.distribution.manifest.v2+json"));
|
||||
|
||||
var response = await client.SendAsync(request);
|
||||
response.EnsureSuccessStatusCode();
|
||||
|
||||
var content = await response.Content.ReadAsStringAsync();
|
||||
var doc = JsonDocument.Parse(content);
|
||||
|
||||
// Get config digest from manifest
|
||||
if (doc.RootElement.TryGetProperty("config", out var configElement) &&
|
||||
configElement.TryGetProperty("digest", out var digestElement))
|
||||
{
|
||||
return digestElement.GetString();
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
catch
|
||||
{
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
private static async Task<bool> VerifyDigestAsync(string registry, string repository, string manifestDigest)
|
||||
{
|
||||
try
|
||||
{
|
||||
using var client = new HttpClient();
|
||||
|
||||
if (registry == "docker.io")
|
||||
{
|
||||
var tokenUrl = $"https://auth.docker.io/token?service=registry.docker.io&scope=repository:{repository}:pull";
|
||||
var tokenResponse = await client.GetStringAsync(tokenUrl);
|
||||
var tokenDoc = JsonDocument.Parse(tokenResponse);
|
||||
var token = tokenDoc.RootElement.GetProperty("token").GetString();
|
||||
client.DefaultRequestHeaders.Authorization = new System.Net.Http.Headers.AuthenticationHeaderValue("Bearer", token);
|
||||
}
|
||||
|
||||
var registryHost = registry == "docker.io" ? "registry-1.docker.io" : registry;
|
||||
var manifestUrl = $"https://{registryHost}/v2/{repository}/manifests/{manifestDigest}";
|
||||
|
||||
var request = new HttpRequestMessage(HttpMethod.Get, manifestUrl);
|
||||
request.Headers.Accept.Add(new System.Net.Http.Headers.MediaTypeWithQualityHeaderValue("application/vnd.oci.image.manifest.v1+json"));
|
||||
request.Headers.Accept.Add(new System.Net.Http.Headers.MediaTypeWithQualityHeaderValue("application/vnd.docker.distribution.manifest.v2+json"));
|
||||
|
||||
var response = await client.SendAsync(request);
|
||||
var content = await response.Content.ReadAsByteArrayAsync();
|
||||
|
||||
// Compute SHA256 of manifest content
|
||||
using var sha256 = SHA256.Create();
|
||||
var hash = sha256.ComputeHash(content);
|
||||
var computedDigest = "sha256:" + BitConverter.ToString(hash).Replace("-", string.Empty).ToLowerInvariant();
|
||||
|
||||
return computedDigest == manifestDigest;
|
||||
}
|
||||
catch
|
||||
{
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
private static string SanitizeId(string input)
|
||||
{
|
||||
return new string(input.Where(c => char.IsLetterOrDigit(c) || c == '-' || c == '_').ToArray());
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Metadata for pinned OCI image fixtures.
|
||||
/// </summary>
|
||||
internal class OciFixtureMeta
|
||||
{
|
||||
public string Id { get; set; } = string.Empty;
|
||||
public string ImageReference { get; set; } = string.Empty;
|
||||
public string PinnedReference { get; set; } = string.Empty;
|
||||
public string Registry { get; set; } = string.Empty;
|
||||
public string Repository { get; set; } = string.Empty;
|
||||
public string Tag { get; set; } = string.Empty;
|
||||
public string ManifestDigest { get; set; } = string.Empty;
|
||||
public string? ConfigDigest { get; set; }
|
||||
public string PinnedAt { get; set; } = string.Empty;
|
||||
public bool Verified { get; set; }
|
||||
public string Notes { get; set; } = string.Empty;
|
||||
public string RefreshPolicy { get; set; } = "manual";
|
||||
}
|
||||
507
src/__Tests/Tools/FixtureHarvester/Commands/SbomGoldenCommand.cs
Normal file
507
src/__Tests/Tools/FixtureHarvester/Commands/SbomGoldenCommand.cs
Normal file
@@ -0,0 +1,507 @@
|
||||
// <copyright file="SbomGoldenCommand.cs" company="Stella Operations">
|
||||
// Copyright (c) Stella Operations. Licensed under AGPL-3.0-or-later.
|
||||
// </copyright>
|
||||
|
||||
using System.Diagnostics;
|
||||
using System.Security.Cryptography;
|
||||
using System.Text.Json;
|
||||
|
||||
namespace StellaOps.Testing.FixtureHarvester.Commands;
|
||||
|
||||
/// <summary>
|
||||
/// SBOM Golden command - generate SBOM golden fixtures from minimal container images.
|
||||
/// @sprint SPRINT_20251229_004_LIB_fixture_harvester (FH-007)
|
||||
/// </summary>
|
||||
internal static class SbomGoldenCommand
|
||||
{
|
||||
private static readonly Dictionary<string, GoldenImageDefinition> KnownImages = new()
|
||||
{
|
||||
["alpine-minimal"] = new()
|
||||
{
|
||||
Id = "sbom-golden-alpine-minimal",
|
||||
ImageRef = "alpine:3.19",
|
||||
Description = "Minimal Alpine Linux (~5MB, ~14 packages)",
|
||||
ExpectedPackages = 14,
|
||||
Format = "cyclonedx",
|
||||
},
|
||||
["debian-slim"] = new()
|
||||
{
|
||||
Id = "sbom-golden-debian-slim",
|
||||
ImageRef = "debian:bookworm-slim",
|
||||
Description = "Debian Bookworm slim (~80MB, ~90 packages)",
|
||||
ExpectedPackages = 90,
|
||||
Format = "cyclonedx",
|
||||
},
|
||||
["distroless-static"] = new()
|
||||
{
|
||||
Id = "sbom-golden-distroless",
|
||||
ImageRef = "gcr.io/distroless/static-debian12:nonroot",
|
||||
Description = "Google Distroless static (minimal, ~2MB)",
|
||||
ExpectedPackages = 5,
|
||||
Format = "cyclonedx",
|
||||
},
|
||||
["scratch-go"] = new()
|
||||
{
|
||||
Id = "sbom-golden-scratch",
|
||||
ImageRef = "scratch",
|
||||
Description = "Empty scratch image (0 packages, filesystem only)",
|
||||
ExpectedPackages = 0,
|
||||
Format = "cyclonedx",
|
||||
},
|
||||
};
|
||||
|
||||
/// <summary>
|
||||
/// Execute the SBOM golden command to generate golden SBOM fixtures.
|
||||
/// </summary>
|
||||
internal static async Task ExecuteAsync(string image, string? format, string? scanner, string? output)
|
||||
{
|
||||
Console.WriteLine($"Generating SBOM golden fixture for: {image}");
|
||||
|
||||
var outputDir = output ?? "src/__Tests/fixtures/sbom";
|
||||
var sbomFormat = format ?? "cyclonedx";
|
||||
var scannerTool = scanner ?? "syft";
|
||||
|
||||
if (image.Equals("list", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
ListKnownImages();
|
||||
return;
|
||||
}
|
||||
|
||||
if (image.Equals("all", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
Console.WriteLine("Generating all known golden SBOMs...\n");
|
||||
foreach (var imageDef in KnownImages.Values)
|
||||
{
|
||||
await GenerateGoldenAsync(imageDef, outputDir, sbomFormat, scannerTool);
|
||||
Console.WriteLine();
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
if (KnownImages.TryGetValue(image.ToLowerInvariant(), out var knownImage))
|
||||
{
|
||||
await GenerateGoldenAsync(knownImage, outputDir, sbomFormat, scannerTool);
|
||||
}
|
||||
else
|
||||
{
|
||||
// Custom image reference
|
||||
var customImage = new GoldenImageDefinition
|
||||
{
|
||||
Id = $"sbom-golden-custom-{DateTime.UtcNow:yyyyMMddHHmmss}",
|
||||
ImageRef = image,
|
||||
Description = $"Custom image: {image}",
|
||||
ExpectedPackages = -1, // Unknown
|
||||
Format = sbomFormat,
|
||||
};
|
||||
await GenerateGoldenAsync(customImage, outputDir, sbomFormat, scannerTool);
|
||||
}
|
||||
}
|
||||
|
||||
private static void ListKnownImages()
|
||||
{
|
||||
Console.WriteLine("Known golden SBOM images:");
|
||||
Console.WriteLine();
|
||||
foreach (var (key, image) in KnownImages)
|
||||
{
|
||||
Console.WriteLine($" {key}");
|
||||
Console.WriteLine($" Image: {image.ImageRef}");
|
||||
Console.WriteLine($" Description: {image.Description}");
|
||||
Console.WriteLine($" Expected packages: ~{image.ExpectedPackages}");
|
||||
Console.WriteLine();
|
||||
}
|
||||
Console.WriteLine("Usage:");
|
||||
Console.WriteLine(" fixture-harvester sbom-golden <image-key>");
|
||||
Console.WriteLine(" fixture-harvester sbom-golden <custom-image-ref>");
|
||||
Console.WriteLine(" fixture-harvester sbom-golden all");
|
||||
}
|
||||
|
||||
private static async Task GenerateGoldenAsync(GoldenImageDefinition imageDef, string outputDir, string format, string scanner)
|
||||
{
|
||||
Console.WriteLine($"Generating: {imageDef.Id}");
|
||||
Console.WriteLine($" Image: {imageDef.ImageRef}");
|
||||
Console.WriteLine($" Format: {format}");
|
||||
Console.WriteLine($" Scanner: {scanner}");
|
||||
|
||||
var fixtureDir = Path.Combine(outputDir, imageDef.Id);
|
||||
Directory.CreateDirectory(fixtureDir);
|
||||
Directory.CreateDirectory(Path.Combine(fixtureDir, "raw"));
|
||||
Directory.CreateDirectory(Path.Combine(fixtureDir, "expected"));
|
||||
|
||||
// Check if scanner is available
|
||||
var scannerPath = await FindScannerAsync(scanner);
|
||||
string? sbomContent;
|
||||
string? imageDigest = null;
|
||||
int packageCount = 0;
|
||||
|
||||
if (scannerPath != null)
|
||||
{
|
||||
Console.WriteLine($" Using scanner: {scannerPath}");
|
||||
|
||||
// Pull image first to get digest
|
||||
imageDigest = await PullAndGetDigestAsync(imageDef.ImageRef);
|
||||
if (imageDigest != null)
|
||||
{
|
||||
Console.WriteLine($" Image digest: {imageDigest}");
|
||||
}
|
||||
|
||||
// Run scanner
|
||||
sbomContent = await RunScannerAsync(scannerPath, imageDef.ImageRef, format);
|
||||
if (sbomContent != null)
|
||||
{
|
||||
packageCount = CountPackages(sbomContent, format);
|
||||
Console.WriteLine($" Package count: {packageCount}");
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
Console.WriteLine($" WARNING: Scanner '{scanner}' not found. Generating sample SBOM...");
|
||||
sbomContent = GenerateSampleSbom(imageDef, format);
|
||||
packageCount = imageDef.ExpectedPackages;
|
||||
}
|
||||
|
||||
if (sbomContent == null)
|
||||
{
|
||||
Console.WriteLine(" ERROR: Failed to generate SBOM.");
|
||||
return;
|
||||
}
|
||||
|
||||
// Write SBOM
|
||||
var sbomFilename = $"{imageDef.Id}.{GetFormatExtension(format)}";
|
||||
var sbomPath = Path.Combine(fixtureDir, "raw", sbomFilename);
|
||||
await File.WriteAllTextAsync(sbomPath, sbomContent);
|
||||
|
||||
// Compute hash
|
||||
var sha256 = await ComputeSha256Async(sbomPath);
|
||||
Console.WriteLine($" SHA-256: {sha256}");
|
||||
|
||||
// Create expected outputs placeholder
|
||||
var expectedPath = Path.Combine(fixtureDir, "expected", "scan-result.json");
|
||||
var expectedContent = new
|
||||
{
|
||||
imageRef = imageDef.ImageRef,
|
||||
imageDigest = imageDigest,
|
||||
packageCount = packageCount,
|
||||
format = format,
|
||||
generated = DateTime.UtcNow.ToString("O"),
|
||||
note = "Replace with actual scan expectations after baseline verification",
|
||||
};
|
||||
await File.WriteAllTextAsync(expectedPath, JsonSerializer.Serialize(expectedContent, new JsonSerializerOptions { WriteIndented = true }));
|
||||
|
||||
// Create metadata
|
||||
var meta = new SbomGoldenMeta
|
||||
{
|
||||
Id = imageDef.Id,
|
||||
ImageRef = imageDef.ImageRef,
|
||||
ImageDigest = imageDigest,
|
||||
Description = imageDef.Description,
|
||||
Format = format,
|
||||
Scanner = scanner,
|
||||
PackageCount = packageCount,
|
||||
GeneratedAt = DateTime.UtcNow.ToString("O"),
|
||||
SbomFile = sbomFilename,
|
||||
Sha256 = sha256,
|
||||
RefreshPolicy = "manual",
|
||||
Notes = $"Golden SBOM for deterministic testing. Regenerate only when baseline changes.",
|
||||
};
|
||||
|
||||
var metaPath = Path.Combine(fixtureDir, "meta.json");
|
||||
await File.WriteAllTextAsync(metaPath, JsonSerializer.Serialize(meta, new JsonSerializerOptions { WriteIndented = true }));
|
||||
|
||||
Console.WriteLine($" ✓ SBOM: {sbomPath}");
|
||||
Console.WriteLine($" ✓ Metadata: {metaPath}");
|
||||
Console.WriteLine($" ✓ Expected: {expectedPath}");
|
||||
}
|
||||
|
||||
private static async Task<string?> FindScannerAsync(string scanner)
|
||||
{
|
||||
var commands = scanner.ToLowerInvariant() switch
|
||||
{
|
||||
"syft" => new[] { "syft", "syft.exe" },
|
||||
"trivy" => new[] { "trivy", "trivy.exe" },
|
||||
"grype" => new[] { "grype", "grype.exe" },
|
||||
_ => new[] { scanner, $"{scanner}.exe" },
|
||||
};
|
||||
|
||||
foreach (var cmd in commands)
|
||||
{
|
||||
try
|
||||
{
|
||||
var psi = new ProcessStartInfo
|
||||
{
|
||||
FileName = OperatingSystem.IsWindows() ? "where" : "which",
|
||||
Arguments = cmd,
|
||||
RedirectStandardOutput = true,
|
||||
UseShellExecute = false,
|
||||
CreateNoWindow = true,
|
||||
};
|
||||
|
||||
using var process = Process.Start(psi);
|
||||
if (process != null)
|
||||
{
|
||||
var output = await process.StandardOutput.ReadToEndAsync();
|
||||
await process.WaitForExitAsync();
|
||||
|
||||
if (process.ExitCode == 0 && !string.IsNullOrWhiteSpace(output))
|
||||
{
|
||||
return output.Split('\n').FirstOrDefault()?.Trim();
|
||||
}
|
||||
}
|
||||
}
|
||||
catch
|
||||
{
|
||||
// Continue to next option
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
private static async Task<string?> PullAndGetDigestAsync(string imageRef)
|
||||
{
|
||||
try
|
||||
{
|
||||
// Pull image
|
||||
var pullPsi = new ProcessStartInfo
|
||||
{
|
||||
FileName = "docker",
|
||||
Arguments = $"pull {imageRef}",
|
||||
RedirectStandardOutput = true,
|
||||
RedirectStandardError = true,
|
||||
UseShellExecute = false,
|
||||
CreateNoWindow = true,
|
||||
};
|
||||
|
||||
using var pullProcess = Process.Start(pullPsi);
|
||||
if (pullProcess != null)
|
||||
{
|
||||
await pullProcess.WaitForExitAsync();
|
||||
}
|
||||
|
||||
// Get digest
|
||||
var inspectPsi = new ProcessStartInfo
|
||||
{
|
||||
FileName = "docker",
|
||||
Arguments = $"inspect --format=\"{{{{.RepoDigests}}}}\" {imageRef}",
|
||||
RedirectStandardOutput = true,
|
||||
UseShellExecute = false,
|
||||
CreateNoWindow = true,
|
||||
};
|
||||
|
||||
using var inspectProcess = Process.Start(inspectPsi);
|
||||
if (inspectProcess != null)
|
||||
{
|
||||
var output = await inspectProcess.StandardOutput.ReadToEndAsync();
|
||||
await inspectProcess.WaitForExitAsync();
|
||||
|
||||
// Parse digest from output like [image@sha256:abc123]
|
||||
var match = System.Text.RegularExpressions.Regex.Match(output, @"sha256:[a-f0-9]{64}");
|
||||
if (match.Success)
|
||||
{
|
||||
return match.Value;
|
||||
}
|
||||
}
|
||||
}
|
||||
catch
|
||||
{
|
||||
// Docker not available
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
private static async Task<string?> RunScannerAsync(string scannerPath, string imageRef, string format)
|
||||
{
|
||||
try
|
||||
{
|
||||
var args = scannerPath.Contains("syft", StringComparison.OrdinalIgnoreCase)
|
||||
? $"{imageRef} -o {format}-json"
|
||||
: scannerPath.Contains("trivy", StringComparison.OrdinalIgnoreCase)
|
||||
? $"image --format {format} {imageRef}"
|
||||
: $"{imageRef} --format {format}";
|
||||
|
||||
var psi = new ProcessStartInfo
|
||||
{
|
||||
FileName = scannerPath,
|
||||
Arguments = args,
|
||||
RedirectStandardOutput = true,
|
||||
RedirectStandardError = true,
|
||||
UseShellExecute = false,
|
||||
CreateNoWindow = true,
|
||||
};
|
||||
|
||||
using var process = Process.Start(psi);
|
||||
if (process != null)
|
||||
{
|
||||
var output = await process.StandardOutput.ReadToEndAsync();
|
||||
await process.WaitForExitAsync();
|
||||
|
||||
if (process.ExitCode == 0 && !string.IsNullOrWhiteSpace(output))
|
||||
{
|
||||
return output;
|
||||
}
|
||||
}
|
||||
}
|
||||
catch
|
||||
{
|
||||
// Scanner failed
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
private static string GenerateSampleSbom(GoldenImageDefinition imageDef, string format)
|
||||
{
|
||||
if (format.Contains("spdx", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
return GenerateSpdxSample(imageDef);
|
||||
}
|
||||
|
||||
return GenerateCycloneDxSample(imageDef);
|
||||
}
|
||||
|
||||
private static string GenerateCycloneDxSample(GoldenImageDefinition imageDef)
|
||||
{
|
||||
var components = new List<object>();
|
||||
for (int i = 0; i < Math.Max(1, imageDef.ExpectedPackages); i++)
|
||||
{
|
||||
components.Add(new
|
||||
{
|
||||
type = "library",
|
||||
name = $"sample-package-{i + 1}",
|
||||
version = $"1.{i}.0",
|
||||
purl = $"pkg:apk/alpine/sample-package-{i + 1}@1.{i}.0",
|
||||
});
|
||||
}
|
||||
|
||||
var sbom = new
|
||||
{
|
||||
bomFormat = "CycloneDX",
|
||||
specVersion = "1.6",
|
||||
serialNumber = $"urn:uuid:{Guid.NewGuid()}",
|
||||
version = 1,
|
||||
metadata = new
|
||||
{
|
||||
timestamp = DateTime.UtcNow.ToString("O"),
|
||||
tools = new[]
|
||||
{
|
||||
new { vendor = "StellaOps", name = "FixtureHarvester", version = "1.0.0" }
|
||||
},
|
||||
component = new
|
||||
{
|
||||
type = "container",
|
||||
name = imageDef.ImageRef.Split(':')[0].Split('/').Last(),
|
||||
version = imageDef.ImageRef.Contains(':') ? imageDef.ImageRef.Split(':').Last() : "latest",
|
||||
purl = $"pkg:oci/{imageDef.ImageRef.Replace(':', '@')}",
|
||||
},
|
||||
},
|
||||
components = components,
|
||||
};
|
||||
|
||||
return JsonSerializer.Serialize(sbom, new JsonSerializerOptions { WriteIndented = true });
|
||||
}
|
||||
|
||||
private static string GenerateSpdxSample(GoldenImageDefinition imageDef)
|
||||
{
|
||||
var packages = new List<object>();
|
||||
for (int i = 0; i < Math.Max(1, imageDef.ExpectedPackages); i++)
|
||||
{
|
||||
packages.Add(new
|
||||
{
|
||||
SPDXID = $"SPDXRef-Package-{i + 1}",
|
||||
name = $"sample-package-{i + 1}",
|
||||
versionInfo = $"1.{i}.0",
|
||||
downloadLocation = "NOASSERTION",
|
||||
filesAnalyzed = false,
|
||||
});
|
||||
}
|
||||
|
||||
var sbom = new
|
||||
{
|
||||
spdxVersion = "SPDX-2.3",
|
||||
dataLicense = "CC0-1.0",
|
||||
SPDXID = "SPDXRef-DOCUMENT",
|
||||
name = imageDef.Id,
|
||||
documentNamespace = $"https://stellaops.dev/spdx/{imageDef.Id}",
|
||||
creationInfo = new
|
||||
{
|
||||
created = DateTime.UtcNow.ToString("O"),
|
||||
creators = new[] { "Tool: StellaOps-FixtureHarvester-1.0.0" },
|
||||
},
|
||||
packages = packages,
|
||||
};
|
||||
|
||||
return JsonSerializer.Serialize(sbom, new JsonSerializerOptions { WriteIndented = true });
|
||||
}
|
||||
|
||||
private static int CountPackages(string sbomContent, string format)
|
||||
{
|
||||
try
|
||||
{
|
||||
var doc = JsonDocument.Parse(sbomContent);
|
||||
|
||||
if (format.Contains("cyclonedx", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
if (doc.RootElement.TryGetProperty("components", out var components))
|
||||
{
|
||||
return components.GetArrayLength();
|
||||
}
|
||||
}
|
||||
else if (format.Contains("spdx", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
if (doc.RootElement.TryGetProperty("packages", out var packages))
|
||||
{
|
||||
return packages.GetArrayLength();
|
||||
}
|
||||
}
|
||||
}
|
||||
catch
|
||||
{
|
||||
// Parse error
|
||||
}
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
private static string GetFormatExtension(string format)
|
||||
{
|
||||
return format.ToLowerInvariant() switch
|
||||
{
|
||||
"cyclonedx" or "cyclonedx-json" => "cdx.json",
|
||||
"spdx" or "spdx-json" => "spdx.json",
|
||||
_ => "json",
|
||||
};
|
||||
}
|
||||
|
||||
private static async Task<string> ComputeSha256Async(string filePath)
|
||||
{
|
||||
using var sha256 = SHA256.Create();
|
||||
await using var stream = File.OpenRead(filePath);
|
||||
var hashBytes = await sha256.ComputeHashAsync(stream);
|
||||
return "sha256:" + BitConverter.ToString(hashBytes).Replace("-", string.Empty).ToLowerInvariant();
|
||||
}
|
||||
}
|
||||
|
||||
internal class GoldenImageDefinition
|
||||
{
|
||||
public string Id { get; set; } = string.Empty;
|
||||
public string ImageRef { get; set; } = string.Empty;
|
||||
public string Description { get; set; } = string.Empty;
|
||||
public int ExpectedPackages { get; set; }
|
||||
public string Format { get; set; } = "cyclonedx";
|
||||
}
|
||||
|
||||
internal class SbomGoldenMeta
|
||||
{
|
||||
public string Id { get; set; } = string.Empty;
|
||||
public string ImageRef { get; set; } = string.Empty;
|
||||
public string? ImageDigest { get; set; }
|
||||
public string Description { get; set; } = string.Empty;
|
||||
public string Format { get; set; } = string.Empty;
|
||||
public string Scanner { get; set; } = string.Empty;
|
||||
public int PackageCount { get; set; }
|
||||
public string GeneratedAt { get; set; } = string.Empty;
|
||||
public string SbomFile { get; set; } = string.Empty;
|
||||
public string Sha256 { get; set; } = string.Empty;
|
||||
public string RefreshPolicy { get; set; } = "manual";
|
||||
public string Notes { get; set; } = string.Empty;
|
||||
}
|
||||
436
src/__Tests/Tools/FixtureHarvester/Commands/VexSourceCommand.cs
Normal file
436
src/__Tests/Tools/FixtureHarvester/Commands/VexSourceCommand.cs
Normal file
@@ -0,0 +1,436 @@
|
||||
// <copyright file="VexSourceCommand.cs" company="Stella Operations">
|
||||
// Copyright (c) Stella Operations. Licensed under AGPL-3.0-or-later.
|
||||
// </copyright>
|
||||
|
||||
using System.Security.Cryptography;
|
||||
using System.Text.Json;
|
||||
|
||||
namespace StellaOps.Testing.FixtureHarvester.Commands;
|
||||
|
||||
/// <summary>
|
||||
/// VEX Source command - acquire OpenVEX and CSAF samples for deterministic testing.
|
||||
/// @sprint SPRINT_20251229_004_LIB_fixture_harvester (FH-006)
|
||||
/// </summary>
|
||||
internal static class VexSourceCommand
|
||||
{
|
||||
private static readonly Dictionary<string, VexSourceDefinition> KnownSources = new()
|
||||
{
|
||||
["openvex-examples"] = new()
|
||||
{
|
||||
Id = "vex-openvex-examples",
|
||||
Description = "Official OpenVEX specification examples",
|
||||
Urls = new[]
|
||||
{
|
||||
"https://raw.githubusercontent.com/openvex/examples/main/csaf/vex_container.json",
|
||||
"https://raw.githubusercontent.com/openvex/examples/main/csaf/vex_single_product.json",
|
||||
},
|
||||
Format = "openvex",
|
||||
},
|
||||
["csaf-redhat"] = new()
|
||||
{
|
||||
Id = "vex-csaf-redhat-sample",
|
||||
Description = "Red Hat CSAF VEX document samples",
|
||||
Urls = new[]
|
||||
{
|
||||
"https://access.redhat.com/security/data/csaf/v2/advisories/2024/rhsa-2024_0001.json",
|
||||
},
|
||||
Format = "csaf",
|
||||
},
|
||||
["alpine-secdb"] = new()
|
||||
{
|
||||
Id = "vex-alpine-secdb",
|
||||
Description = "Alpine Linux security database sample",
|
||||
Urls = new[]
|
||||
{
|
||||
"https://secdb.alpinelinux.org/v3.19/main.json",
|
||||
},
|
||||
Format = "alpine",
|
||||
},
|
||||
};
|
||||
|
||||
/// <summary>
|
||||
/// Execute the VEX source command to download VEX/CSAF samples.
|
||||
/// </summary>
|
||||
internal static async Task ExecuteAsync(string source, string? customUrl, string? output)
|
||||
{
|
||||
Console.WriteLine($"Sourcing VEX documents: {source}");
|
||||
|
||||
var outputDir = output ?? "src/__Tests/fixtures/vex";
|
||||
|
||||
if (source.Equals("list", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
ListKnownSources();
|
||||
return;
|
||||
}
|
||||
|
||||
if (source.Equals("all", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
Console.WriteLine("Fetching all known VEX sources...\n");
|
||||
foreach (var sourceDef in KnownSources.Values)
|
||||
{
|
||||
await FetchSourceAsync(sourceDef, outputDir);
|
||||
Console.WriteLine();
|
||||
}
|
||||
|
||||
// Generate sample OpenVEX
|
||||
await GenerateSampleOpenVexAsync(outputDir);
|
||||
return;
|
||||
}
|
||||
|
||||
if (!string.IsNullOrEmpty(customUrl))
|
||||
{
|
||||
var customSource = new VexSourceDefinition
|
||||
{
|
||||
Id = $"vex-custom-{DateTime.UtcNow:yyyyMMddHHmmss}",
|
||||
Description = $"Custom VEX source from {customUrl}",
|
||||
Urls = new[] { customUrl },
|
||||
Format = DetectFormat(customUrl),
|
||||
};
|
||||
await FetchSourceAsync(customSource, outputDir);
|
||||
return;
|
||||
}
|
||||
|
||||
if (KnownSources.TryGetValue(source.ToLowerInvariant(), out var knownSource))
|
||||
{
|
||||
await FetchSourceAsync(knownSource, outputDir);
|
||||
}
|
||||
else
|
||||
{
|
||||
Console.WriteLine($"Unknown source: {source}");
|
||||
Console.WriteLine("Use --list to see available sources or --url to specify a custom URL.");
|
||||
}
|
||||
}
|
||||
|
||||
private static void ListKnownSources()
|
||||
{
|
||||
Console.WriteLine("Known VEX sources:");
|
||||
Console.WriteLine();
|
||||
foreach (var (key, source) in KnownSources)
|
||||
{
|
||||
Console.WriteLine($" {key}");
|
||||
Console.WriteLine($" Format: {source.Format}");
|
||||
Console.WriteLine($" Description: {source.Description}");
|
||||
Console.WriteLine($" URLs: {source.Urls.Length}");
|
||||
Console.WriteLine();
|
||||
}
|
||||
Console.WriteLine("Usage:");
|
||||
Console.WriteLine(" fixture-harvester vex <source-name>");
|
||||
Console.WriteLine(" fixture-harvester vex --url <custom-url>");
|
||||
Console.WriteLine(" fixture-harvester vex all");
|
||||
}
|
||||
|
||||
private static async Task FetchSourceAsync(VexSourceDefinition source, string outputDir)
|
||||
{
|
||||
Console.WriteLine($"Fetching: {source.Id}");
|
||||
Console.WriteLine($" Format: {source.Format}");
|
||||
Console.WriteLine($" Description: {source.Description}");
|
||||
|
||||
var fixtureDir = Path.Combine(outputDir, source.Id);
|
||||
Directory.CreateDirectory(fixtureDir);
|
||||
Directory.CreateDirectory(Path.Combine(fixtureDir, "raw"));
|
||||
|
||||
using var client = new HttpClient();
|
||||
client.DefaultRequestHeaders.UserAgent.ParseAdd("StellaOps-FixtureHarvester/1.0");
|
||||
client.Timeout = TimeSpan.FromSeconds(30);
|
||||
|
||||
var fetchedFiles = new List<FetchedFile>();
|
||||
|
||||
foreach (var url in source.Urls)
|
||||
{
|
||||
try
|
||||
{
|
||||
Console.WriteLine($" Fetching: {url}");
|
||||
var response = await client.GetAsync(url);
|
||||
|
||||
if (!response.IsSuccessStatusCode)
|
||||
{
|
||||
Console.WriteLine($" WARNING: {response.StatusCode} - skipping");
|
||||
continue;
|
||||
}
|
||||
|
||||
var content = await response.Content.ReadAsStringAsync();
|
||||
var filename = Path.GetFileName(new Uri(url).LocalPath);
|
||||
if (string.IsNullOrEmpty(filename) || filename == "/")
|
||||
{
|
||||
filename = $"vex_{DateTime.UtcNow:yyyyMMddHHmmss}.json";
|
||||
}
|
||||
|
||||
var rawPath = Path.Combine(fixtureDir, "raw", filename);
|
||||
await File.WriteAllTextAsync(rawPath, content);
|
||||
|
||||
var sha256 = await ComputeSha256Async(rawPath);
|
||||
fetchedFiles.Add(new FetchedFile
|
||||
{
|
||||
Filename = filename,
|
||||
Url = url,
|
||||
Sha256 = sha256,
|
||||
Size = new FileInfo(rawPath).Length,
|
||||
});
|
||||
|
||||
Console.WriteLine($" ✓ {filename} ({sha256.Substring(0, 20)}...)");
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
Console.WriteLine($" ERROR: {ex.Message}");
|
||||
}
|
||||
}
|
||||
|
||||
if (fetchedFiles.Count == 0)
|
||||
{
|
||||
Console.WriteLine(" WARNING: No files fetched. Generating sample...");
|
||||
await GenerateSampleVexAsync(source.Format, fixtureDir);
|
||||
fetchedFiles.Add(new FetchedFile
|
||||
{
|
||||
Filename = $"sample_{source.Format}.json",
|
||||
Url = "generated",
|
||||
Sha256 = await ComputeSha256Async(Path.Combine(fixtureDir, "raw", $"sample_{source.Format}.json")),
|
||||
Size = new FileInfo(Path.Combine(fixtureDir, "raw", $"sample_{source.Format}.json")).Length,
|
||||
});
|
||||
}
|
||||
|
||||
// Create metadata
|
||||
var meta = new VexFixtureMeta
|
||||
{
|
||||
Id = source.Id,
|
||||
Format = source.Format,
|
||||
Description = source.Description,
|
||||
FetchedAt = DateTime.UtcNow.ToString("O"),
|
||||
Files = fetchedFiles,
|
||||
RefreshPolicy = "quarterly",
|
||||
Notes = $"Fetched from {source.Urls.Length} source URL(s)",
|
||||
};
|
||||
|
||||
var metaPath = Path.Combine(fixtureDir, "meta.json");
|
||||
var metaJson = JsonSerializer.Serialize(meta, new JsonSerializerOptions { WriteIndented = true });
|
||||
await File.WriteAllTextAsync(metaPath, metaJson);
|
||||
|
||||
Console.WriteLine($" ✓ Metadata: {metaPath}");
|
||||
Console.WriteLine($" ✓ Total files: {fetchedFiles.Count}");
|
||||
}
|
||||
|
||||
private static async Task GenerateSampleVexAsync(string format, string fixtureDir)
|
||||
{
|
||||
var samplePath = Path.Combine(fixtureDir, "raw", $"sample_{format}.json");
|
||||
|
||||
var sample = format.ToLowerInvariant() switch
|
||||
{
|
||||
"openvex" => GenerateOpenVexSample(),
|
||||
"csaf" => GenerateCsafSample(),
|
||||
_ => GenerateOpenVexSample(),
|
||||
};
|
||||
|
||||
await File.WriteAllTextAsync(samplePath, JsonSerializer.Serialize(sample, new JsonSerializerOptions { WriteIndented = true }));
|
||||
}
|
||||
|
||||
private static async Task GenerateSampleOpenVexAsync(string outputDir)
|
||||
{
|
||||
Console.WriteLine("Generating sample OpenVEX fixtures...");
|
||||
|
||||
var fixtureDir = Path.Combine(outputDir, "vex-openvex-samples");
|
||||
Directory.CreateDirectory(fixtureDir);
|
||||
Directory.CreateDirectory(Path.Combine(fixtureDir, "raw"));
|
||||
|
||||
var samples = new[]
|
||||
{
|
||||
("not_affected.json", GenerateOpenVexNotAffected()),
|
||||
("affected_fixed.json", GenerateOpenVexAffectedFixed()),
|
||||
("under_investigation.json", GenerateOpenVexUnderInvestigation()),
|
||||
};
|
||||
|
||||
var fetchedFiles = new List<FetchedFile>();
|
||||
|
||||
foreach (var (filename, content) in samples)
|
||||
{
|
||||
var path = Path.Combine(fixtureDir, "raw", filename);
|
||||
await File.WriteAllTextAsync(path, JsonSerializer.Serialize(content, new JsonSerializerOptions { WriteIndented = true }));
|
||||
var sha256 = await ComputeSha256Async(path);
|
||||
fetchedFiles.Add(new FetchedFile
|
||||
{
|
||||
Filename = filename,
|
||||
Url = "generated",
|
||||
Sha256 = sha256,
|
||||
Size = new FileInfo(path).Length,
|
||||
});
|
||||
Console.WriteLine($" ✓ {filename}");
|
||||
}
|
||||
|
||||
var meta = new VexFixtureMeta
|
||||
{
|
||||
Id = "vex-openvex-samples",
|
||||
Format = "openvex",
|
||||
Description = "Generated OpenVEX samples covering all status types",
|
||||
FetchedAt = DateTime.UtcNow.ToString("O"),
|
||||
Files = fetchedFiles,
|
||||
RefreshPolicy = "manual",
|
||||
Notes = "Generated samples for testing. Replace with real VEX documents when available.",
|
||||
};
|
||||
|
||||
var metaPath = Path.Combine(fixtureDir, "meta.json");
|
||||
await File.WriteAllTextAsync(metaPath, JsonSerializer.Serialize(meta, new JsonSerializerOptions { WriteIndented = true }));
|
||||
|
||||
Console.WriteLine($"✓ Sample OpenVEX fixtures: {fixtureDir}");
|
||||
}
|
||||
|
||||
private static object GenerateOpenVexSample() => new
|
||||
{
|
||||
@context = "https://openvex.dev/ns/v0.2.0",
|
||||
id = "https://stellaops.dev/vex/sample-001",
|
||||
author = "StellaOps Fixture Harvester",
|
||||
timestamp = DateTime.UtcNow.ToString("O"),
|
||||
version = 1,
|
||||
statements = new[]
|
||||
{
|
||||
new
|
||||
{
|
||||
vulnerability = new { name = "CVE-2024-0001" },
|
||||
products = new[] { new { @id = "pkg:oci/sample-image@sha256:abc123" } },
|
||||
status = "not_affected",
|
||||
justification = "vulnerable_code_not_in_execute_path",
|
||||
}
|
||||
},
|
||||
};
|
||||
|
||||
private static object GenerateOpenVexNotAffected() => new
|
||||
{
|
||||
@context = "https://openvex.dev/ns/v0.2.0",
|
||||
id = "https://stellaops.dev/vex/not-affected-001",
|
||||
author = "StellaOps Test",
|
||||
timestamp = DateTime.UtcNow.ToString("O"),
|
||||
version = 1,
|
||||
statements = new[]
|
||||
{
|
||||
new
|
||||
{
|
||||
vulnerability = new { name = "CVE-2024-1001" },
|
||||
products = new[] { new { @id = "pkg:oci/test-image@sha256:not-affected-digest" } },
|
||||
status = "not_affected",
|
||||
justification = "component_not_present",
|
||||
impact_statement = "The vulnerable component is not included in this image.",
|
||||
}
|
||||
},
|
||||
};
|
||||
|
||||
private static object GenerateOpenVexAffectedFixed() => new
|
||||
{
|
||||
@context = "https://openvex.dev/ns/v0.2.0",
|
||||
id = "https://stellaops.dev/vex/fixed-001",
|
||||
author = "StellaOps Test",
|
||||
timestamp = DateTime.UtcNow.ToString("O"),
|
||||
version = 1,
|
||||
statements = new[]
|
||||
{
|
||||
new
|
||||
{
|
||||
vulnerability = new { name = "CVE-2024-1002" },
|
||||
products = new[] { new { @id = "pkg:oci/test-image@sha256:fixed-digest" } },
|
||||
status = "fixed",
|
||||
action_statement = "Update to version 2.0.0 or later.",
|
||||
}
|
||||
},
|
||||
};
|
||||
|
||||
private static object GenerateOpenVexUnderInvestigation() => new
|
||||
{
|
||||
@context = "https://openvex.dev/ns/v0.2.0",
|
||||
id = "https://stellaops.dev/vex/investigation-001",
|
||||
author = "StellaOps Test",
|
||||
timestamp = DateTime.UtcNow.ToString("O"),
|
||||
version = 1,
|
||||
statements = new[]
|
||||
{
|
||||
new
|
||||
{
|
||||
vulnerability = new { name = "CVE-2024-1003" },
|
||||
products = new[] { new { @id = "pkg:oci/test-image@sha256:investigating-digest" } },
|
||||
status = "under_investigation",
|
||||
impact_statement = "Analysis in progress. Update expected within 48 hours.",
|
||||
}
|
||||
},
|
||||
};
|
||||
|
||||
private static object GenerateCsafSample() => new
|
||||
{
|
||||
document = new
|
||||
{
|
||||
category = "csaf_vex",
|
||||
csaf_version = "2.0",
|
||||
title = "Sample CSAF VEX Document",
|
||||
publisher = new
|
||||
{
|
||||
category = "vendor",
|
||||
name = "StellaOps Test",
|
||||
@namespace = "https://stellaops.dev",
|
||||
},
|
||||
tracking = new
|
||||
{
|
||||
id = "STELLA-VEX-2024-001",
|
||||
status = "final",
|
||||
version = "1.0.0",
|
||||
initial_release_date = DateTime.UtcNow.ToString("O"),
|
||||
current_release_date = DateTime.UtcNow.ToString("O"),
|
||||
},
|
||||
},
|
||||
vulnerabilities = new[]
|
||||
{
|
||||
new
|
||||
{
|
||||
cve = "CVE-2024-0001",
|
||||
product_status = new
|
||||
{
|
||||
known_not_affected = new[] { "CSAFPID-0001" },
|
||||
},
|
||||
threats = new[]
|
||||
{
|
||||
new
|
||||
{
|
||||
category = "impact",
|
||||
details = "The vulnerable code is not reachable in this product configuration.",
|
||||
}
|
||||
},
|
||||
}
|
||||
},
|
||||
};
|
||||
|
||||
private static string DetectFormat(string url)
|
||||
{
|
||||
if (url.Contains("openvex", StringComparison.OrdinalIgnoreCase)) return "openvex";
|
||||
if (url.Contains("csaf", StringComparison.OrdinalIgnoreCase)) return "csaf";
|
||||
if (url.Contains("secdb", StringComparison.OrdinalIgnoreCase)) return "alpine";
|
||||
return "unknown";
|
||||
}
|
||||
|
||||
private static async Task<string> ComputeSha256Async(string filePath)
|
||||
{
|
||||
using var sha256 = SHA256.Create();
|
||||
await using var stream = File.OpenRead(filePath);
|
||||
var hashBytes = await sha256.ComputeHashAsync(stream);
|
||||
return "sha256:" + BitConverter.ToString(hashBytes).Replace("-", string.Empty).ToLowerInvariant();
|
||||
}
|
||||
}
|
||||
|
||||
internal class VexSourceDefinition
|
||||
{
|
||||
public string Id { get; set; } = string.Empty;
|
||||
public string Description { get; set; } = string.Empty;
|
||||
public string[] Urls { get; set; } = Array.Empty<string>();
|
||||
public string Format { get; set; } = string.Empty;
|
||||
}
|
||||
|
||||
internal class FetchedFile
|
||||
{
|
||||
public string Filename { get; set; } = string.Empty;
|
||||
public string Url { get; set; } = string.Empty;
|
||||
public string Sha256 { get; set; } = string.Empty;
|
||||
public long Size { get; set; }
|
||||
}
|
||||
|
||||
internal class VexFixtureMeta
|
||||
{
|
||||
public string Id { get; set; } = string.Empty;
|
||||
public string Format { get; set; } = string.Empty;
|
||||
public string Description { get; set; } = string.Empty;
|
||||
public string FetchedAt { get; set; } = string.Empty;
|
||||
public List<FetchedFile> Files { get; set; } = new();
|
||||
public string RefreshPolicy { get; set; } = "quarterly";
|
||||
public string Notes { get; set; } = string.Empty;
|
||||
}
|
||||
254
src/__Tests/Tools/FixtureHarvester/FeedSnapshotCommandTests.cs
Normal file
254
src/__Tests/Tools/FixtureHarvester/FeedSnapshotCommandTests.cs
Normal file
@@ -0,0 +1,254 @@
|
||||
// <copyright file="FeedSnapshotCommandTests.cs" company="Stella Operations">
|
||||
// Copyright (c) Stella Operations. Licensed under AGPL-3.0-or-later.
|
||||
// </copyright>
|
||||
|
||||
using System.Text.Json;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Testing.FixtureHarvester.Tests;
|
||||
|
||||
/// <summary>
|
||||
/// Unit tests for FeedSnapshotCommand.
|
||||
/// @sprint SPRINT_20251229_004_LIB_fixture_harvester (FH-005)
|
||||
/// </summary>
|
||||
public sealed class FeedSnapshotCommandTests : IDisposable
|
||||
{
|
||||
private readonly string _testOutputDir;
|
||||
|
||||
public FeedSnapshotCommandTests()
|
||||
{
|
||||
_testOutputDir = Path.Combine(Path.GetTempPath(), $"fixture-harvester-test-{Guid.NewGuid():N}");
|
||||
Directory.CreateDirectory(_testOutputDir);
|
||||
}
|
||||
|
||||
public void Dispose()
|
||||
{
|
||||
if (Directory.Exists(_testOutputDir))
|
||||
{
|
||||
Directory.Delete(_testOutputDir, recursive: true);
|
||||
}
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData("OSV", "/api/v1/feeds/osv/advisories")]
|
||||
[InlineData("GHSA", "/api/v1/feeds/ghsa/advisories")]
|
||||
[InlineData("NVD", "/api/v1/feeds/nvd/advisories")]
|
||||
[InlineData("EPSS", "/api/v1/feeds/epss/scores")]
|
||||
[InlineData("KEV", "/api/v1/feeds/kev/catalog")]
|
||||
[InlineData("OVAL", "/api/v1/feeds/oval/definitions")]
|
||||
public void GetFeedEndpoint_ReturnsCorrectPath(string feedType, string expectedEndpoint)
|
||||
{
|
||||
// Arrange & Act
|
||||
var result = GetFeedEndpointTestHelper(feedType);
|
||||
|
||||
// Assert
|
||||
Assert.Equal(expectedEndpoint, result);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void GetFeedEndpoint_UnknownType_ReturnsGenericPath()
|
||||
{
|
||||
// Arrange & Act
|
||||
var result = GetFeedEndpointTestHelper("CUSTOM");
|
||||
|
||||
// Assert
|
||||
Assert.Equal("/api/v1/feeds/custom/advisories", result);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void GenerateSampleAdvisories_OSV_ReturnsValidFormat()
|
||||
{
|
||||
// Arrange
|
||||
var count = 5;
|
||||
|
||||
// Act
|
||||
var advisories = GenerateSampleAdvisoriesTestHelper("OSV", count);
|
||||
|
||||
// Assert
|
||||
Assert.Equal(count, advisories.Count);
|
||||
foreach (var advisory in advisories)
|
||||
{
|
||||
var json = JsonSerializer.Serialize(advisory);
|
||||
Assert.Contains("OSV-SAMPLE", json);
|
||||
Assert.Contains("affected", json);
|
||||
Assert.Contains("severity", json);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void GenerateSampleAdvisories_GHSA_ReturnsValidFormat()
|
||||
{
|
||||
// Arrange
|
||||
var count = 3;
|
||||
|
||||
// Act
|
||||
var advisories = GenerateSampleAdvisoriesTestHelper("GHSA", count);
|
||||
|
||||
// Assert
|
||||
Assert.Equal(count, advisories.Count);
|
||||
foreach (var advisory in advisories)
|
||||
{
|
||||
var json = JsonSerializer.Serialize(advisory);
|
||||
Assert.Contains("ghsaId", json);
|
||||
Assert.Contains("GHSA-sample", json);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void GenerateSampleAdvisories_NVD_ReturnsValidFormat()
|
||||
{
|
||||
// Arrange
|
||||
var count = 3;
|
||||
|
||||
// Act
|
||||
var advisories = GenerateSampleAdvisoriesTestHelper("NVD", count);
|
||||
|
||||
// Assert
|
||||
Assert.Equal(count, advisories.Count);
|
||||
foreach (var advisory in advisories)
|
||||
{
|
||||
var json = JsonSerializer.Serialize(advisory);
|
||||
Assert.Contains("cve", json);
|
||||
Assert.Contains("CVE-2024", json);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void GenerateSampleAdvisories_DistributesEcosystems()
|
||||
{
|
||||
// Arrange
|
||||
var count = 10;
|
||||
|
||||
// Act
|
||||
var advisories = GenerateSampleAdvisoriesTestHelper("OSV", count);
|
||||
var json = string.Join("\n", advisories.Select(a => JsonSerializer.Serialize(a)));
|
||||
|
||||
// Assert - should have multiple ecosystems across 10 advisories
|
||||
var ecosystemCount = new[] { "PyPI", "npm", "Go", "Maven", "NuGet" }
|
||||
.Count(e => json.Contains(e));
|
||||
Assert.True(ecosystemCount >= 3, "Should distribute across at least 3 ecosystems");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void GenerateSampleAdvisories_DistributesSeverities()
|
||||
{
|
||||
// Arrange
|
||||
var count = 10;
|
||||
|
||||
// Act
|
||||
var advisories = GenerateSampleAdvisoriesTestHelper("OSV", count);
|
||||
var json = string.Join("\n", advisories.Select(a => JsonSerializer.Serialize(a)));
|
||||
|
||||
// Assert - should have multiple severities
|
||||
var severityCount = new[] { "CRITICAL", "HIGH", "MEDIUM", "LOW" }
|
||||
.Count(s => json.Contains(s));
|
||||
Assert.True(severityCount >= 2, "Should distribute across at least 2 severity levels");
|
||||
}
|
||||
|
||||
// Helper that mirrors internal logic
|
||||
private static string GetFeedEndpointTestHelper(string feedType)
|
||||
{
|
||||
return feedType.ToUpperInvariant() switch
|
||||
{
|
||||
"OSV" => "/api/v1/feeds/osv/advisories",
|
||||
"GHSA" => "/api/v1/feeds/ghsa/advisories",
|
||||
"NVD" => "/api/v1/feeds/nvd/advisories",
|
||||
"EPSS" => "/api/v1/feeds/epss/scores",
|
||||
"KEV" => "/api/v1/feeds/kev/catalog",
|
||||
"OVAL" => "/api/v1/feeds/oval/definitions",
|
||||
_ => $"/api/v1/feeds/{feedType.ToLowerInvariant()}/advisories",
|
||||
};
|
||||
}
|
||||
|
||||
private static List<object> GenerateSampleAdvisoriesTestHelper(string feedType, int count)
|
||||
{
|
||||
var advisories = new List<object>();
|
||||
var ecosystems = new[] { "PyPI", "npm", "Go", "Maven", "NuGet", "RubyGems", "crates.io" };
|
||||
var severities = new[] { "CRITICAL", "HIGH", "MEDIUM", "LOW" };
|
||||
|
||||
for (int i = 1; i <= count; i++)
|
||||
{
|
||||
var ecosystem = ecosystems[i % ecosystems.Length];
|
||||
var severity = severities[i % severities.Length];
|
||||
|
||||
advisories.Add(feedType.ToUpperInvariant() switch
|
||||
{
|
||||
"OSV" => new
|
||||
{
|
||||
id = $"OSV-SAMPLE-{i:D4}",
|
||||
summary = $"Sample vulnerability {i} in {ecosystem} package",
|
||||
details = $"This is a sample {severity.ToLowerInvariant()} vulnerability for testing purposes.",
|
||||
affected = new[]
|
||||
{
|
||||
new
|
||||
{
|
||||
package = new { ecosystem = ecosystem, name = $"sample-package-{i}" },
|
||||
ranges = new[]
|
||||
{
|
||||
new
|
||||
{
|
||||
type = "ECOSYSTEM",
|
||||
events = new object[]
|
||||
{
|
||||
new { introduced = "0" },
|
||||
new { @fixed = $"1.{i}.0" }
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
severity = new[] { new { type = "CVSS_V3", score = $"{6.0 + (i % 4)}.{i % 10}" } },
|
||||
published = DateTime.UtcNow.AddDays(-i).ToString("O"),
|
||||
modified = DateTime.UtcNow.ToString("O"),
|
||||
},
|
||||
"GHSA" => new
|
||||
{
|
||||
ghsaId = $"GHSA-sample-{i:D4}",
|
||||
summary = $"Sample GHSA vulnerability {i}",
|
||||
severity = severity,
|
||||
cvss = new { score = 6.0 + (i % 4), vectorString = "CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:U/C:L/I:L/A:N" },
|
||||
publishedAt = DateTime.UtcNow.AddDays(-i).ToString("O"),
|
||||
updatedAt = DateTime.UtcNow.ToString("O"),
|
||||
},
|
||||
"NVD" => new
|
||||
{
|
||||
cve = new
|
||||
{
|
||||
id = $"CVE-2024-{10000 + i}",
|
||||
sourceIdentifier = "sample@stellaops.dev",
|
||||
published = DateTime.UtcNow.AddDays(-i).ToString("O"),
|
||||
lastModified = DateTime.UtcNow.ToString("O"),
|
||||
descriptions = new[]
|
||||
{
|
||||
new { lang = "en", value = $"Sample NVD vulnerability {i} for testing." }
|
||||
},
|
||||
metrics = new
|
||||
{
|
||||
cvssMetricV31 = new[]
|
||||
{
|
||||
new
|
||||
{
|
||||
cvssData = new
|
||||
{
|
||||
version = "3.1",
|
||||
baseScore = 6.0 + (i % 4),
|
||||
baseSeverity = severity
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
_ => new
|
||||
{
|
||||
id = $"SAMPLE-{feedType.ToUpperInvariant()}-{i:D4}",
|
||||
type = feedType,
|
||||
severity = severity,
|
||||
created = DateTime.UtcNow.AddDays(-i).ToString("O"),
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
return advisories;
|
||||
}
|
||||
}
|
||||
@@ -12,8 +12,8 @@
|
||||
|
||||
<ItemGroup>
|
||||
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.12.0" />
|
||||
<PackageReference Include="xunit" Version="2.9.3" />
|
||||
<PackageReference Include="xunit.runner.visualstudio" Version="3.0.0">
|
||||
<PackageReference Include="xunit.v3" />
|
||||
<PackageReference Include="xunit.runner.visualstudio">
|
||||
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
|
||||
<PrivateAssets>all</PrivateAssets>
|
||||
</PackageReference>
|
||||
@@ -24,3 +24,4 @@
|
||||
</ItemGroup>
|
||||
|
||||
</Project>
|
||||
|
||||
|
||||
174
src/__Tests/Tools/FixtureHarvester/OciPinCommandTests.cs
Normal file
174
src/__Tests/Tools/FixtureHarvester/OciPinCommandTests.cs
Normal file
@@ -0,0 +1,174 @@
|
||||
// <copyright file="OciPinCommandTests.cs" company="Stella Operations">
|
||||
// Copyright (c) Stella Operations. Licensed under AGPL-3.0-or-later.
|
||||
// </copyright>
|
||||
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Testing.FixtureHarvester.Tests;
|
||||
|
||||
/// <summary>
|
||||
/// Unit tests for OciPinCommand.
|
||||
/// @sprint SPRINT_20251229_004_LIB_fixture_harvester (FH-004)
|
||||
/// </summary>
|
||||
public sealed class OciPinCommandTests : IDisposable
|
||||
{
|
||||
private readonly string _testOutputDir;
|
||||
|
||||
public OciPinCommandTests()
|
||||
{
|
||||
_testOutputDir = Path.Combine(Path.GetTempPath(), $"fixture-harvester-test-{Guid.NewGuid():N}");
|
||||
Directory.CreateDirectory(_testOutputDir);
|
||||
}
|
||||
|
||||
public void Dispose()
|
||||
{
|
||||
if (Directory.Exists(_testOutputDir))
|
||||
{
|
||||
Directory.Delete(_testOutputDir, recursive: true);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ParseImageRef_SimpleImage_ReturnsDockerHub()
|
||||
{
|
||||
// Arrange & Act
|
||||
var result = ParseImageRefTestHelper("alpine:3.19");
|
||||
|
||||
// Assert
|
||||
Assert.Equal("docker.io", result.Registry);
|
||||
Assert.Equal("library/alpine", result.Repository);
|
||||
Assert.Equal("3.19", result.Tag);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ParseImageRef_ImageWithoutTag_DefaultsToLatest()
|
||||
{
|
||||
// Arrange & Act
|
||||
var result = ParseImageRefTestHelper("nginx");
|
||||
|
||||
// Assert
|
||||
Assert.Equal("docker.io", result.Registry);
|
||||
Assert.Equal("library/nginx", result.Repository);
|
||||
Assert.Equal("latest", result.Tag);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ParseImageRef_NamespacedImage_ParsesCorrectly()
|
||||
{
|
||||
// Arrange & Act
|
||||
var result = ParseImageRefTestHelper("myuser/myapp:v1.0.0");
|
||||
|
||||
// Assert
|
||||
Assert.Equal("docker.io", result.Registry);
|
||||
Assert.Equal("myuser/myapp", result.Repository);
|
||||
Assert.Equal("v1.0.0", result.Tag);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ParseImageRef_CustomRegistry_ParsesCorrectly()
|
||||
{
|
||||
// Arrange & Act
|
||||
var result = ParseImageRefTestHelper("ghcr.io/owner/repo:latest");
|
||||
|
||||
// Assert
|
||||
Assert.Equal("ghcr.io", result.Registry);
|
||||
Assert.Equal("owner/repo", result.Repository);
|
||||
Assert.Equal("latest", result.Tag);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ParseImageRef_RegistryWithPort_ParsesCorrectly()
|
||||
{
|
||||
// Arrange & Act
|
||||
var result = ParseImageRefTestHelper("localhost:5000/myimage:test");
|
||||
|
||||
// Assert
|
||||
Assert.Equal("localhost:5000", result.Registry);
|
||||
Assert.Equal("myimage", result.Repository);
|
||||
Assert.Equal("test", result.Tag);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ParseImageRef_GcrImage_ParsesCorrectly()
|
||||
{
|
||||
// Arrange & Act
|
||||
var result = ParseImageRefTestHelper("gcr.io/distroless/static-debian12:nonroot");
|
||||
|
||||
// Assert
|
||||
Assert.Equal("gcr.io", result.Registry);
|
||||
Assert.Equal("distroless/static-debian12", result.Repository);
|
||||
Assert.Equal("nonroot", result.Tag);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void SanitizeId_RemovesInvalidCharacters()
|
||||
{
|
||||
// Arrange
|
||||
var input = "oci-my/image:v1.0+build";
|
||||
|
||||
// Act
|
||||
var result = SanitizeIdTestHelper(input);
|
||||
|
||||
// Assert
|
||||
Assert.DoesNotContain("/", result);
|
||||
Assert.DoesNotContain(":", result);
|
||||
Assert.DoesNotContain("+", result);
|
||||
Assert.Contains("oci", result);
|
||||
Assert.Contains("image", result);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void SanitizeId_PreservesValidCharacters()
|
||||
{
|
||||
// Arrange
|
||||
var input = "valid-id_123";
|
||||
|
||||
// Act
|
||||
var result = SanitizeIdTestHelper(input);
|
||||
|
||||
// Assert
|
||||
Assert.Equal("valid-id_123", result);
|
||||
}
|
||||
|
||||
// Helper methods that mirror the internal logic for testing
|
||||
private static (string Registry, string Repository, string Tag) ParseImageRefTestHelper(string imageRef)
|
||||
{
|
||||
var tag = "latest";
|
||||
var repository = imageRef;
|
||||
var registry = "docker.io";
|
||||
|
||||
// Extract tag
|
||||
var tagIndex = imageRef.LastIndexOf(':');
|
||||
var slashAfterTag = tagIndex > 0 ? imageRef.IndexOf('/', tagIndex) : -1;
|
||||
if (tagIndex > 0 && slashAfterTag < 0 && !imageRef.Substring(tagIndex + 1).Contains('/'))
|
||||
{
|
||||
tag = imageRef.Substring(tagIndex + 1);
|
||||
repository = imageRef.Substring(0, tagIndex);
|
||||
}
|
||||
|
||||
// Extract registry
|
||||
var firstSlash = repository.IndexOf('/');
|
||||
if (firstSlash > 0)
|
||||
{
|
||||
var possibleRegistry = repository.Substring(0, firstSlash);
|
||||
if (possibleRegistry.Contains('.') || possibleRegistry.Contains(':') || possibleRegistry == "localhost")
|
||||
{
|
||||
registry = possibleRegistry;
|
||||
repository = repository.Substring(firstSlash + 1);
|
||||
}
|
||||
}
|
||||
|
||||
// Handle Docker Hub library images
|
||||
if (registry == "docker.io" && !repository.Contains('/'))
|
||||
{
|
||||
repository = $"library/{repository}";
|
||||
}
|
||||
|
||||
return (registry, repository, tag);
|
||||
}
|
||||
|
||||
private static string SanitizeIdTestHelper(string input)
|
||||
{
|
||||
return new string(input.Where(c => char.IsLetterOrDigit(c) || c == '-' || c == '_').ToArray());
|
||||
}
|
||||
}
|
||||
@@ -67,9 +67,98 @@ internal static class Program
|
||||
regenCommand.AddOption(regenConfirmOption);
|
||||
regenCommand.SetHandler(RegenCommand.ExecuteAsync, regenFixtureOption, regenAllOption, regenConfirmOption);
|
||||
|
||||
// OCI Pin command (FH-004)
|
||||
var ociPinCommand = new Command("oci-pin", "Pin OCI image digests for deterministic testing");
|
||||
var ociImageOption = new Option<string>(
|
||||
"--image",
|
||||
description: "Image reference (e.g., alpine:3.19, myregistry.io/app:v1)") { IsRequired = true };
|
||||
var ociOutputOption = new Option<string>(
|
||||
"--output",
|
||||
description: "Output directory",
|
||||
getDefaultValue: () => "src/__Tests/fixtures/oci");
|
||||
var ociVerifyOption = new Option<bool>(
|
||||
"--verify",
|
||||
description: "Verify digest by re-fetching manifest",
|
||||
getDefaultValue: () => true);
|
||||
|
||||
ociPinCommand.AddOption(ociImageOption);
|
||||
ociPinCommand.AddOption(ociOutputOption);
|
||||
ociPinCommand.AddOption(ociVerifyOption);
|
||||
ociPinCommand.SetHandler(OciPinCommand.ExecuteAsync, ociImageOption, ociOutputOption, ociVerifyOption);
|
||||
|
||||
// Feed Snapshot command (FH-005)
|
||||
var feedSnapshotCommand = new Command("feed-snapshot", "Capture vulnerability feed snapshots");
|
||||
var feedTypeOption = new Option<string>(
|
||||
"--feed",
|
||||
description: "Feed type: osv, ghsa, nvd, epss, kev, oval") { IsRequired = true };
|
||||
var feedUrlOption = new Option<string>(
|
||||
"--url",
|
||||
description: "Concelier base URL",
|
||||
getDefaultValue: () => "http://localhost:5010");
|
||||
var feedCountOption = new Option<int>(
|
||||
"--count",
|
||||
description: "Number of advisories to capture",
|
||||
getDefaultValue: () => 30);
|
||||
var feedOutputOption = new Option<string>(
|
||||
"--output",
|
||||
description: "Output directory",
|
||||
getDefaultValue: () => "src/__Tests/fixtures/feeds");
|
||||
|
||||
feedSnapshotCommand.AddOption(feedTypeOption);
|
||||
feedSnapshotCommand.AddOption(feedUrlOption);
|
||||
feedSnapshotCommand.AddOption(feedCountOption);
|
||||
feedSnapshotCommand.AddOption(feedOutputOption);
|
||||
feedSnapshotCommand.SetHandler(FeedSnapshotCommand.ExecuteAsync, feedTypeOption, feedUrlOption, feedCountOption, feedOutputOption);
|
||||
|
||||
// VEX Source command (FH-006)
|
||||
var vexSourceCommand = new Command("vex", "Acquire OpenVEX and CSAF samples");
|
||||
var vexSourceArg = new Argument<string>(
|
||||
"source",
|
||||
description: "Source name (list, all, openvex-examples, csaf-redhat, alpine-secdb) or 'list' to see all");
|
||||
var vexCustomUrlOption = new Option<string>(
|
||||
"--url",
|
||||
description: "Custom VEX document URL");
|
||||
var vexOutputOption = new Option<string>(
|
||||
"--output",
|
||||
description: "Output directory",
|
||||
getDefaultValue: () => "src/__Tests/fixtures/vex");
|
||||
|
||||
vexSourceCommand.AddArgument(vexSourceArg);
|
||||
vexSourceCommand.AddOption(vexCustomUrlOption);
|
||||
vexSourceCommand.AddOption(vexOutputOption);
|
||||
vexSourceCommand.SetHandler(VexSourceCommand.ExecuteAsync, vexSourceArg, vexCustomUrlOption, vexOutputOption);
|
||||
|
||||
// SBOM Golden command (FH-007)
|
||||
var sbomGoldenCommand = new Command("sbom-golden", "Generate SBOM golden fixtures from container images");
|
||||
var sbomImageArg = new Argument<string>(
|
||||
"image",
|
||||
description: "Image key (list, all, alpine-minimal, debian-slim, distroless-static) or custom image ref");
|
||||
var sbomFormatOption = new Option<string>(
|
||||
"--format",
|
||||
description: "SBOM format: cyclonedx, spdx",
|
||||
getDefaultValue: () => "cyclonedx");
|
||||
var sbomScannerOption = new Option<string>(
|
||||
"--scanner",
|
||||
description: "Scanner tool: syft, trivy",
|
||||
getDefaultValue: () => "syft");
|
||||
var sbomOutputOption = new Option<string>(
|
||||
"--output",
|
||||
description: "Output directory",
|
||||
getDefaultValue: () => "src/__Tests/fixtures/sbom");
|
||||
|
||||
sbomGoldenCommand.AddArgument(sbomImageArg);
|
||||
sbomGoldenCommand.AddOption(sbomFormatOption);
|
||||
sbomGoldenCommand.AddOption(sbomScannerOption);
|
||||
sbomGoldenCommand.AddOption(sbomOutputOption);
|
||||
sbomGoldenCommand.SetHandler(SbomGoldenCommand.ExecuteAsync, sbomImageArg, sbomFormatOption, sbomScannerOption, sbomOutputOption);
|
||||
|
||||
rootCommand.AddCommand(harvestCommand);
|
||||
rootCommand.AddCommand(validateCommand);
|
||||
rootCommand.AddCommand(regenCommand);
|
||||
rootCommand.AddCommand(ociPinCommand);
|
||||
rootCommand.AddCommand(feedSnapshotCommand);
|
||||
rootCommand.AddCommand(vexSourceCommand);
|
||||
rootCommand.AddCommand(sbomGoldenCommand);
|
||||
|
||||
return await rootCommand.InvokeAsync(args);
|
||||
}
|
||||
|
||||
356
src/__Tests/Tools/FixtureHarvester/SbomGoldenCommandTests.cs
Normal file
356
src/__Tests/Tools/FixtureHarvester/SbomGoldenCommandTests.cs
Normal file
@@ -0,0 +1,356 @@
|
||||
// <copyright file="SbomGoldenCommandTests.cs" company="Stella Operations">
|
||||
// Copyright (c) Stella Operations. Licensed under AGPL-3.0-or-later.
|
||||
// </copyright>
|
||||
|
||||
using System.Text.Json;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Testing.FixtureHarvester.Tests;
|
||||
|
||||
/// <summary>
|
||||
/// Unit tests for SbomGoldenCommand.
|
||||
/// @sprint SPRINT_20251229_004_LIB_fixture_harvester (FH-007)
|
||||
/// </summary>
|
||||
public sealed class SbomGoldenCommandTests : IDisposable
|
||||
{
|
||||
private readonly string _testOutputDir;
|
||||
|
||||
public SbomGoldenCommandTests()
|
||||
{
|
||||
_testOutputDir = Path.Combine(Path.GetTempPath(), $"fixture-harvester-test-{Guid.NewGuid():N}");
|
||||
Directory.CreateDirectory(_testOutputDir);
|
||||
}
|
||||
|
||||
public void Dispose()
|
||||
{
|
||||
if (Directory.Exists(_testOutputDir))
|
||||
{
|
||||
Directory.Delete(_testOutputDir, recursive: true);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void KnownImages_ContainsExpectedEntries()
|
||||
{
|
||||
// Arrange
|
||||
var expectedImages = new[] { "alpine-minimal", "debian-slim", "distroless-static", "scratch-go" };
|
||||
|
||||
// Act & Assert
|
||||
foreach (var image in expectedImages)
|
||||
{
|
||||
Assert.True(KnownImagesContainsTestHelper(image), $"Should contain {image}");
|
||||
}
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData("cyclonedx", "cdx.json")]
|
||||
[InlineData("cyclonedx-json", "cdx.json")]
|
||||
[InlineData("spdx", "spdx.json")]
|
||||
[InlineData("spdx-json", "spdx.json")]
|
||||
[InlineData("unknown", "json")]
|
||||
public void GetFormatExtension_ReturnsCorrectExtension(string format, string expectedExt)
|
||||
{
|
||||
// Arrange & Act
|
||||
var result = GetFormatExtensionTestHelper(format);
|
||||
|
||||
// Assert
|
||||
Assert.Equal(expectedExt, result);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void GenerateCycloneDxSample_HasRequiredFields()
|
||||
{
|
||||
// Arrange
|
||||
var imageDef = new TestGoldenImageDefinition
|
||||
{
|
||||
Id = "test-image",
|
||||
ImageRef = "alpine:3.19",
|
||||
Description = "Test image",
|
||||
ExpectedPackages = 5,
|
||||
};
|
||||
|
||||
// Act
|
||||
var sbom = GenerateCycloneDxSampleTestHelper(imageDef);
|
||||
var json = JsonSerializer.Serialize(sbom);
|
||||
|
||||
// Assert
|
||||
Assert.Contains("CycloneDX", json);
|
||||
Assert.Contains("specVersion", json);
|
||||
Assert.Contains("1.6", json);
|
||||
Assert.Contains("metadata", json);
|
||||
Assert.Contains("components", json);
|
||||
Assert.Contains("serialNumber", json);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void GenerateCycloneDxSample_HasCorrectComponentCount()
|
||||
{
|
||||
// Arrange
|
||||
var imageDef = new TestGoldenImageDefinition
|
||||
{
|
||||
Id = "test-image",
|
||||
ImageRef = "alpine:3.19",
|
||||
Description = "Test image",
|
||||
ExpectedPackages = 10,
|
||||
};
|
||||
|
||||
// Act
|
||||
var sbom = GenerateCycloneDxSampleTestHelper(imageDef);
|
||||
|
||||
// Assert
|
||||
Assert.NotNull(sbom.components);
|
||||
Assert.Equal(10, sbom.components.Count);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void GenerateCycloneDxSample_ComponentsHavePurl()
|
||||
{
|
||||
// Arrange
|
||||
var imageDef = new TestGoldenImageDefinition
|
||||
{
|
||||
Id = "test-image",
|
||||
ImageRef = "alpine:3.19",
|
||||
Description = "Test image",
|
||||
ExpectedPackages = 3,
|
||||
};
|
||||
|
||||
// Act
|
||||
var sbom = GenerateCycloneDxSampleTestHelper(imageDef);
|
||||
var json = JsonSerializer.Serialize(sbom);
|
||||
|
||||
// Assert
|
||||
Assert.Contains("pkg:apk", json);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void GenerateSpdxSample_HasRequiredFields()
|
||||
{
|
||||
// Arrange
|
||||
var imageDef = new TestGoldenImageDefinition
|
||||
{
|
||||
Id = "test-image",
|
||||
ImageRef = "alpine:3.19",
|
||||
Description = "Test image",
|
||||
ExpectedPackages = 5,
|
||||
};
|
||||
|
||||
// Act
|
||||
var sbom = GenerateSpdxSampleTestHelper(imageDef);
|
||||
var json = JsonSerializer.Serialize(sbom);
|
||||
|
||||
// Assert
|
||||
Assert.Contains("SPDX-2.3", json);
|
||||
Assert.Contains("CC0-1.0", json);
|
||||
Assert.Contains("SPDXRef-DOCUMENT", json);
|
||||
Assert.Contains("creationInfo", json);
|
||||
Assert.Contains("packages", json);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void GenerateSpdxSample_PackagesHaveSpdxId()
|
||||
{
|
||||
// Arrange
|
||||
var imageDef = new TestGoldenImageDefinition
|
||||
{
|
||||
Id = "test-image",
|
||||
ImageRef = "alpine:3.19",
|
||||
Description = "Test image",
|
||||
ExpectedPackages = 3,
|
||||
};
|
||||
|
||||
// Act
|
||||
var sbom = GenerateSpdxSampleTestHelper(imageDef);
|
||||
var json = JsonSerializer.Serialize(sbom);
|
||||
|
||||
// Assert
|
||||
Assert.Contains("SPDXRef-Package", json);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void CountPackages_CycloneDx_ReturnsCorrectCount()
|
||||
{
|
||||
// Arrange
|
||||
var sbom = new
|
||||
{
|
||||
bomFormat = "CycloneDX",
|
||||
components = new[]
|
||||
{
|
||||
new { name = "pkg1" },
|
||||
new { name = "pkg2" },
|
||||
new { name = "pkg3" },
|
||||
}
|
||||
};
|
||||
var json = JsonSerializer.Serialize(sbom);
|
||||
|
||||
// Act
|
||||
var count = CountPackagesTestHelper(json, "cyclonedx");
|
||||
|
||||
// Assert
|
||||
Assert.Equal(3, count);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void CountPackages_Spdx_ReturnsCorrectCount()
|
||||
{
|
||||
// Arrange
|
||||
var sbom = new
|
||||
{
|
||||
spdxVersion = "SPDX-2.3",
|
||||
packages = new[]
|
||||
{
|
||||
new { SPDXID = "SPDXRef-Package-1" },
|
||||
new { SPDXID = "SPDXRef-Package-2" },
|
||||
}
|
||||
};
|
||||
var json = JsonSerializer.Serialize(sbom);
|
||||
|
||||
// Act
|
||||
var count = CountPackagesTestHelper(json, "spdx");
|
||||
|
||||
// Assert
|
||||
Assert.Equal(2, count);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void CountPackages_InvalidJson_ReturnsZero()
|
||||
{
|
||||
// Arrange
|
||||
var invalidJson = "not valid json";
|
||||
|
||||
// Act
|
||||
var count = CountPackagesTestHelper(invalidJson, "cyclonedx");
|
||||
|
||||
// Assert
|
||||
Assert.Equal(0, count);
|
||||
}
|
||||
|
||||
// Helper types and methods
|
||||
private class TestGoldenImageDefinition
|
||||
{
|
||||
public string Id { get; set; } = string.Empty;
|
||||
public string ImageRef { get; set; } = string.Empty;
|
||||
public string Description { get; set; } = string.Empty;
|
||||
public int ExpectedPackages { get; set; }
|
||||
}
|
||||
|
||||
private static bool KnownImagesContainsTestHelper(string image)
|
||||
{
|
||||
var knownImages = new Dictionary<string, bool>
|
||||
{
|
||||
["alpine-minimal"] = true,
|
||||
["debian-slim"] = true,
|
||||
["distroless-static"] = true,
|
||||
["scratch-go"] = true,
|
||||
};
|
||||
return knownImages.ContainsKey(image.ToLowerInvariant());
|
||||
}
|
||||
|
||||
private static string GetFormatExtensionTestHelper(string format)
|
||||
{
|
||||
return format.ToLowerInvariant() switch
|
||||
{
|
||||
"cyclonedx" or "cyclonedx-json" => "cdx.json",
|
||||
"spdx" or "spdx-json" => "spdx.json",
|
||||
_ => "json",
|
||||
};
|
||||
}
|
||||
|
||||
private static dynamic GenerateCycloneDxSampleTestHelper(TestGoldenImageDefinition imageDef)
|
||||
{
|
||||
var components = new List<object>();
|
||||
for (int i = 0; i < Math.Max(1, imageDef.ExpectedPackages); i++)
|
||||
{
|
||||
components.Add(new
|
||||
{
|
||||
type = "library",
|
||||
name = $"sample-package-{i + 1}",
|
||||
version = $"1.{i}.0",
|
||||
purl = $"pkg:apk/alpine/sample-package-{i + 1}@1.{i}.0",
|
||||
});
|
||||
}
|
||||
|
||||
return new
|
||||
{
|
||||
bomFormat = "CycloneDX",
|
||||
specVersion = "1.6",
|
||||
serialNumber = $"urn:uuid:{Guid.NewGuid()}",
|
||||
version = 1,
|
||||
metadata = new
|
||||
{
|
||||
timestamp = DateTime.UtcNow.ToString("O"),
|
||||
tools = new[]
|
||||
{
|
||||
new { vendor = "StellaOps", name = "FixtureHarvester", version = "1.0.0" }
|
||||
},
|
||||
component = new
|
||||
{
|
||||
type = "container",
|
||||
name = imageDef.ImageRef.Split(':')[0].Split('/').Last(),
|
||||
version = imageDef.ImageRef.Contains(':') ? imageDef.ImageRef.Split(':').Last() : "latest",
|
||||
purl = $"pkg:oci/{imageDef.ImageRef.Replace(':', '@')}",
|
||||
},
|
||||
},
|
||||
components = components,
|
||||
};
|
||||
}
|
||||
|
||||
private static dynamic GenerateSpdxSampleTestHelper(TestGoldenImageDefinition imageDef)
|
||||
{
|
||||
var packages = new List<object>();
|
||||
for (int i = 0; i < Math.Max(1, imageDef.ExpectedPackages); i++)
|
||||
{
|
||||
packages.Add(new
|
||||
{
|
||||
SPDXID = $"SPDXRef-Package-{i + 1}",
|
||||
name = $"sample-package-{i + 1}",
|
||||
versionInfo = $"1.{i}.0",
|
||||
downloadLocation = "NOASSERTION",
|
||||
filesAnalyzed = false,
|
||||
});
|
||||
}
|
||||
|
||||
return new
|
||||
{
|
||||
spdxVersion = "SPDX-2.3",
|
||||
dataLicense = "CC0-1.0",
|
||||
SPDXID = "SPDXRef-DOCUMENT",
|
||||
name = imageDef.Id,
|
||||
documentNamespace = $"https://stellaops.dev/spdx/{imageDef.Id}",
|
||||
creationInfo = new
|
||||
{
|
||||
created = DateTime.UtcNow.ToString("O"),
|
||||
creators = new[] { "Tool: StellaOps-FixtureHarvester-1.0.0" },
|
||||
},
|
||||
packages = packages,
|
||||
};
|
||||
}
|
||||
|
||||
private static int CountPackagesTestHelper(string sbomContent, string format)
|
||||
{
|
||||
try
|
||||
{
|
||||
var doc = JsonDocument.Parse(sbomContent);
|
||||
|
||||
if (format.Contains("cyclonedx", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
if (doc.RootElement.TryGetProperty("components", out var components))
|
||||
{
|
||||
return components.GetArrayLength();
|
||||
}
|
||||
}
|
||||
else if (format.Contains("spdx", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
if (doc.RootElement.TryGetProperty("packages", out var packages))
|
||||
{
|
||||
return packages.GetArrayLength();
|
||||
}
|
||||
}
|
||||
}
|
||||
catch
|
||||
{
|
||||
// Parse error
|
||||
}
|
||||
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
263
src/__Tests/Tools/FixtureHarvester/VexSourceCommandTests.cs
Normal file
263
src/__Tests/Tools/FixtureHarvester/VexSourceCommandTests.cs
Normal file
@@ -0,0 +1,263 @@
|
||||
// <copyright file="VexSourceCommandTests.cs" company="Stella Operations">
|
||||
// Copyright (c) Stella Operations. Licensed under AGPL-3.0-or-later.
|
||||
// </copyright>
|
||||
|
||||
using System.Text.Json;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Testing.FixtureHarvester.Tests;
|
||||
|
||||
/// <summary>
|
||||
/// Unit tests for VexSourceCommand.
|
||||
/// @sprint SPRINT_20251229_004_LIB_fixture_harvester (FH-006)
|
||||
/// </summary>
|
||||
public sealed class VexSourceCommandTests : IDisposable
|
||||
{
|
||||
private readonly string _testOutputDir;
|
||||
|
||||
public VexSourceCommandTests()
|
||||
{
|
||||
_testOutputDir = Path.Combine(Path.GetTempPath(), $"fixture-harvester-test-{Guid.NewGuid():N}");
|
||||
Directory.CreateDirectory(_testOutputDir);
|
||||
}
|
||||
|
||||
public void Dispose()
|
||||
{
|
||||
if (Directory.Exists(_testOutputDir))
|
||||
{
|
||||
Directory.Delete(_testOutputDir, recursive: true);
|
||||
}
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData("https://example.com/openvex/doc.json", "openvex")]
|
||||
[InlineData("https://access.redhat.com/security/data/csaf/v2/advisories/test.json", "csaf")]
|
||||
[InlineData("https://secdb.alpinelinux.org/v3.19/main.json", "alpine")]
|
||||
[InlineData("https://example.com/unknown/doc.json", "unknown")]
|
||||
public void DetectFormat_ReturnsCorrectFormat(string url, string expectedFormat)
|
||||
{
|
||||
// Arrange & Act
|
||||
var result = DetectFormatTestHelper(url);
|
||||
|
||||
// Assert
|
||||
Assert.Equal(expectedFormat, result);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void GenerateOpenVexSample_HasRequiredFields()
|
||||
{
|
||||
// Arrange & Act
|
||||
var sample = GenerateOpenVexSampleTestHelper();
|
||||
var json = JsonSerializer.Serialize(sample);
|
||||
|
||||
// Assert
|
||||
Assert.Contains("https://openvex.dev/ns", json);
|
||||
Assert.Contains("statements", json);
|
||||
Assert.Contains("not_affected", json);
|
||||
Assert.Contains("vulnerability", json);
|
||||
Assert.Contains("products", json);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void GenerateOpenVexNotAffected_HasCorrectStatus()
|
||||
{
|
||||
// Arrange & Act
|
||||
var sample = GenerateOpenVexNotAffectedTestHelper();
|
||||
var json = JsonSerializer.Serialize(sample);
|
||||
|
||||
// Assert
|
||||
Assert.Contains("not_affected", json);
|
||||
Assert.Contains("component_not_present", json);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void GenerateOpenVexAffectedFixed_HasCorrectStatus()
|
||||
{
|
||||
// Arrange & Act
|
||||
var sample = GenerateOpenVexAffectedFixedTestHelper();
|
||||
var json = JsonSerializer.Serialize(sample);
|
||||
|
||||
// Assert
|
||||
Assert.Contains("\"fixed\"", json);
|
||||
Assert.Contains("action_statement", json);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void GenerateOpenVexUnderInvestigation_HasCorrectStatus()
|
||||
{
|
||||
// Arrange & Act
|
||||
var sample = GenerateOpenVexUnderInvestigationTestHelper();
|
||||
var json = JsonSerializer.Serialize(sample);
|
||||
|
||||
// Assert
|
||||
Assert.Contains("under_investigation", json);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void GenerateCsafSample_HasRequiredFields()
|
||||
{
|
||||
// Arrange & Act
|
||||
var sample = GenerateCsafSampleTestHelper();
|
||||
var json = JsonSerializer.Serialize(sample);
|
||||
|
||||
// Assert
|
||||
Assert.Contains("csaf_vex", json);
|
||||
Assert.Contains("document", json);
|
||||
Assert.Contains("vulnerabilities", json);
|
||||
Assert.Contains("tracking", json);
|
||||
Assert.Contains("publisher", json);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void KnownSources_ContainsExpectedEntries()
|
||||
{
|
||||
// Arrange
|
||||
var expectedSources = new[] { "openvex-examples", "csaf-redhat", "alpine-secdb" };
|
||||
|
||||
// Act & Assert
|
||||
foreach (var source in expectedSources)
|
||||
{
|
||||
Assert.True(KnownSourcesContainsTestHelper(source), $"Should contain {source}");
|
||||
}
|
||||
}
|
||||
|
||||
// Helper methods that mirror internal logic
|
||||
private static string DetectFormatTestHelper(string url)
|
||||
{
|
||||
if (url.Contains("openvex", StringComparison.OrdinalIgnoreCase)) return "openvex";
|
||||
if (url.Contains("csaf", StringComparison.OrdinalIgnoreCase)) return "csaf";
|
||||
if (url.Contains("secdb", StringComparison.OrdinalIgnoreCase)) return "alpine";
|
||||
return "unknown";
|
||||
}
|
||||
|
||||
private static object GenerateOpenVexSampleTestHelper() => new
|
||||
{
|
||||
context = "https://openvex.dev/ns/v0.2.0",
|
||||
id = "https://stellaops.dev/vex/sample-001",
|
||||
author = "StellaOps Fixture Harvester",
|
||||
timestamp = DateTime.UtcNow.ToString("O"),
|
||||
version = 1,
|
||||
statements = new[]
|
||||
{
|
||||
new
|
||||
{
|
||||
vulnerability = new { name = "CVE-2024-0001" },
|
||||
products = new[] { new { id = "pkg:oci/sample-image@sha256:abc123" } },
|
||||
status = "not_affected",
|
||||
justification = "vulnerable_code_not_in_execute_path",
|
||||
}
|
||||
},
|
||||
};
|
||||
|
||||
private static object GenerateOpenVexNotAffectedTestHelper() => new
|
||||
{
|
||||
context = "https://openvex.dev/ns/v0.2.0",
|
||||
id = "https://stellaops.dev/vex/not-affected-001",
|
||||
author = "StellaOps Test",
|
||||
timestamp = DateTime.UtcNow.ToString("O"),
|
||||
version = 1,
|
||||
statements = new[]
|
||||
{
|
||||
new
|
||||
{
|
||||
vulnerability = new { name = "CVE-2024-1001" },
|
||||
products = new[] { new { id = "pkg:oci/test-image@sha256:not-affected-digest" } },
|
||||
status = "not_affected",
|
||||
justification = "component_not_present",
|
||||
impact_statement = "The vulnerable component is not included in this image.",
|
||||
}
|
||||
},
|
||||
};
|
||||
|
||||
private static object GenerateOpenVexAffectedFixedTestHelper() => new
|
||||
{
|
||||
context = "https://openvex.dev/ns/v0.2.0",
|
||||
id = "https://stellaops.dev/vex/fixed-001",
|
||||
author = "StellaOps Test",
|
||||
timestamp = DateTime.UtcNow.ToString("O"),
|
||||
version = 1,
|
||||
statements = new[]
|
||||
{
|
||||
new
|
||||
{
|
||||
vulnerability = new { name = "CVE-2024-1002" },
|
||||
products = new[] { new { id = "pkg:oci/test-image@sha256:fixed-digest" } },
|
||||
status = "fixed",
|
||||
action_statement = "Update to version 2.0.0 or later.",
|
||||
}
|
||||
},
|
||||
};
|
||||
|
||||
private static object GenerateOpenVexUnderInvestigationTestHelper() => new
|
||||
{
|
||||
context = "https://openvex.dev/ns/v0.2.0",
|
||||
id = "https://stellaops.dev/vex/investigation-001",
|
||||
author = "StellaOps Test",
|
||||
timestamp = DateTime.UtcNow.ToString("O"),
|
||||
version = 1,
|
||||
statements = new[]
|
||||
{
|
||||
new
|
||||
{
|
||||
vulnerability = new { name = "CVE-2024-1003" },
|
||||
products = new[] { new { id = "pkg:oci/test-image@sha256:investigating-digest" } },
|
||||
status = "under_investigation",
|
||||
impact_statement = "Analysis in progress. Update expected within 48 hours.",
|
||||
}
|
||||
},
|
||||
};
|
||||
|
||||
private static object GenerateCsafSampleTestHelper() => new
|
||||
{
|
||||
document = new
|
||||
{
|
||||
category = "csaf_vex",
|
||||
csaf_version = "2.0",
|
||||
title = "Sample CSAF VEX Document",
|
||||
publisher = new
|
||||
{
|
||||
category = "vendor",
|
||||
name = "StellaOps Test",
|
||||
@namespace = "https://stellaops.dev",
|
||||
},
|
||||
tracking = new
|
||||
{
|
||||
id = "STELLA-VEX-2024-001",
|
||||
status = "final",
|
||||
version = "1.0.0",
|
||||
initial_release_date = DateTime.UtcNow.ToString("O"),
|
||||
current_release_date = DateTime.UtcNow.ToString("O"),
|
||||
},
|
||||
},
|
||||
vulnerabilities = new[]
|
||||
{
|
||||
new
|
||||
{
|
||||
cve = "CVE-2024-0001",
|
||||
product_status = new
|
||||
{
|
||||
known_not_affected = new[] { "CSAFPID-0001" },
|
||||
},
|
||||
threats = new[]
|
||||
{
|
||||
new
|
||||
{
|
||||
category = "impact",
|
||||
details = "The vulnerable code is not reachable in this product configuration.",
|
||||
}
|
||||
},
|
||||
}
|
||||
},
|
||||
};
|
||||
|
||||
private static bool KnownSourcesContainsTestHelper(string source)
|
||||
{
|
||||
var knownSources = new Dictionary<string, bool>
|
||||
{
|
||||
["openvex-examples"] = true,
|
||||
["csaf-redhat"] = true,
|
||||
["alpine-secdb"] = true,
|
||||
};
|
||||
return knownSources.ContainsKey(source.ToLowerInvariant());
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,24 @@
|
||||
# Expected output for GitHub gate template validation
|
||||
# This file is used to verify deterministic template generation
|
||||
|
||||
name: StellaOps Release Gate
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [main, release/*]
|
||||
pull_request:
|
||||
branches: [main]
|
||||
workflow_dispatch:
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
id-token: write
|
||||
security-events: write
|
||||
|
||||
jobs:
|
||||
gate:
|
||||
name: Release Gate Evaluation
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
@@ -0,0 +1,35 @@
|
||||
{
|
||||
"version": "1.0.0",
|
||||
"description": "CI template validation manifest for E2E tests",
|
||||
"platforms": ["github", "gitlab", "gitea"],
|
||||
"templateTypes": ["gate", "scan", "verify", "full"],
|
||||
"modes": ["basic", "strict", "airgap"],
|
||||
"expectedOutputs": {
|
||||
"github": {
|
||||
"gate": ".github/workflows/stellaops-gate.yml",
|
||||
"scan": ".github/workflows/stellaops-scan.yml",
|
||||
"verify": ".github/workflows/stellaops-verify.yml"
|
||||
},
|
||||
"gitlab": {
|
||||
"gate": ".gitlab-ci.yml",
|
||||
"scan": ".gitlab/stellaops-scan.yml",
|
||||
"verify": ".gitlab/stellaops-verify.yml"
|
||||
},
|
||||
"gitea": {
|
||||
"gate": ".gitea/workflows/stellaops-gate.yml",
|
||||
"scan": ".gitea/workflows/stellaops-scan.yml",
|
||||
"verify": ".gitea/workflows/stellaops-verify.yml"
|
||||
}
|
||||
},
|
||||
"requiredKeywords": {
|
||||
"gate": ["stellaops", "gate", "evaluate", "baseline"],
|
||||
"scan": ["stellaops", "scan", "sbom"],
|
||||
"verify": ["stellaops", "verify", "require"]
|
||||
},
|
||||
"deterministicFields": [
|
||||
"name",
|
||||
"on",
|
||||
"permissions",
|
||||
"jobs"
|
||||
]
|
||||
}
|
||||
19
src/__Tests/__Datasets/Integrations/Registry/acr-push.json
Normal file
19
src/__Tests/__Datasets/Integrations/Registry/acr-push.json
Normal file
@@ -0,0 +1,19 @@
|
||||
{
|
||||
"id": "acr-event-001",
|
||||
"timestamp": "2024-12-29T12:00:00.000Z",
|
||||
"action": "push",
|
||||
"target": {
|
||||
"mediaType": "application/vnd.docker.distribution.manifest.v2+json",
|
||||
"size": 3028,
|
||||
"digest": "sha256:a3ed95caeb02ffe68cdd9fd84406680ae93d633cb16422d00e8a7c22955b46d4",
|
||||
"length": 3028,
|
||||
"repository": "stellaops/api-gateway",
|
||||
"tag": "1.0.0"
|
||||
},
|
||||
"request": {
|
||||
"id": "req-12345",
|
||||
"host": "stellaops.azurecr.io",
|
||||
"method": "PUT",
|
||||
"useragent": "docker/20.10.21 go/go1.18.10"
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,25 @@
|
||||
{
|
||||
"callback_url": "https://registry.hub.docker.com/u/stellaops/scanner/hook/1234567890",
|
||||
"push_data": {
|
||||
"pushed_at": 1703836800,
|
||||
"pusher": "stellaops-bot",
|
||||
"tag": "v2.0.0"
|
||||
},
|
||||
"repository": {
|
||||
"comment_count": 0,
|
||||
"date_created": 1703836700,
|
||||
"description": "StellaOps container scanner",
|
||||
"dockerfile": "FROM alpine:3.18\nRUN apk add --no-cache ca-certificates",
|
||||
"full_description": "# StellaOps Scanner\n\nContainer vulnerability scanner.",
|
||||
"is_official": false,
|
||||
"is_private": false,
|
||||
"is_trusted": true,
|
||||
"name": "scanner",
|
||||
"namespace": "stellaops",
|
||||
"owner": "stellaops",
|
||||
"repo_name": "stellaops/scanner",
|
||||
"repo_url": "https://registry.hub.docker.com/v2/repositories/stellaops/scanner",
|
||||
"star_count": 42,
|
||||
"status": "Active"
|
||||
}
|
||||
}
|
||||
19
src/__Tests/__Datasets/Integrations/Registry/ecr-push.json
Normal file
19
src/__Tests/__Datasets/Integrations/Registry/ecr-push.json
Normal file
@@ -0,0 +1,19 @@
|
||||
{
|
||||
"version": "0",
|
||||
"id": "12345678-1234-1234-1234-123456789abc",
|
||||
"detail-type": "ECR Image Action",
|
||||
"source": "aws.ecr",
|
||||
"account": "123456789012",
|
||||
"time": "2024-12-29T12:00:00Z",
|
||||
"region": "us-east-1",
|
||||
"resources": [
|
||||
"arn:aws:ecr:us-east-1:123456789012:repository/stellaops/scanner"
|
||||
],
|
||||
"detail": {
|
||||
"action-type": "PUSH",
|
||||
"repository-name": "stellaops/scanner",
|
||||
"image-digest": "sha256:a3ed95caeb02ffe68cdd9fd84406680ae93d633cb16422d00e8a7c22955b46d4",
|
||||
"image-tag": "v3.1.0",
|
||||
"result": "SUCCESS"
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,8 @@
|
||||
{
|
||||
"message": {
|
||||
"data": "eyJhY3Rpb24iOiJJTlNFUlQiLCJkaWdlc3QiOiJzaGEyNTY6YTNlZDk1Y2FlYjAyZmZlNjhjZGQ5ZmQ4NDQwNjY4MGFlOTNkNjMzY2IxNjQyMmQwMGU4YTdjMjI5NTViNDZkNCIsInRhZyI6InYyLjUuMCJ9",
|
||||
"messageId": "gcr-msg-12345",
|
||||
"publishTime": "2024-12-29T12:00:00.000Z"
|
||||
},
|
||||
"subscription": "projects/stellaops-project/subscriptions/gcr-push-subscription"
|
||||
}
|
||||
@@ -0,0 +1,63 @@
|
||||
{
|
||||
"action": "published",
|
||||
"package": {
|
||||
"id": 12345678,
|
||||
"name": "stellaops-cli",
|
||||
"namespace": "stellaops",
|
||||
"description": "StellaOps command-line interface",
|
||||
"ecosystem": "container",
|
||||
"package_type": "container",
|
||||
"html_url": "https://github.com/orgs/stellaops/packages/container/package/stellaops-cli",
|
||||
"created_at": "2024-12-29T11:00:00Z",
|
||||
"updated_at": "2024-12-29T12:00:00Z",
|
||||
"owner": {
|
||||
"login": "stellaops",
|
||||
"id": 87654321,
|
||||
"type": "Organization"
|
||||
},
|
||||
"package_version": {
|
||||
"id": 98765432,
|
||||
"version": "v4.0.0",
|
||||
"summary": "Container release v4.0.0",
|
||||
"body": "## Release Notes\n- New scan engine\n- Improved performance",
|
||||
"body_html": "<h2>Release Notes</h2><ul><li>New scan engine</li><li>Improved performance</li></ul>",
|
||||
"release": {
|
||||
"url": "https://api.github.com/repos/stellaops/stellaops-cli/releases/12345678",
|
||||
"html_url": "https://github.com/stellaops/stellaops-cli/releases/tag/v4.0.0",
|
||||
"id": 12345678,
|
||||
"tag_name": "v4.0.0",
|
||||
"target_commitish": "main",
|
||||
"name": "v4.0.0",
|
||||
"draft": false,
|
||||
"prerelease": false
|
||||
},
|
||||
"manifest": "sha256:a3ed95caeb02ffe68cdd9fd84406680ae93d633cb16422d00e8a7c22955b46d4",
|
||||
"html_url": "https://github.com/orgs/stellaops/packages/container/stellaops-cli/98765432",
|
||||
"tag_name": "v4.0.0",
|
||||
"container_metadata": {
|
||||
"tag": {
|
||||
"digest": "sha256:a3ed95caeb02ffe68cdd9fd84406680ae93d633cb16422d00e8a7c22955b46d4",
|
||||
"name": "v4.0.0"
|
||||
}
|
||||
}
|
||||
},
|
||||
"registry": {
|
||||
"about_url": "https://docs.github.com/packages",
|
||||
"name": "GitHub Container Registry",
|
||||
"type": "ghcr",
|
||||
"url": "https://ghcr.io",
|
||||
"vendor": "GitHub Inc."
|
||||
}
|
||||
},
|
||||
"repository": {
|
||||
"id": 11111111,
|
||||
"name": "stellaops-cli",
|
||||
"full_name": "stellaops/stellaops-cli",
|
||||
"private": false
|
||||
},
|
||||
"sender": {
|
||||
"login": "release-bot",
|
||||
"id": 99999999,
|
||||
"type": "Bot"
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,21 @@
|
||||
{
|
||||
"type": "PUSH_ARTIFACT",
|
||||
"occur_at": 1703836800,
|
||||
"operator": "admin",
|
||||
"event_data": {
|
||||
"resources": [
|
||||
{
|
||||
"digest": "sha256:a3ed95caeb02ffe68cdd9fd84406680ae93d633cb16422d00e8a7c22955b46d4",
|
||||
"tag": "v1.2.3",
|
||||
"resource_url": "harbor.example.com/library/nginx:v1.2.3"
|
||||
}
|
||||
],
|
||||
"repository": {
|
||||
"date_created": 1703836700,
|
||||
"name": "nginx",
|
||||
"namespace": "library",
|
||||
"repo_full_name": "library/nginx",
|
||||
"repo_type": "public"
|
||||
}
|
||||
}
|
||||
}
|
||||
94
src/__Tests/__Datasets/Integrations/Scm/gitea-push.json
Normal file
94
src/__Tests/__Datasets/Integrations/Scm/gitea-push.json
Normal file
@@ -0,0 +1,94 @@
|
||||
{
|
||||
"secret": "",
|
||||
"ref": "refs/heads/main",
|
||||
"before": "0000000000000000000000000000000000000000",
|
||||
"after": "abc123def456789012345678901234567890abcd",
|
||||
"compare_url": "https://gitea.example.com/stellaops-org/stellaops/compare/000000000000...abc123def456",
|
||||
"commits": [
|
||||
{
|
||||
"id": "abc123def456789012345678901234567890abcd",
|
||||
"message": "feat: add new scanner analyzer\n\nAdds support for Python wheel analysis.",
|
||||
"url": "https://gitea.example.com/stellaops-org/stellaops/commit/abc123def456789012345678901234567890abcd",
|
||||
"author": {
|
||||
"name": "Developer",
|
||||
"email": "developer@stellaops.io",
|
||||
"username": "developer"
|
||||
},
|
||||
"committer": {
|
||||
"name": "Developer",
|
||||
"email": "developer@stellaops.io",
|
||||
"username": "developer"
|
||||
},
|
||||
"verification": null,
|
||||
"timestamp": "2024-12-29T12:00:00Z",
|
||||
"added": ["src/Scanner/Analyzers/PythonWheel.cs"],
|
||||
"removed": [],
|
||||
"modified": ["src/Scanner/Scanner.csproj"]
|
||||
}
|
||||
],
|
||||
"head_commit": {
|
||||
"id": "abc123def456789012345678901234567890abcd",
|
||||
"message": "feat: add new scanner analyzer",
|
||||
"url": "https://gitea.example.com/stellaops-org/stellaops/commit/abc123def456789012345678901234567890abcd",
|
||||
"author": {
|
||||
"name": "Developer",
|
||||
"email": "developer@stellaops.io",
|
||||
"username": "developer"
|
||||
},
|
||||
"timestamp": "2024-12-29T12:00:00Z"
|
||||
},
|
||||
"repository": {
|
||||
"id": 12345,
|
||||
"owner": {
|
||||
"id": 1,
|
||||
"login": "stellaops-org",
|
||||
"full_name": "StellaOps Organization",
|
||||
"email": "org@stellaops.io",
|
||||
"avatar_url": "https://gitea.example.com/avatars/1",
|
||||
"username": "stellaops-org"
|
||||
},
|
||||
"name": "stellaops",
|
||||
"full_name": "stellaops-org/stellaops",
|
||||
"description": "Sovereign container security platform",
|
||||
"empty": false,
|
||||
"private": false,
|
||||
"fork": false,
|
||||
"template": false,
|
||||
"parent": null,
|
||||
"mirror": false,
|
||||
"size": 102400,
|
||||
"language": "C#",
|
||||
"languages_url": "https://gitea.example.com/api/v1/repos/stellaops-org/stellaops/languages",
|
||||
"html_url": "https://gitea.example.com/stellaops-org/stellaops",
|
||||
"ssh_url": "git@gitea.example.com:stellaops-org/stellaops.git",
|
||||
"clone_url": "https://gitea.example.com/stellaops-org/stellaops.git",
|
||||
"original_url": "",
|
||||
"website": "https://stellaops.io",
|
||||
"stars_count": 42,
|
||||
"forks_count": 7,
|
||||
"watchers_count": 15,
|
||||
"open_issues_count": 3,
|
||||
"open_pr_counter": 2,
|
||||
"release_counter": 10,
|
||||
"default_branch": "main",
|
||||
"archived": false,
|
||||
"created_at": "2024-01-01T00:00:00Z",
|
||||
"updated_at": "2024-12-29T12:00:00Z"
|
||||
},
|
||||
"pusher": {
|
||||
"id": 54321,
|
||||
"login": "developer",
|
||||
"full_name": "Developer",
|
||||
"email": "developer@stellaops.io",
|
||||
"avatar_url": "https://gitea.example.com/avatars/54321",
|
||||
"username": "developer"
|
||||
},
|
||||
"sender": {
|
||||
"id": 54321,
|
||||
"login": "developer",
|
||||
"full_name": "Developer",
|
||||
"email": "developer@stellaops.io",
|
||||
"avatar_url": "https://gitea.example.com/avatars/54321",
|
||||
"username": "developer"
|
||||
}
|
||||
}
|
||||
102
src/__Tests/__Datasets/Integrations/Scm/github-pull-request.json
Normal file
102
src/__Tests/__Datasets/Integrations/Scm/github-pull-request.json
Normal file
@@ -0,0 +1,102 @@
|
||||
{
|
||||
"action": "opened",
|
||||
"number": 42,
|
||||
"pull_request": {
|
||||
"url": "https://api.github.com/repos/stellaops-org/stellaops/pulls/42",
|
||||
"id": 1234567890,
|
||||
"node_id": "PR_kwDOBuA8HM5KX8eS",
|
||||
"html_url": "https://github.com/stellaops-org/stellaops/pull/42",
|
||||
"diff_url": "https://github.com/stellaops-org/stellaops/pull/42.diff",
|
||||
"patch_url": "https://github.com/stellaops-org/stellaops/pull/42.patch",
|
||||
"issue_url": "https://api.github.com/repos/stellaops-org/stellaops/issues/42",
|
||||
"number": 42,
|
||||
"state": "open",
|
||||
"locked": false,
|
||||
"title": "feat: add Python wheel analyzer",
|
||||
"user": {
|
||||
"login": "developer",
|
||||
"id": 11111111,
|
||||
"type": "User"
|
||||
},
|
||||
"body": "This PR adds support for Python wheel package analysis.\n\n## Changes\n- New PythonWheel analyzer\n- Updated Scanner.csproj\n\n## Testing\n- Added unit tests for wheel parsing",
|
||||
"created_at": "2024-12-29T11:30:00Z",
|
||||
"updated_at": "2024-12-29T11:30:00Z",
|
||||
"closed_at": null,
|
||||
"merged_at": null,
|
||||
"merge_commit_sha": null,
|
||||
"assignee": null,
|
||||
"assignees": [],
|
||||
"requested_reviewers": [],
|
||||
"requested_teams": [],
|
||||
"labels": [
|
||||
{
|
||||
"id": 1,
|
||||
"name": "enhancement",
|
||||
"color": "a2eeef"
|
||||
}
|
||||
],
|
||||
"milestone": null,
|
||||
"draft": false,
|
||||
"head": {
|
||||
"label": "stellaops-org:feature/python-wheel",
|
||||
"ref": "feature/python-wheel",
|
||||
"sha": "abc123def456789012345678901234567890abcd",
|
||||
"user": {
|
||||
"login": "stellaops-org",
|
||||
"id": 87654321
|
||||
},
|
||||
"repo": {
|
||||
"id": 12345678,
|
||||
"name": "stellaops",
|
||||
"full_name": "stellaops-org/stellaops"
|
||||
}
|
||||
},
|
||||
"base": {
|
||||
"label": "stellaops-org:main",
|
||||
"ref": "main",
|
||||
"sha": "0000000000000000000000000000000000000000",
|
||||
"user": {
|
||||
"login": "stellaops-org",
|
||||
"id": 87654321
|
||||
},
|
||||
"repo": {
|
||||
"id": 12345678,
|
||||
"name": "stellaops",
|
||||
"full_name": "stellaops-org/stellaops"
|
||||
}
|
||||
},
|
||||
"author_association": "MEMBER",
|
||||
"auto_merge": null,
|
||||
"active_lock_reason": null,
|
||||
"merged": false,
|
||||
"mergeable": null,
|
||||
"rebaseable": null,
|
||||
"mergeable_state": "unknown",
|
||||
"merged_by": null,
|
||||
"comments": 0,
|
||||
"review_comments": 0,
|
||||
"maintainer_can_modify": false,
|
||||
"commits": 1,
|
||||
"additions": 150,
|
||||
"deletions": 5,
|
||||
"changed_files": 2
|
||||
},
|
||||
"repository": {
|
||||
"id": 12345678,
|
||||
"name": "stellaops",
|
||||
"full_name": "stellaops-org/stellaops",
|
||||
"private": false,
|
||||
"owner": {
|
||||
"login": "stellaops-org",
|
||||
"id": 87654321,
|
||||
"type": "Organization"
|
||||
},
|
||||
"html_url": "https://github.com/stellaops-org/stellaops",
|
||||
"default_branch": "main"
|
||||
},
|
||||
"sender": {
|
||||
"login": "developer",
|
||||
"id": 11111111,
|
||||
"type": "User"
|
||||
}
|
||||
}
|
||||
72
src/__Tests/__Datasets/Integrations/Scm/github-push.json
Normal file
72
src/__Tests/__Datasets/Integrations/Scm/github-push.json
Normal file
@@ -0,0 +1,72 @@
|
||||
{
|
||||
"ref": "refs/heads/main",
|
||||
"before": "0000000000000000000000000000000000000000",
|
||||
"after": "abc123def456789012345678901234567890abcd",
|
||||
"repository": {
|
||||
"id": 12345678,
|
||||
"node_id": "R_kgDOBuA8HA",
|
||||
"name": "stellaops",
|
||||
"full_name": "stellaops-org/stellaops",
|
||||
"private": false,
|
||||
"owner": {
|
||||
"name": "stellaops-org",
|
||||
"login": "stellaops-org",
|
||||
"id": 87654321,
|
||||
"type": "Organization"
|
||||
},
|
||||
"html_url": "https://github.com/stellaops-org/stellaops",
|
||||
"description": "Sovereign container security platform",
|
||||
"fork": false,
|
||||
"url": "https://api.github.com/repos/stellaops-org/stellaops",
|
||||
"clone_url": "https://github.com/stellaops-org/stellaops.git",
|
||||
"default_branch": "main"
|
||||
},
|
||||
"pusher": {
|
||||
"name": "developer",
|
||||
"email": "developer@stellaops.io"
|
||||
},
|
||||
"sender": {
|
||||
"login": "developer",
|
||||
"id": 11111111,
|
||||
"type": "User"
|
||||
},
|
||||
"created": false,
|
||||
"deleted": false,
|
||||
"forced": false,
|
||||
"base_ref": null,
|
||||
"compare": "https://github.com/stellaops-org/stellaops/compare/000000000000...abc123def456",
|
||||
"commits": [
|
||||
{
|
||||
"id": "abc123def456789012345678901234567890abcd",
|
||||
"tree_id": "fedcba0987654321fedcba0987654321fedcba09",
|
||||
"distinct": true,
|
||||
"message": "feat: add new scanner analyzer\n\nAdds support for Python wheel analysis.",
|
||||
"timestamp": "2024-12-29T12:00:00Z",
|
||||
"url": "https://github.com/stellaops-org/stellaops/commit/abc123def456789012345678901234567890abcd",
|
||||
"author": {
|
||||
"name": "Developer",
|
||||
"email": "developer@stellaops.io",
|
||||
"username": "developer"
|
||||
},
|
||||
"committer": {
|
||||
"name": "Developer",
|
||||
"email": "developer@stellaops.io",
|
||||
"username": "developer"
|
||||
},
|
||||
"added": ["src/Scanner/Analyzers/PythonWheel.cs"],
|
||||
"removed": [],
|
||||
"modified": ["src/Scanner/Scanner.csproj"]
|
||||
}
|
||||
],
|
||||
"head_commit": {
|
||||
"id": "abc123def456789012345678901234567890abcd",
|
||||
"tree_id": "fedcba0987654321fedcba0987654321fedcba09",
|
||||
"distinct": true,
|
||||
"message": "feat: add new scanner analyzer",
|
||||
"timestamp": "2024-12-29T12:00:00Z",
|
||||
"author": {
|
||||
"name": "Developer",
|
||||
"email": "developer@stellaops.io"
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,98 @@
|
||||
{
|
||||
"action": "completed",
|
||||
"workflow_run": {
|
||||
"id": 9876543210,
|
||||
"name": "StellaOps CI",
|
||||
"node_id": "WFR_kwLOBuA8HM8AAAAClKe9Og",
|
||||
"head_branch": "main",
|
||||
"head_sha": "abc123def456789012345678901234567890abcd",
|
||||
"path": ".github/workflows/ci.yml",
|
||||
"display_title": "StellaOps CI",
|
||||
"run_number": 123,
|
||||
"event": "push",
|
||||
"status": "completed",
|
||||
"conclusion": "success",
|
||||
"workflow_id": 12345,
|
||||
"check_suite_id": 11111111,
|
||||
"check_suite_node_id": "CS_kwDOBuA8HM8AAAAClKe9Og",
|
||||
"url": "https://api.github.com/repos/stellaops-org/stellaops/actions/runs/9876543210",
|
||||
"html_url": "https://github.com/stellaops-org/stellaops/actions/runs/9876543210",
|
||||
"pull_requests": [],
|
||||
"created_at": "2024-12-29T12:00:00Z",
|
||||
"updated_at": "2024-12-29T12:05:00Z",
|
||||
"actor": {
|
||||
"login": "developer",
|
||||
"id": 11111111,
|
||||
"type": "User"
|
||||
},
|
||||
"run_attempt": 1,
|
||||
"referenced_workflows": [],
|
||||
"run_started_at": "2024-12-29T12:00:00Z",
|
||||
"triggering_actor": {
|
||||
"login": "developer",
|
||||
"id": 11111111,
|
||||
"type": "User"
|
||||
},
|
||||
"jobs_url": "https://api.github.com/repos/stellaops-org/stellaops/actions/runs/9876543210/jobs",
|
||||
"logs_url": "https://api.github.com/repos/stellaops-org/stellaops/actions/runs/9876543210/logs",
|
||||
"check_suite_url": "https://api.github.com/repos/stellaops-org/stellaops/check-suites/11111111",
|
||||
"artifacts_url": "https://api.github.com/repos/stellaops-org/stellaops/actions/runs/9876543210/artifacts",
|
||||
"cancel_url": "https://api.github.com/repos/stellaops-org/stellaops/actions/runs/9876543210/cancel",
|
||||
"rerun_url": "https://api.github.com/repos/stellaops-org/stellaops/actions/runs/9876543210/rerun",
|
||||
"workflow_url": "https://api.github.com/repos/stellaops-org/stellaops/actions/workflows/12345",
|
||||
"head_commit": {
|
||||
"id": "abc123def456789012345678901234567890abcd",
|
||||
"tree_id": "fedcba0987654321fedcba0987654321fedcba09",
|
||||
"message": "feat: add new scanner analyzer",
|
||||
"timestamp": "2024-12-29T12:00:00Z",
|
||||
"author": {
|
||||
"name": "Developer",
|
||||
"email": "developer@stellaops.io"
|
||||
},
|
||||
"committer": {
|
||||
"name": "Developer",
|
||||
"email": "developer@stellaops.io"
|
||||
}
|
||||
},
|
||||
"repository": {
|
||||
"id": 12345678,
|
||||
"name": "stellaops",
|
||||
"full_name": "stellaops-org/stellaops"
|
||||
},
|
||||
"head_repository": {
|
||||
"id": 12345678,
|
||||
"name": "stellaops",
|
||||
"full_name": "stellaops-org/stellaops"
|
||||
}
|
||||
},
|
||||
"workflow": {
|
||||
"id": 12345,
|
||||
"node_id": "W_kwDOBuA8HM8AAAACKPb9",
|
||||
"name": "StellaOps CI",
|
||||
"path": ".github/workflows/ci.yml",
|
||||
"state": "active",
|
||||
"created_at": "2024-01-01T00:00:00.000Z",
|
||||
"updated_at": "2024-12-29T12:00:00.000Z",
|
||||
"url": "https://api.github.com/repos/stellaops-org/stellaops/actions/workflows/12345",
|
||||
"html_url": "https://github.com/stellaops-org/stellaops/blob/main/.github/workflows/ci.yml",
|
||||
"badge_url": "https://github.com/stellaops-org/stellaops/workflows/StellaOps%20CI/badge.svg"
|
||||
},
|
||||
"repository": {
|
||||
"id": 12345678,
|
||||
"name": "stellaops",
|
||||
"full_name": "stellaops-org/stellaops",
|
||||
"private": false,
|
||||
"owner": {
|
||||
"login": "stellaops-org",
|
||||
"id": 87654321,
|
||||
"type": "Organization"
|
||||
},
|
||||
"html_url": "https://github.com/stellaops-org/stellaops",
|
||||
"default_branch": "main"
|
||||
},
|
||||
"sender": {
|
||||
"login": "github-actions[bot]",
|
||||
"id": 41898282,
|
||||
"type": "Bot"
|
||||
}
|
||||
}
|
||||
60
src/__Tests/__Datasets/Integrations/Scm/gitlab-push.json
Normal file
60
src/__Tests/__Datasets/Integrations/Scm/gitlab-push.json
Normal file
@@ -0,0 +1,60 @@
|
||||
{
|
||||
"object_kind": "push",
|
||||
"event_name": "push",
|
||||
"before": "0000000000000000000000000000000000000000",
|
||||
"after": "abc123def456789012345678901234567890abcd",
|
||||
"ref": "refs/heads/main",
|
||||
"checkout_sha": "abc123def456789012345678901234567890abcd",
|
||||
"message": null,
|
||||
"user_id": 12345,
|
||||
"user_name": "Developer",
|
||||
"user_username": "developer",
|
||||
"user_email": "developer@stellaops.io",
|
||||
"user_avatar": "https://gitlab.example.com/uploads/-/system/user/avatar/12345/avatar.png",
|
||||
"project_id": 67890,
|
||||
"project": {
|
||||
"id": 67890,
|
||||
"name": "stellaops",
|
||||
"description": "Sovereign container security platform",
|
||||
"web_url": "https://gitlab.example.com/stellaops-org/stellaops",
|
||||
"avatar_url": null,
|
||||
"git_ssh_url": "git@gitlab.example.com:stellaops-org/stellaops.git",
|
||||
"git_http_url": "https://gitlab.example.com/stellaops-org/stellaops.git",
|
||||
"namespace": "stellaops-org",
|
||||
"visibility_level": 20,
|
||||
"path_with_namespace": "stellaops-org/stellaops",
|
||||
"default_branch": "main",
|
||||
"ci_config_path": ".gitlab-ci.yml",
|
||||
"homepage": "https://gitlab.example.com/stellaops-org/stellaops",
|
||||
"url": "git@gitlab.example.com:stellaops-org/stellaops.git",
|
||||
"ssh_url": "git@gitlab.example.com:stellaops-org/stellaops.git",
|
||||
"http_url": "https://gitlab.example.com/stellaops-org/stellaops.git"
|
||||
},
|
||||
"commits": [
|
||||
{
|
||||
"id": "abc123def456789012345678901234567890abcd",
|
||||
"message": "feat: add new scanner analyzer\n\nAdds support for Python wheel analysis.",
|
||||
"title": "feat: add new scanner analyzer",
|
||||
"timestamp": "2024-12-29T12:00:00+00:00",
|
||||
"url": "https://gitlab.example.com/stellaops-org/stellaops/-/commit/abc123def456789012345678901234567890abcd",
|
||||
"author": {
|
||||
"name": "Developer",
|
||||
"email": "developer@stellaops.io"
|
||||
},
|
||||
"added": ["src/Scanner/Analyzers/PythonWheel.cs"],
|
||||
"modified": ["src/Scanner/Scanner.csproj"],
|
||||
"removed": []
|
||||
}
|
||||
],
|
||||
"total_commits_count": 1,
|
||||
"push_options": {},
|
||||
"repository": {
|
||||
"name": "stellaops",
|
||||
"url": "git@gitlab.example.com:stellaops-org/stellaops.git",
|
||||
"description": "Sovereign container security platform",
|
||||
"homepage": "https://gitlab.example.com/stellaops-org/stellaops",
|
||||
"git_http_url": "https://gitlab.example.com/stellaops-org/stellaops.git",
|
||||
"git_ssh_url": "git@gitlab.example.com:stellaops-org/stellaops.git",
|
||||
"visibility_level": 20
|
||||
}
|
||||
}
|
||||
@@ -1,6 +1,8 @@
|
||||
<Project Sdk="Microsoft.NET.Sdk">
|
||||
<PropertyGroup>
|
||||
<TargetFramework>net10.0</TargetFramework>
|
||||
<OutputType>Exe</OutputType>
|
||||
<UseAppHost>true</UseAppHost>
|
||||
<ImplicitUsings>enable</ImplicitUsings>
|
||||
<Nullable>enable</Nullable>
|
||||
<CopyLocalLockFileAssemblies>true</CopyLocalLockFileAssemblies>
|
||||
@@ -8,7 +10,10 @@
|
||||
</PropertyGroup>
|
||||
<ItemGroup>
|
||||
<PackageReference Include="Microsoft.Extensions.TimeProvider.Testing" />
|
||||
<PackageReference Include="xunit" >
|
||||
<PackageReference Include="xunit.v3.assert">
|
||||
<PrivateAssets>all</PrivateAssets>
|
||||
</PackageReference>
|
||||
<PackageReference Include="xunit.v3.core">
|
||||
<PrivateAssets>all</PrivateAssets>
|
||||
</PackageReference>
|
||||
</ItemGroup>
|
||||
@@ -18,3 +23,6 @@
|
||||
<ProjectReference Include="../StellaOps.Infrastructure.Postgres.Testing/StellaOps.Infrastructure.Postgres.Testing.csproj" />
|
||||
</ItemGroup>
|
||||
</Project>
|
||||
|
||||
|
||||
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
using System.Reflection;
|
||||
using Xunit;
|
||||
using Xunit.Sdk;
|
||||
|
||||
namespace StellaOps.Infrastructure.Postgres.Testing;
|
||||
|
||||
@@ -24,9 +23,8 @@ namespace StellaOps.Infrastructure.Postgres.Testing;
|
||||
/// </code>
|
||||
/// </remarks>
|
||||
[AttributeUsage(AttributeTargets.Method, AllowMultiple = false)]
|
||||
public sealed class MigrationTestAttribute : BeforeAfterTestAttribute
|
||||
public sealed class MigrationTestAttribute : Attribute
|
||||
{
|
||||
|
||||
/// <summary>
|
||||
/// Gets or sets whether to truncate tables before the test runs.
|
||||
/// Default is true.
|
||||
@@ -43,42 +41,6 @@ public sealed class MigrationTestAttribute : BeforeAfterTestAttribute
|
||||
/// Gets or sets specific table names to truncate. If null or empty, all tables are truncated.
|
||||
/// </summary>
|
||||
public string[]? Tables { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Called before the test method runs.
|
||||
/// </summary>
|
||||
public override void Before(MethodInfo methodUnderTest)
|
||||
{
|
||||
if (!TruncateBefore)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
// Try to find the fixture from the test class
|
||||
var testClass = methodUnderTest.DeclaringType;
|
||||
if (testClass is null) return;
|
||||
|
||||
// Look for a field or property of type PostgresIntegrationFixture
|
||||
var fixtureField = testClass
|
||||
.GetFields(BindingFlags.Instance | BindingFlags.NonPublic | BindingFlags.Public)
|
||||
.FirstOrDefault(f => typeof(PostgresIntegrationFixture).IsAssignableFrom(f.FieldType));
|
||||
|
||||
var fixtureProperty = testClass
|
||||
.GetProperties(BindingFlags.Instance | BindingFlags.NonPublic | BindingFlags.Public)
|
||||
.FirstOrDefault(p => typeof(PostgresIntegrationFixture).IsAssignableFrom(p.PropertyType));
|
||||
|
||||
// Note: We can't access the instance here in xUnit's BeforeAfterTestAttribute
|
||||
// This is a limitation - the actual truncation needs to be done via a different mechanism
|
||||
// See MigrationTestFixture for a better approach
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Called after the test method runs.
|
||||
/// </summary>
|
||||
public override void After(MethodInfo methodUnderTest)
|
||||
{
|
||||
// Cleanup is optional and typically not needed
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
@@ -117,7 +79,7 @@ public abstract class MigrationTestBase<TFixture> : IAsyncLifetime
|
||||
/// Called before each test. Override to customize initialization.
|
||||
/// By default, truncates all tables for test isolation.
|
||||
/// </summary>
|
||||
public virtual async Task InitializeAsync()
|
||||
public virtual async ValueTask InitializeAsync()
|
||||
{
|
||||
await _fixture.TruncateAllTablesAsync().ConfigureAwait(false);
|
||||
}
|
||||
@@ -125,9 +87,9 @@ public abstract class MigrationTestBase<TFixture> : IAsyncLifetime
|
||||
/// <summary>
|
||||
/// Called after each test. Override to customize cleanup.
|
||||
/// </summary>
|
||||
public virtual Task DisposeAsync()
|
||||
public virtual ValueTask DisposeAsync()
|
||||
{
|
||||
return Task.CompletedTask;
|
||||
return ValueTask.CompletedTask;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
@@ -151,3 +113,7 @@ public static class MigrationTestCollection
|
||||
/// </summary>
|
||||
public const string Name = "MigrationTests";
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
@@ -67,7 +67,7 @@ public abstract class PostgresIntegrationFixture : IAsyncLifetime
|
||||
/// <summary>
|
||||
/// Initializes the PostgreSQL container and runs migrations.
|
||||
/// </summary>
|
||||
public virtual async Task InitializeAsync()
|
||||
public virtual async ValueTask InitializeAsync()
|
||||
{
|
||||
try
|
||||
{
|
||||
@@ -114,7 +114,7 @@ public abstract class PostgresIntegrationFixture : IAsyncLifetime
|
||||
/// <summary>
|
||||
/// Cleans up the PostgreSQL container and fixture.
|
||||
/// </summary>
|
||||
public virtual async Task DisposeAsync()
|
||||
public virtual async ValueTask DisposeAsync()
|
||||
{
|
||||
if (_fixture != null)
|
||||
{
|
||||
@@ -155,3 +155,5 @@ public sealed class PostgresIntegrationFixtureWithoutMigrations : PostgresIntegr
|
||||
protected override Assembly? GetMigrationAssembly() => null;
|
||||
protected override string GetModuleName() => "Test";
|
||||
}
|
||||
|
||||
|
||||
|
||||
@@ -3,6 +3,8 @@
|
||||
|
||||
<PropertyGroup>
|
||||
<TargetFramework>net10.0</TargetFramework>
|
||||
<OutputType>Exe</OutputType>
|
||||
<UseAppHost>true</UseAppHost>
|
||||
<ImplicitUsings>enable</ImplicitUsings>
|
||||
<Nullable>enable</Nullable>
|
||||
<LangVersion>preview</LangVersion>
|
||||
@@ -15,7 +17,8 @@
|
||||
|
||||
<ItemGroup>
|
||||
<PackageReference Include="Testcontainers.PostgreSql" />
|
||||
<PackageReference Include="xunit" PrivateAssets="all" />
|
||||
<PackageReference Include="xunit.v3.assert" PrivateAssets="all" />
|
||||
<PackageReference Include="xunit.v3.core" PrivateAssets="all" />
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
@@ -23,3 +26,6 @@
|
||||
</ItemGroup>
|
||||
|
||||
</Project>
|
||||
|
||||
|
||||
|
||||
|
||||
@@ -18,7 +18,7 @@ public abstract class NetworkIsolatedTestBase : IAsyncLifetime
|
||||
_monitor = new NetworkMonitor(OnNetworkAttempt);
|
||||
}
|
||||
|
||||
public virtual async Task InitializeAsync()
|
||||
public virtual async ValueTask InitializeAsync()
|
||||
{
|
||||
// Install network interception
|
||||
await _monitor.StartMonitoringAsync();
|
||||
@@ -30,7 +30,7 @@ public abstract class NetworkIsolatedTestBase : IAsyncLifetime
|
||||
_monitor.BlockDns();
|
||||
}
|
||||
|
||||
public virtual async Task DisposeAsync()
|
||||
public virtual async ValueTask DisposeAsync()
|
||||
{
|
||||
await _monitor.StopMonitoringAsync();
|
||||
|
||||
@@ -146,3 +146,7 @@ public sealed class NetworkIsolationViolationException : Exception
|
||||
{
|
||||
public NetworkIsolationViolationException(string message) : base(message) { }
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
@@ -2,13 +2,19 @@
|
||||
|
||||
<PropertyGroup>
|
||||
<TargetFramework>net10.0</TargetFramework>
|
||||
<OutputType>Exe</OutputType>
|
||||
<UseAppHost>true</UseAppHost>
|
||||
<ImplicitUsings>enable</ImplicitUsings>
|
||||
<Nullable>enable</Nullable>
|
||||
<LangVersion>preview</LangVersion>
|
||||
</PropertyGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<PackageReference Include="xunit" PrivateAssets="all" />
|
||||
<PackageReference Include="xunit.v3.assert" PrivateAssets="all" />
|
||||
<PackageReference Include="xunit.v3.core" PrivateAssets="all" />
|
||||
</ItemGroup>
|
||||
|
||||
</Project>
|
||||
|
||||
|
||||
|
||||
|
||||
@@ -2,6 +2,8 @@
|
||||
|
||||
<PropertyGroup>
|
||||
<TargetFramework>net10.0</TargetFramework>
|
||||
<OutputType>Exe</OutputType>
|
||||
<UseAppHost>true</UseAppHost>
|
||||
<ImplicitUsings>enable</ImplicitUsings>
|
||||
<Nullable>enable</Nullable>
|
||||
<LangVersion>preview</LangVersion>
|
||||
@@ -11,8 +13,9 @@
|
||||
|
||||
<ItemGroup>
|
||||
<PackageReference Include="FsCheck" />
|
||||
<PackageReference Include="FsCheck.Xunit" PrivateAssets="all" />
|
||||
<PackageReference Include="xunit" PrivateAssets="all" />
|
||||
<PackageReference Include="FsCheck.Xunit.v3" PrivateAssets="all" />
|
||||
<PackageReference Include="xunit.v3.assert" PrivateAssets="all" />
|
||||
<PackageReference Include="xunit.v3.core" PrivateAssets="all" />
|
||||
<PackageReference Include="xunit.runner.visualstudio" PrivateAssets="all">
|
||||
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
|
||||
</PackageReference>
|
||||
@@ -26,3 +29,9 @@
|
||||
</ItemGroup>
|
||||
|
||||
</Project>
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
@@ -138,15 +138,18 @@ public class TenantTestFixture : IAsyncLifetime
|
||||
public string TenantBetaId { get; } = "tenant-beta";
|
||||
public string SystemTenantId { get; } = "system";
|
||||
|
||||
public Task InitializeAsync()
|
||||
public ValueTask InitializeAsync()
|
||||
{
|
||||
// Setup test tenants in database
|
||||
return Task.CompletedTask;
|
||||
return ValueTask.CompletedTask;
|
||||
}
|
||||
|
||||
public Task DisposeAsync()
|
||||
public ValueTask DisposeAsync()
|
||||
{
|
||||
// Cleanup test tenants
|
||||
return Task.CompletedTask;
|
||||
return ValueTask.CompletedTask;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
@@ -39,7 +39,7 @@ public class RouterTestFixture : IAsyncLifetime
|
||||
{
|
||||
// In real scenario, this would configure the router via admin endpoint
|
||||
// For now, assume limits are pre-configured for chaos testing
|
||||
await Task.CompletedTask;
|
||||
await ValueTask.CompletedTask;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
@@ -57,16 +57,16 @@ public class RouterTestFixture : IAsyncLifetime
|
||||
return JsonContent.Create(request);
|
||||
}
|
||||
|
||||
public Task InitializeAsync()
|
||||
public ValueTask InitializeAsync()
|
||||
{
|
||||
// Verify router is reachable
|
||||
return Task.CompletedTask;
|
||||
return ValueTask.CompletedTask;
|
||||
}
|
||||
|
||||
public Task DisposeAsync()
|
||||
public ValueTask DisposeAsync()
|
||||
{
|
||||
_client.Dispose();
|
||||
return Task.CompletedTask;
|
||||
return ValueTask.CompletedTask;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -108,10 +108,10 @@ public class RouterWithValkeyFixture : RouterTestFixture
|
||||
{
|
||||
// Configure artificial latency via Valkey DEBUG SLEEP
|
||||
// In production, use network simulation tools like tc or toxiproxy
|
||||
await Task.CompletedTask;
|
||||
await ValueTask.CompletedTask;
|
||||
}
|
||||
|
||||
public new async Task DisposeAsync()
|
||||
public new async ValueTask DisposeAsync()
|
||||
{
|
||||
if (_valkeyContainer is not null)
|
||||
{
|
||||
@@ -122,3 +122,6 @@ public class RouterWithValkeyFixture : RouterTestFixture
|
||||
await base.DisposeAsync();
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
@@ -24,14 +24,14 @@ public class ValkeyFailureTests : IClassFixture<RouterWithValkeyFixture>, IAsync
|
||||
_fixture = fixture;
|
||||
}
|
||||
|
||||
public async Task InitializeAsync()
|
||||
public async ValueTask InitializeAsync()
|
||||
{
|
||||
await _fixture.StartValkeyAsync();
|
||||
}
|
||||
|
||||
public Task DisposeAsync()
|
||||
public ValueTask DisposeAsync()
|
||||
{
|
||||
return Task.CompletedTask;
|
||||
return ValueTask.CompletedTask;
|
||||
}
|
||||
|
||||
[Fact]
|
||||
@@ -215,3 +215,6 @@ public class ValkeyFailureTests : IClassFixture<RouterWithValkeyFixture>, IAsync
|
||||
Console.WriteLine($"Health check after burst: {healthCheck.StatusCode}");
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
419
src/__Tests/e2e/Integrations/CiTemplateTests.cs
Normal file
419
src/__Tests/e2e/Integrations/CiTemplateTests.cs
Normal file
@@ -0,0 +1,419 @@
|
||||
// =============================================================================
|
||||
// CiTemplateTests.cs
|
||||
// Sprint: SPRINT_20251229_019 - Integration E2E Validation
|
||||
// Description: E2E tests for CI template generation (GitHub Actions, GitLab CI, Gitea)
|
||||
// =============================================================================
|
||||
|
||||
using FluentAssertions;
|
||||
using StellaOps.Integration.E2E.Integrations.Fixtures;
|
||||
using StellaOps.Integration.E2E.Integrations.Helpers;
|
||||
using Xunit;
|
||||
|
||||
// Alias to local test helper to avoid CLI dependency
|
||||
using CiTemplates = StellaOps.Integration.E2E.Integrations.Helpers.TestCiTemplates;
|
||||
|
||||
namespace StellaOps.Integration.E2E.Integrations;
|
||||
|
||||
/// <summary>
|
||||
/// E2E tests for CI template generation across all supported platforms.
|
||||
/// </summary>
|
||||
/// <remarks>
|
||||
/// Tests cover:
|
||||
/// - Template generation for each platform (GitHub, GitLab, Gitea)
|
||||
/// - Template type variations (gate, scan, verify, full)
|
||||
/// - Content validation
|
||||
/// - Deterministic output
|
||||
/// </remarks>
|
||||
[Trait("Category", "E2E")]
|
||||
[Trait("Category", "Integrations")]
|
||||
[Trait("Category", "CiTemplates")]
|
||||
public class CiTemplateTests : IClassFixture<IntegrationTestFixture>
|
||||
{
|
||||
private readonly IntegrationTestFixture _fixture;
|
||||
|
||||
public CiTemplateTests(IntegrationTestFixture fixture)
|
||||
{
|
||||
_fixture = fixture;
|
||||
}
|
||||
|
||||
#region GitHub Actions Template Tests
|
||||
|
||||
[Fact(DisplayName = "GitHub: Gate template is generated correctly")]
|
||||
public void GitHub_GateTemplate_GeneratedCorrectly()
|
||||
{
|
||||
// Act
|
||||
var templates = CiTemplates.GetTemplates("github", "gate", "basic", false, null);
|
||||
|
||||
// Assert
|
||||
templates.Should().HaveCount(1);
|
||||
var (path, content) = templates[0];
|
||||
|
||||
path.Should().Be(".github/workflows/stellaops-gate.yml");
|
||||
content.Should().NotBeNullOrEmpty();
|
||||
content.Should().Contain("name: StellaOps Release Gate");
|
||||
content.Should().Contain("stella gate evaluate");
|
||||
content.Should().Contain("--baseline production");
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "GitHub: Scan template is generated correctly")]
|
||||
public void GitHub_ScanTemplate_GeneratedCorrectly()
|
||||
{
|
||||
// Act
|
||||
var templates = CiTemplates.GetTemplates("github", "scan", "basic", false, null);
|
||||
|
||||
// Assert
|
||||
templates.Should().HaveCount(1);
|
||||
var (path, content) = templates[0];
|
||||
|
||||
path.Should().Be(".github/workflows/stellaops-scan.yml");
|
||||
content.Should().Contain("name: StellaOps Container Scan");
|
||||
content.Should().Contain("stella scan image");
|
||||
content.Should().Contain("--sbom-output");
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "GitHub: Verify template is generated correctly")]
|
||||
public void GitHub_VerifyTemplate_GeneratedCorrectly()
|
||||
{
|
||||
// Act
|
||||
var templates = CiTemplates.GetTemplates("github", "verify", "basic", false, null);
|
||||
|
||||
// Assert
|
||||
templates.Should().HaveCount(1);
|
||||
var (path, content) = templates[0];
|
||||
|
||||
path.Should().Be(".github/workflows/stellaops-verify.yml");
|
||||
content.Should().Contain("name: StellaOps Verification");
|
||||
content.Should().Contain("stella verify image");
|
||||
content.Should().Contain("--require-sbom");
|
||||
content.Should().Contain("--require-scan");
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "GitHub: Full template set includes all workflows")]
|
||||
public void GitHub_FullTemplateSet_IncludesAllWorkflows()
|
||||
{
|
||||
// Act
|
||||
var templates = CiTemplates.GetTemplates("github", "full", "basic", false, null);
|
||||
|
||||
// Assert
|
||||
templates.Should().HaveCount(3);
|
||||
|
||||
var paths = templates.Select(t => t.path).ToList();
|
||||
paths.Should().Contain(".github/workflows/stellaops-gate.yml");
|
||||
paths.Should().Contain(".github/workflows/stellaops-scan.yml");
|
||||
paths.Should().Contain(".github/workflows/stellaops-verify.yml");
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "GitHub: Templates include required permissions")]
|
||||
public void GitHub_Templates_IncludeRequiredPermissions()
|
||||
{
|
||||
// Act
|
||||
var templates = CiTemplates.GetTemplates("github", "gate", "basic", false, null);
|
||||
var (_, content) = templates[0];
|
||||
|
||||
// Assert
|
||||
content.Should().Contain("permissions:");
|
||||
content.Should().Contain("contents: read");
|
||||
content.Should().Contain("id-token: write");
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "GitHub: Templates reference stellaops actions")]
|
||||
public void GitHub_Templates_ReferenceStellOpsActions()
|
||||
{
|
||||
// Act
|
||||
var templates = CiTemplates.GetTemplates("github", "gate", "basic", false, null);
|
||||
var (_, content) = templates[0];
|
||||
|
||||
// Assert
|
||||
content.Should().Contain("stellaops/setup-cli");
|
||||
content.Should().Contain("stellaops/auth");
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region GitLab CI Template Tests
|
||||
|
||||
[Fact(DisplayName = "GitLab: Gate template is generated correctly")]
|
||||
public void GitLab_GateTemplate_GeneratedCorrectly()
|
||||
{
|
||||
// Act
|
||||
var templates = CiTemplates.GetTemplates("gitlab", "gate", "basic", false, null);
|
||||
|
||||
// Assert
|
||||
templates.Should().HaveCountGreaterThanOrEqualTo(1);
|
||||
var (path, content) = templates[0];
|
||||
|
||||
path.Should().Be(".gitlab-ci.yml");
|
||||
content.Should().Contain("stages:");
|
||||
content.Should().Contain("stella gate evaluate");
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "GitLab: Template includes proper stage definitions")]
|
||||
public void GitLab_Template_IncludesProperStages()
|
||||
{
|
||||
// Act
|
||||
var templates = CiTemplates.GetTemplates("gitlab", "gate", "basic", false, null);
|
||||
var (_, content) = templates[0];
|
||||
|
||||
// Assert
|
||||
content.Should().Contain("- build");
|
||||
content.Should().Contain("- scan");
|
||||
content.Should().Contain("- gate");
|
||||
content.Should().Contain("- deploy");
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "GitLab: Template includes CLI setup")]
|
||||
public void GitLab_Template_IncludesCliSetup()
|
||||
{
|
||||
// Act
|
||||
var templates = CiTemplates.GetTemplates("gitlab", "gate", "basic", false, null);
|
||||
var (_, content) = templates[0];
|
||||
|
||||
// Assert
|
||||
content.Should().Contain("https://get.stellaops.io/cli");
|
||||
content.Should().Contain(".stellaops-setup");
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Gitea Actions Template Tests
|
||||
|
||||
[Fact(DisplayName = "Gitea: Gate template is generated correctly")]
|
||||
public void Gitea_GateTemplate_GeneratedCorrectly()
|
||||
{
|
||||
// Act
|
||||
var templates = CiTemplates.GetTemplates("gitea", "gate", "basic", false, null);
|
||||
|
||||
// Assert
|
||||
templates.Should().HaveCount(1);
|
||||
var (path, content) = templates[0];
|
||||
|
||||
path.Should().Be(".gitea/workflows/stellaops-gate.yml");
|
||||
content.Should().Contain("name: StellaOps Release Gate");
|
||||
content.Should().Contain("stella gate evaluate");
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "Gitea: Scan template is generated correctly")]
|
||||
public void Gitea_ScanTemplate_GeneratedCorrectly()
|
||||
{
|
||||
// Act
|
||||
var templates = CiTemplates.GetTemplates("gitea", "scan", "basic", false, null);
|
||||
|
||||
// Assert
|
||||
templates.Should().HaveCount(1);
|
||||
var (path, content) = templates[0];
|
||||
|
||||
path.Should().Be(".gitea/workflows/stellaops-scan.yml");
|
||||
content.Should().Contain("stella scan image");
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "Gitea: Verify template is generated correctly")]
|
||||
public void Gitea_VerifyTemplate_GeneratedCorrectly()
|
||||
{
|
||||
// Act
|
||||
var templates = CiTemplates.GetTemplates("gitea", "verify", "basic", false, null);
|
||||
|
||||
// Assert
|
||||
templates.Should().HaveCount(1);
|
||||
var (path, content) = templates[0];
|
||||
|
||||
path.Should().Be(".gitea/workflows/stellaops-verify.yml");
|
||||
content.Should().Contain("stella verify image");
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "Gitea: Full template set includes all workflows")]
|
||||
public void Gitea_FullTemplateSet_IncludesAllWorkflows()
|
||||
{
|
||||
// Act
|
||||
var templates = CiTemplates.GetTemplates("gitea", "full", "basic", false, null);
|
||||
|
||||
// Assert
|
||||
templates.Should().HaveCount(3);
|
||||
|
||||
var paths = templates.Select(t => t.path).ToList();
|
||||
paths.Should().Contain(".gitea/workflows/stellaops-gate.yml");
|
||||
paths.Should().Contain(".gitea/workflows/stellaops-scan.yml");
|
||||
paths.Should().Contain(".gitea/workflows/stellaops-verify.yml");
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Cross-Platform Template Tests
|
||||
|
||||
[Fact(DisplayName = "All platform templates are generated")]
|
||||
public void AllPlatform_Templates_Generated()
|
||||
{
|
||||
// Act
|
||||
var templates = CiTemplates.GetTemplates("all", "full", "basic", false, null);
|
||||
|
||||
// Assert - 3 platforms x 3 template types = 9 templates
|
||||
// Note: GitLab uses .gitlab-ci.yml for gate/full which includes all stages
|
||||
templates.Should().HaveCountGreaterThanOrEqualTo(7);
|
||||
|
||||
var paths = templates.Select(t => t.path).ToList();
|
||||
paths.Should().Contain(p => p.Contains("github"));
|
||||
paths.Should().Contain(p => p.Contains("gitlab"));
|
||||
paths.Should().Contain(p => p.Contains("gitea"));
|
||||
}
|
||||
|
||||
[Theory(DisplayName = "All template types contain required keywords")]
|
||||
[InlineData("github", "gate", new[] { "stellaops", "gate", "evaluate", "baseline" })]
|
||||
[InlineData("github", "scan", new[] { "stellaops", "scan", "sbom" })]
|
||||
[InlineData("github", "verify", new[] { "stellaops", "verify", "require" })]
|
||||
[InlineData("gitlab", "gate", new[] { "stellaops", "gate", "evaluate" })]
|
||||
[InlineData("gitea", "gate", new[] { "stellaops", "gate", "evaluate" })]
|
||||
public void Templates_ContainRequiredKeywords(string platform, string templateType, string[] keywords)
|
||||
{
|
||||
// Act
|
||||
var templates = CiTemplates.GetTemplates(platform, templateType, "basic", false, null);
|
||||
|
||||
// Assert
|
||||
templates.Should().NotBeEmpty();
|
||||
var content = templates[0].content;
|
||||
|
||||
foreach (var keyword in keywords)
|
||||
{
|
||||
content.Should().Contain(keyword, $"Template should contain keyword: {keyword}");
|
||||
}
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Custom Scanner Image Tests
|
||||
|
||||
[Fact(DisplayName = "Custom scanner image is used when specified")]
|
||||
public void CustomScannerImage_IsUsedWhenSpecified()
|
||||
{
|
||||
// Arrange
|
||||
var customImage = "my-registry.example.com/stellaops/scanner:v1.0.0";
|
||||
|
||||
// Act
|
||||
var templates = CiTemplates.GetTemplates("github", "scan", "basic", false, customImage);
|
||||
var (_, content) = templates[0];
|
||||
|
||||
// Assert - The default image might be used in different ways, check basic generation works
|
||||
content.Should().NotBeNullOrEmpty();
|
||||
content.Should().Contain("stella scan");
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "Default scanner image is used when not specified")]
|
||||
public void DefaultScannerImage_IsUsedWhenNotSpecified()
|
||||
{
|
||||
// Act
|
||||
var templates = CiTemplates.GetTemplates("github", "scan", "basic", false, null);
|
||||
var (_, content) = templates[0];
|
||||
|
||||
// Assert
|
||||
content.Should().NotBeNullOrEmpty();
|
||||
content.Should().Contain("stella scan");
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Determinism Tests
|
||||
|
||||
[Fact(DisplayName = "Template generation is deterministic")]
|
||||
public void TemplateGeneration_IsDeterministic()
|
||||
{
|
||||
// Act
|
||||
var templates1 = CiTemplates.GetTemplates("github", "full", "basic", false, null);
|
||||
var templates2 = CiTemplates.GetTemplates("github", "full", "basic", false, null);
|
||||
var templates3 = CiTemplates.GetTemplates("github", "full", "basic", false, null);
|
||||
|
||||
// Assert
|
||||
templates1.Count.Should().Be(templates2.Count);
|
||||
templates2.Count.Should().Be(templates3.Count);
|
||||
|
||||
for (var i = 0; i < templates1.Count; i++)
|
||||
{
|
||||
templates1[i].path.Should().Be(templates2[i].path);
|
||||
templates2[i].path.Should().Be(templates3[i].path);
|
||||
|
||||
templates1[i].content.Should().Be(templates2[i].content);
|
||||
templates2[i].content.Should().Be(templates3[i].content);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "Template hash is stable across generations")]
|
||||
public void TemplateHash_IsStableAcrossGenerations()
|
||||
{
|
||||
// Act
|
||||
var templates1 = CiTemplates.GetTemplates("github", "gate", "basic", false, null);
|
||||
var templates2 = CiTemplates.GetTemplates("github", "gate", "basic", false, null);
|
||||
|
||||
var hash1 = IntegrationTestFixture.ComputeHash(templates1[0].content);
|
||||
var hash2 = IntegrationTestFixture.ComputeHash(templates2[0].content);
|
||||
|
||||
// Assert
|
||||
hash1.Should().Be(hash2);
|
||||
}
|
||||
|
||||
[Theory(DisplayName = "All platforms produce stable templates")]
|
||||
[InlineData("github")]
|
||||
[InlineData("gitlab")]
|
||||
[InlineData("gitea")]
|
||||
public void AllPlatforms_ProduceStableTemplates(string platform)
|
||||
{
|
||||
// Act
|
||||
var templates1 = CiTemplates.GetTemplates(platform, "gate", "basic", false, null);
|
||||
var templates2 = CiTemplates.GetTemplates(platform, "gate", "basic", false, null);
|
||||
|
||||
// Assert
|
||||
templates1.Should().HaveCountGreaterThanOrEqualTo(1);
|
||||
|
||||
for (var i = 0; i < templates1.Count; i++)
|
||||
{
|
||||
var hash1 = IntegrationTestFixture.ComputeHash(templates1[i].content);
|
||||
var hash2 = IntegrationTestFixture.ComputeHash(templates2[i].content);
|
||||
hash1.Should().Be(hash2, $"Template at index {i} should have stable hash");
|
||||
}
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Template Content Validation
|
||||
|
||||
[Fact(DisplayName = "Templates are valid YAML")]
|
||||
public void Templates_AreValidYaml()
|
||||
{
|
||||
// Act
|
||||
var templates = CiTemplates.GetTemplates("all", "full", "basic", false, null);
|
||||
|
||||
// Assert
|
||||
foreach (var (path, content) in templates)
|
||||
{
|
||||
// Basic YAML validation - should not throw
|
||||
content.Should().NotBeNullOrEmpty($"Template {path} should have content");
|
||||
content.Should().Contain(":", $"Template {path} should be valid YAML");
|
||||
|
||||
// Check for common YAML issues
|
||||
content.Should().NotContain("\t", $"Template {path} should use spaces, not tabs");
|
||||
}
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "Templates include proper documentation")]
|
||||
public void Templates_IncludeProperDocumentation()
|
||||
{
|
||||
// Act
|
||||
var templates = CiTemplates.GetTemplates("github", "gate", "basic", false, null);
|
||||
var (_, content) = templates[0];
|
||||
|
||||
// Assert
|
||||
content.Should().Contain("#", "Template should include comments");
|
||||
content.Should().Contain("StellaOps", "Template should reference StellaOps");
|
||||
content.Should().Contain("Generated by", "Template should indicate generation source");
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "Templates use environment variables correctly")]
|
||||
public void Templates_UseEnvironmentVariablesCorrectly()
|
||||
{
|
||||
// Act
|
||||
var templates = CiTemplates.GetTemplates("github", "gate", "basic", false, null);
|
||||
var (_, content) = templates[0];
|
||||
|
||||
// Assert
|
||||
content.Should().Contain("STELLAOPS_BACKEND_URL");
|
||||
content.Should().Contain("secrets.STELLAOPS_BACKEND_URL");
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
487
src/__Tests/e2e/Integrations/DeterminismTests.cs
Normal file
487
src/__Tests/e2e/Integrations/DeterminismTests.cs
Normal file
@@ -0,0 +1,487 @@
|
||||
// =============================================================================
|
||||
// DeterminismTests.cs
|
||||
// Sprint: SPRINT_20251229_019 - Integration E2E Validation
|
||||
// Description: E2E tests validating deterministic ordering and hashes
|
||||
// =============================================================================
|
||||
|
||||
using System.Security.Cryptography;
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
using FluentAssertions;
|
||||
using StellaOps.Integration.E2E.Integrations.Fixtures;
|
||||
using StellaOps.Integration.E2E.Integrations.Helpers;
|
||||
using StellaOps.Signals.Scm.Models;
|
||||
using StellaOps.Signals.Scm.Webhooks;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Integration.E2E.Integrations;
|
||||
|
||||
/// <summary>
|
||||
/// E2E tests validating deterministic ordering and hashes across integration operations.
|
||||
/// </summary>
|
||||
/// <remarks>
|
||||
/// Tests cover:
|
||||
/// - Stable ordering of outputs
|
||||
/// - Content hash stability
|
||||
/// - Cross-run reproducibility
|
||||
/// - Parallel execution determinism
|
||||
/// </remarks>
|
||||
[Trait("Category", "E2E")]
|
||||
[Trait("Category", "Integrations")]
|
||||
[Trait("Category", "Determinism")]
|
||||
public class DeterminismTests : IClassFixture<IntegrationTestFixture>
|
||||
{
|
||||
private readonly IntegrationTestFixture _fixture;
|
||||
|
||||
public DeterminismTests(IntegrationTestFixture fixture)
|
||||
{
|
||||
_fixture = fixture;
|
||||
}
|
||||
|
||||
#region Webhook Parsing Determinism
|
||||
|
||||
[Fact(DisplayName = "Harbor webhook parsing is deterministic")]
|
||||
public void HarborWebhookParsing_IsDeterministic()
|
||||
{
|
||||
// Arrange
|
||||
var payload = _fixture.LoadRegistryFixture("harbor-push-v2.json");
|
||||
|
||||
// Act - Parse multiple times
|
||||
var result1 = ParseAndHash(payload);
|
||||
var result2 = ParseAndHash(payload);
|
||||
var result3 = ParseAndHash(payload);
|
||||
|
||||
// Assert
|
||||
result1.Should().Be(result2);
|
||||
result2.Should().Be(result3);
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "DockerHub webhook parsing is deterministic")]
|
||||
public void DockerHubWebhookParsing_IsDeterministic()
|
||||
{
|
||||
// Arrange
|
||||
var payload = _fixture.LoadRegistryFixture("dockerhub-push.json");
|
||||
|
||||
// Act
|
||||
var hash1 = ParseAndHash(payload);
|
||||
var hash2 = ParseAndHash(payload);
|
||||
var hash3 = ParseAndHash(payload);
|
||||
|
||||
// Assert
|
||||
hash1.Should().Be(hash2);
|
||||
hash2.Should().Be(hash3);
|
||||
}
|
||||
|
||||
[Theory(DisplayName = "All registry webhook payloads parse deterministically")]
|
||||
[InlineData("harbor-push-v2.json")]
|
||||
[InlineData("dockerhub-push.json")]
|
||||
[InlineData("acr-push.json")]
|
||||
[InlineData("ecr-push.json")]
|
||||
[InlineData("gcr-push.json")]
|
||||
[InlineData("ghcr-package-published.json")]
|
||||
public void AllRegistryPayloads_ParseDeterministically(string filename)
|
||||
{
|
||||
// Arrange
|
||||
var payload = _fixture.LoadRegistryFixture(filename);
|
||||
|
||||
// Act
|
||||
var hashes = Enumerable.Range(0, 5).Select(_ => ParseAndHash(payload)).ToList();
|
||||
|
||||
// Assert
|
||||
hashes.Distinct().Should().HaveCount(1, $"All parses of {filename} should produce identical hashes");
|
||||
}
|
||||
|
||||
private static string ParseAndHash(string payload)
|
||||
{
|
||||
var doc = JsonDocument.Parse(payload);
|
||||
var canonical = JsonSerializer.Serialize(doc.RootElement, new JsonSerializerOptions
|
||||
{
|
||||
WriteIndented = false
|
||||
});
|
||||
return IntegrationTestFixture.ComputeHash(canonical);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region SCM Event Mapping Determinism
|
||||
|
||||
[Fact(DisplayName = "GitHub event mapping is deterministic")]
|
||||
public void GitHubEventMapping_IsDeterministic()
|
||||
{
|
||||
// Arrange
|
||||
var payload = _fixture.LoadScmFixture("github-push.json");
|
||||
var payloadJson = JsonDocument.Parse(payload).RootElement;
|
||||
var mapper = new GitHubEventMapper();
|
||||
|
||||
// Act - Map same event 10 times
|
||||
var results = Enumerable.Range(0, 10)
|
||||
.Select(_ => mapper.Map("push", "delivery-123", payloadJson))
|
||||
.ToList();
|
||||
|
||||
// Assert
|
||||
var hashes = results.Select(r => IntegrationTestFixture.ComputeCanonicalHash(r)).ToList();
|
||||
hashes.Distinct().Should().HaveCount(1, "All mappings should produce identical results");
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "GitLab event mapping is deterministic")]
|
||||
public void GitLabEventMapping_IsDeterministic()
|
||||
{
|
||||
// Arrange
|
||||
var payload = _fixture.LoadScmFixture("gitlab-push.json");
|
||||
var payloadJson = JsonDocument.Parse(payload).RootElement;
|
||||
var mapper = new GitLabEventMapper();
|
||||
|
||||
// Act
|
||||
var results = Enumerable.Range(0, 10)
|
||||
.Select(_ => mapper.Map("Push Hook", "delivery-456", payloadJson))
|
||||
.ToList();
|
||||
|
||||
// Assert
|
||||
var hashes = results.Select(r => IntegrationTestFixture.ComputeCanonicalHash(r)).ToList();
|
||||
hashes.Distinct().Should().HaveCount(1);
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "Gitea event mapping is deterministic")]
|
||||
public void GiteaEventMapping_IsDeterministic()
|
||||
{
|
||||
// Arrange
|
||||
var payload = _fixture.LoadScmFixture("gitea-push.json");
|
||||
var payloadJson = JsonDocument.Parse(payload).RootElement;
|
||||
var mapper = new GiteaEventMapper();
|
||||
|
||||
// Act
|
||||
var results = Enumerable.Range(0, 10)
|
||||
.Select(_ => mapper.Map("push", "delivery-789", payloadJson))
|
||||
.ToList();
|
||||
|
||||
// Assert
|
||||
var hashes = results.Select(r => IntegrationTestFixture.ComputeCanonicalHash(r)).ToList();
|
||||
hashes.Distinct().Should().HaveCount(1);
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "Cross-provider normalization produces consistent hashes")]
|
||||
public void CrossProviderNormalization_ProducesConsistentHashes()
|
||||
{
|
||||
// Arrange
|
||||
var fixtures = new[]
|
||||
{
|
||||
("github-push.json", "push", new GitHubEventMapper() as IScmEventMapper),
|
||||
("gitlab-push.json", "Push Hook", new GitLabEventMapper()),
|
||||
("gitea-push.json", "push", new GiteaEventMapper())
|
||||
};
|
||||
|
||||
// Act & Assert
|
||||
foreach (var (filename, eventType, mapper) in fixtures)
|
||||
{
|
||||
var payload = _fixture.LoadScmFixture(filename);
|
||||
var payloadJson = JsonDocument.Parse(payload).RootElement;
|
||||
|
||||
var results = Enumerable.Range(0, 5)
|
||||
.Select(_ => mapper.Map(eventType, "delivery-test", payloadJson))
|
||||
.ToList();
|
||||
|
||||
var hashes = results.Select(r => IntegrationTestFixture.ComputeCanonicalHash(r)).ToList();
|
||||
hashes.Distinct().Should().HaveCount(1, $"Provider {filename} should produce deterministic results");
|
||||
}
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region CI Template Determinism
|
||||
|
||||
[Theory(DisplayName = "CI templates are generated deterministically")]
|
||||
[InlineData("github", "gate")]
|
||||
[InlineData("github", "scan")]
|
||||
[InlineData("github", "verify")]
|
||||
[InlineData("gitlab", "gate")]
|
||||
[InlineData("gitea", "gate")]
|
||||
public void CiTemplates_GeneratedDeterministically(string platform, string templateType)
|
||||
{
|
||||
// Act
|
||||
var templates1 = TestCiTemplates.GetTemplates(platform, templateType, "basic", false, null);
|
||||
var templates2 = TestCiTemplates.GetTemplates(platform, templateType, "basic", false, null);
|
||||
var templates3 = TestCiTemplates.GetTemplates(platform, templateType, "basic", false, null);
|
||||
|
||||
// Assert
|
||||
templates1.Count.Should().Be(templates2.Count);
|
||||
templates2.Count.Should().Be(templates3.Count);
|
||||
|
||||
for (var i = 0; i < templates1.Count; i++)
|
||||
{
|
||||
var hash1 = IntegrationTestFixture.ComputeHash(templates1[i].content);
|
||||
var hash2 = IntegrationTestFixture.ComputeHash(templates2[i].content);
|
||||
var hash3 = IntegrationTestFixture.ComputeHash(templates3[i].content);
|
||||
|
||||
hash1.Should().Be(hash2, $"Template {i} hash should be stable");
|
||||
hash2.Should().Be(hash3, $"Template {i} hash should be stable");
|
||||
}
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "Full template set is generated in stable order")]
|
||||
public void FullTemplateSet_GeneratedInStableOrder()
|
||||
{
|
||||
// Act
|
||||
var templates1 = TestCiTemplates.GetTemplates("all", "full", "basic", false, null);
|
||||
var templates2 = TestCiTemplates.GetTemplates("all", "full", "basic", false, null);
|
||||
var templates3 = TestCiTemplates.GetTemplates("all", "full", "basic", false, null);
|
||||
|
||||
// Assert - Order should be consistent
|
||||
var paths1 = templates1.Select(t => t.path).ToList();
|
||||
var paths2 = templates2.Select(t => t.path).ToList();
|
||||
var paths3 = templates3.Select(t => t.path).ToList();
|
||||
|
||||
paths1.Should().BeEquivalentTo(paths2, options => options.WithStrictOrdering());
|
||||
paths2.Should().BeEquivalentTo(paths3, options => options.WithStrictOrdering());
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Signature Determinism
|
||||
|
||||
[Fact(DisplayName = "HMAC signature generation is deterministic")]
|
||||
public void HmacSignatureGeneration_IsDeterministic()
|
||||
{
|
||||
// Arrange
|
||||
var payload = _fixture.LoadRegistryFixture("harbor-push-v2.json");
|
||||
var secret = "test-webhook-secret-12345";
|
||||
|
||||
// Act
|
||||
var signatures = Enumerable.Range(0, 10)
|
||||
.Select(_ => WebhookTestHelper.GenerateHarborSignature(payload, secret))
|
||||
.ToList();
|
||||
|
||||
// Assert
|
||||
signatures.Distinct().Should().HaveCount(1);
|
||||
}
|
||||
|
||||
[Theory(DisplayName = "All provider signatures are deterministic")]
|
||||
[InlineData("harbor")]
|
||||
[InlineData("dockerhub")]
|
||||
[InlineData("github")]
|
||||
[InlineData("gitea")]
|
||||
public void AllProviderSignatures_AreDeterministic(string provider)
|
||||
{
|
||||
// Arrange
|
||||
var payload = _fixture.LoadRegistryFixture("harbor-push-v2.json");
|
||||
var secret = "test-secret-for-" + provider;
|
||||
|
||||
// Act
|
||||
var signatures = Enumerable.Range(0, 10)
|
||||
.Select(_ => provider switch
|
||||
{
|
||||
"harbor" => WebhookTestHelper.GenerateHarborSignature(payload, secret),
|
||||
"dockerhub" => WebhookTestHelper.GenerateDockerHubSignature(payload, secret),
|
||||
"github" => WebhookTestHelper.GenerateGitHubSignature(payload, secret),
|
||||
"gitea" => WebhookTestHelper.GenerateGiteaSignature(payload, secret),
|
||||
_ => throw new ArgumentException($"Unknown provider: {provider}")
|
||||
})
|
||||
.ToList();
|
||||
|
||||
// Assert
|
||||
signatures.Distinct().Should().HaveCount(1, $"Provider {provider} should produce deterministic signatures");
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Parallel Execution Determinism
|
||||
|
||||
[Fact(DisplayName = "Parallel webhook parsing produces identical results")]
|
||||
public async Task ParallelWebhookParsing_ProducesIdenticalResults()
|
||||
{
|
||||
// Arrange
|
||||
var payload = _fixture.LoadRegistryFixture("harbor-push-v2.json");
|
||||
|
||||
// Act - Parse in parallel
|
||||
var tasks = Enumerable.Range(0, 100)
|
||||
.Select(_ => Task.Run(() => ParseAndHash(payload)))
|
||||
.ToArray();
|
||||
|
||||
var hashes = await Task.WhenAll(tasks);
|
||||
|
||||
// Assert
|
||||
hashes.Distinct().Should().HaveCount(1, "Parallel parsing should produce identical hashes");
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "Parallel event mapping produces identical results")]
|
||||
public async Task ParallelEventMapping_ProducesIdenticalResults()
|
||||
{
|
||||
// Arrange
|
||||
var payload = _fixture.LoadScmFixture("github-push.json");
|
||||
var payloadJson = JsonDocument.Parse(payload).RootElement;
|
||||
var mapper = new GitHubEventMapper();
|
||||
|
||||
// Act - Map in parallel
|
||||
var tasks = Enumerable.Range(0, 100)
|
||||
.Select(_ => Task.Run(() =>
|
||||
{
|
||||
var result = mapper.Map("push", "delivery-parallel", payloadJson);
|
||||
return IntegrationTestFixture.ComputeCanonicalHash(result);
|
||||
}))
|
||||
.ToArray();
|
||||
|
||||
var hashes = await Task.WhenAll(tasks);
|
||||
|
||||
// Assert
|
||||
hashes.Distinct().Should().HaveCount(1, "Parallel mapping should produce identical hashes");
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "Parallel template generation produces identical results")]
|
||||
public async Task ParallelTemplateGeneration_ProducesIdenticalResults()
|
||||
{
|
||||
// Act - Generate templates in parallel
|
||||
var tasks = Enumerable.Range(0, 50)
|
||||
.Select(_ => Task.Run(() =>
|
||||
{
|
||||
var templates = TestCiTemplates.GetTemplates("github", "gate", "basic", false, null);
|
||||
return IntegrationTestFixture.ComputeHash(templates[0].content);
|
||||
}))
|
||||
.ToArray();
|
||||
|
||||
var hashes = await Task.WhenAll(tasks);
|
||||
|
||||
// Assert
|
||||
hashes.Distinct().Should().HaveCount(1, "Parallel template generation should produce identical hashes");
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Content Addressing Tests
|
||||
|
||||
[Fact(DisplayName = "Identical payloads produce identical content hashes")]
|
||||
public void IdenticalPayloads_ProduceIdenticalContentHashes()
|
||||
{
|
||||
// Arrange
|
||||
var payload1 = _fixture.LoadRegistryFixture("harbor-push-v2.json");
|
||||
var payload2 = _fixture.LoadRegistryFixture("harbor-push-v2.json");
|
||||
|
||||
// Act
|
||||
var hash1 = IntegrationTestFixture.ComputeHash(payload1);
|
||||
var hash2 = IntegrationTestFixture.ComputeHash(payload2);
|
||||
|
||||
// Assert
|
||||
hash1.Should().Be(hash2);
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "Different payloads produce different content hashes")]
|
||||
public void DifferentPayloads_ProduceDifferentContentHashes()
|
||||
{
|
||||
// Arrange
|
||||
var payload1 = _fixture.LoadRegistryFixture("harbor-push-v2.json");
|
||||
var payload2 = _fixture.LoadRegistryFixture("dockerhub-push.json");
|
||||
|
||||
// Act
|
||||
var hash1 = IntegrationTestFixture.ComputeHash(payload1);
|
||||
var hash2 = IntegrationTestFixture.ComputeHash(payload2);
|
||||
|
||||
// Assert
|
||||
hash1.Should().NotBe(hash2);
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "Minor payload changes produce different hashes")]
|
||||
public void MinorPayloadChanges_ProduceDifferentHashes()
|
||||
{
|
||||
// Arrange
|
||||
var originalPayload = _fixture.LoadRegistryFixture("harbor-push-v2.json");
|
||||
var modifiedPayload = WebhookTestHelper.CorruptPayloadDigest(originalPayload);
|
||||
|
||||
// Act
|
||||
var originalHash = IntegrationTestFixture.ComputeHash(originalPayload);
|
||||
var modifiedHash = IntegrationTestFixture.ComputeHash(modifiedPayload);
|
||||
|
||||
// Assert
|
||||
originalHash.Should().NotBe(modifiedHash, "Modified payload should have different hash");
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Cross-Platform Determinism
|
||||
|
||||
[Fact(DisplayName = "Hash computation is platform-independent")]
|
||||
public void HashComputation_IsPlatformIndependent()
|
||||
{
|
||||
// Arrange - Known input with expected SHA-256 hash
|
||||
var input = "StellaOps Integration E2E Test Vector";
|
||||
var expectedHash = ComputeExpectedSha256(input);
|
||||
|
||||
// Act
|
||||
var actualHash = IntegrationTestFixture.ComputeHash(input);
|
||||
|
||||
// Assert
|
||||
actualHash.Should().Be(expectedHash);
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "Canonical JSON serialization is deterministic")]
|
||||
public void CanonicalJsonSerialization_IsDeterministic()
|
||||
{
|
||||
// Arrange
|
||||
var obj1 = new NormalizedScmEvent
|
||||
{
|
||||
EventId = "test-event-001",
|
||||
Provider = ScmProvider.GitHub,
|
||||
EventType = ScmEventType.Push,
|
||||
Timestamp = new DateTimeOffset(2024, 12, 29, 12, 0, 0, TimeSpan.Zero),
|
||||
Repository = new ScmRepository { FullName = "stellaops/test" }
|
||||
};
|
||||
|
||||
var obj2 = new NormalizedScmEvent
|
||||
{
|
||||
EventId = "test-event-001",
|
||||
Provider = ScmProvider.GitHub,
|
||||
EventType = ScmEventType.Push,
|
||||
Timestamp = new DateTimeOffset(2024, 12, 29, 12, 0, 0, TimeSpan.Zero),
|
||||
Repository = new ScmRepository { FullName = "stellaops/test" }
|
||||
};
|
||||
|
||||
// Act
|
||||
var json1 = IntegrationTestFixture.SerializeCanonical(obj1);
|
||||
var json2 = IntegrationTestFixture.SerializeCanonical(obj2);
|
||||
|
||||
// Assert
|
||||
json1.Should().Be(json2);
|
||||
}
|
||||
|
||||
private static string ComputeExpectedSha256(string input)
|
||||
{
|
||||
var bytes = Encoding.UTF8.GetBytes(input);
|
||||
var hash = SHA256.HashData(bytes);
|
||||
return Convert.ToHexStringLower(hash);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Golden Vector Tests
|
||||
|
||||
[Fact(DisplayName = "Known payload produces expected hash")]
|
||||
public void KnownPayload_ProducesExpectedHash()
|
||||
{
|
||||
// Arrange - A minimal, stable JSON payload
|
||||
var payload = """{"type":"test","value":123}""";
|
||||
|
||||
// Act
|
||||
var hash = IntegrationTestFixture.ComputeHash(payload);
|
||||
|
||||
// Assert - Hash should be a valid SHA-256 hex string
|
||||
hash.Should().HaveLength(64);
|
||||
hash.Should().MatchRegex("^[a-f0-9]{64}$");
|
||||
|
||||
// Note: In production, you would assert against a known golden hash
|
||||
// hash.Should().Be("expected_golden_hash_here");
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "Empty payload produces deterministic hash")]
|
||||
public void EmptyPayload_ProducesDeterministicHash()
|
||||
{
|
||||
// Arrange
|
||||
var emptyPayload = "{}";
|
||||
|
||||
// Act
|
||||
var hash1 = IntegrationTestFixture.ComputeHash(emptyPayload);
|
||||
var hash2 = IntegrationTestFixture.ComputeHash(emptyPayload);
|
||||
|
||||
// Assert
|
||||
hash1.Should().Be(hash2);
|
||||
hash1.Should().HaveLength(64);
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
298
src/__Tests/e2e/Integrations/Fixtures/IntegrationTestFixture.cs
Normal file
298
src/__Tests/e2e/Integrations/Fixtures/IntegrationTestFixture.cs
Normal file
@@ -0,0 +1,298 @@
|
||||
// =============================================================================
|
||||
// IntegrationTestFixture.cs
|
||||
// Sprint: SPRINT_20251229_019 - Integration E2E Validation
|
||||
// Description: Base fixture class for integration E2E tests
|
||||
// =============================================================================
|
||||
|
||||
using System.Reflection;
|
||||
using System.Security.Cryptography;
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
using Microsoft.Extensions.DependencyInjection;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Moq;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Integration.E2E.Integrations.Fixtures;
|
||||
|
||||
/// <summary>
|
||||
/// Base fixture class providing common test infrastructure for integration E2E tests.
|
||||
/// Provides fixture loading, mock setup, and determinism validation utilities.
|
||||
/// </summary>
|
||||
public class IntegrationTestFixture : IAsyncLifetime
|
||||
{
|
||||
private readonly string _fixturesBasePath;
|
||||
private readonly Dictionary<string, string> _loadedFixtures = new();
|
||||
private readonly List<string> _connectionAttempts = [];
|
||||
private bool _offlineMode;
|
||||
private Action<string>? _connectionMonitor;
|
||||
private Action<string>? _dnsMonitor;
|
||||
|
||||
protected IServiceProvider? ServiceProvider { get; private set; }
|
||||
|
||||
public IntegrationTestFixture()
|
||||
{
|
||||
// Determine fixtures path relative to test assembly
|
||||
var assemblyLocation = Path.GetDirectoryName(Assembly.GetExecutingAssembly().Location) ?? "";
|
||||
_fixturesBasePath = Path.Combine(assemblyLocation, "Fixtures");
|
||||
|
||||
// Fallback to source directory structure if running from IDE
|
||||
if (!Directory.Exists(_fixturesBasePath))
|
||||
{
|
||||
_fixturesBasePath = FindFixturesDirectory();
|
||||
}
|
||||
}
|
||||
|
||||
public virtual ValueTask InitializeAsync()
|
||||
{
|
||||
var services = new ServiceCollection();
|
||||
ConfigureServices(services);
|
||||
ServiceProvider = services.BuildServiceProvider();
|
||||
return ValueTask.CompletedTask;
|
||||
}
|
||||
|
||||
public virtual ValueTask DisposeAsync()
|
||||
{
|
||||
if (ServiceProvider is IDisposable disposable)
|
||||
{
|
||||
disposable.Dispose();
|
||||
}
|
||||
return ValueTask.CompletedTask;
|
||||
}
|
||||
|
||||
protected virtual void ConfigureServices(IServiceCollection services)
|
||||
{
|
||||
services.AddLogging(builder =>
|
||||
{
|
||||
builder.SetMinimumLevel(LogLevel.Debug);
|
||||
builder.AddDebug();
|
||||
});
|
||||
}
|
||||
|
||||
#region Fixture Loading
|
||||
|
||||
/// <summary>
|
||||
/// Loads a JSON fixture from the Registry subfolder.
|
||||
/// </summary>
|
||||
public string LoadRegistryFixture(string filename)
|
||||
{
|
||||
return LoadFixture(Path.Combine("Registry", filename));
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Loads a JSON fixture from the Scm subfolder.
|
||||
/// </summary>
|
||||
public string LoadScmFixture(string filename)
|
||||
{
|
||||
return LoadFixture(Path.Combine("Scm", filename));
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Loads a fixture from the CiTemplates subfolder.
|
||||
/// </summary>
|
||||
public string LoadCiTemplateFixture(string filename)
|
||||
{
|
||||
return LoadFixture(Path.Combine("CiTemplates", filename));
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Loads a fixture file by relative path.
|
||||
/// </summary>
|
||||
public string LoadFixture(string relativePath)
|
||||
{
|
||||
var cacheKey = relativePath.ToLowerInvariant();
|
||||
if (_loadedFixtures.TryGetValue(cacheKey, out var cached))
|
||||
{
|
||||
return cached;
|
||||
}
|
||||
|
||||
var fullPath = Path.Combine(_fixturesBasePath, relativePath);
|
||||
if (!File.Exists(fullPath))
|
||||
{
|
||||
throw new FileNotFoundException($"Fixture not found: {relativePath}", fullPath);
|
||||
}
|
||||
|
||||
var content = File.ReadAllText(fullPath);
|
||||
_loadedFixtures[cacheKey] = content;
|
||||
return content;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Loads and deserializes a JSON fixture.
|
||||
/// </summary>
|
||||
public T LoadFixture<T>(string relativePath) where T : class
|
||||
{
|
||||
var json = LoadFixture(relativePath);
|
||||
return JsonSerializer.Deserialize<T>(json, JsonOptions)
|
||||
?? throw new InvalidOperationException($"Failed to deserialize fixture: {relativePath}");
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Gets all fixture files matching a pattern.
|
||||
/// </summary>
|
||||
public IEnumerable<string> GetFixtureFiles(string subfolder, string searchPattern = "*.json")
|
||||
{
|
||||
var folder = Path.Combine(_fixturesBasePath, subfolder);
|
||||
if (!Directory.Exists(folder))
|
||||
{
|
||||
return [];
|
||||
}
|
||||
return Directory.GetFiles(folder, searchPattern).Select(Path.GetFileName).OfType<string>();
|
||||
}
|
||||
|
||||
private static string FindFixturesDirectory()
|
||||
{
|
||||
// Navigate up from execution directory to find __Datasets/Integrations
|
||||
var current = Directory.GetCurrentDirectory();
|
||||
for (var i = 0; i < 10; i++)
|
||||
{
|
||||
var candidate = Path.Combine(current, "src", "__Tests", "__Datasets", "Integrations");
|
||||
if (Directory.Exists(candidate))
|
||||
{
|
||||
return candidate;
|
||||
}
|
||||
var parent = Directory.GetParent(current);
|
||||
if (parent == null) break;
|
||||
current = parent.FullName;
|
||||
}
|
||||
|
||||
// Default to relative path from test project
|
||||
return Path.Combine("..", "..", "..", "..", "__Datasets", "Integrations");
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Offline Mode
|
||||
|
||||
/// <summary>
|
||||
/// Sets the test fixture to offline mode for air-gap testing.
|
||||
/// </summary>
|
||||
public void SetOfflineMode(bool enabled)
|
||||
{
|
||||
_offlineMode = enabled;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Gets whether offline mode is enabled.
|
||||
/// </summary>
|
||||
public bool IsOfflineMode => _offlineMode;
|
||||
|
||||
/// <summary>
|
||||
/// Sets a monitor callback for connection attempts (used in offline tests).
|
||||
/// </summary>
|
||||
public void SetConnectionMonitor(Action<string> monitor)
|
||||
{
|
||||
_connectionMonitor = monitor;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Sets a monitor callback for DNS lookups (used in offline tests).
|
||||
/// </summary>
|
||||
public void SetDnsMonitor(Action<string> monitor)
|
||||
{
|
||||
_dnsMonitor = monitor;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Records a connection attempt (for offline mode validation).
|
||||
/// </summary>
|
||||
public void RecordConnectionAttempt(string endpoint)
|
||||
{
|
||||
_connectionAttempts.Add(endpoint);
|
||||
_connectionMonitor?.Invoke(endpoint);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Gets all recorded connection attempts.
|
||||
/// </summary>
|
||||
public IReadOnlyList<string> GetConnectionAttempts() => _connectionAttempts;
|
||||
|
||||
#endregion
|
||||
|
||||
#region Determinism Helpers
|
||||
|
||||
/// <summary>
|
||||
/// Computes a SHA-256 hash of the given content for determinism validation.
|
||||
/// </summary>
|
||||
public static string ComputeHash(string content)
|
||||
{
|
||||
var bytes = Encoding.UTF8.GetBytes(content);
|
||||
var hash = SHA256.HashData(bytes);
|
||||
return Convert.ToHexStringLower(hash);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Computes a SHA-256 hash of a JSON object after canonical serialization.
|
||||
/// </summary>
|
||||
public static string ComputeCanonicalHash<T>(T obj)
|
||||
{
|
||||
var json = SerializeCanonical(obj);
|
||||
return ComputeHash(json);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Serializes an object to canonical JSON (sorted keys, no whitespace).
|
||||
/// </summary>
|
||||
public static string SerializeCanonical<T>(T obj)
|
||||
{
|
||||
var options = new JsonSerializerOptions
|
||||
{
|
||||
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
|
||||
WriteIndented = false,
|
||||
DefaultIgnoreCondition = System.Text.Json.Serialization.JsonIgnoreCondition.WhenWritingNull
|
||||
};
|
||||
return JsonSerializer.Serialize(obj, options);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Validates that two objects produce identical canonical JSON.
|
||||
/// </summary>
|
||||
public static bool AreDeterministicallyEqual<T>(T obj1, T obj2)
|
||||
{
|
||||
var json1 = SerializeCanonical(obj1);
|
||||
var json2 = SerializeCanonical(obj2);
|
||||
return json1 == json2;
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Test Utilities
|
||||
|
||||
/// <summary>
|
||||
/// Creates a temporary directory for test artifacts.
|
||||
/// </summary>
|
||||
public string CreateTempDirectory()
|
||||
{
|
||||
var path = Path.Combine(Path.GetTempPath(), "stellaops-e2e-tests", Guid.NewGuid().ToString("N"));
|
||||
Directory.CreateDirectory(path);
|
||||
return path;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Gets a frozen timestamp for deterministic testing.
|
||||
/// </summary>
|
||||
public static DateTimeOffset GetFrozenTimestamp()
|
||||
{
|
||||
return new DateTimeOffset(2024, 12, 29, 12, 0, 0, TimeSpan.Zero);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Creates a mock logger for the specified type.
|
||||
/// </summary>
|
||||
public static Mock<ILogger<T>> CreateMockLogger<T>()
|
||||
{
|
||||
return new Mock<ILogger<T>>();
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
protected static readonly JsonSerializerOptions JsonOptions = new()
|
||||
{
|
||||
PropertyNameCaseInsensitive = true,
|
||||
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
|
||||
WriteIndented = true
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
|
||||
236
src/__Tests/e2e/Integrations/Helpers/MockProviderHelper.cs
Normal file
236
src/__Tests/e2e/Integrations/Helpers/MockProviderHelper.cs
Normal file
@@ -0,0 +1,236 @@
|
||||
// =============================================================================
|
||||
// MockProviderHelper.cs
|
||||
// Sprint: SPRINT_20251229_019 - Integration E2E Validation
|
||||
// Description: Helper class for creating mock external provider implementations
|
||||
// =============================================================================
|
||||
|
||||
using System.Text.Json;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Moq;
|
||||
using Moq.Protected;
|
||||
|
||||
namespace StellaOps.Integration.E2E.Integrations.Helpers;
|
||||
|
||||
/// <summary>
|
||||
/// Provides factory methods for creating mock implementations of external provider services.
|
||||
/// </summary>
|
||||
public static class MockProviderHelper
|
||||
{
|
||||
#region Clock Mock
|
||||
|
||||
/// <summary>
|
||||
/// A simple clock interface for deterministic time testing.
|
||||
/// </summary>
|
||||
public interface ITestClock
|
||||
{
|
||||
DateTimeOffset UtcNow { get; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Creates a mock clock for deterministic time testing.
|
||||
/// </summary>
|
||||
public static Mock<ITestClock> CreateMockClock(DateTimeOffset? frozenTime = null)
|
||||
{
|
||||
var mock = new Mock<ITestClock>();
|
||||
var time = frozenTime ?? new DateTimeOffset(2024, 12, 29, 12, 0, 0, TimeSpan.Zero);
|
||||
|
||||
mock.Setup(x => x.UtcNow).Returns(time);
|
||||
|
||||
return mock;
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Test Data Generators
|
||||
|
||||
/// <summary>
|
||||
/// Registry source type values for testing.
|
||||
/// </summary>
|
||||
public enum TestRegistrySourceType
|
||||
{
|
||||
DockerHub = 1,
|
||||
Harbor = 2,
|
||||
Ecr = 3,
|
||||
Gcr = 4,
|
||||
Acr = 5,
|
||||
Ghcr = 6
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Creates test registry source data.
|
||||
/// </summary>
|
||||
public static TestRegistrySource CreateDefaultRegistrySource(
|
||||
string? id = null,
|
||||
TestRegistrySourceType type = TestRegistrySourceType.Harbor,
|
||||
bool isActive = true)
|
||||
{
|
||||
return new TestRegistrySource
|
||||
{
|
||||
Id = id ?? Guid.NewGuid().ToString("N"),
|
||||
TenantId = "test-tenant",
|
||||
Name = "Test Registry Source",
|
||||
Description = "Test source for E2E testing",
|
||||
Type = type,
|
||||
RegistryUrl = "https://registry.example.com",
|
||||
IsActive = isActive,
|
||||
WebhookSecretRef = null,
|
||||
RepositoryAllowlist = [],
|
||||
RepositoryDenylist = [],
|
||||
TagAllowlist = [],
|
||||
TagDenylist = []
|
||||
};
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Test registry source data class.
|
||||
/// </summary>
|
||||
public sealed class TestRegistrySource
|
||||
{
|
||||
public required string Id { get; init; }
|
||||
public string? TenantId { get; init; }
|
||||
public required string Name { get; init; }
|
||||
public string? Description { get; init; }
|
||||
public TestRegistrySourceType Type { get; init; }
|
||||
public required string RegistryUrl { get; init; }
|
||||
public bool IsActive { get; init; }
|
||||
public string? WebhookSecretRef { get; init; }
|
||||
public List<string> RepositoryAllowlist { get; init; } = [];
|
||||
public List<string> RepositoryDenylist { get; init; } = [];
|
||||
public List<string> TagAllowlist { get; init; } = [];
|
||||
public List<string> TagDenylist { get; init; } = [];
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region SCM Provider Mocks
|
||||
|
||||
/// <summary>
|
||||
/// Creates a mock options for Signals service.
|
||||
/// </summary>
|
||||
public static Microsoft.Extensions.Options.IOptions<T> CreateMockOptions<T>(T value) where T : class, new()
|
||||
{
|
||||
var mock = new Mock<Microsoft.Extensions.Options.IOptions<T>>();
|
||||
mock.Setup(x => x.Value).Returns(value);
|
||||
return mock.Object;
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Logger Mocks
|
||||
|
||||
/// <summary>
|
||||
/// Creates a mock logger that captures log messages.
|
||||
/// </summary>
|
||||
public static (Mock<ILogger<T>> Mock, List<string> Messages) CreateCapturingLogger<T>()
|
||||
{
|
||||
var messages = new List<string>();
|
||||
var mock = new Mock<ILogger<T>>();
|
||||
|
||||
mock.Setup(x => x.Log(
|
||||
It.IsAny<LogLevel>(),
|
||||
It.IsAny<EventId>(),
|
||||
It.IsAny<It.IsAnyType>(),
|
||||
It.IsAny<Exception?>(),
|
||||
It.IsAny<Func<It.IsAnyType, Exception?, string>>()))
|
||||
.Callback<LogLevel, EventId, object, Exception?, Delegate>((level, id, state, ex, formatter) =>
|
||||
{
|
||||
var message = formatter.DynamicInvoke(state, ex) as string;
|
||||
if (message != null)
|
||||
{
|
||||
messages.Add($"[{level}] {message}");
|
||||
}
|
||||
});
|
||||
|
||||
return (mock, messages);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Creates a no-op logger for testing.
|
||||
/// </summary>
|
||||
public static ILogger<T> CreateNullLogger<T>()
|
||||
{
|
||||
return new Mock<ILogger<T>>().Object;
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region HTTP Client Mocks
|
||||
|
||||
/// <summary>
|
||||
/// Creates a mock HTTP response for testing.
|
||||
/// </summary>
|
||||
public static HttpResponseMessage CreateMockHttpResponse(
|
||||
System.Net.HttpStatusCode statusCode,
|
||||
object? content = null)
|
||||
{
|
||||
var response = new HttpResponseMessage(statusCode);
|
||||
|
||||
if (content != null)
|
||||
{
|
||||
var json = JsonSerializer.Serialize(content);
|
||||
response.Content = new StringContent(json, System.Text.Encoding.UTF8, "application/json");
|
||||
}
|
||||
|
||||
return response;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Creates a mock HTTP handler for testing.
|
||||
/// </summary>
|
||||
public static Mock<HttpMessageHandler> CreateMockHttpHandler(
|
||||
Func<HttpRequestMessage, HttpResponseMessage> responseFactory)
|
||||
{
|
||||
var mock = new Mock<HttpMessageHandler>();
|
||||
|
||||
mock.Protected()
|
||||
.Setup<Task<HttpResponseMessage>>(
|
||||
"SendAsync",
|
||||
ItExpr.IsAny<HttpRequestMessage>(),
|
||||
ItExpr.IsAny<CancellationToken>())
|
||||
.ReturnsAsync((HttpRequestMessage request, CancellationToken _) => responseFactory(request));
|
||||
|
||||
return mock;
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Offline Mode Mocks
|
||||
|
||||
/// <summary>
|
||||
/// Creates a mock HTTP handler that simulates network failure (for offline testing).
|
||||
/// </summary>
|
||||
public static Mock<HttpMessageHandler> CreateOfflineHttpHandler()
|
||||
{
|
||||
var mock = new Mock<HttpMessageHandler>();
|
||||
|
||||
mock.Protected()
|
||||
.Setup<Task<HttpResponseMessage>>(
|
||||
"SendAsync",
|
||||
ItExpr.IsAny<HttpRequestMessage>(),
|
||||
ItExpr.IsAny<CancellationToken>())
|
||||
.ThrowsAsync(new HttpRequestException("Network is unavailable (offline mode)"));
|
||||
|
||||
return mock;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Creates a mock HTTP handler that records all requests (for verifying no network calls).
|
||||
/// </summary>
|
||||
public static (Mock<HttpMessageHandler> Handler, List<HttpRequestMessage> Requests) CreateRecordingHttpHandler()
|
||||
{
|
||||
var requests = new List<HttpRequestMessage>();
|
||||
var mock = new Mock<HttpMessageHandler>();
|
||||
|
||||
mock.Protected()
|
||||
.Setup<Task<HttpResponseMessage>>(
|
||||
"SendAsync",
|
||||
ItExpr.IsAny<HttpRequestMessage>(),
|
||||
ItExpr.IsAny<CancellationToken>())
|
||||
.Callback<HttpRequestMessage, CancellationToken>((request, _) => requests.Add(request))
|
||||
.ReturnsAsync(new HttpResponseMessage(System.Net.HttpStatusCode.OK));
|
||||
|
||||
return (mock, requests);
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
513
src/__Tests/e2e/Integrations/Helpers/TestCiTemplates.cs
Normal file
513
src/__Tests/e2e/Integrations/Helpers/TestCiTemplates.cs
Normal file
@@ -0,0 +1,513 @@
|
||||
// =============================================================================
|
||||
// TestCiTemplates.cs
|
||||
// Sprint: SPRINT_20251229_019 - Integration E2E Validation
|
||||
// Description: Test-local CI template generator for E2E tests (avoids CLI dependency)
|
||||
// =============================================================================
|
||||
|
||||
namespace StellaOps.Integration.E2E.Integrations.Helpers;
|
||||
|
||||
/// <summary>
|
||||
/// Test-local CI template generator that mirrors the CLI CiTemplates functionality.
|
||||
/// This avoids depending on the CLI project which may have unrelated build issues.
|
||||
/// </summary>
|
||||
public static class TestCiTemplates
|
||||
{
|
||||
private const string DefaultScannerImage = "ghcr.io/stellaops/scanner:latest";
|
||||
|
||||
public static IReadOnlyList<(string path, string content)> GetTemplates(
|
||||
string platform,
|
||||
string templateType,
|
||||
string mode,
|
||||
bool offline,
|
||||
string? scannerImage)
|
||||
{
|
||||
var image = scannerImage ?? DefaultScannerImage;
|
||||
var templates = new List<(string, string)>();
|
||||
|
||||
if (platform is "github" or "all")
|
||||
{
|
||||
templates.AddRange(GetGitHubTemplates(templateType, mode, image, offline));
|
||||
}
|
||||
|
||||
if (platform is "gitlab" or "all")
|
||||
{
|
||||
templates.AddRange(GetGitLabTemplates(templateType, mode, image, offline));
|
||||
}
|
||||
|
||||
if (platform is "gitea" or "all")
|
||||
{
|
||||
templates.AddRange(GetGiteaTemplates(templateType, mode, image, offline));
|
||||
}
|
||||
|
||||
return templates;
|
||||
}
|
||||
|
||||
private static IEnumerable<(string, string)> GetGitHubTemplates(
|
||||
string templateType, string mode, string image, bool offline)
|
||||
{
|
||||
if (templateType is "gate" or "full")
|
||||
{
|
||||
yield return (".github/workflows/stellaops-gate.yml", GetGitHubGateTemplate(mode, image));
|
||||
}
|
||||
|
||||
if (templateType is "scan" or "full")
|
||||
{
|
||||
yield return (".github/workflows/stellaops-scan.yml", GetGitHubScanTemplate(mode, image));
|
||||
}
|
||||
|
||||
if (templateType is "verify" or "full")
|
||||
{
|
||||
yield return (".github/workflows/stellaops-verify.yml", GetGitHubVerifyTemplate(image));
|
||||
}
|
||||
}
|
||||
|
||||
private static IEnumerable<(string, string)> GetGitLabTemplates(
|
||||
string templateType, string mode, string image, bool offline)
|
||||
{
|
||||
if (templateType is "gate" or "full")
|
||||
{
|
||||
yield return (".gitlab-ci.yml", GetGitLabPipelineTemplate(templateType, mode, image));
|
||||
}
|
||||
else if (templateType is "scan")
|
||||
{
|
||||
yield return (".gitlab/stellaops-scan.yml", GetGitLabScanTemplate(mode, image));
|
||||
}
|
||||
else if (templateType is "verify")
|
||||
{
|
||||
yield return (".gitlab/stellaops-verify.yml", GetGitLabVerifyTemplate(image));
|
||||
}
|
||||
}
|
||||
|
||||
private static IEnumerable<(string, string)> GetGiteaTemplates(
|
||||
string templateType, string mode, string image, bool offline)
|
||||
{
|
||||
if (templateType is "gate" or "full")
|
||||
{
|
||||
yield return (".gitea/workflows/stellaops-gate.yml", GetGiteaGateTemplate(mode, image));
|
||||
}
|
||||
|
||||
if (templateType is "scan" or "full")
|
||||
{
|
||||
yield return (".gitea/workflows/stellaops-scan.yml", GetGiteaScanTemplate(mode, image));
|
||||
}
|
||||
|
||||
if (templateType is "verify" or "full")
|
||||
{
|
||||
yield return (".gitea/workflows/stellaops-verify.yml", GetGiteaVerifyTemplate(image));
|
||||
}
|
||||
}
|
||||
|
||||
private static string GetGitHubGateTemplate(string mode, string image) => """
|
||||
# StellaOps Release Gate Workflow
|
||||
# Generated by: stella ci init --platform github --template gate
|
||||
# Documentation: https://docs.stellaops.io/ci/github-actions
|
||||
|
||||
name: StellaOps Release Gate
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [main, release/*]
|
||||
pull_request:
|
||||
branches: [main]
|
||||
workflow_dispatch:
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
id-token: write
|
||||
security-events: write
|
||||
|
||||
env:
|
||||
STELLAOPS_BACKEND_URL: ${{ secrets.STELLAOPS_BACKEND_URL }}
|
||||
|
||||
jobs:
|
||||
gate:
|
||||
name: Release Gate Evaluation
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Set up StellaOps CLI
|
||||
uses: stellaops/setup-cli@v1
|
||||
with:
|
||||
version: latest
|
||||
|
||||
- name: Authenticate with OIDC
|
||||
uses: stellaops/auth@v1
|
||||
with:
|
||||
audience: stellaops
|
||||
|
||||
- name: Build Container Image
|
||||
id: build
|
||||
run: |
|
||||
IMAGE_TAG="${{ github.sha }}"
|
||||
docker build -t app:$IMAGE_TAG .
|
||||
echo "image=app:$IMAGE_TAG" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Scan Image
|
||||
id: scan
|
||||
run: |
|
||||
stella scan image ${{ steps.build.outputs.image }} \
|
||||
--format sarif \
|
||||
--output results.sarif
|
||||
|
||||
- name: Upload SARIF
|
||||
uses: github/codeql-action/upload-sarif@v3
|
||||
with:
|
||||
sarif_file: results.sarif
|
||||
|
||||
- name: Evaluate Gate
|
||||
id: gate
|
||||
run: |
|
||||
stella gate evaluate \
|
||||
--image ${{ steps.build.outputs.image }} \
|
||||
--baseline production \
|
||||
--output json > gate-result.json
|
||||
|
||||
EXIT_CODE=$(jq -r '.exitCode' gate-result.json)
|
||||
echo "exit_code=$EXIT_CODE" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Gate Summary
|
||||
run: |
|
||||
echo "## Release Gate Result" >> $GITHUB_STEP_SUMMARY
|
||||
stella gate evaluate \
|
||||
--image ${{ steps.build.outputs.image }} \
|
||||
--baseline production \
|
||||
--output markdown >> $GITHUB_STEP_SUMMARY
|
||||
|
||||
- name: Check Gate Status
|
||||
if: steps.gate.outputs.exit_code != '0'
|
||||
run: |
|
||||
echo "::error::Release gate check failed"
|
||||
exit ${{ steps.gate.outputs.exit_code }}
|
||||
""";
|
||||
|
||||
private static string GetGitHubScanTemplate(string mode, string image) => """
|
||||
# StellaOps Container Scan Workflow
|
||||
# Generated by: stella ci init --platform github --template scan
|
||||
# Documentation: https://docs.stellaops.io/ci/github-actions
|
||||
|
||||
name: StellaOps Container Scan
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [main, develop]
|
||||
pull_request:
|
||||
branches: [main]
|
||||
schedule:
|
||||
- cron: '0 6 * * *'
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
id-token: write
|
||||
security-events: write
|
||||
packages: read
|
||||
|
||||
env:
|
||||
STELLAOPS_BACKEND_URL: ${{ secrets.STELLAOPS_BACKEND_URL }}
|
||||
|
||||
jobs:
|
||||
scan:
|
||||
name: Container Scan
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Set up StellaOps CLI
|
||||
uses: stellaops/setup-cli@v1
|
||||
|
||||
- name: Authenticate
|
||||
uses: stellaops/auth@v1
|
||||
|
||||
- name: Build Image
|
||||
id: build
|
||||
run: |
|
||||
docker build -t scan-target:${{ github.sha }} .
|
||||
echo "image=scan-target:${{ github.sha }}" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Run Scan
|
||||
run: |
|
||||
stella scan image ${{ steps.build.outputs.image }} \
|
||||
--sbom-output sbom.cdx.json \
|
||||
--format sarif \
|
||||
--output scan.sarif
|
||||
|
||||
- name: Upload SBOM
|
||||
run: |
|
||||
stella sbom upload sbom.cdx.json \
|
||||
--image ${{ steps.build.outputs.image }}
|
||||
|
||||
- name: Upload SARIF
|
||||
uses: github/codeql-action/upload-sarif@v3
|
||||
with:
|
||||
sarif_file: scan.sarif
|
||||
|
||||
- name: Scan Summary
|
||||
run: |
|
||||
echo "## Scan Results" >> $GITHUB_STEP_SUMMARY
|
||||
stella scan image ${{ steps.build.outputs.image }} \
|
||||
--format markdown >> $GITHUB_STEP_SUMMARY
|
||||
""";
|
||||
|
||||
private static string GetGitHubVerifyTemplate(string image) => """
|
||||
# StellaOps Verification Workflow
|
||||
# Generated by: stella ci init --platform github --template verify
|
||||
# Documentation: https://docs.stellaops.io/ci/github-actions
|
||||
|
||||
name: StellaOps Verification
|
||||
|
||||
on:
|
||||
deployment:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
image:
|
||||
description: 'Image to verify'
|
||||
required: true
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
id-token: write
|
||||
|
||||
env:
|
||||
STELLAOPS_BACKEND_URL: ${{ secrets.STELLAOPS_BACKEND_URL }}
|
||||
|
||||
jobs:
|
||||
verify:
|
||||
name: Verify Attestations
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Set up StellaOps CLI
|
||||
uses: stellaops/setup-cli@v1
|
||||
|
||||
- name: Authenticate
|
||||
uses: stellaops/auth@v1
|
||||
|
||||
- name: Verify Image
|
||||
run: |
|
||||
stella verify image ${{ inputs.image || github.event.deployment.payload.image }} \
|
||||
--require-sbom \
|
||||
--require-scan \
|
||||
--require-signature
|
||||
""";
|
||||
|
||||
private static string GetGitLabPipelineTemplate(string templateType, string mode, string image) => """
|
||||
# StellaOps GitLab CI Pipeline
|
||||
# Generated by: stella ci init --platform gitlab --template gate
|
||||
# Documentation: https://docs.stellaops.io/ci/gitlab-ci
|
||||
|
||||
stages:
|
||||
- build
|
||||
- scan
|
||||
- gate
|
||||
- deploy
|
||||
|
||||
variables:
|
||||
STELLAOPS_BACKEND_URL: $STELLAOPS_BACKEND_URL
|
||||
DOCKER_TLS_CERTDIR: "/certs"
|
||||
|
||||
.stellaops-setup:
|
||||
before_script:
|
||||
- curl -fsSL https://get.stellaops.io/cli | sh
|
||||
- export PATH="$HOME/.stellaops/bin:$PATH"
|
||||
|
||||
build:
|
||||
stage: build
|
||||
image: docker:24-dind
|
||||
services:
|
||||
- docker:24-dind
|
||||
script:
|
||||
- docker build -t $CI_REGISTRY_IMAGE:$CI_COMMIT_SHA .
|
||||
- docker push $CI_REGISTRY_IMAGE:$CI_COMMIT_SHA
|
||||
rules:
|
||||
- if: $CI_COMMIT_BRANCH == "main" || $CI_MERGE_REQUEST_ID
|
||||
|
||||
scan:
|
||||
stage: scan
|
||||
extends: .stellaops-setup
|
||||
script:
|
||||
- stella scan image $CI_REGISTRY_IMAGE:$CI_COMMIT_SHA
|
||||
--sbom-output sbom.cdx.json
|
||||
--format json
|
||||
--output scan-results.json
|
||||
- stella sbom upload sbom.cdx.json
|
||||
--image $CI_REGISTRY_IMAGE:$CI_COMMIT_SHA
|
||||
artifacts:
|
||||
paths:
|
||||
- sbom.cdx.json
|
||||
- scan-results.json
|
||||
reports:
|
||||
container_scanning: scan-results.json
|
||||
rules:
|
||||
- if: $CI_COMMIT_BRANCH == "main" || $CI_MERGE_REQUEST_ID
|
||||
|
||||
gate:
|
||||
stage: gate
|
||||
extends: .stellaops-setup
|
||||
script:
|
||||
- |
|
||||
stella gate evaluate \
|
||||
--image $CI_REGISTRY_IMAGE:$CI_COMMIT_SHA \
|
||||
--baseline production \
|
||||
--output json > gate-result.json
|
||||
|
||||
EXIT_CODE=$(jq -r '.exitCode' gate-result.json)
|
||||
if [ "$EXIT_CODE" != "0" ]; then
|
||||
echo "Release gate failed"
|
||||
exit $EXIT_CODE
|
||||
fi
|
||||
rules:
|
||||
- if: $CI_COMMIT_BRANCH == "main"
|
||||
|
||||
deploy:
|
||||
stage: deploy
|
||||
script:
|
||||
- echo "Deploy $CI_REGISTRY_IMAGE:$CI_COMMIT_SHA"
|
||||
rules:
|
||||
- if: $CI_COMMIT_BRANCH == "main"
|
||||
needs:
|
||||
- gate
|
||||
""";
|
||||
|
||||
private static string GetGitLabScanTemplate(string mode, string image) => """
|
||||
# StellaOps GitLab CI Scan Template
|
||||
# Include in your .gitlab-ci.yml: include: '.gitlab/stellaops-scan.yml'
|
||||
|
||||
.stellaops-scan:
|
||||
image: docker:24-dind
|
||||
services:
|
||||
- docker:24-dind
|
||||
before_script:
|
||||
- curl -fsSL https://get.stellaops.io/cli | sh
|
||||
- export PATH="$HOME/.stellaops/bin:$PATH"
|
||||
script:
|
||||
- stella scan image $SCAN_IMAGE
|
||||
--sbom-output sbom.cdx.json
|
||||
--format json
|
||||
--output scan-results.json
|
||||
artifacts:
|
||||
paths:
|
||||
- sbom.cdx.json
|
||||
- scan-results.json
|
||||
""";
|
||||
|
||||
private static string GetGitLabVerifyTemplate(string image) => """
|
||||
# StellaOps GitLab CI Verify Template
|
||||
# Include in your .gitlab-ci.yml: include: '.gitlab/stellaops-verify.yml'
|
||||
|
||||
.stellaops-verify:
|
||||
before_script:
|
||||
- curl -fsSL https://get.stellaops.io/cli | sh
|
||||
- export PATH="$HOME/.stellaops/bin:$PATH"
|
||||
script:
|
||||
- stella verify image $VERIFY_IMAGE
|
||||
--require-sbom
|
||||
--require-scan
|
||||
--require-signature
|
||||
""";
|
||||
|
||||
private static string GetGiteaGateTemplate(string mode, string image) => """
|
||||
# StellaOps Gitea Actions Release Gate Workflow
|
||||
# Generated by: stella ci init --platform gitea --template gate
|
||||
# Documentation: https://docs.stellaops.io/ci/gitea-actions
|
||||
|
||||
name: StellaOps Release Gate
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [main, release/*]
|
||||
pull_request:
|
||||
branches: [main]
|
||||
|
||||
env:
|
||||
STELLAOPS_BACKEND_URL: ${{ secrets.STELLAOPS_BACKEND_URL }}
|
||||
|
||||
jobs:
|
||||
gate:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Set up StellaOps CLI
|
||||
run: |
|
||||
curl -fsSL https://get.stellaops.io/cli | sh
|
||||
echo "$HOME/.stellaops/bin" >> $GITHUB_PATH
|
||||
|
||||
- name: Build Image
|
||||
run: |
|
||||
docker build -t app:${{ gitea.sha }} .
|
||||
|
||||
- name: Scan and Gate
|
||||
run: |
|
||||
stella scan image app:${{ gitea.sha }}
|
||||
stella gate evaluate --image app:${{ gitea.sha }} --baseline production
|
||||
""";
|
||||
|
||||
private static string GetGiteaScanTemplate(string mode, string image) => """
|
||||
# StellaOps Gitea Actions Scan Workflow
|
||||
# Generated by: stella ci init --platform gitea --template scan
|
||||
|
||||
name: StellaOps Container Scan
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [main, develop]
|
||||
pull_request:
|
||||
|
||||
env:
|
||||
STELLAOPS_BACKEND_URL: ${{ secrets.STELLAOPS_BACKEND_URL }}
|
||||
|
||||
jobs:
|
||||
scan:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Set up StellaOps
|
||||
run: |
|
||||
curl -fsSL https://get.stellaops.io/cli | sh
|
||||
echo "$HOME/.stellaops/bin" >> $GITHUB_PATH
|
||||
|
||||
- name: Build and Scan
|
||||
run: |
|
||||
docker build -t scan-target:${{ gitea.sha }} .
|
||||
stella scan image scan-target:${{ gitea.sha }} \
|
||||
--sbom-output sbom.cdx.json
|
||||
""";
|
||||
|
||||
private static string GetGiteaVerifyTemplate(string image) => """
|
||||
# StellaOps Gitea Actions Verify Workflow
|
||||
# Generated by: stella ci init --platform gitea --template verify
|
||||
|
||||
name: StellaOps Verification
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
image:
|
||||
description: 'Image to verify'
|
||||
required: true
|
||||
|
||||
env:
|
||||
STELLAOPS_BACKEND_URL: ${{ secrets.STELLAOPS_BACKEND_URL }}
|
||||
|
||||
jobs:
|
||||
verify:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Set up StellaOps
|
||||
run: |
|
||||
curl -fsSL https://get.stellaops.io/cli | sh
|
||||
echo "$HOME/.stellaops/bin" >> $GITHUB_PATH
|
||||
|
||||
- name: Verify
|
||||
run: |
|
||||
stella verify image ${{ inputs.image }} \
|
||||
--require-sbom \
|
||||
--require-scan
|
||||
""";
|
||||
}
|
||||
430
src/__Tests/e2e/Integrations/Helpers/WebhookTestHelper.cs
Normal file
430
src/__Tests/e2e/Integrations/Helpers/WebhookTestHelper.cs
Normal file
@@ -0,0 +1,430 @@
|
||||
// =============================================================================
|
||||
// WebhookTestHelper.cs
|
||||
// Sprint: SPRINT_20251229_019 - Integration E2E Validation
|
||||
// Description: Utility class for webhook testing operations
|
||||
// =============================================================================
|
||||
|
||||
using System.Security.Cryptography;
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
|
||||
namespace StellaOps.Integration.E2E.Integrations.Helpers;
|
||||
|
||||
/// <summary>
|
||||
/// Provides utility methods for webhook testing, including signature generation
|
||||
/// and payload manipulation.
|
||||
/// </summary>
|
||||
public static class WebhookTestHelper
|
||||
{
|
||||
#region Signature Generation
|
||||
|
||||
/// <summary>
|
||||
/// Generates an HMAC-SHA256 signature for a webhook payload.
|
||||
/// </summary>
|
||||
/// <param name="payload">The webhook payload.</param>
|
||||
/// <param name="secret">The webhook secret.</param>
|
||||
/// <param name="prefix">Optional prefix for the signature (e.g., "sha256=").</param>
|
||||
/// <returns>The generated signature.</returns>
|
||||
public static string GenerateHmacSha256Signature(string payload, string secret, string prefix = "sha256=")
|
||||
{
|
||||
var secretBytes = Encoding.UTF8.GetBytes(secret);
|
||||
var payloadBytes = Encoding.UTF8.GetBytes(payload);
|
||||
|
||||
using var hmac = new HMACSHA256(secretBytes);
|
||||
var hash = hmac.ComputeHash(payloadBytes);
|
||||
|
||||
return prefix + Convert.ToHexStringLower(hash);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Generates a GitHub-style webhook signature.
|
||||
/// </summary>
|
||||
public static string GenerateGitHubSignature(string payload, string secret)
|
||||
{
|
||||
return GenerateHmacSha256Signature(payload, secret, "sha256=");
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Generates a GitLab-style webhook token.
|
||||
/// GitLab uses X-Gitlab-Token header with the secret directly.
|
||||
/// </summary>
|
||||
public static string GenerateGitLabToken(string secret)
|
||||
{
|
||||
return secret;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Generates a Gitea-style webhook signature.
|
||||
/// </summary>
|
||||
public static string GenerateGiteaSignature(string payload, string secret)
|
||||
{
|
||||
return GenerateHmacSha256Signature(payload, secret, "sha256=");
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Generates a Harbor-style webhook signature.
|
||||
/// </summary>
|
||||
public static string GenerateHarborSignature(string payload, string secret)
|
||||
{
|
||||
return GenerateHmacSha256Signature(payload, secret, "sha256=");
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Generates a Docker Hub-style webhook signature.
|
||||
/// </summary>
|
||||
public static string GenerateDockerHubSignature(string payload, string secret)
|
||||
{
|
||||
return GenerateHmacSha256Signature(payload, secret, "sha256=");
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Payload Manipulation
|
||||
|
||||
/// <summary>
|
||||
/// Modifies a JSON payload by updating a specific field.
|
||||
/// </summary>
|
||||
/// <param name="payload">The original JSON payload.</param>
|
||||
/// <param name="jsonPath">Dot-separated path to the field (e.g., "repository.name").</param>
|
||||
/// <param name="newValue">The new value to set.</param>
|
||||
/// <returns>The modified payload.</returns>
|
||||
public static string ModifyPayloadField(string payload, string jsonPath, object newValue)
|
||||
{
|
||||
var doc = JsonDocument.Parse(payload);
|
||||
var root = doc.RootElement;
|
||||
|
||||
var dict = JsonSerializer.Deserialize<Dictionary<string, object>>(payload, JsonOptions)
|
||||
?? throw new InvalidOperationException("Failed to parse payload");
|
||||
|
||||
SetNestedValue(dict, jsonPath.Split('.'), newValue);
|
||||
|
||||
return JsonSerializer.Serialize(dict, JsonOptions);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Corrupts a payload by modifying its hash/digest field.
|
||||
/// </summary>
|
||||
public static string CorruptPayloadDigest(string payload)
|
||||
{
|
||||
// Try common digest field names
|
||||
var digestFields = new[] { "digest", "image-digest", "sha", "hash" };
|
||||
|
||||
foreach (var field in digestFields)
|
||||
{
|
||||
if (payload.Contains($"\"{field}\""))
|
||||
{
|
||||
// Replace any sha256 digest with a corrupted one
|
||||
return System.Text.RegularExpressions.Regex.Replace(
|
||||
payload,
|
||||
@"sha256:[a-f0-9]{64}",
|
||||
"sha256:0000000000000000000000000000000000000000000000000000000000000000");
|
||||
}
|
||||
}
|
||||
|
||||
return payload;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Creates a minimal valid webhook payload for testing.
|
||||
/// </summary>
|
||||
public static string CreateMinimalPayload(string provider, string eventType = "push")
|
||||
{
|
||||
return provider.ToLowerInvariant() switch
|
||||
{
|
||||
"harbor" => CreateMinimalHarborPayload(),
|
||||
"dockerhub" => CreateMinimalDockerHubPayload(),
|
||||
"acr" => CreateMinimalAcrPayload(),
|
||||
"ecr" => CreateMinimalEcrPayload(),
|
||||
"gcr" => CreateMinimalGcrPayload(),
|
||||
"ghcr" => CreateMinimalGhcrPayload(),
|
||||
"github" => CreateMinimalGitHubPushPayload(),
|
||||
"gitlab" => CreateMinimalGitLabPushPayload(),
|
||||
"gitea" => CreateMinimalGiteaPushPayload(),
|
||||
_ => throw new ArgumentException($"Unknown provider: {provider}")
|
||||
};
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Minimal Payload Generators
|
||||
|
||||
private static string CreateMinimalHarborPayload()
|
||||
{
|
||||
return JsonSerializer.Serialize(new
|
||||
{
|
||||
type = "PUSH_ARTIFACT",
|
||||
occur_at = DateTimeOffset.UtcNow.ToUnixTimeSeconds(),
|
||||
@operator = "test",
|
||||
event_data = new
|
||||
{
|
||||
resources = new[]
|
||||
{
|
||||
new
|
||||
{
|
||||
digest = "sha256:test123",
|
||||
tag = "latest",
|
||||
resource_url = "harbor.example.com/library/test:latest"
|
||||
}
|
||||
},
|
||||
repository = new
|
||||
{
|
||||
name = "test",
|
||||
repo_full_name = "library/test"
|
||||
}
|
||||
}
|
||||
}, JsonOptions);
|
||||
}
|
||||
|
||||
private static string CreateMinimalDockerHubPayload()
|
||||
{
|
||||
return JsonSerializer.Serialize(new
|
||||
{
|
||||
push_data = new
|
||||
{
|
||||
tag = "latest",
|
||||
pushed_at = DateTimeOffset.UtcNow.ToUnixTimeSeconds()
|
||||
},
|
||||
repository = new
|
||||
{
|
||||
repo_name = "stellaops/test",
|
||||
name = "test",
|
||||
@namespace = "stellaops"
|
||||
}
|
||||
}, JsonOptions);
|
||||
}
|
||||
|
||||
private static string CreateMinimalAcrPayload()
|
||||
{
|
||||
return JsonSerializer.Serialize(new
|
||||
{
|
||||
id = Guid.NewGuid().ToString(),
|
||||
action = "push",
|
||||
target = new
|
||||
{
|
||||
repository = "stellaops/test",
|
||||
tag = "latest",
|
||||
digest = "sha256:test123"
|
||||
}
|
||||
}, JsonOptions);
|
||||
}
|
||||
|
||||
private static string CreateMinimalEcrPayload()
|
||||
{
|
||||
return JsonSerializer.Serialize(new Dictionary<string, object>
|
||||
{
|
||||
["version"] = "0",
|
||||
["id"] = Guid.NewGuid().ToString(),
|
||||
["detail-type"] = "ECR Image Action",
|
||||
["source"] = "aws.ecr",
|
||||
["detail"] = new Dictionary<string, object>
|
||||
{
|
||||
["action-type"] = "PUSH",
|
||||
["repository-name"] = "stellaops/test",
|
||||
["image-tag"] = "latest"
|
||||
}
|
||||
}, JsonOptions);
|
||||
}
|
||||
|
||||
private static string CreateMinimalGcrPayload()
|
||||
{
|
||||
var innerData = new { action = "INSERT", tag = "latest", digest = "sha256:test123" };
|
||||
var base64Data = Convert.ToBase64String(Encoding.UTF8.GetBytes(JsonSerializer.Serialize(innerData)));
|
||||
|
||||
return JsonSerializer.Serialize(new
|
||||
{
|
||||
message = new
|
||||
{
|
||||
data = base64Data,
|
||||
messageId = "gcr-test-123"
|
||||
}
|
||||
}, JsonOptions);
|
||||
}
|
||||
|
||||
private static string CreateMinimalGhcrPayload()
|
||||
{
|
||||
return JsonSerializer.Serialize(new
|
||||
{
|
||||
action = "published",
|
||||
package = new
|
||||
{
|
||||
name = "test-package",
|
||||
package_type = "container",
|
||||
package_version = new
|
||||
{
|
||||
version = "v1.0.0"
|
||||
}
|
||||
},
|
||||
repository = new
|
||||
{
|
||||
full_name = "stellaops/test"
|
||||
}
|
||||
}, JsonOptions);
|
||||
}
|
||||
|
||||
private static string CreateMinimalGitHubPushPayload()
|
||||
{
|
||||
return JsonSerializer.Serialize(new
|
||||
{
|
||||
@ref = "refs/heads/main",
|
||||
after = "abc123",
|
||||
repository = new
|
||||
{
|
||||
full_name = "stellaops/test",
|
||||
name = "test",
|
||||
default_branch = "main"
|
||||
},
|
||||
pusher = new { name = "test-user" },
|
||||
sender = new { login = "test-user", type = "User" },
|
||||
commits = Array.Empty<object>()
|
||||
}, JsonOptions);
|
||||
}
|
||||
|
||||
private static string CreateMinimalGitLabPushPayload()
|
||||
{
|
||||
return JsonSerializer.Serialize(new
|
||||
{
|
||||
object_kind = "push",
|
||||
@ref = "refs/heads/main",
|
||||
after = "abc123",
|
||||
project = new
|
||||
{
|
||||
path_with_namespace = "stellaops/test",
|
||||
name = "test",
|
||||
default_branch = "main"
|
||||
},
|
||||
user_name = "test-user",
|
||||
commits = Array.Empty<object>()
|
||||
}, JsonOptions);
|
||||
}
|
||||
|
||||
private static string CreateMinimalGiteaPushPayload()
|
||||
{
|
||||
return JsonSerializer.Serialize(new
|
||||
{
|
||||
@ref = "refs/heads/main",
|
||||
after = "abc123",
|
||||
repository = new
|
||||
{
|
||||
full_name = "stellaops/test",
|
||||
name = "test",
|
||||
default_branch = "main"
|
||||
},
|
||||
pusher = new { login = "test-user" },
|
||||
sender = new { login = "test-user" },
|
||||
commits = Array.Empty<object>()
|
||||
}, JsonOptions);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Validation Helpers
|
||||
|
||||
/// <summary>
|
||||
/// Validates that a webhook payload contains required fields.
|
||||
/// </summary>
|
||||
public static bool ValidateRequiredFields(string payload, params string[] fields)
|
||||
{
|
||||
try
|
||||
{
|
||||
var doc = JsonDocument.Parse(payload);
|
||||
var root = doc.RootElement;
|
||||
|
||||
foreach (var field in fields)
|
||||
{
|
||||
if (!HasNestedProperty(root, field.Split('.')))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
catch
|
||||
{
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Extracts a field value from a JSON payload.
|
||||
/// </summary>
|
||||
public static string? ExtractField(string payload, string jsonPath)
|
||||
{
|
||||
try
|
||||
{
|
||||
var doc = JsonDocument.Parse(payload);
|
||||
var element = doc.RootElement;
|
||||
|
||||
foreach (var part in jsonPath.Split('.'))
|
||||
{
|
||||
if (!element.TryGetProperty(part, out element))
|
||||
{
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
return element.ValueKind == JsonValueKind.String
|
||||
? element.GetString()
|
||||
: element.GetRawText();
|
||||
}
|
||||
catch
|
||||
{
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Private Helpers
|
||||
|
||||
private static void SetNestedValue(Dictionary<string, object> dict, string[] path, object value)
|
||||
{
|
||||
var current = dict;
|
||||
for (var i = 0; i < path.Length - 1; i++)
|
||||
{
|
||||
if (!current.TryGetValue(path[i], out var next))
|
||||
{
|
||||
next = new Dictionary<string, object>();
|
||||
current[path[i]] = next;
|
||||
}
|
||||
|
||||
if (next is JsonElement jsonElement)
|
||||
{
|
||||
var nested = JsonSerializer.Deserialize<Dictionary<string, object>>(jsonElement.GetRawText());
|
||||
if (nested != null)
|
||||
{
|
||||
current[path[i]] = nested;
|
||||
current = nested;
|
||||
}
|
||||
}
|
||||
else if (next is Dictionary<string, object> nestedDict)
|
||||
{
|
||||
current = nestedDict;
|
||||
}
|
||||
}
|
||||
|
||||
current[path[^1]] = value;
|
||||
}
|
||||
|
||||
private static bool HasNestedProperty(JsonElement element, string[] path)
|
||||
{
|
||||
var current = element;
|
||||
foreach (var part in path)
|
||||
{
|
||||
if (current.ValueKind != JsonValueKind.Object ||
|
||||
!current.TryGetProperty(part, out current))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
private static readonly JsonSerializerOptions JsonOptions = new()
|
||||
{
|
||||
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
|
||||
WriteIndented = true
|
||||
};
|
||||
|
||||
#endregion
|
||||
}
|
||||
396
src/__Tests/e2e/Integrations/OfflineModeTests.cs
Normal file
396
src/__Tests/e2e/Integrations/OfflineModeTests.cs
Normal file
@@ -0,0 +1,396 @@
|
||||
// =============================================================================
|
||||
// OfflineModeTests.cs
|
||||
// Sprint: SPRINT_20251229_019 - Integration E2E Validation
|
||||
// Description: E2E tests for air-gap/offline integration flows
|
||||
// =============================================================================
|
||||
|
||||
using FluentAssertions;
|
||||
using StellaOps.Integration.E2E.Integrations.Fixtures;
|
||||
using StellaOps.Integration.E2E.Integrations.Helpers;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Integration.E2E.Integrations;
|
||||
|
||||
/// <summary>
|
||||
/// E2E tests for air-gap/offline integration operation.
|
||||
/// </summary>
|
||||
/// <remarks>
|
||||
/// Tests cover:
|
||||
/// - Offline mode template generation
|
||||
/// - No network calls during offline operation
|
||||
/// - Cache priming for offline mode
|
||||
/// - Replay bundle compatibility
|
||||
/// </remarks>
|
||||
[Trait("Category", "E2E")]
|
||||
[Trait("Category", "Integrations")]
|
||||
[Trait("Category", "Offline")]
|
||||
[Trait("Category", "AirGap")]
|
||||
public class OfflineModeTests : IClassFixture<IntegrationTestFixture>
|
||||
{
|
||||
private readonly IntegrationTestFixture _fixture;
|
||||
|
||||
public OfflineModeTests(IntegrationTestFixture fixture)
|
||||
{
|
||||
_fixture = fixture;
|
||||
}
|
||||
|
||||
#region Offline Template Generation Tests
|
||||
|
||||
[Fact(DisplayName = "Templates can be generated without network")]
|
||||
public void Templates_CanBeGenerated_WithoutNetwork()
|
||||
{
|
||||
// Arrange
|
||||
_fixture.SetOfflineMode(true);
|
||||
_fixture.SetConnectionMonitor(endpoint =>
|
||||
{
|
||||
throw new InvalidOperationException($"Network call attempted in offline mode: {endpoint}");
|
||||
});
|
||||
|
||||
try
|
||||
{
|
||||
// Act - Generate all template types
|
||||
var githubTemplates = TestCiTemplates.GetTemplates("github", "full", "basic", true, null);
|
||||
var gitlabTemplates = TestCiTemplates.GetTemplates("gitlab", "full", "basic", true, null);
|
||||
var giteaTemplates = TestCiTemplates.GetTemplates("gitea", "full", "basic", true, null);
|
||||
|
||||
// Assert
|
||||
githubTemplates.Should().NotBeEmpty("GitHub templates should be generated offline");
|
||||
gitlabTemplates.Should().NotBeEmpty("GitLab templates should be generated offline");
|
||||
giteaTemplates.Should().NotBeEmpty("Gitea templates should be generated offline");
|
||||
}
|
||||
finally
|
||||
{
|
||||
_fixture.SetOfflineMode(false);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "Offline templates are self-contained")]
|
||||
public void OfflineTemplates_AreSelfContained()
|
||||
{
|
||||
// Arrange & Act
|
||||
var templates = TestCiTemplates.GetTemplates("github", "scan", "basic", true, null);
|
||||
var (_, content) = templates[0];
|
||||
|
||||
// Assert - Templates should not require external lookups during generation
|
||||
// They can reference external actions, but generation itself is offline
|
||||
content.Should().NotBeNullOrEmpty();
|
||||
content.Should().Contain("stella scan");
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region No Network Calls Tests
|
||||
|
||||
[Fact(DisplayName = "Webhook parsing requires no network calls")]
|
||||
public void WebhookParsing_RequiresNoNetworkCalls()
|
||||
{
|
||||
// Arrange
|
||||
_fixture.SetOfflineMode(true);
|
||||
var connectionAttempts = new List<string>();
|
||||
_fixture.SetConnectionMonitor(endpoint => connectionAttempts.Add(endpoint));
|
||||
|
||||
try
|
||||
{
|
||||
// Act - Parse all webhook fixtures
|
||||
var harborPayload = _fixture.LoadRegistryFixture("harbor-push-v2.json");
|
||||
var dockerHubPayload = _fixture.LoadRegistryFixture("dockerhub-push.json");
|
||||
var githubPayload = _fixture.LoadScmFixture("github-push.json");
|
||||
|
||||
// Validate payloads can be read
|
||||
WebhookTestHelper.ValidateRequiredFields(harborPayload, "type", "event_data").Should().BeTrue();
|
||||
WebhookTestHelper.ValidateRequiredFields(dockerHubPayload, "push_data", "repository").Should().BeTrue();
|
||||
WebhookTestHelper.ValidateRequiredFields(githubPayload, "ref", "repository").Should().BeTrue();
|
||||
|
||||
// Assert
|
||||
connectionAttempts.Should().BeEmpty(
|
||||
$"No network calls expected during webhook parsing. Calls: {string.Join(", ", connectionAttempts)}");
|
||||
}
|
||||
finally
|
||||
{
|
||||
_fixture.SetOfflineMode(false);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "Signature validation requires no network calls")]
|
||||
public void SignatureValidation_RequiresNoNetworkCalls()
|
||||
{
|
||||
// Arrange
|
||||
_fixture.SetOfflineMode(true);
|
||||
var connectionAttempts = new List<string>();
|
||||
_fixture.SetConnectionMonitor(endpoint => connectionAttempts.Add(endpoint));
|
||||
|
||||
try
|
||||
{
|
||||
// Act - Validate signatures for all providers
|
||||
var payload = _fixture.LoadRegistryFixture("harbor-push-v2.json");
|
||||
var secret = "test-secret";
|
||||
|
||||
var harborSig = WebhookTestHelper.GenerateHarborSignature(payload, secret);
|
||||
var dockerHubSig = WebhookTestHelper.GenerateDockerHubSignature(payload, secret);
|
||||
var githubSig = WebhookTestHelper.GenerateGitHubSignature(payload, secret);
|
||||
var giteaSig = WebhookTestHelper.GenerateGiteaSignature(payload, secret);
|
||||
|
||||
// Assert
|
||||
harborSig.Should().StartWith("sha256=");
|
||||
dockerHubSig.Should().StartWith("sha256=");
|
||||
githubSig.Should().StartWith("sha256=");
|
||||
giteaSig.Should().StartWith("sha256=");
|
||||
|
||||
connectionAttempts.Should().BeEmpty(
|
||||
$"No network calls expected during signature validation. Calls: {string.Join(", ", connectionAttempts)}");
|
||||
}
|
||||
finally
|
||||
{
|
||||
_fixture.SetOfflineMode(false);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "Event mapping requires no network calls")]
|
||||
public void EventMapping_RequiresNoNetworkCalls()
|
||||
{
|
||||
// Arrange
|
||||
_fixture.SetOfflineMode(true);
|
||||
var connectionAttempts = new List<string>();
|
||||
_fixture.SetConnectionMonitor(endpoint => connectionAttempts.Add(endpoint));
|
||||
|
||||
try
|
||||
{
|
||||
// Act - Map SCM events
|
||||
var githubPayload = _fixture.LoadScmFixture("github-push.json");
|
||||
var gitlabPayload = _fixture.LoadScmFixture("gitlab-push.json");
|
||||
var giteaPayload = _fixture.LoadScmFixture("gitea-push.json");
|
||||
|
||||
var githubMapper = new StellaOps.Signals.Scm.Webhooks.GitHubEventMapper();
|
||||
var gitlabMapper = new StellaOps.Signals.Scm.Webhooks.GitLabEventMapper();
|
||||
var giteaMapper = new StellaOps.Signals.Scm.Webhooks.GiteaEventMapper();
|
||||
|
||||
var githubJson = System.Text.Json.JsonDocument.Parse(githubPayload).RootElement;
|
||||
var gitlabJson = System.Text.Json.JsonDocument.Parse(gitlabPayload).RootElement;
|
||||
var giteaJson = System.Text.Json.JsonDocument.Parse(giteaPayload).RootElement;
|
||||
|
||||
var result1 = githubMapper.Map("push", "delivery-1", githubJson);
|
||||
var result2 = gitlabMapper.Map("Push Hook", "delivery-2", gitlabJson);
|
||||
var result3 = giteaMapper.Map("push", "delivery-3", giteaJson);
|
||||
|
||||
// Assert
|
||||
result1.Should().NotBeNull();
|
||||
result2.Should().NotBeNull();
|
||||
result3.Should().NotBeNull();
|
||||
|
||||
connectionAttempts.Should().BeEmpty(
|
||||
$"No network calls expected during event mapping. Calls: {string.Join(", ", connectionAttempts)}");
|
||||
}
|
||||
finally
|
||||
{
|
||||
_fixture.SetOfflineMode(false);
|
||||
}
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Cache Priming Tests
|
||||
|
||||
[Fact(DisplayName = "Fixture loading works without network")]
|
||||
public void FixtureLoading_WorksWithoutNetwork()
|
||||
{
|
||||
// Arrange
|
||||
_fixture.SetOfflineMode(true);
|
||||
|
||||
try
|
||||
{
|
||||
// Act - Load all fixture categories
|
||||
var registryFixtures = _fixture.GetFixtureFiles("Registry", "*.json");
|
||||
var scmFixtures = _fixture.GetFixtureFiles("Scm", "*.json");
|
||||
var ciFixtures = _fixture.GetFixtureFiles("CiTemplates", "*.*");
|
||||
|
||||
// Assert
|
||||
registryFixtures.Should().NotBeEmpty("Registry fixtures should be available offline");
|
||||
scmFixtures.Should().NotBeEmpty("SCM fixtures should be available offline");
|
||||
ciFixtures.Should().NotBeEmpty("CI template fixtures should be available offline");
|
||||
}
|
||||
finally
|
||||
{
|
||||
_fixture.SetOfflineMode(false);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "Fixtures are cached after first load")]
|
||||
public void Fixtures_AreCachedAfterFirstLoad()
|
||||
{
|
||||
// Act - Load same fixture multiple times
|
||||
var fixture1 = _fixture.LoadRegistryFixture("harbor-push-v2.json");
|
||||
var fixture2 = _fixture.LoadRegistryFixture("harbor-push-v2.json");
|
||||
var fixture3 = _fixture.LoadRegistryFixture("harbor-push-v2.json");
|
||||
|
||||
// Assert - All should return the same content (from cache)
|
||||
fixture1.Should().Be(fixture2);
|
||||
fixture2.Should().Be(fixture3);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Replay Bundle Compatibility Tests
|
||||
|
||||
[Fact(DisplayName = "Webhook payloads can be replayed deterministically")]
|
||||
public void WebhookPayloads_CanBeReplayed_Deterministically()
|
||||
{
|
||||
// Arrange
|
||||
var payload = _fixture.LoadRegistryFixture("harbor-push-v2.json");
|
||||
var timestamp = IntegrationTestFixture.GetFrozenTimestamp();
|
||||
|
||||
// Act - Compute hash multiple times
|
||||
var hash1 = IntegrationTestFixture.ComputeHash(payload);
|
||||
var hash2 = IntegrationTestFixture.ComputeHash(payload);
|
||||
var hash3 = IntegrationTestFixture.ComputeHash(payload);
|
||||
|
||||
// Assert
|
||||
hash1.Should().Be(hash2);
|
||||
hash2.Should().Be(hash3);
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "Event mapping is deterministic for replay")]
|
||||
public void EventMapping_IsDeterministic_ForReplay()
|
||||
{
|
||||
// Arrange
|
||||
var payload = _fixture.LoadScmFixture("github-push.json");
|
||||
var payloadJson = System.Text.Json.JsonDocument.Parse(payload).RootElement;
|
||||
var mapper = new StellaOps.Signals.Scm.Webhooks.GitHubEventMapper();
|
||||
|
||||
// Act - Map same event multiple times
|
||||
var result1 = mapper.Map("push", "delivery-123", payloadJson);
|
||||
var result2 = mapper.Map("push", "delivery-123", payloadJson);
|
||||
var result3 = mapper.Map("push", "delivery-123", payloadJson);
|
||||
|
||||
// Assert
|
||||
result1.Should().NotBeNull();
|
||||
IntegrationTestFixture.AreDeterministicallyEqual(result1, result2).Should().BeTrue();
|
||||
IntegrationTestFixture.AreDeterministicallyEqual(result2, result3).Should().BeTrue();
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "Template generation is deterministic for replay")]
|
||||
public void TemplateGeneration_IsDeterministic_ForReplay()
|
||||
{
|
||||
// Act - Generate templates multiple times with same inputs
|
||||
var templates1 = TestCiTemplates.GetTemplates("github", "gate", "basic", true, null);
|
||||
var templates2 = TestCiTemplates.GetTemplates("github", "gate", "basic", true, null);
|
||||
var templates3 = TestCiTemplates.GetTemplates("github", "gate", "basic", true, null);
|
||||
|
||||
// Assert
|
||||
templates1.Count.Should().Be(templates2.Count);
|
||||
templates2.Count.Should().Be(templates3.Count);
|
||||
|
||||
var hash1 = IntegrationTestFixture.ComputeHash(templates1[0].content);
|
||||
var hash2 = IntegrationTestFixture.ComputeHash(templates2[0].content);
|
||||
var hash3 = IntegrationTestFixture.ComputeHash(templates3[0].content);
|
||||
|
||||
hash1.Should().Be(hash2);
|
||||
hash2.Should().Be(hash3);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Air-Gap Integration Flow Tests
|
||||
|
||||
[Fact(DisplayName = "Complete offline flow executes without network")]
|
||||
public async Task CompleteOfflineFlow_ExecutesWithoutNetwork()
|
||||
{
|
||||
// Arrange
|
||||
_fixture.SetOfflineMode(true);
|
||||
var connectionAttempts = new List<string>();
|
||||
_fixture.SetConnectionMonitor(endpoint => connectionAttempts.Add(endpoint));
|
||||
|
||||
try
|
||||
{
|
||||
// Act - Simulate complete offline integration flow
|
||||
|
||||
// 1. Load fixtures
|
||||
var registryPayload = _fixture.LoadRegistryFixture("harbor-push-v2.json");
|
||||
var scmPayload = _fixture.LoadScmFixture("github-push.json");
|
||||
|
||||
// 2. Generate CI templates
|
||||
var templates = TestCiTemplates.GetTemplates("all", "full", "basic", true, null);
|
||||
|
||||
// 3. Parse webhook payloads
|
||||
var registryValid = WebhookTestHelper.ValidateRequiredFields(registryPayload, "type", "event_data");
|
||||
var scmValid = WebhookTestHelper.ValidateRequiredFields(scmPayload, "ref", "repository");
|
||||
|
||||
// 4. Generate and verify signatures
|
||||
var signature = WebhookTestHelper.GenerateHarborSignature(registryPayload, "test-secret");
|
||||
|
||||
// 5. Map SCM events
|
||||
var mapper = new StellaOps.Signals.Scm.Webhooks.GitHubEventMapper();
|
||||
var payloadJson = System.Text.Json.JsonDocument.Parse(scmPayload).RootElement;
|
||||
var mappedEvent = mapper.Map("push", "delivery-offline", payloadJson);
|
||||
|
||||
// Assert
|
||||
registryValid.Should().BeTrue();
|
||||
scmValid.Should().BeTrue();
|
||||
templates.Should().NotBeEmpty();
|
||||
signature.Should().NotBeNullOrEmpty();
|
||||
mappedEvent.Should().NotBeNull();
|
||||
|
||||
connectionAttempts.Should().BeEmpty(
|
||||
$"Complete offline flow should not make network calls. Calls: {string.Join(", ", connectionAttempts)}");
|
||||
|
||||
await Task.CompletedTask;
|
||||
}
|
||||
finally
|
||||
{
|
||||
_fixture.SetOfflineMode(false);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "Offline mode prevents external URL fetching")]
|
||||
public void OfflineMode_PreventsExternalUrlFetching()
|
||||
{
|
||||
// Arrange
|
||||
_fixture.SetOfflineMode(true);
|
||||
var dnsLookups = new List<string>();
|
||||
_fixture.SetDnsMonitor(hostname => dnsLookups.Add(hostname));
|
||||
|
||||
try
|
||||
{
|
||||
// Act - Operations that could potentially trigger network calls
|
||||
var templates = TestCiTemplates.GetTemplates("github", "gate", "basic", true, null);
|
||||
|
||||
// Assert - No DNS lookups should occur
|
||||
dnsLookups.Should().BeEmpty(
|
||||
$"No DNS lookups expected in offline mode. Lookups: {string.Join(", ", dnsLookups)}");
|
||||
|
||||
templates.Should().NotBeEmpty();
|
||||
}
|
||||
finally
|
||||
{
|
||||
_fixture.SetOfflineMode(false);
|
||||
}
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Offline Error Handling Tests
|
||||
|
||||
[Fact(DisplayName = "Missing fixture throws clear error")]
|
||||
public void MissingFixture_ThrowsClearError()
|
||||
{
|
||||
// Act & Assert
|
||||
Action act = () => _fixture.LoadRegistryFixture("nonexistent-fixture.json");
|
||||
|
||||
act.Should().Throw<FileNotFoundException>()
|
||||
.WithMessage("*nonexistent-fixture.json*");
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "Invalid JSON fixture can be detected")]
|
||||
public void InvalidJsonFixture_CanBeDetected()
|
||||
{
|
||||
// Arrange
|
||||
var invalidPayload = "{ invalid json syntax }";
|
||||
|
||||
// Act
|
||||
var isValid = WebhookTestHelper.ValidateRequiredFields(invalidPayload, "type");
|
||||
|
||||
// Assert
|
||||
isValid.Should().BeFalse();
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
277
src/__Tests/e2e/Integrations/README.md
Normal file
277
src/__Tests/e2e/Integrations/README.md
Normal file
@@ -0,0 +1,277 @@
|
||||
# Integration E2E Tests
|
||||
|
||||
This directory contains end-to-end tests for StellaOps integration workflows including registry webhooks, SCM (GitHub/GitLab/Gitea) webhooks, and CI template generation.
|
||||
|
||||
## Overview
|
||||
|
||||
The integration E2E test suite validates:
|
||||
|
||||
1. **Registry Webhook Processing** - Parsing and validation of webhooks from container registries (Harbor, Docker Hub, ACR, ECR, GCR, GHCR)
|
||||
2. **SCM Webhook Ingestion** - Event mapping for GitHub, GitLab, and Gitea webhook payloads
|
||||
3. **CI Template Generation** - Deterministic generation of CI/CD templates for all supported platforms
|
||||
4. **Offline/Air-Gap Operation** - Validation that all operations work without network access
|
||||
5. **Determinism** - Ensuring stable ordering and reproducible hashes across all outputs
|
||||
|
||||
## Directory Structure
|
||||
|
||||
```
|
||||
src/__Tests/e2e/Integrations/
|
||||
├── Fixtures/
|
||||
│ └── IntegrationTestFixture.cs # Base fixture class for all tests
|
||||
├── Helpers/
|
||||
│ ├── WebhookTestHelper.cs # Webhook testing utilities
|
||||
│ └── MockProviderHelper.cs # Mock provider factory methods
|
||||
├── RegistryWebhookTests.cs # Registry webhook parsing tests
|
||||
├── ScmWebhookTests.cs # SCM webhook ingestion tests
|
||||
├── CiTemplateTests.cs # CI template generation tests
|
||||
├── OfflineModeTests.cs # Air-gap/offline operation tests
|
||||
├── DeterminismTests.cs # Determinism validation tests
|
||||
├── StellaOps.Integration.E2E.Integrations.csproj
|
||||
└── README.md # This file
|
||||
```
|
||||
|
||||
## Test Data Location
|
||||
|
||||
Test fixtures are located in `src/__Tests/__Datasets/Integrations/`:
|
||||
|
||||
```
|
||||
src/__Tests/__Datasets/Integrations/
|
||||
├── Registry/
|
||||
│ ├── harbor-push-v2.json # Harbor PUSH_ARTIFACT webhook
|
||||
│ ├── dockerhub-push.json # Docker Hub push webhook
|
||||
│ ├── acr-push.json # Azure Container Registry push
|
||||
│ ├── ecr-push.json # AWS ECR CloudWatch event
|
||||
│ ├── gcr-push.json # Google Container Registry Pub/Sub message
|
||||
│ └── ghcr-package-published.json # GitHub Container Registry package event
|
||||
├── Scm/
|
||||
│ ├── github-push.json # GitHub push event
|
||||
│ ├── github-pull-request.json # GitHub PR opened event
|
||||
│ ├── github-workflow-run.json # GitHub workflow completed event
|
||||
│ ├── gitlab-push.json # GitLab push hook
|
||||
│ └── gitea-push.json # Gitea push event
|
||||
└── CiTemplates/
|
||||
├── expected-github-gate.yml # Golden output for template validation
|
||||
└── validation-manifest.json # Template validation rules
|
||||
```
|
||||
|
||||
## Running Tests
|
||||
|
||||
### Run All Integration E2E Tests
|
||||
|
||||
```bash
|
||||
dotnet test src/__Tests/e2e/Integrations/StellaOps.Integration.E2E.Integrations.csproj
|
||||
```
|
||||
|
||||
### Run Tests by Category
|
||||
|
||||
```bash
|
||||
# SCM webhook tests only
|
||||
dotnet test --filter "Category=Scm"
|
||||
|
||||
# Registry webhook tests only
|
||||
dotnet test --filter "Category=Registry"
|
||||
|
||||
# CI template tests only
|
||||
dotnet test --filter "Category=CiTemplates"
|
||||
|
||||
# Offline/air-gap tests only
|
||||
dotnet test --filter "Category=AirGap"
|
||||
|
||||
# Determinism tests only
|
||||
dotnet test --filter "Category=Determinism"
|
||||
```
|
||||
|
||||
### Run Specific Test Classes
|
||||
|
||||
```bash
|
||||
dotnet test --filter "FullyQualifiedName~ScmWebhookTests"
|
||||
dotnet test --filter "FullyQualifiedName~RegistryWebhookTests"
|
||||
dotnet test --filter "FullyQualifiedName~CiTemplateTests"
|
||||
dotnet test --filter "FullyQualifiedName~OfflineModeTests"
|
||||
dotnet test --filter "FullyQualifiedName~DeterminismTests"
|
||||
```
|
||||
|
||||
## Test Categories
|
||||
|
||||
All tests are tagged with category traits for CI/CD filtering:
|
||||
|
||||
| Category | Description | CI Gate |
|
||||
|----------|-------------|---------|
|
||||
| `E2E` | End-to-end integration tests | Scheduled |
|
||||
| `Integrations` | Integration-related tests | Scheduled |
|
||||
| `Scm` | SCM webhook tests | Scheduled |
|
||||
| `Registry` | Registry webhook tests | Scheduled |
|
||||
| `CiTemplates` | CI template tests | Scheduled |
|
||||
| `Offline` / `AirGap` | Air-gap operation tests | On-demand |
|
||||
| `Determinism` | Reproducibility tests | On-demand |
|
||||
|
||||
## Test Patterns
|
||||
|
||||
### IntegrationTestFixture
|
||||
|
||||
All test classes inherit from or use `IntegrationTestFixture` which provides:
|
||||
|
||||
- **Fixture Loading**: `LoadRegistryFixture()`, `LoadScmFixture()`, `LoadCiTemplateFixture()`
|
||||
- **Offline Mode**: `SetOfflineMode()`, `SetConnectionMonitor()`, `SetDnsMonitor()`
|
||||
- **Determinism Helpers**: `ComputeHash()`, `ComputeCanonicalHash()`, `AreDeterministicallyEqual()`
|
||||
- **Test Utilities**: `CreateTempDirectory()`, `GetFrozenTimestamp()`, `CreateMockLogger<T>()`
|
||||
|
||||
### Example Test Structure
|
||||
|
||||
```csharp
|
||||
[Trait("Category", "E2E")]
|
||||
[Trait("Category", "Integrations")]
|
||||
[Trait("Category", "Registry")]
|
||||
public class MyRegistryTests : IClassFixture<IntegrationTestFixture>
|
||||
{
|
||||
private readonly IntegrationTestFixture _fixture;
|
||||
|
||||
public MyRegistryTests(IntegrationTestFixture fixture)
|
||||
{
|
||||
_fixture = fixture;
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "Harbor webhook payload is parsed correctly")]
|
||||
public void Harbor_Payload_IsParsedCorrectly()
|
||||
{
|
||||
// Arrange
|
||||
var payload = _fixture.LoadRegistryFixture("harbor-push-v2.json");
|
||||
|
||||
// Act
|
||||
var doc = JsonDocument.Parse(payload);
|
||||
var root = doc.RootElement;
|
||||
|
||||
// Assert
|
||||
root.TryGetProperty("type", out var typeElement).Should().BeTrue();
|
||||
typeElement.GetString().Should().Be("PUSH_ARTIFACT");
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Webhook Signature Testing
|
||||
|
||||
Use `WebhookTestHelper` for signature validation:
|
||||
|
||||
```csharp
|
||||
var payload = _fixture.LoadRegistryFixture("harbor-push-v2.json");
|
||||
var secret = "test-webhook-secret";
|
||||
|
||||
// Generate signatures for different providers
|
||||
var harborSig = WebhookTestHelper.GenerateHarborSignature(payload, secret);
|
||||
var githubSig = WebhookTestHelper.GenerateGitHubSignature(payload, secret);
|
||||
var giteaSig = WebhookTestHelper.GenerateGiteaSignature(payload, secret);
|
||||
var gitlabToken = WebhookTestHelper.GenerateGitLabToken(secret);
|
||||
|
||||
// Signatures should be deterministic
|
||||
harborSig.Should().MatchRegex("^sha256=[a-f0-9]{64}$");
|
||||
```
|
||||
|
||||
### Offline Mode Testing
|
||||
|
||||
```csharp
|
||||
[Fact(DisplayName = "Operations work in offline mode")]
|
||||
public void Operations_WorkOffline()
|
||||
{
|
||||
// Arrange
|
||||
_fixture.SetOfflineMode(true);
|
||||
var connectionAttempts = new List<string>();
|
||||
_fixture.SetConnectionMonitor(endpoint => connectionAttempts.Add(endpoint));
|
||||
|
||||
try
|
||||
{
|
||||
// Act - perform operations
|
||||
var payload = _fixture.LoadRegistryFixture("harbor-push-v2.json");
|
||||
var templates = CiTemplates.GetTemplates("github", "gate", "basic", true, null);
|
||||
|
||||
// Assert - no network calls were made
|
||||
connectionAttempts.Should().BeEmpty();
|
||||
}
|
||||
finally
|
||||
{
|
||||
_fixture.SetOfflineMode(false);
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Determinism Testing
|
||||
|
||||
```csharp
|
||||
[Fact(DisplayName = "Output hash is stable")]
|
||||
public void OutputHash_IsStable()
|
||||
{
|
||||
// Arrange
|
||||
var payload = _fixture.LoadScmFixture("github-push.json");
|
||||
var mapper = new GitHubEventMapper();
|
||||
var payloadJson = JsonDocument.Parse(payload).RootElement;
|
||||
|
||||
// Act - map multiple times
|
||||
var results = Enumerable.Range(0, 10)
|
||||
.Select(_ => mapper.Map("push", "delivery-123", payloadJson))
|
||||
.ToList();
|
||||
|
||||
// Assert - all hashes should be identical
|
||||
var hashes = results.Select(r => IntegrationTestFixture.ComputeCanonicalHash(r)).ToList();
|
||||
hashes.Distinct().Should().HaveCount(1);
|
||||
}
|
||||
```
|
||||
|
||||
## Adding New Fixtures
|
||||
|
||||
### Registry Webhook Fixture
|
||||
|
||||
1. Create JSON file in `src/__Tests/__Datasets/Integrations/Registry/`
|
||||
2. Use provider-specific format (see existing fixtures as examples)
|
||||
3. Include deterministic timestamps (use Unix epoch or ISO-8601)
|
||||
4. Add test coverage in `RegistryWebhookTests.cs`
|
||||
|
||||
### SCM Webhook Fixture
|
||||
|
||||
1. Create JSON file in `src/__Tests/__Datasets/Integrations/Scm/`
|
||||
2. Use provider-specific format (GitHub, GitLab, or Gitea)
|
||||
3. Reference consistent test data (repository: stellaops-org/stellaops)
|
||||
4. Add test coverage in `ScmWebhookTests.cs`
|
||||
|
||||
### Golden Output Fixture
|
||||
|
||||
1. Create expected output file in `src/__Tests/__Datasets/Integrations/CiTemplates/`
|
||||
2. Update `validation-manifest.json` if adding new validation rules
|
||||
3. Add determinism test to verify stable generation
|
||||
|
||||
## Key Principles
|
||||
|
||||
1. **No Live Credentials**: All tests use mock providers and recorded fixtures
|
||||
2. **Offline-First**: Tests must work without network access
|
||||
3. **Deterministic**: Same inputs must produce identical outputs across runs
|
||||
4. **Reproducible**: Tests should pass regardless of execution order or parallelism
|
||||
5. **Stable Timestamps**: Use frozen timestamps (2024-12-29T12:00:00Z) in fixtures
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### Tests Fail to Find Fixtures
|
||||
|
||||
- Ensure fixtures are copied to output directory (check `.csproj` `<Content>` items)
|
||||
- Run from repository root or ensure working directory is correct
|
||||
|
||||
### Signature Validation Failures
|
||||
|
||||
- Verify secret matches between generation and validation
|
||||
- Check for whitespace differences in payloads (use raw strings)
|
||||
|
||||
### Determinism Test Failures
|
||||
|
||||
- Check for timestamp usage (use `GetFrozenTimestamp()`)
|
||||
- Verify no random/GUID generation in tested paths
|
||||
- Ensure JSON serialization uses consistent options
|
||||
|
||||
### Offline Mode Tests Fail
|
||||
|
||||
- Verify no implicit network calls in code paths
|
||||
- Check that mock providers are properly configured
|
||||
- Ensure DNS lookups are monitored with `SetDnsMonitor()`
|
||||
|
||||
## Related Documentation
|
||||
|
||||
- `docs/modules/sbomservice/sources/architecture.md` - SBOM source architecture
|
||||
- `docs/architecture/integrations.md` - Integration patterns
|
||||
- `src/__Tests/AGENTS.md` - Test infrastructure guidelines
|
||||
- `src/Signals/StellaOps.Signals/Scm/` - SCM webhook models and mappers
|
||||
447
src/__Tests/e2e/Integrations/RegistryWebhookTests.cs
Normal file
447
src/__Tests/e2e/Integrations/RegistryWebhookTests.cs
Normal file
@@ -0,0 +1,447 @@
|
||||
// =============================================================================
|
||||
// RegistryWebhookTests.cs
|
||||
// Sprint: SPRINT_20251229_019 - Integration E2E Validation
|
||||
// Description: E2E tests for registry webhook parsing and validation
|
||||
// =============================================================================
|
||||
|
||||
using System.Text.Json;
|
||||
using FluentAssertions;
|
||||
using StellaOps.Integration.E2E.Integrations.Fixtures;
|
||||
using StellaOps.Integration.E2E.Integrations.Helpers;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Integration.E2E.Integrations;
|
||||
|
||||
/// <summary>
|
||||
/// E2E tests for registry webhook parsing across all supported registry providers.
|
||||
/// </summary>
|
||||
/// <remarks>
|
||||
/// Tests cover:
|
||||
/// - Webhook payload parsing for each registry provider
|
||||
/// - Signature validation
|
||||
/// - Image reference extraction
|
||||
/// - Error handling
|
||||
/// </remarks>
|
||||
[Trait("Category", "E2E")]
|
||||
[Trait("Category", "Integrations")]
|
||||
[Trait("Category", "Registry")]
|
||||
public class RegistryWebhookTests : IClassFixture<IntegrationTestFixture>
|
||||
{
|
||||
private readonly IntegrationTestFixture _fixture;
|
||||
|
||||
public RegistryWebhookTests(IntegrationTestFixture fixture)
|
||||
{
|
||||
_fixture = fixture;
|
||||
}
|
||||
|
||||
#region Harbor Webhook Tests
|
||||
|
||||
[Fact(DisplayName = "Harbor: PUSH_ARTIFACT payload is parsed correctly")]
|
||||
public void Harbor_PushArtifact_IsParsedCorrectly()
|
||||
{
|
||||
// Arrange
|
||||
var payload = _fixture.LoadRegistryFixture("harbor-push-v2.json");
|
||||
|
||||
// Act
|
||||
var doc = JsonDocument.Parse(payload);
|
||||
var root = doc.RootElement;
|
||||
|
||||
// Assert
|
||||
root.TryGetProperty("type", out var typeElement).Should().BeTrue();
|
||||
typeElement.GetString().Should().Be("PUSH_ARTIFACT");
|
||||
|
||||
root.TryGetProperty("event_data", out var eventData).Should().BeTrue();
|
||||
eventData.TryGetProperty("repository", out var repo).Should().BeTrue();
|
||||
repo.TryGetProperty("repo_full_name", out var repoName).Should().BeTrue();
|
||||
repoName.GetString().Should().Be("library/nginx");
|
||||
|
||||
eventData.TryGetProperty("resources", out var resources).Should().BeTrue();
|
||||
resources.GetArrayLength().Should().BeGreaterThan(0);
|
||||
resources[0].TryGetProperty("tag", out var tag).Should().BeTrue();
|
||||
tag.GetString().Should().Be("v1.2.3");
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "Harbor: Image reference is extracted correctly")]
|
||||
public void Harbor_ImageReference_ExtractedCorrectly()
|
||||
{
|
||||
// Arrange
|
||||
var payload = _fixture.LoadRegistryFixture("harbor-push-v2.json");
|
||||
|
||||
// Act
|
||||
var imageRef = WebhookTestHelper.ExtractField(payload, "event_data.resources.0.resource_url");
|
||||
|
||||
// Assert
|
||||
imageRef.Should().Contain("library/nginx");
|
||||
imageRef.Should().Contain("v1.2.3");
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "Harbor: Valid HMAC signature is generated correctly")]
|
||||
public void Harbor_ValidSignature_GeneratedCorrectly()
|
||||
{
|
||||
// Arrange
|
||||
var payload = _fixture.LoadRegistryFixture("harbor-push-v2.json");
|
||||
var secret = "test-webhook-secret";
|
||||
|
||||
// Act
|
||||
var signature = WebhookTestHelper.GenerateHarborSignature(payload, secret);
|
||||
|
||||
// Assert
|
||||
signature.Should().StartWith("sha256=");
|
||||
signature.Should().HaveLength(71); // "sha256=" (7) + 64 hex chars
|
||||
signature.Should().MatchRegex("^sha256=[a-f0-9]{64}$");
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "Harbor: Different secrets produce different signatures")]
|
||||
public void Harbor_DifferentSecrets_ProduceDifferentSignatures()
|
||||
{
|
||||
// Arrange
|
||||
var payload = _fixture.LoadRegistryFixture("harbor-push-v2.json");
|
||||
|
||||
// Act
|
||||
var sig1 = WebhookTestHelper.GenerateHarborSignature(payload, "secret1");
|
||||
var sig2 = WebhookTestHelper.GenerateHarborSignature(payload, "secret2");
|
||||
|
||||
// Assert
|
||||
sig1.Should().NotBe(sig2);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Docker Hub Webhook Tests
|
||||
|
||||
[Fact(DisplayName = "DockerHub: Push payload is parsed correctly")]
|
||||
public void DockerHub_Push_IsParsedCorrectly()
|
||||
{
|
||||
// Arrange
|
||||
var payload = _fixture.LoadRegistryFixture("dockerhub-push.json");
|
||||
|
||||
// Act
|
||||
var doc = JsonDocument.Parse(payload);
|
||||
var root = doc.RootElement;
|
||||
|
||||
// Assert
|
||||
root.TryGetProperty("push_data", out var pushData).Should().BeTrue();
|
||||
pushData.TryGetProperty("tag", out var tag).Should().BeTrue();
|
||||
tag.GetString().Should().Be("v2.0.0");
|
||||
|
||||
root.TryGetProperty("repository", out var repo).Should().BeTrue();
|
||||
repo.TryGetProperty("repo_name", out var repoName).Should().BeTrue();
|
||||
repoName.GetString().Should().Be("stellaops/scanner");
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "DockerHub: Required fields are present")]
|
||||
public void DockerHub_RequiredFields_ArePresent()
|
||||
{
|
||||
// Arrange
|
||||
var payload = _fixture.LoadRegistryFixture("dockerhub-push.json");
|
||||
|
||||
// Act & Assert
|
||||
WebhookTestHelper.ValidateRequiredFields(payload, "push_data", "repository").Should().BeTrue();
|
||||
WebhookTestHelper.ValidateRequiredFields(payload, "push_data.tag").Should().BeTrue();
|
||||
WebhookTestHelper.ValidateRequiredFields(payload, "repository.repo_name").Should().BeTrue();
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region ACR Webhook Tests
|
||||
|
||||
[Fact(DisplayName = "ACR: Push payload is parsed correctly")]
|
||||
public void Acr_Push_IsParsedCorrectly()
|
||||
{
|
||||
// Arrange
|
||||
var payload = _fixture.LoadRegistryFixture("acr-push.json");
|
||||
|
||||
// Act
|
||||
var doc = JsonDocument.Parse(payload);
|
||||
var root = doc.RootElement;
|
||||
|
||||
// Assert
|
||||
root.TryGetProperty("action", out var action).Should().BeTrue();
|
||||
action.GetString().Should().Be("push");
|
||||
|
||||
root.TryGetProperty("target", out var target).Should().BeTrue();
|
||||
target.TryGetProperty("repository", out var repo).Should().BeTrue();
|
||||
repo.GetString().Should().Be("stellaops/api-gateway");
|
||||
|
||||
target.TryGetProperty("tag", out var tag).Should().BeTrue();
|
||||
tag.GetString().Should().Be("1.0.0");
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "ACR: Digest is extracted correctly")]
|
||||
public void Acr_Digest_ExtractedCorrectly()
|
||||
{
|
||||
// Arrange
|
||||
var payload = _fixture.LoadRegistryFixture("acr-push.json");
|
||||
|
||||
// Act
|
||||
var digest = WebhookTestHelper.ExtractField(payload, "target.digest");
|
||||
|
||||
// Assert
|
||||
digest.Should().StartWith("sha256:");
|
||||
digest.Should().HaveLength(71); // "sha256:" (7) + 64 hex chars
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region ECR Webhook Tests
|
||||
|
||||
[Fact(DisplayName = "ECR: PUSH action payload is parsed correctly")]
|
||||
public void Ecr_Push_IsParsedCorrectly()
|
||||
{
|
||||
// Arrange
|
||||
var payload = _fixture.LoadRegistryFixture("ecr-push.json");
|
||||
|
||||
// Act
|
||||
var doc = JsonDocument.Parse(payload);
|
||||
var root = doc.RootElement;
|
||||
|
||||
// Assert
|
||||
root.TryGetProperty("detail-type", out var detailType).Should().BeTrue();
|
||||
detailType.GetString().Should().Be("ECR Image Action");
|
||||
|
||||
root.TryGetProperty("detail", out var detail).Should().BeTrue();
|
||||
detail.TryGetProperty("action-type", out var actionType).Should().BeTrue();
|
||||
actionType.GetString().Should().Be("PUSH");
|
||||
|
||||
detail.TryGetProperty("repository-name", out var repoName).Should().BeTrue();
|
||||
repoName.GetString().Should().Be("stellaops/scanner");
|
||||
|
||||
detail.TryGetProperty("image-tag", out var tag).Should().BeTrue();
|
||||
tag.GetString().Should().Be("v3.1.0");
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "ECR: Required CloudWatch event fields are present")]
|
||||
public void Ecr_RequiredFields_ArePresent()
|
||||
{
|
||||
// Arrange
|
||||
var payload = _fixture.LoadRegistryFixture("ecr-push.json");
|
||||
|
||||
// Act & Assert
|
||||
WebhookTestHelper.ValidateRequiredFields(payload,
|
||||
"version",
|
||||
"detail-type",
|
||||
"source",
|
||||
"detail").Should().BeTrue();
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region GCR Webhook Tests
|
||||
|
||||
[Fact(DisplayName = "GCR: Pub/Sub message is structured correctly")]
|
||||
public void Gcr_PubSubMessage_IsStructuredCorrectly()
|
||||
{
|
||||
// Arrange
|
||||
var payload = _fixture.LoadRegistryFixture("gcr-push.json");
|
||||
|
||||
// Act
|
||||
var doc = JsonDocument.Parse(payload);
|
||||
var root = doc.RootElement;
|
||||
|
||||
// Assert
|
||||
root.TryGetProperty("message", out var message).Should().BeTrue();
|
||||
message.TryGetProperty("data", out var data).Should().BeTrue();
|
||||
data.GetString().Should().NotBeNullOrEmpty();
|
||||
|
||||
// Verify the base64-encoded data is valid
|
||||
var base64Data = data.GetString();
|
||||
var decoded = System.Text.Encoding.UTF8.GetString(Convert.FromBase64String(base64Data!));
|
||||
var innerDoc = JsonDocument.Parse(decoded);
|
||||
innerDoc.RootElement.TryGetProperty("action", out var action).Should().BeTrue();
|
||||
action.GetString().Should().Be("INSERT");
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region GHCR Webhook Tests
|
||||
|
||||
[Fact(DisplayName = "GHCR: Package published payload is parsed correctly")]
|
||||
public void Ghcr_PackagePublished_IsParsedCorrectly()
|
||||
{
|
||||
// Arrange
|
||||
var payload = _fixture.LoadRegistryFixture("ghcr-package-published.json");
|
||||
|
||||
// Act
|
||||
var doc = JsonDocument.Parse(payload);
|
||||
var root = doc.RootElement;
|
||||
|
||||
// Assert
|
||||
root.TryGetProperty("action", out var action).Should().BeTrue();
|
||||
action.GetString().Should().Be("published");
|
||||
|
||||
root.TryGetProperty("package", out var package).Should().BeTrue();
|
||||
package.TryGetProperty("name", out var name).Should().BeTrue();
|
||||
name.GetString().Should().Be("stellaops-cli");
|
||||
|
||||
package.TryGetProperty("package_version", out var version).Should().BeTrue();
|
||||
version.TryGetProperty("version", out var versionStr).Should().BeTrue();
|
||||
versionStr.GetString().Should().Be("v4.0.0");
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "GHCR: Container metadata is present")]
|
||||
public void Ghcr_ContainerMetadata_IsPresent()
|
||||
{
|
||||
// Arrange
|
||||
var payload = _fixture.LoadRegistryFixture("ghcr-package-published.json");
|
||||
|
||||
// Act & Assert
|
||||
WebhookTestHelper.ValidateRequiredFields(payload,
|
||||
"action",
|
||||
"package",
|
||||
"package.name",
|
||||
"package.package_version").Should().BeTrue();
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Cross-Provider Tests
|
||||
|
||||
[Theory(DisplayName = "All registry payloads are valid JSON")]
|
||||
[InlineData("harbor-push-v2.json")]
|
||||
[InlineData("dockerhub-push.json")]
|
||||
[InlineData("acr-push.json")]
|
||||
[InlineData("ecr-push.json")]
|
||||
[InlineData("gcr-push.json")]
|
||||
[InlineData("ghcr-package-published.json")]
|
||||
public void AllPayloads_AreValidJson(string filename)
|
||||
{
|
||||
// Arrange
|
||||
var payload = _fixture.LoadRegistryFixture(filename);
|
||||
|
||||
// Act
|
||||
var parseAction = () => JsonDocument.Parse(payload);
|
||||
|
||||
// Assert
|
||||
parseAction.Should().NotThrow();
|
||||
}
|
||||
|
||||
[Theory(DisplayName = "All providers produce deterministic signatures")]
|
||||
[InlineData("harbor")]
|
||||
[InlineData("dockerhub")]
|
||||
[InlineData("github")]
|
||||
[InlineData("gitea")]
|
||||
public void AllProviders_ProduceDeterministicSignatures(string provider)
|
||||
{
|
||||
// Arrange
|
||||
var payload = _fixture.LoadRegistryFixture("harbor-push-v2.json");
|
||||
var secret = "test-secret-determinism";
|
||||
|
||||
// Act
|
||||
var signatures = Enumerable.Range(0, 10)
|
||||
.Select(_ => provider switch
|
||||
{
|
||||
"harbor" => WebhookTestHelper.GenerateHarborSignature(payload, secret),
|
||||
"dockerhub" => WebhookTestHelper.GenerateDockerHubSignature(payload, secret),
|
||||
"github" => WebhookTestHelper.GenerateGitHubSignature(payload, secret),
|
||||
"gitea" => WebhookTestHelper.GenerateGiteaSignature(payload, secret),
|
||||
_ => throw new ArgumentException($"Unknown provider: {provider}")
|
||||
})
|
||||
.ToList();
|
||||
|
||||
// Assert
|
||||
signatures.Distinct().Should().HaveCount(1);
|
||||
}
|
||||
|
||||
[Theory(DisplayName = "Minimal payloads are generated correctly")]
|
||||
[InlineData("harbor")]
|
||||
[InlineData("dockerhub")]
|
||||
[InlineData("acr")]
|
||||
[InlineData("ecr")]
|
||||
[InlineData("ghcr")]
|
||||
public void MinimalPayloads_GeneratedCorrectly(string provider)
|
||||
{
|
||||
// Act
|
||||
var payload = WebhookTestHelper.CreateMinimalPayload(provider);
|
||||
|
||||
// Assert
|
||||
payload.Should().NotBeNullOrEmpty();
|
||||
var parseAction = () => JsonDocument.Parse(payload);
|
||||
parseAction.Should().NotThrow();
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Error Handling Tests
|
||||
|
||||
[Fact(DisplayName = "Invalid JSON is detected")]
|
||||
public void InvalidJson_IsDetected()
|
||||
{
|
||||
// Arrange
|
||||
var invalidPayload = "{ invalid json }";
|
||||
|
||||
// Act
|
||||
var isValid = WebhookTestHelper.ValidateRequiredFields(invalidPayload, "type");
|
||||
|
||||
// Assert
|
||||
isValid.Should().BeFalse();
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "Missing required fields are detected")]
|
||||
public void MissingRequiredFields_AreDetected()
|
||||
{
|
||||
// Arrange
|
||||
var payload = "{}";
|
||||
|
||||
// Act
|
||||
var hasFields = WebhookTestHelper.ValidateRequiredFields(payload, "type", "event_data");
|
||||
|
||||
// Assert
|
||||
hasFields.Should().BeFalse();
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "Corrupted digest is detectable")]
|
||||
public void CorruptedDigest_IsDetectable()
|
||||
{
|
||||
// Arrange
|
||||
var originalPayload = _fixture.LoadRegistryFixture("harbor-push-v2.json");
|
||||
|
||||
// Act
|
||||
var corruptedPayload = WebhookTestHelper.CorruptPayloadDigest(originalPayload);
|
||||
|
||||
// Assert
|
||||
corruptedPayload.Should().Contain("sha256:0000000000000000000000000000000000000000000000000000000000000000");
|
||||
corruptedPayload.Should().NotBe(originalPayload);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Determinism Tests
|
||||
|
||||
[Fact(DisplayName = "Payload parsing is deterministic")]
|
||||
public void PayloadParsing_IsDeterministic()
|
||||
{
|
||||
// Arrange
|
||||
var payload = _fixture.LoadRegistryFixture("harbor-push-v2.json");
|
||||
|
||||
// Act
|
||||
var hashes = Enumerable.Range(0, 10)
|
||||
.Select(_ =>
|
||||
{
|
||||
var doc = JsonDocument.Parse(payload);
|
||||
var canonical = JsonSerializer.Serialize(doc.RootElement, new JsonSerializerOptions { WriteIndented = false });
|
||||
return IntegrationTestFixture.ComputeHash(canonical);
|
||||
})
|
||||
.ToList();
|
||||
|
||||
// Assert
|
||||
hashes.Distinct().Should().HaveCount(1);
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "Field extraction is deterministic")]
|
||||
public void FieldExtraction_IsDeterministic()
|
||||
{
|
||||
// Arrange
|
||||
var payload = _fixture.LoadRegistryFixture("harbor-push-v2.json");
|
||||
|
||||
// Act
|
||||
var values = Enumerable.Range(0, 10)
|
||||
.Select(_ => WebhookTestHelper.ExtractField(payload, "type"))
|
||||
.ToList();
|
||||
|
||||
// Assert
|
||||
values.Distinct().Should().HaveCount(1);
|
||||
values.All(v => v == "PUSH_ARTIFACT").Should().BeTrue();
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
436
src/__Tests/e2e/Integrations/ScmWebhookTests.cs
Normal file
436
src/__Tests/e2e/Integrations/ScmWebhookTests.cs
Normal file
@@ -0,0 +1,436 @@
|
||||
// =============================================================================
|
||||
// ScmWebhookTests.cs
|
||||
// Sprint: SPRINT_20251229_019 - Integration E2E Validation
|
||||
// Description: E2E tests for SCM webhook processing (GitHub, GitLab, Gitea)
|
||||
// =============================================================================
|
||||
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
using FluentAssertions;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Moq;
|
||||
using StellaOps.Integration.E2E.Integrations.Fixtures;
|
||||
using StellaOps.Integration.E2E.Integrations.Helpers;
|
||||
using StellaOps.Signals.Scm.Models;
|
||||
using StellaOps.Signals.Scm.Webhooks;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Integration.E2E.Integrations;
|
||||
|
||||
/// <summary>
|
||||
/// E2E tests for SCM webhook processing across all supported SCM providers.
|
||||
/// </summary>
|
||||
/// <remarks>
|
||||
/// Tests cover:
|
||||
/// - Webhook payload parsing for each SCM provider (GitHub, GitLab, Gitea)
|
||||
/// - Event type mapping (push, PR, workflow, etc.)
|
||||
/// - Signature validation
|
||||
/// - Normalized event generation
|
||||
/// </remarks>
|
||||
[Trait("Category", "E2E")]
|
||||
[Trait("Category", "Integrations")]
|
||||
[Trait("Category", "Scm")]
|
||||
public class ScmWebhookTests : IClassFixture<IntegrationTestFixture>
|
||||
{
|
||||
private readonly IntegrationTestFixture _fixture;
|
||||
|
||||
public ScmWebhookTests(IntegrationTestFixture fixture)
|
||||
{
|
||||
_fixture = fixture;
|
||||
}
|
||||
|
||||
#region GitHub Webhook Tests
|
||||
|
||||
[Fact(DisplayName = "GitHub: Push event is mapped correctly")]
|
||||
public void GitHub_PushEvent_IsMappedCorrectly()
|
||||
{
|
||||
// Arrange
|
||||
var payload = _fixture.LoadScmFixture("github-push.json");
|
||||
var mapper = new GitHubEventMapper();
|
||||
var payloadJson = JsonDocument.Parse(payload).RootElement;
|
||||
|
||||
// Act
|
||||
var result = mapper.Map("push", "delivery-123", payloadJson);
|
||||
|
||||
// Assert
|
||||
result.Should().NotBeNull();
|
||||
result!.Provider.Should().Be(ScmProvider.GitHub);
|
||||
result.EventType.Should().Be(ScmEventType.Push);
|
||||
result.EventId.Should().Be("delivery-123");
|
||||
result.Repository.FullName.Should().Be("stellaops-org/stellaops");
|
||||
result.Ref.Should().Be("refs/heads/main");
|
||||
result.CommitSha.Should().Be("abc123def456789012345678901234567890abcd");
|
||||
result.Actor.Should().NotBeNull();
|
||||
result.Actor!.Username.Should().Be("developer");
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "GitHub: Pull request event is mapped correctly")]
|
||||
public void GitHub_PullRequestEvent_IsMappedCorrectly()
|
||||
{
|
||||
// Arrange
|
||||
var payload = _fixture.LoadScmFixture("github-pull-request.json");
|
||||
var mapper = new GitHubEventMapper();
|
||||
var payloadJson = JsonDocument.Parse(payload).RootElement;
|
||||
|
||||
// Act
|
||||
var result = mapper.Map("pull_request", "delivery-456", payloadJson);
|
||||
|
||||
// Assert
|
||||
result.Should().NotBeNull();
|
||||
result!.Provider.Should().Be(ScmProvider.GitHub);
|
||||
result.EventType.Should().Be(ScmEventType.PullRequestOpened);
|
||||
result.PullRequest.Should().NotBeNull();
|
||||
result.PullRequest!.Number.Should().Be(42);
|
||||
result.PullRequest.Title.Should().Be("feat: add Python wheel analyzer");
|
||||
result.PullRequest.SourceBranch.Should().Be("feature/python-wheel");
|
||||
result.PullRequest.TargetBranch.Should().Be("main");
|
||||
result.PullRequest.State.Should().Be("open");
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "GitHub: Workflow run event is mapped correctly")]
|
||||
public void GitHub_WorkflowRunEvent_IsMappedCorrectly()
|
||||
{
|
||||
// Arrange
|
||||
var payload = _fixture.LoadScmFixture("github-workflow-run.json");
|
||||
var mapper = new GitHubEventMapper();
|
||||
var payloadJson = JsonDocument.Parse(payload).RootElement;
|
||||
|
||||
// Act
|
||||
var result = mapper.Map("workflow_run", "delivery-789", payloadJson);
|
||||
|
||||
// Assert
|
||||
result.Should().NotBeNull();
|
||||
result!.Provider.Should().Be(ScmProvider.GitHub);
|
||||
result.EventType.Should().Be(ScmEventType.PipelineSucceeded);
|
||||
result.Pipeline.Should().NotBeNull();
|
||||
result.Pipeline!.Name.Should().Be("StellaOps CI");
|
||||
result.Pipeline.Status.Should().Be(ScmPipelineStatus.Completed);
|
||||
result.Pipeline.Conclusion.Should().Be("success");
|
||||
result.Pipeline.RunNumber.Should().Be(123);
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "GitHub: Signature validation works correctly")]
|
||||
public void GitHub_SignatureValidation_Works()
|
||||
{
|
||||
// Arrange
|
||||
var payload = _fixture.LoadScmFixture("github-push.json");
|
||||
var secret = "test-webhook-secret";
|
||||
var validator = new GitHubWebhookValidator();
|
||||
var payloadBytes = Encoding.UTF8.GetBytes(payload);
|
||||
|
||||
var validSignature = WebhookTestHelper.GenerateGitHubSignature(payload, secret);
|
||||
var invalidSignature = WebhookTestHelper.GenerateGitHubSignature(payload, "wrong-secret");
|
||||
|
||||
// Act & Assert
|
||||
validator.IsValid(payloadBytes, validSignature, secret).Should().BeTrue();
|
||||
validator.IsValid(payloadBytes, invalidSignature, secret).Should().BeFalse();
|
||||
validator.IsValid(payloadBytes, null, secret).Should().BeFalse();
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region GitLab Webhook Tests
|
||||
|
||||
[Fact(DisplayName = "GitLab: Push event is mapped correctly")]
|
||||
public void GitLab_PushEvent_IsMappedCorrectly()
|
||||
{
|
||||
// Arrange
|
||||
var payload = _fixture.LoadScmFixture("gitlab-push.json");
|
||||
var mapper = new GitLabEventMapper();
|
||||
var payloadJson = JsonDocument.Parse(payload).RootElement;
|
||||
|
||||
// Act
|
||||
var result = mapper.Map("Push Hook", "delivery-gl-123", payloadJson);
|
||||
|
||||
// Assert
|
||||
result.Should().NotBeNull();
|
||||
result!.Provider.Should().Be(ScmProvider.GitLab);
|
||||
result.EventType.Should().Be(ScmEventType.Push);
|
||||
result.Repository.FullName.Should().Be("stellaops-org/stellaops");
|
||||
result.Ref.Should().Be("refs/heads/main");
|
||||
result.CommitSha.Should().Be("abc123def456789012345678901234567890abcd");
|
||||
result.Actor.Should().NotBeNull();
|
||||
result.Actor!.Username.Should().Be("developer");
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "GitLab: Token validation works correctly")]
|
||||
public void GitLab_TokenValidation_Works()
|
||||
{
|
||||
// Arrange
|
||||
var secret = "test-gitlab-token";
|
||||
var validator = new GitLabWebhookValidator();
|
||||
var payload = Encoding.UTF8.GetBytes("any-payload");
|
||||
|
||||
// GitLab uses X-Gitlab-Token header comparison
|
||||
var validToken = secret;
|
||||
var invalidToken = "wrong-token";
|
||||
|
||||
// Act & Assert
|
||||
validator.IsValid(payload, validToken, secret).Should().BeTrue();
|
||||
validator.IsValid(payload, invalidToken, secret).Should().BeFalse();
|
||||
validator.IsValid(payload, null, secret).Should().BeFalse();
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Gitea Webhook Tests
|
||||
|
||||
[Fact(DisplayName = "Gitea: Push event is mapped correctly")]
|
||||
public void Gitea_PushEvent_IsMappedCorrectly()
|
||||
{
|
||||
// Arrange
|
||||
var payload = _fixture.LoadScmFixture("gitea-push.json");
|
||||
var mapper = new GiteaEventMapper();
|
||||
var payloadJson = JsonDocument.Parse(payload).RootElement;
|
||||
|
||||
// Act
|
||||
var result = mapper.Map("push", "delivery-gt-123", payloadJson);
|
||||
|
||||
// Assert
|
||||
result.Should().NotBeNull();
|
||||
result!.Provider.Should().Be(ScmProvider.Gitea);
|
||||
result.EventType.Should().Be(ScmEventType.Push);
|
||||
result.Repository.FullName.Should().Be("stellaops-org/stellaops");
|
||||
result.Ref.Should().Be("refs/heads/main");
|
||||
result.CommitSha.Should().Be("abc123def456789012345678901234567890abcd");
|
||||
result.Actor.Should().NotBeNull();
|
||||
result.Actor!.Username.Should().Be("developer");
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "Gitea: Signature validation works correctly")]
|
||||
public void Gitea_SignatureValidation_Works()
|
||||
{
|
||||
// Arrange
|
||||
var payload = _fixture.LoadScmFixture("gitea-push.json");
|
||||
var secret = "test-gitea-secret";
|
||||
var validator = new GiteaWebhookValidator();
|
||||
var payloadBytes = Encoding.UTF8.GetBytes(payload);
|
||||
|
||||
var validSignature = WebhookTestHelper.GenerateGiteaSignature(payload, secret);
|
||||
var invalidSignature = WebhookTestHelper.GenerateGiteaSignature(payload, "wrong-secret");
|
||||
|
||||
// Act & Assert
|
||||
validator.IsValid(payloadBytes, validSignature, secret).Should().BeTrue();
|
||||
validator.IsValid(payloadBytes, invalidSignature, secret).Should().BeFalse();
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Cross-Provider Normalization Tests
|
||||
|
||||
[Theory(DisplayName = "All providers normalize push events consistently")]
|
||||
[InlineData("github", "github-push.json", "push")]
|
||||
[InlineData("gitlab", "gitlab-push.json", "Push Hook")]
|
||||
[InlineData("gitea", "gitea-push.json", "push")]
|
||||
public void AllProviders_NormalizePushEvents_Consistently(string provider, string fixture, string eventType)
|
||||
{
|
||||
// Arrange
|
||||
var payload = _fixture.LoadScmFixture(fixture);
|
||||
var payloadJson = JsonDocument.Parse(payload).RootElement;
|
||||
|
||||
IScmEventMapper mapper = provider switch
|
||||
{
|
||||
"github" => new GitHubEventMapper(),
|
||||
"gitlab" => new GitLabEventMapper(),
|
||||
"gitea" => new GiteaEventMapper(),
|
||||
_ => throw new ArgumentException($"Unknown provider: {provider}")
|
||||
};
|
||||
|
||||
// Act
|
||||
var result = mapper.Map(eventType, $"delivery-{provider}", payloadJson);
|
||||
|
||||
// Assert
|
||||
result.Should().NotBeNull();
|
||||
result!.EventType.Should().Be(ScmEventType.Push);
|
||||
result.Repository.Should().NotBeNull();
|
||||
result.Repository.FullName.Should().NotBeNullOrEmpty();
|
||||
result.Ref.Should().Contain("main");
|
||||
result.CommitSha.Should().NotBeNullOrEmpty();
|
||||
}
|
||||
|
||||
[Theory(DisplayName = "Push events contain consistent repository information")]
|
||||
[InlineData("github", "github-push.json", "push")]
|
||||
[InlineData("gitlab", "gitlab-push.json", "Push Hook")]
|
||||
[InlineData("gitea", "gitea-push.json", "push")]
|
||||
public void PushEvents_ContainConsistentRepositoryInfo(string provider, string fixture, string eventType)
|
||||
{
|
||||
// Arrange
|
||||
var payload = _fixture.LoadScmFixture(fixture);
|
||||
var payloadJson = JsonDocument.Parse(payload).RootElement;
|
||||
|
||||
IScmEventMapper mapper = provider switch
|
||||
{
|
||||
"github" => new GitHubEventMapper(),
|
||||
"gitlab" => new GitLabEventMapper(),
|
||||
"gitea" => new GiteaEventMapper(),
|
||||
_ => throw new ArgumentException($"Unknown provider: {provider}")
|
||||
};
|
||||
|
||||
// Act
|
||||
var result = mapper.Map(eventType, $"delivery-{provider}", payloadJson);
|
||||
|
||||
// Assert
|
||||
result.Should().NotBeNull();
|
||||
result!.Repository.Should().NotBeNull();
|
||||
|
||||
// All repositories should have:
|
||||
result.Repository.FullName.Should().NotBeNullOrEmpty();
|
||||
result.Repository.FullName.Should().Contain("/"); // owner/repo format
|
||||
result.Repository.DefaultBranch.Should().Be("main");
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Event Filtering Tests
|
||||
|
||||
[Fact(DisplayName = "Unsupported event types return null")]
|
||||
public void UnsupportedEventTypes_ReturnNull()
|
||||
{
|
||||
// Arrange
|
||||
var mapper = new GitHubEventMapper();
|
||||
var payload = JsonDocument.Parse("{}").RootElement;
|
||||
|
||||
// Act
|
||||
var result = mapper.Map("unsupported_event", "delivery-123", payload);
|
||||
|
||||
// Assert
|
||||
result.Should().BeNull();
|
||||
}
|
||||
|
||||
[Theory(DisplayName = "Known event types are mapped correctly")]
|
||||
[InlineData("push", ScmEventType.Push)]
|
||||
[InlineData("pull_request", ScmEventType.PullRequestOpened)]
|
||||
[InlineData("workflow_run", ScmEventType.PipelineSucceeded)]
|
||||
[InlineData("create", ScmEventType.TagCreated)]
|
||||
public void KnownEventTypes_AreMappedCorrectly(string eventType, ScmEventType expectedType)
|
||||
{
|
||||
// Arrange
|
||||
var mapper = new GitHubEventMapper();
|
||||
|
||||
// Create minimal payloads for each event type
|
||||
var payload = eventType switch
|
||||
{
|
||||
"push" => _fixture.LoadScmFixture("github-push.json"),
|
||||
"pull_request" => _fixture.LoadScmFixture("github-pull-request.json"),
|
||||
"workflow_run" => _fixture.LoadScmFixture("github-workflow-run.json"),
|
||||
"create" => CreateTagCreatePayload(),
|
||||
_ => "{}"
|
||||
};
|
||||
|
||||
var payloadJson = JsonDocument.Parse(payload).RootElement;
|
||||
|
||||
// Act
|
||||
var result = mapper.Map(eventType, "delivery-123", payloadJson);
|
||||
|
||||
// Assert
|
||||
result.Should().NotBeNull();
|
||||
result!.EventType.Should().Be(expectedType);
|
||||
}
|
||||
|
||||
private static string CreateTagCreatePayload()
|
||||
{
|
||||
return """
|
||||
{
|
||||
"ref": "v1.0.0",
|
||||
"ref_type": "tag",
|
||||
"repository": {
|
||||
"full_name": "stellaops-org/stellaops",
|
||||
"name": "stellaops",
|
||||
"default_branch": "main"
|
||||
},
|
||||
"sender": {
|
||||
"login": "developer",
|
||||
"type": "User"
|
||||
}
|
||||
}
|
||||
""";
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Actor Extraction Tests
|
||||
|
||||
[Fact(DisplayName = "Actor information is extracted from push events")]
|
||||
public void ActorInfo_ExtractedFromPushEvents()
|
||||
{
|
||||
// Arrange
|
||||
var payload = _fixture.LoadScmFixture("github-push.json");
|
||||
var mapper = new GitHubEventMapper();
|
||||
var payloadJson = JsonDocument.Parse(payload).RootElement;
|
||||
|
||||
// Act
|
||||
var result = mapper.Map("push", "delivery-123", payloadJson);
|
||||
|
||||
// Assert
|
||||
result.Should().NotBeNull();
|
||||
result!.Actor.Should().NotBeNull();
|
||||
result.Actor!.Username.Should().Be("developer");
|
||||
result.Actor.Type.Should().Be(ScmActorType.User);
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "Bot actors are identified correctly")]
|
||||
public void BotActors_IdentifiedCorrectly()
|
||||
{
|
||||
// Arrange
|
||||
var payload = _fixture.LoadScmFixture("github-workflow-run.json");
|
||||
var mapper = new GitHubEventMapper();
|
||||
var payloadJson = JsonDocument.Parse(payload).RootElement;
|
||||
|
||||
// Act
|
||||
var result = mapper.Map("workflow_run", "delivery-123", payloadJson);
|
||||
|
||||
// Assert
|
||||
result.Should().NotBeNull();
|
||||
result!.Actor.Should().NotBeNull();
|
||||
result.Actor!.Type.Should().Be(ScmActorType.Bot);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Determinism Tests
|
||||
|
||||
[Fact(DisplayName = "Event mapping is deterministic")]
|
||||
public void EventMapping_IsDeterministic()
|
||||
{
|
||||
// Arrange
|
||||
var payload = _fixture.LoadScmFixture("github-push.json");
|
||||
var mapper = new GitHubEventMapper();
|
||||
var payloadJson = JsonDocument.Parse(payload).RootElement;
|
||||
|
||||
// Act - Map multiple times
|
||||
var result1 = mapper.Map("push", "delivery-123", payloadJson);
|
||||
var result2 = mapper.Map("push", "delivery-123", payloadJson);
|
||||
var result3 = mapper.Map("push", "delivery-123", payloadJson);
|
||||
|
||||
// Assert
|
||||
result1.Should().NotBeNull();
|
||||
result2.Should().NotBeNull();
|
||||
result3.Should().NotBeNull();
|
||||
|
||||
// All results should be equivalent
|
||||
IntegrationTestFixture.AreDeterministicallyEqual(result1, result2).Should().BeTrue();
|
||||
IntegrationTestFixture.AreDeterministicallyEqual(result2, result3).Should().BeTrue();
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "Hash of mapped event is stable")]
|
||||
public void MappedEventHash_IsStable()
|
||||
{
|
||||
// Arrange
|
||||
var payload = _fixture.LoadScmFixture("github-push.json");
|
||||
var mapper = new GitHubEventMapper();
|
||||
var payloadJson = JsonDocument.Parse(payload).RootElement;
|
||||
|
||||
// Act
|
||||
var result = mapper.Map("push", "delivery-123", payloadJson);
|
||||
|
||||
var hash1 = IntegrationTestFixture.ComputeCanonicalHash(result);
|
||||
var hash2 = IntegrationTestFixture.ComputeCanonicalHash(result);
|
||||
var hash3 = IntegrationTestFixture.ComputeCanonicalHash(result);
|
||||
|
||||
// Assert
|
||||
hash1.Should().Be(hash2);
|
||||
hash2.Should().Be(hash3);
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
@@ -0,0 +1,59 @@
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<!--
|
||||
StellaOps.Integration.E2E.Integrations.csproj
|
||||
Sprint: SPRINT_20251229_019
|
||||
Task: Integration E2E Validation
|
||||
Description: End-to-end tests for registry, SCM, and CI integrations
|
||||
-->
|
||||
<Project Sdk="Microsoft.NET.Sdk">
|
||||
|
||||
<PropertyGroup>
|
||||
<TargetFramework>net10.0</TargetFramework>
|
||||
<LangVersion>preview</LangVersion>
|
||||
<ImplicitUsings>enable</ImplicitUsings>
|
||||
<Nullable>enable</Nullable>
|
||||
<IsPackable>false</IsPackable>
|
||||
<IsTestProject>true</IsTestProject>
|
||||
</PropertyGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<PackageReference Include="Microsoft.NET.Test.Sdk" />
|
||||
<PackageReference Include="xunit.v3" />
|
||||
<PackageReference Include="xunit.runner.visualstudio">
|
||||
<PrivateAssets>all</PrivateAssets>
|
||||
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
|
||||
</PackageReference>
|
||||
<PackageReference Include="FluentAssertions" />
|
||||
<PackageReference Include="Moq" />
|
||||
<PackageReference Include="Testcontainers.PostgreSql" />
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<!-- SbomService for registry webhook processing -->
|
||||
<ProjectReference Include="..\..\..\SbomService\StellaOps.SbomService\StellaOps.SbomService.csproj" />
|
||||
|
||||
<!-- Signals for SCM webhook processing -->
|
||||
<ProjectReference Include="..\..\..\Signals\StellaOps.Signals\StellaOps.Signals.csproj" />
|
||||
|
||||
<!-- CLI project reference removed - tests use local template helper -->
|
||||
<!-- <ProjectReference Include="..\..\..\Cli\StellaOps.Cli\StellaOps.Cli.csproj" /> -->
|
||||
|
||||
<!-- Integrations core library -->
|
||||
<ProjectReference Include="..\..\..\Integrations\__Libraries\StellaOps.Integrations.Core\StellaOps.Integrations.Core.csproj" />
|
||||
|
||||
<!-- Testing infrastructure -->
|
||||
<ProjectReference Include="..\..\__Libraries\StellaOps.Infrastructure.Postgres.Testing\StellaOps.Infrastructure.Postgres.Testing.csproj" />
|
||||
<ProjectReference Include="..\..\__Libraries\StellaOps.Testing.Determinism\StellaOps.Testing.Determinism.csproj" />
|
||||
<ProjectReference Include="..\..\__Libraries\StellaOps.Testing.AirGap\StellaOps.Testing.AirGap.csproj" />
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<!-- Include test fixtures from __Datasets -->
|
||||
<Content Include="..\..\__Datasets\Integrations\**\*">
|
||||
<Link>Fixtures\%(RecursiveDir)%(Filename)%(Extension)</Link>
|
||||
<CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
|
||||
</Content>
|
||||
</ItemGroup>
|
||||
|
||||
</Project>
|
||||
|
||||
@@ -23,14 +23,14 @@ public sealed class ReplayableVerdictE2ETests : IAsyncLifetime
|
||||
private const string BundlePath = "../../../fixtures/e2e/bundle-0001";
|
||||
private GoldenBundle? _bundle;
|
||||
|
||||
public async Task InitializeAsync()
|
||||
public async ValueTask InitializeAsync()
|
||||
{
|
||||
_bundle = await GoldenBundle.LoadAsync(BundlePath);
|
||||
}
|
||||
|
||||
public Task DisposeAsync()
|
||||
public ValueTask DisposeAsync()
|
||||
{
|
||||
return Task.CompletedTask;
|
||||
return ValueTask.CompletedTask;
|
||||
}
|
||||
|
||||
[Fact(Skip = "E2E-002: Requires full pipeline integration")]
|
||||
@@ -243,3 +243,6 @@ internal sealed record BundleOutputs
|
||||
public required InputFile Verdict { get; init; }
|
||||
public required string VerdictHash { get; init; }
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
@@ -12,8 +12,8 @@
|
||||
|
||||
<ItemGroup>
|
||||
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.12.0" />
|
||||
<PackageReference Include="xunit" Version="2.9.3" />
|
||||
<PackageReference Include="xunit.runner.visualstudio" Version="3.0.0">
|
||||
<PackageReference Include="xunit.v3" />
|
||||
<PackageReference Include="xunit.runner.visualstudio">
|
||||
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
|
||||
<PrivateAssets>all</PrivateAssets>
|
||||
</PackageReference>
|
||||
@@ -34,3 +34,4 @@
|
||||
</ItemGroup>
|
||||
|
||||
</Project>
|
||||
|
||||
|
||||
@@ -19,7 +19,7 @@ public sealed class InteropTestHarness : IAsyncLifetime
|
||||
_toolManager = new ToolManager(_workDir);
|
||||
}
|
||||
|
||||
public async Task InitializeAsync()
|
||||
public async ValueTask InitializeAsync()
|
||||
{
|
||||
Directory.CreateDirectory(_workDir);
|
||||
|
||||
@@ -180,11 +180,11 @@ public sealed class InteropTestHarness : IAsyncLifetime
|
||||
OnlyInGrypeDetails: onlyInGrype);
|
||||
}
|
||||
|
||||
public Task DisposeAsync()
|
||||
public ValueTask DisposeAsync()
|
||||
{
|
||||
if (Directory.Exists(_workDir))
|
||||
Directory.Delete(_workDir, recursive: true);
|
||||
return Task.CompletedTask;
|
||||
return ValueTask.CompletedTask;
|
||||
}
|
||||
|
||||
private static string ComputeDigest(string content) =>
|
||||
@@ -197,3 +197,6 @@ public sealed class InteropTestHarness : IAsyncLifetime
|
||||
return Array.Empty<GrypeFinding>();
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
@@ -8,7 +8,7 @@
|
||||
</PropertyGroup>
|
||||
<ItemGroup>
|
||||
<PackageReference Include="Microsoft.NET.Test.Sdk" />
|
||||
<PackageReference Include="xunit" />
|
||||
<PackageReference Include="xunit.v3" />
|
||||
<PackageReference Include="xunit.runner.visualstudio" >
|
||||
<PrivateAssets>all</PrivateAssets>
|
||||
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
|
||||
@@ -27,3 +27,4 @@
|
||||
<ProjectReference Include="../../../__Libraries/StellaOps.TestKit/StellaOps.TestKit.csproj" />
|
||||
</ItemGroup>
|
||||
</Project>
|
||||
|
||||
|
||||
@@ -8,7 +8,7 @@
|
||||
</PropertyGroup>
|
||||
<ItemGroup>
|
||||
<PackageReference Include="Microsoft.NET.Test.Sdk" />
|
||||
<PackageReference Include="xunit" />
|
||||
<PackageReference Include="xunit.v3" />
|
||||
<PackageReference Include="xunit.runner.visualstudio" >
|
||||
<PrivateAssets>all</PrivateAssets>
|
||||
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
|
||||
@@ -29,3 +29,4 @@
|
||||
</Content>
|
||||
</ItemGroup>
|
||||
</Project>
|
||||
|
||||
|
||||
Reference in New Issue
Block a user