up
Some checks failed
LNM Migration CI / build-runner (push) Has been cancelled
Ledger OpenAPI CI / deprecation-check (push) Has been cancelled
Docs CI / lint-and-preview (push) Has been cancelled
Airgap Sealed CI Smoke / sealed-smoke (push) Has been cancelled
Ledger Packs CI / build-pack (push) Has been cancelled
Export Center CI / export-ci (push) Has been cancelled
Ledger OpenAPI CI / validate-oas (push) Has been cancelled
Ledger OpenAPI CI / check-wellknown (push) Has been cancelled
Ledger Packs CI / verify-pack (push) Has been cancelled
LNM Migration CI / validate-metrics (push) Has been cancelled
AOC Guard CI / aoc-guard (push) Has been cancelled
AOC Guard CI / aoc-verify (push) Has been cancelled
Some checks failed
LNM Migration CI / build-runner (push) Has been cancelled
Ledger OpenAPI CI / deprecation-check (push) Has been cancelled
Docs CI / lint-and-preview (push) Has been cancelled
Airgap Sealed CI Smoke / sealed-smoke (push) Has been cancelled
Ledger Packs CI / build-pack (push) Has been cancelled
Export Center CI / export-ci (push) Has been cancelled
Ledger OpenAPI CI / validate-oas (push) Has been cancelled
Ledger OpenAPI CI / check-wellknown (push) Has been cancelled
Ledger Packs CI / verify-pack (push) Has been cancelled
LNM Migration CI / validate-metrics (push) Has been cancelled
AOC Guard CI / aoc-guard (push) Has been cancelled
AOC Guard CI / aoc-verify (push) Has been cancelled
This commit is contained in:
@@ -14,6 +14,7 @@ using StellaOps.Scanner.WebService.Contracts;
|
||||
using StellaOps.Scanner.WebService.Domain;
|
||||
using StellaOps.Scanner.WebService.Infrastructure;
|
||||
using StellaOps.Scanner.WebService.Security;
|
||||
using StellaOps.Scanner.WebService.Options;
|
||||
using StellaOps.Scanner.WebService.Services;
|
||||
using DomainScanProgressEvent = StellaOps.Scanner.WebService.Domain.ScanProgressEvent;
|
||||
using StellaOps.Scanner.Core.Contracts;
|
||||
|
||||
@@ -358,13 +358,52 @@ public sealed class ScannerWebServiceOptions
|
||||
|
||||
public int EventTtlDays { get; set; } = 45;
|
||||
|
||||
// === Tenant-level rate limits ===
|
||||
public double PerTenantEventsPerSecond { get; set; } = 200;
|
||||
|
||||
public int PerTenantBurst { get; set; } = 1000;
|
||||
|
||||
// === Node-level rate limits ===
|
||||
public double PerNodeEventsPerSecond { get; set; } = 50;
|
||||
|
||||
public int PerNodeBurst { get; set; } = 200;
|
||||
|
||||
public double PerTenantEventsPerSecond { get; set; } = 200;
|
||||
// === Namespace-level rate limits (hierarchical budget) ===
|
||||
/// <summary>
|
||||
/// Maximum events per second per namespace.
|
||||
/// Part of hierarchical rate limiting: tenant → namespace → workload.
|
||||
/// Default: 100 events/second per namespace.
|
||||
/// </summary>
|
||||
public double PerNamespaceEventsPerSecond { get; set; } = 100;
|
||||
|
||||
public int PerTenantBurst { get; set; } = 1000;
|
||||
/// <summary>
|
||||
/// Burst capacity per namespace.
|
||||
/// Default: 500 events burst.
|
||||
/// </summary>
|
||||
public int PerNamespaceBurst { get; set; } = 500;
|
||||
|
||||
// === Workload-level rate limits (hierarchical budget) ===
|
||||
/// <summary>
|
||||
/// Maximum events per second per workload (pod/container).
|
||||
/// Part of hierarchical rate limiting: tenant → namespace → workload.
|
||||
/// Prevents noisy workloads from exhausting namespace or tenant budgets.
|
||||
/// Default: 25 events/second per workload.
|
||||
/// </summary>
|
||||
public double PerWorkloadEventsPerSecond { get; set; } = 25;
|
||||
|
||||
/// <summary>
|
||||
/// Burst capacity per workload.
|
||||
/// Default: 100 events burst.
|
||||
/// </summary>
|
||||
public int PerWorkloadBurst { get; set; } = 100;
|
||||
|
||||
/// <summary>
|
||||
/// Enable hierarchical rate limiting across tenant → namespace → workload.
|
||||
/// When enabled, rate limits are enforced at all three levels.
|
||||
/// When disabled, only tenant and node limits apply (legacy behavior).
|
||||
/// Default: false (opt-in for backward compatibility).
|
||||
/// </summary>
|
||||
public bool HierarchicalRateLimitingEnabled { get; set; } = false;
|
||||
|
||||
public int PolicyCacheTtlSeconds { get; set; } = 300;
|
||||
|
||||
|
||||
@@ -11,6 +11,7 @@ using StellaOps.Cryptography;
|
||||
using StellaOps.Replay.Core;
|
||||
using StellaOps.Scanner.Core.Replay;
|
||||
using StellaOps.Scanner.Reachability;
|
||||
using ReachabilityWriter = StellaOps.Scanner.Reachability.ReachabilityReplayWriter;
|
||||
using StellaOps.Scanner.Storage;
|
||||
using StellaOps.Scanner.Storage.ObjectStore;
|
||||
using StellaOps.Scanner.WebService.Domain;
|
||||
@@ -25,7 +26,7 @@ namespace StellaOps.Scanner.WebService.Replay;
|
||||
internal sealed class RecordModeService : IRecordModeService
|
||||
{
|
||||
private readonly RecordModeAssembler _assembler;
|
||||
private readonly ReachabilityReplayWriter _reachability;
|
||||
private readonly ReachabilityWriter _reachability;
|
||||
private readonly ICryptoHash _cryptoHash;
|
||||
private readonly IArtifactObjectStore? _objectStore;
|
||||
private readonly ScannerStorageOptions? _storageOptions;
|
||||
@@ -45,7 +46,7 @@ internal sealed class RecordModeService : IRecordModeService
|
||||
_timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider));
|
||||
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||
_assembler = new RecordModeAssembler(cryptoHash, timeProvider);
|
||||
_reachability = new ReachabilityReplayWriter();
|
||||
_reachability = new ReachabilityWriter();
|
||||
}
|
||||
|
||||
// Legacy/testing constructor for unit tests that do not require storage.
|
||||
@@ -53,7 +54,7 @@ internal sealed class RecordModeService : IRecordModeService
|
||||
{
|
||||
_cryptoHash = cryptoHash ?? throw new ArgumentNullException(nameof(cryptoHash));
|
||||
_assembler = new RecordModeAssembler(cryptoHash, timeProvider);
|
||||
_reachability = new ReachabilityReplayWriter();
|
||||
_reachability = new ReachabilityWriter();
|
||||
_timeProvider = timeProvider ?? TimeProvider.System;
|
||||
}
|
||||
|
||||
|
||||
@@ -5,10 +5,17 @@ using StellaOps.Zastava.Core.Contracts;
|
||||
|
||||
namespace StellaOps.Scanner.WebService.Services;
|
||||
|
||||
/// <summary>
|
||||
/// Hierarchical rate limiter for runtime events.
|
||||
/// Supports rate limiting at tenant, node, namespace, and workload levels.
|
||||
/// Budget allocation: tenant → namespace → workload (when hierarchical mode enabled).
|
||||
/// </summary>
|
||||
internal sealed class RuntimeEventRateLimiter
|
||||
{
|
||||
private readonly ConcurrentDictionary<string, TokenBucket> _tenantBuckets = new(StringComparer.Ordinal);
|
||||
private readonly ConcurrentDictionary<string, TokenBucket> _nodeBuckets = new(StringComparer.Ordinal);
|
||||
private readonly ConcurrentDictionary<string, TokenBucket> _namespaceBuckets = new(StringComparer.Ordinal);
|
||||
private readonly ConcurrentDictionary<string, TokenBucket> _workloadBuckets = new(StringComparer.Ordinal);
|
||||
private readonly TimeProvider _timeProvider;
|
||||
private readonly IOptionsMonitor<ScannerWebServiceOptions> _optionsMonitor;
|
||||
|
||||
@@ -29,33 +36,36 @@ internal sealed class RuntimeEventRateLimiter
|
||||
var options = _optionsMonitor.CurrentValue.Runtime ?? new ScannerWebServiceOptions.RuntimeOptions();
|
||||
var now = _timeProvider.GetUtcNow();
|
||||
|
||||
// Count events by scope
|
||||
var tenantCounts = new Dictionary<string, int>(StringComparer.Ordinal);
|
||||
var nodeCounts = new Dictionary<string, int>(StringComparer.Ordinal);
|
||||
var namespaceCounts = new Dictionary<string, int>(StringComparer.Ordinal);
|
||||
var workloadCounts = new Dictionary<string, int>(StringComparer.Ordinal);
|
||||
|
||||
foreach (var envelope in envelopes)
|
||||
{
|
||||
var tenant = envelope.Event.Tenant;
|
||||
var node = envelope.Event.Node;
|
||||
if (tenantCounts.TryGetValue(tenant, out var tenantCount))
|
||||
{
|
||||
tenantCounts[tenant] = tenantCount + 1;
|
||||
}
|
||||
else
|
||||
{
|
||||
tenantCounts[tenant] = 1;
|
||||
}
|
||||
var ns = envelope.Event.Workload?.Namespace ?? "_default";
|
||||
var workloadId = GetWorkloadKey(envelope.Event);
|
||||
|
||||
var nodeKey = $"{tenant}|{node}";
|
||||
if (nodeCounts.TryGetValue(nodeKey, out var nodeCount))
|
||||
// Tenant counts
|
||||
IncrementCount(tenantCounts, tenant);
|
||||
|
||||
// Node counts (tenant-scoped)
|
||||
IncrementCount(nodeCounts, $"{tenant}|{node}");
|
||||
|
||||
// Namespace counts (tenant-scoped) - only used in hierarchical mode
|
||||
if (options.HierarchicalRateLimitingEnabled)
|
||||
{
|
||||
nodeCounts[nodeKey] = nodeCount + 1;
|
||||
}
|
||||
else
|
||||
{
|
||||
nodeCounts[nodeKey] = 1;
|
||||
IncrementCount(namespaceCounts, $"{tenant}|{ns}");
|
||||
IncrementCount(workloadCounts, $"{tenant}|{ns}|{workloadId}");
|
||||
}
|
||||
}
|
||||
|
||||
// === Evaluate rate limits in order: tenant → node → namespace → workload ===
|
||||
|
||||
// 1. Tenant-level check
|
||||
var tenantDecision = TryAcquire(
|
||||
_tenantBuckets,
|
||||
tenantCounts,
|
||||
@@ -69,6 +79,7 @@ internal sealed class RuntimeEventRateLimiter
|
||||
return tenantDecision;
|
||||
}
|
||||
|
||||
// 2. Node-level check
|
||||
var nodeDecision = TryAcquire(
|
||||
_nodeBuckets,
|
||||
nodeCounts,
|
||||
@@ -77,7 +88,84 @@ internal sealed class RuntimeEventRateLimiter
|
||||
now,
|
||||
scope: "node");
|
||||
|
||||
return nodeDecision;
|
||||
if (!nodeDecision.Allowed)
|
||||
{
|
||||
return nodeDecision;
|
||||
}
|
||||
|
||||
// 3. Hierarchical checks (namespace → workload) - only when enabled
|
||||
if (options.HierarchicalRateLimitingEnabled)
|
||||
{
|
||||
// 3a. Namespace-level check
|
||||
var namespaceDecision = TryAcquire(
|
||||
_namespaceBuckets,
|
||||
namespaceCounts,
|
||||
options.PerNamespaceEventsPerSecond,
|
||||
options.PerNamespaceBurst,
|
||||
now,
|
||||
scope: "namespace");
|
||||
|
||||
if (!namespaceDecision.Allowed)
|
||||
{
|
||||
return namespaceDecision;
|
||||
}
|
||||
|
||||
// 3b. Workload-level check
|
||||
var workloadDecision = TryAcquire(
|
||||
_workloadBuckets,
|
||||
workloadCounts,
|
||||
options.PerWorkloadEventsPerSecond,
|
||||
options.PerWorkloadBurst,
|
||||
now,
|
||||
scope: "workload");
|
||||
|
||||
if (!workloadDecision.Allowed)
|
||||
{
|
||||
return workloadDecision;
|
||||
}
|
||||
}
|
||||
|
||||
return RateLimitDecision.Success;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Gets a unique key for a workload from the runtime event.
|
||||
/// Uses pod name if available, otherwise container ID or a generated key.
|
||||
/// </summary>
|
||||
private static string GetWorkloadKey(RuntimeEvent evt)
|
||||
{
|
||||
var workload = evt.Workload;
|
||||
if (workload is null)
|
||||
{
|
||||
return "_unknown";
|
||||
}
|
||||
|
||||
// Prefer pod name for Kubernetes workloads
|
||||
if (!string.IsNullOrEmpty(workload.Pod))
|
||||
{
|
||||
return workload.Pod;
|
||||
}
|
||||
|
||||
// Fall back to container ID
|
||||
if (!string.IsNullOrEmpty(workload.ContainerId))
|
||||
{
|
||||
// Truncate container ID for reasonable key length
|
||||
var containerId = workload.ContainerId;
|
||||
if (containerId.Contains("://"))
|
||||
{
|
||||
containerId = containerId.Substring(containerId.IndexOf("://") + 3);
|
||||
}
|
||||
return containerId.Length > 12 ? containerId[..12] : containerId;
|
||||
}
|
||||
|
||||
// Last resort: use container name
|
||||
return workload.Container ?? "_unknown";
|
||||
}
|
||||
|
||||
private static void IncrementCount(Dictionary<string, int> counts, string key)
|
||||
{
|
||||
counts.TryGetValue(key, out var count);
|
||||
counts[key] = count + 1;
|
||||
}
|
||||
|
||||
private static RateLimitDecision TryAcquire(
|
||||
|
||||
@@ -37,6 +37,7 @@
|
||||
<ProjectReference Include="../__Libraries/StellaOps.Scanner.Core/StellaOps.Scanner.Core.csproj" />
|
||||
<ProjectReference Include="../../__Libraries/StellaOps.Replay.Core/StellaOps.Replay.Core.csproj" />
|
||||
<ProjectReference Include="../../Zastava/__Libraries/StellaOps.Zastava.Core/StellaOps.Zastava.Core.csproj" />
|
||||
<ProjectReference Include="../__Libraries/StellaOps.Scanner.Reachability/StellaOps.Scanner.Reachability.csproj" />
|
||||
<ProjectReference Include="../../Concelier/__Libraries/StellaOps.Concelier.Core/StellaOps.Concelier.Core.csproj" />
|
||||
<ProjectReference Include="../../Concelier/__Libraries/StellaOps.Concelier.Connector.Common/StellaOps.Concelier.Connector.Common.csproj" />
|
||||
<ProjectReference Include="../../__Libraries/StellaOps.Messaging/StellaOps.Messaging.csproj" />
|
||||
|
||||
@@ -0,0 +1,606 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Net;
|
||||
using System.Net.Http.Json;
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
using CycloneDX.Json;
|
||||
using CycloneDX.Models;
|
||||
using Microsoft.Extensions.DependencyInjection;
|
||||
using Microsoft.Extensions.DependencyInjection.Extensions;
|
||||
using StellaOps.Scanner.Storage.Catalog;
|
||||
using StellaOps.Scanner.Storage.ObjectStore;
|
||||
using StellaOps.Scanner.Storage.Repositories;
|
||||
using StellaOps.Scanner.WebService.Contracts;
|
||||
using StellaOps.Zastava.Core.Contracts;
|
||||
|
||||
namespace StellaOps.Scanner.WebService.Tests;
|
||||
|
||||
public sealed class RuntimeReconciliationTests
|
||||
{
|
||||
private const string TestImageDigest = "sha256:abc123def456";
|
||||
private const string TestTenant = "tenant-alpha";
|
||||
private const string TestNode = "node-a";
|
||||
|
||||
[Fact]
|
||||
public async Task ReconcileEndpoint_WithNoRuntimeEvents_ReturnsNotFound()
|
||||
{
|
||||
using var factory = new ScannerApplicationFactory();
|
||||
using var client = factory.CreateClient();
|
||||
|
||||
var request = new RuntimeReconcileRequestDto
|
||||
{
|
||||
ImageDigest = TestImageDigest
|
||||
};
|
||||
|
||||
var response = await client.PostAsJsonAsync("/api/v1/runtime/reconcile", request);
|
||||
|
||||
Assert.Equal(HttpStatusCode.NotFound, response.StatusCode);
|
||||
|
||||
var payload = await response.Content.ReadFromJsonAsync<RuntimeReconcileResponseDto>();
|
||||
Assert.NotNull(payload);
|
||||
Assert.Equal("NO_RUNTIME_EVENTS", payload!.ErrorCode);
|
||||
Assert.Contains("No runtime events found", payload.ErrorMessage);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ReconcileEndpoint_WithRuntimeEventsButNoSbom_ReturnsNoSbomError()
|
||||
{
|
||||
var mockObjectStore = new InMemoryArtifactObjectStore();
|
||||
|
||||
using var factory = new ScannerApplicationFactory(
|
||||
configureServices: services =>
|
||||
{
|
||||
services.RemoveAll<IArtifactObjectStore>();
|
||||
services.AddSingleton<IArtifactObjectStore>(mockObjectStore);
|
||||
});
|
||||
using var client = factory.CreateClient();
|
||||
|
||||
// Ingest runtime event with loaded libraries
|
||||
var ingestRequest = new RuntimeEventsIngestRequestDto
|
||||
{
|
||||
Events = new[]
|
||||
{
|
||||
CreateEnvelopeWithLibraries("evt-001", TestImageDigest, new[]
|
||||
{
|
||||
new RuntimeLoadedLibrary { Path = "/lib/libssl.so.3", Sha256 = "sha256:lib1hash", Inode = 1001 },
|
||||
new RuntimeLoadedLibrary { Path = "/lib/libcrypto.so.3", Sha256 = "sha256:lib2hash", Inode = 1002 }
|
||||
})
|
||||
}
|
||||
};
|
||||
|
||||
var ingestResponse = await client.PostAsJsonAsync("/api/v1/runtime/events", ingestRequest);
|
||||
Assert.Equal(HttpStatusCode.Accepted, ingestResponse.StatusCode);
|
||||
|
||||
// Request reconciliation - no SBOM linked
|
||||
var reconcileRequest = new RuntimeReconcileRequestDto
|
||||
{
|
||||
ImageDigest = TestImageDigest
|
||||
};
|
||||
|
||||
var response = await client.PostAsJsonAsync("/api/v1/runtime/reconcile", reconcileRequest);
|
||||
|
||||
Assert.Equal(HttpStatusCode.OK, response.StatusCode);
|
||||
|
||||
var payload = await response.Content.ReadFromJsonAsync<RuntimeReconcileResponseDto>();
|
||||
Assert.NotNull(payload);
|
||||
Assert.Equal("NO_SBOM", payload!.ErrorCode);
|
||||
Assert.Equal(2, payload.TotalRuntimeLibraries);
|
||||
Assert.Equal(0, payload.TotalSbomComponents);
|
||||
Assert.Equal(0, payload.MatchCount);
|
||||
Assert.Equal(2, payload.MissCount);
|
||||
Assert.Equal(2, payload.Misses.Count);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ReconcileEndpoint_WithHashMatches_ReturnsMatches()
|
||||
{
|
||||
var mockObjectStore = new InMemoryArtifactObjectStore();
|
||||
|
||||
using var factory = new ScannerApplicationFactory(
|
||||
configureServices: services =>
|
||||
{
|
||||
services.RemoveAll<IArtifactObjectStore>();
|
||||
services.AddSingleton<IArtifactObjectStore>(mockObjectStore);
|
||||
});
|
||||
using var client = factory.CreateClient();
|
||||
|
||||
// Setup: Create SBOM artifact with components
|
||||
const string sbomArtifactId = "imagebom/sha256-sbomdigest";
|
||||
const string sbomHash = "sha256:sbomdigest";
|
||||
|
||||
using (var scope = factory.Services.CreateScope())
|
||||
{
|
||||
var artifacts = scope.ServiceProvider.GetRequiredService<ArtifactRepository>();
|
||||
var links = scope.ServiceProvider.GetRequiredService<LinkRepository>();
|
||||
|
||||
await artifacts.UpsertAsync(new ArtifactDocument
|
||||
{
|
||||
Id = sbomArtifactId,
|
||||
Type = ArtifactDocumentType.ImageBom,
|
||||
Format = ArtifactDocumentFormat.CycloneDxJson,
|
||||
MediaType = "application/json",
|
||||
BytesSha256 = sbomHash,
|
||||
RefCount = 1
|
||||
}, CancellationToken.None);
|
||||
|
||||
await links.UpsertAsync(new LinkDocument
|
||||
{
|
||||
Id = Guid.NewGuid().ToString("N"),
|
||||
FromType = LinkSourceType.Image,
|
||||
FromDigest = TestImageDigest,
|
||||
ArtifactId = sbomArtifactId,
|
||||
CreatedAtUtc = DateTime.UtcNow
|
||||
}, CancellationToken.None);
|
||||
}
|
||||
|
||||
// Create SBOM content with matching hash
|
||||
var sbom = CreateSbomWithComponents(new[]
|
||||
{
|
||||
("comp-1", "openssl", "3.0.0", "pkg:deb/debian/openssl@3.0.0", new[] { "lib1hash" }, new[] { "/lib/libssl.so.3" }),
|
||||
("comp-2", "libcrypto", "3.0.0", "pkg:deb/debian/libcrypto@3.0.0", new[] { "lib2hash" }, new[] { "/lib/libcrypto.so.3" })
|
||||
});
|
||||
|
||||
var sbomJson = await Serializer.SerializeAsync(sbom);
|
||||
var sbomBytes = Encoding.UTF8.GetBytes(sbomJson);
|
||||
mockObjectStore.Store($"scanner-artifacts/imagebom/cyclonedx-json/{sbomHash}", sbomBytes);
|
||||
|
||||
// Ingest runtime event with matching libraries
|
||||
var ingestRequest = new RuntimeEventsIngestRequestDto
|
||||
{
|
||||
Events = new[]
|
||||
{
|
||||
CreateEnvelopeWithLibraries("evt-hash-001", TestImageDigest, new[]
|
||||
{
|
||||
new RuntimeLoadedLibrary { Path = "/lib/libssl.so.3", Sha256 = "lib1hash", Inode = 1001 },
|
||||
new RuntimeLoadedLibrary { Path = "/lib/libcrypto.so.3", Sha256 = "lib2hash", Inode = 1002 }
|
||||
})
|
||||
}
|
||||
};
|
||||
|
||||
var ingestResponse = await client.PostAsJsonAsync("/api/v1/runtime/events", ingestRequest);
|
||||
Assert.Equal(HttpStatusCode.Accepted, ingestResponse.StatusCode);
|
||||
|
||||
// Request reconciliation
|
||||
var reconcileRequest = new RuntimeReconcileRequestDto
|
||||
{
|
||||
ImageDigest = TestImageDigest
|
||||
};
|
||||
|
||||
var response = await client.PostAsJsonAsync("/api/v1/runtime/reconcile", reconcileRequest);
|
||||
|
||||
Assert.Equal(HttpStatusCode.OK, response.StatusCode);
|
||||
|
||||
var payload = await response.Content.ReadFromJsonAsync<RuntimeReconcileResponseDto>();
|
||||
Assert.NotNull(payload);
|
||||
Assert.Null(payload!.ErrorCode);
|
||||
Assert.Equal(2, payload.TotalRuntimeLibraries);
|
||||
Assert.Equal(2, payload.TotalSbomComponents);
|
||||
Assert.Equal(2, payload.MatchCount);
|
||||
Assert.Equal(0, payload.MissCount);
|
||||
Assert.Equal(2, payload.Matches.Count);
|
||||
Assert.All(payload.Matches, m => Assert.Equal("sha256", m.MatchType));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ReconcileEndpoint_WithPathMatches_ReturnsMatches()
|
||||
{
|
||||
var mockObjectStore = new InMemoryArtifactObjectStore();
|
||||
|
||||
using var factory = new ScannerApplicationFactory(
|
||||
configureServices: services =>
|
||||
{
|
||||
services.RemoveAll<IArtifactObjectStore>();
|
||||
services.AddSingleton<IArtifactObjectStore>(mockObjectStore);
|
||||
});
|
||||
using var client = factory.CreateClient();
|
||||
|
||||
const string imageDigest = "sha256:pathtest123";
|
||||
const string sbomArtifactId = "imagebom/sha256-sbomdigest-path";
|
||||
const string sbomHash = "sha256:sbomdigest-path";
|
||||
|
||||
using (var scope = factory.Services.CreateScope())
|
||||
{
|
||||
var artifacts = scope.ServiceProvider.GetRequiredService<ArtifactRepository>();
|
||||
var links = scope.ServiceProvider.GetRequiredService<LinkRepository>();
|
||||
|
||||
await artifacts.UpsertAsync(new ArtifactDocument
|
||||
{
|
||||
Id = sbomArtifactId,
|
||||
Type = ArtifactDocumentType.ImageBom,
|
||||
Format = ArtifactDocumentFormat.CycloneDxJson,
|
||||
MediaType = "application/json",
|
||||
BytesSha256 = sbomHash,
|
||||
RefCount = 1
|
||||
}, CancellationToken.None);
|
||||
|
||||
await links.UpsertAsync(new LinkDocument
|
||||
{
|
||||
Id = Guid.NewGuid().ToString("N"),
|
||||
FromType = LinkSourceType.Image,
|
||||
FromDigest = imageDigest,
|
||||
ArtifactId = sbomArtifactId,
|
||||
CreatedAtUtc = DateTime.UtcNow
|
||||
}, CancellationToken.None);
|
||||
}
|
||||
|
||||
// Create SBOM with paths but different hashes (path matching)
|
||||
var sbom = CreateSbomWithComponents(new[]
|
||||
{
|
||||
("comp-1", "zlib", "1.2.11", "pkg:deb/debian/zlib@1.2.11", Array.Empty<string>(), new[] { "/usr/lib/libz.so.1" })
|
||||
});
|
||||
|
||||
var sbomJson = await Serializer.SerializeAsync(sbom);
|
||||
var sbomBytes = Encoding.UTF8.GetBytes(sbomJson);
|
||||
mockObjectStore.Store($"scanner-artifacts/imagebom/cyclonedx-json/{sbomHash}", sbomBytes);
|
||||
|
||||
// Ingest runtime event - no hash, path match only
|
||||
var ingestRequest = new RuntimeEventsIngestRequestDto
|
||||
{
|
||||
Events = new[]
|
||||
{
|
||||
CreateEnvelopeWithLibraries("evt-path-001", imageDigest, new[]
|
||||
{
|
||||
new RuntimeLoadedLibrary { Path = "/usr/lib/libz.so.1", Sha256 = null, Inode = 2001 }
|
||||
})
|
||||
}
|
||||
};
|
||||
|
||||
var ingestResponse = await client.PostAsJsonAsync("/api/v1/runtime/events", ingestRequest);
|
||||
Assert.Equal(HttpStatusCode.Accepted, ingestResponse.StatusCode);
|
||||
|
||||
var reconcileRequest = new RuntimeReconcileRequestDto
|
||||
{
|
||||
ImageDigest = imageDigest
|
||||
};
|
||||
|
||||
var response = await client.PostAsJsonAsync("/api/v1/runtime/reconcile", reconcileRequest);
|
||||
|
||||
Assert.Equal(HttpStatusCode.OK, response.StatusCode);
|
||||
|
||||
var payload = await response.Content.ReadFromJsonAsync<RuntimeReconcileResponseDto>();
|
||||
Assert.NotNull(payload);
|
||||
Assert.Null(payload!.ErrorCode);
|
||||
Assert.Equal(1, payload.MatchCount);
|
||||
Assert.Equal(0, payload.MissCount);
|
||||
Assert.Single(payload.Matches);
|
||||
Assert.Equal("path", payload.Matches[0].MatchType);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ReconcileEndpoint_WithSpecificEventId_UsesSpecifiedEvent()
|
||||
{
|
||||
var mockObjectStore = new InMemoryArtifactObjectStore();
|
||||
|
||||
using var factory = new ScannerApplicationFactory(
|
||||
configureServices: services =>
|
||||
{
|
||||
services.RemoveAll<IArtifactObjectStore>();
|
||||
services.AddSingleton<IArtifactObjectStore>(mockObjectStore);
|
||||
});
|
||||
using var client = factory.CreateClient();
|
||||
|
||||
const string imageDigest = "sha256:eventidtest";
|
||||
const string sbomArtifactId = "imagebom/sha256-sbomdigest-eventid";
|
||||
const string sbomHash = "sha256:sbomdigest-eventid";
|
||||
|
||||
using (var scope = factory.Services.CreateScope())
|
||||
{
|
||||
var artifacts = scope.ServiceProvider.GetRequiredService<ArtifactRepository>();
|
||||
var links = scope.ServiceProvider.GetRequiredService<LinkRepository>();
|
||||
|
||||
await artifacts.UpsertAsync(new ArtifactDocument
|
||||
{
|
||||
Id = sbomArtifactId,
|
||||
Type = ArtifactDocumentType.ImageBom,
|
||||
Format = ArtifactDocumentFormat.CycloneDxJson,
|
||||
MediaType = "application/json",
|
||||
BytesSha256 = sbomHash,
|
||||
RefCount = 1
|
||||
}, CancellationToken.None);
|
||||
|
||||
await links.UpsertAsync(new LinkDocument
|
||||
{
|
||||
Id = Guid.NewGuid().ToString("N"),
|
||||
FromType = LinkSourceType.Image,
|
||||
FromDigest = imageDigest,
|
||||
ArtifactId = sbomArtifactId,
|
||||
CreatedAtUtc = DateTime.UtcNow
|
||||
}, CancellationToken.None);
|
||||
}
|
||||
|
||||
var sbom = CreateSbomWithComponents(new[]
|
||||
{
|
||||
("comp-1", "test-lib", "1.0.0", "pkg:test/lib@1.0.0", new[] { "specifichash" }, Array.Empty<string>())
|
||||
});
|
||||
|
||||
var sbomJson = await Serializer.SerializeAsync(sbom);
|
||||
var sbomBytes = Encoding.UTF8.GetBytes(sbomJson);
|
||||
mockObjectStore.Store($"scanner-artifacts/imagebom/cyclonedx-json/{sbomHash}", sbomBytes);
|
||||
|
||||
// Ingest multiple events with different libraries
|
||||
var ingestRequest = new RuntimeEventsIngestRequestDto
|
||||
{
|
||||
Events = new[]
|
||||
{
|
||||
CreateEnvelopeWithLibraries("evt-specific-001", imageDigest, new[]
|
||||
{
|
||||
new RuntimeLoadedLibrary { Path = "/lib/specific.so", Sha256 = "specifichash", Inode = 3001 }
|
||||
}),
|
||||
CreateEnvelopeWithLibraries("evt-specific-002", imageDigest, new[]
|
||||
{
|
||||
new RuntimeLoadedLibrary { Path = "/lib/other.so", Sha256 = "otherhash", Inode = 3002 }
|
||||
})
|
||||
}
|
||||
};
|
||||
|
||||
var ingestResponse = await client.PostAsJsonAsync("/api/v1/runtime/events", ingestRequest);
|
||||
Assert.Equal(HttpStatusCode.Accepted, ingestResponse.StatusCode);
|
||||
|
||||
// Request reconciliation for specific event (evt-specific-001 should match)
|
||||
var reconcileRequest = new RuntimeReconcileRequestDto
|
||||
{
|
||||
ImageDigest = imageDigest,
|
||||
RuntimeEventId = "evt-specific-001"
|
||||
};
|
||||
|
||||
var response = await client.PostAsJsonAsync("/api/v1/runtime/reconcile", reconcileRequest);
|
||||
|
||||
Assert.Equal(HttpStatusCode.OK, response.StatusCode);
|
||||
|
||||
var payload = await response.Content.ReadFromJsonAsync<RuntimeReconcileResponseDto>();
|
||||
Assert.NotNull(payload);
|
||||
Assert.Equal("evt-specific-001", payload!.RuntimeEventId);
|
||||
Assert.Equal(1, payload.MatchCount);
|
||||
Assert.Equal(0, payload.MissCount);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ReconcileEndpoint_WithNonExistentEventId_ReturnsNotFound()
|
||||
{
|
||||
using var factory = new ScannerApplicationFactory();
|
||||
using var client = factory.CreateClient();
|
||||
|
||||
var request = new RuntimeReconcileRequestDto
|
||||
{
|
||||
ImageDigest = TestImageDigest,
|
||||
RuntimeEventId = "non-existent-event-id"
|
||||
};
|
||||
|
||||
var response = await client.PostAsJsonAsync("/api/v1/runtime/reconcile", request);
|
||||
|
||||
Assert.Equal(HttpStatusCode.NotFound, response.StatusCode);
|
||||
|
||||
var payload = await response.Content.ReadFromJsonAsync<RuntimeReconcileResponseDto>();
|
||||
Assert.NotNull(payload);
|
||||
Assert.Equal("RUNTIME_EVENT_NOT_FOUND", payload!.ErrorCode);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ReconcileEndpoint_WithMissingImageDigest_ReturnsBadRequest()
|
||||
{
|
||||
using var factory = new ScannerApplicationFactory();
|
||||
using var client = factory.CreateClient();
|
||||
|
||||
var request = new RuntimeReconcileRequestDto
|
||||
{
|
||||
ImageDigest = ""
|
||||
};
|
||||
|
||||
var response = await client.PostAsJsonAsync("/api/v1/runtime/reconcile", request);
|
||||
|
||||
Assert.Equal(HttpStatusCode.BadRequest, response.StatusCode);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ReconcileEndpoint_WithMixedMatchesAndMisses_ReturnsCorrectCounts()
|
||||
{
|
||||
var mockObjectStore = new InMemoryArtifactObjectStore();
|
||||
|
||||
using var factory = new ScannerApplicationFactory(
|
||||
configureServices: services =>
|
||||
{
|
||||
services.RemoveAll<IArtifactObjectStore>();
|
||||
services.AddSingleton<IArtifactObjectStore>(mockObjectStore);
|
||||
});
|
||||
using var client = factory.CreateClient();
|
||||
|
||||
const string imageDigest = "sha256:mixedtest";
|
||||
const string sbomArtifactId = "imagebom/sha256-sbomdigest-mixed";
|
||||
const string sbomHash = "sha256:sbomdigest-mixed";
|
||||
|
||||
using (var scope = factory.Services.CreateScope())
|
||||
{
|
||||
var artifacts = scope.ServiceProvider.GetRequiredService<ArtifactRepository>();
|
||||
var links = scope.ServiceProvider.GetRequiredService<LinkRepository>();
|
||||
|
||||
await artifacts.UpsertAsync(new ArtifactDocument
|
||||
{
|
||||
Id = sbomArtifactId,
|
||||
Type = ArtifactDocumentType.ImageBom,
|
||||
Format = ArtifactDocumentFormat.CycloneDxJson,
|
||||
MediaType = "application/json",
|
||||
BytesSha256 = sbomHash,
|
||||
RefCount = 1
|
||||
}, CancellationToken.None);
|
||||
|
||||
await links.UpsertAsync(new LinkDocument
|
||||
{
|
||||
Id = Guid.NewGuid().ToString("N"),
|
||||
FromType = LinkSourceType.Image,
|
||||
FromDigest = imageDigest,
|
||||
ArtifactId = sbomArtifactId,
|
||||
CreatedAtUtc = DateTime.UtcNow
|
||||
}, CancellationToken.None);
|
||||
}
|
||||
|
||||
// SBOM has 2 components
|
||||
var sbom = CreateSbomWithComponents(new[]
|
||||
{
|
||||
("comp-known-1", "known-lib", "1.0.0", "pkg:test/known@1.0.0", new[] { "knownhash1" }, new[] { "/lib/known.so" }),
|
||||
("comp-known-2", "another-lib", "2.0.0", "pkg:test/another@2.0.0", new[] { "knownhash2" }, Array.Empty<string>())
|
||||
});
|
||||
|
||||
var sbomJson = await Serializer.SerializeAsync(sbom);
|
||||
var sbomBytes = Encoding.UTF8.GetBytes(sbomJson);
|
||||
mockObjectStore.Store($"scanner-artifacts/imagebom/cyclonedx-json/{sbomHash}", sbomBytes);
|
||||
|
||||
// Runtime has 3 libraries: 1 hash match, 1 path match, 1 miss
|
||||
var ingestRequest = new RuntimeEventsIngestRequestDto
|
||||
{
|
||||
Events = new[]
|
||||
{
|
||||
CreateEnvelopeWithLibraries("evt-mixed-001", imageDigest, new[]
|
||||
{
|
||||
new RuntimeLoadedLibrary { Path = "/lib/known.so", Sha256 = "knownhash1", Inode = 4001 }, // hash match
|
||||
new RuntimeLoadedLibrary { Path = "/lib/unknown.so", Sha256 = "unknownhash", Inode = 4002 }, // miss
|
||||
new RuntimeLoadedLibrary { Path = "/lib/another.so", Sha256 = "knownhash2", Inode = 4003 } // hash match
|
||||
})
|
||||
}
|
||||
};
|
||||
|
||||
var ingestResponse = await client.PostAsJsonAsync("/api/v1/runtime/events", ingestRequest);
|
||||
Assert.Equal(HttpStatusCode.Accepted, ingestResponse.StatusCode);
|
||||
|
||||
var reconcileRequest = new RuntimeReconcileRequestDto
|
||||
{
|
||||
ImageDigest = imageDigest
|
||||
};
|
||||
|
||||
var response = await client.PostAsJsonAsync("/api/v1/runtime/reconcile", reconcileRequest);
|
||||
|
||||
Assert.Equal(HttpStatusCode.OK, response.StatusCode);
|
||||
|
||||
var payload = await response.Content.ReadFromJsonAsync<RuntimeReconcileResponseDto>();
|
||||
Assert.NotNull(payload);
|
||||
Assert.Null(payload!.ErrorCode);
|
||||
Assert.Equal(3, payload.TotalRuntimeLibraries);
|
||||
Assert.Equal(2, payload.TotalSbomComponents);
|
||||
Assert.Equal(2, payload.MatchCount);
|
||||
Assert.Equal(1, payload.MissCount);
|
||||
Assert.Single(payload.Misses);
|
||||
Assert.Equal("/lib/unknown.so", payload.Misses[0].Path);
|
||||
}
|
||||
|
||||
private static RuntimeEventEnvelope CreateEnvelopeWithLibraries(
|
||||
string eventId,
|
||||
string imageDigest,
|
||||
RuntimeLoadedLibrary[] libraries)
|
||||
{
|
||||
var runtimeEvent = new RuntimeEvent
|
||||
{
|
||||
EventId = eventId,
|
||||
When = DateTimeOffset.UtcNow,
|
||||
Kind = RuntimeEventKind.ContainerStart,
|
||||
Tenant = TestTenant,
|
||||
Node = TestNode,
|
||||
Runtime = new RuntimeEngine
|
||||
{
|
||||
Engine = "containerd",
|
||||
Version = "1.7.0"
|
||||
},
|
||||
Workload = new RuntimeWorkload
|
||||
{
|
||||
Platform = "kubernetes",
|
||||
Namespace = "default",
|
||||
Pod = "test-pod",
|
||||
Container = "test-container",
|
||||
ContainerId = $"containerd://{eventId}",
|
||||
ImageRef = $"ghcr.io/example/test@{imageDigest}"
|
||||
},
|
||||
Delta = new RuntimeDelta
|
||||
{
|
||||
BaselineImageDigest = imageDigest
|
||||
},
|
||||
Process = new RuntimeProcess
|
||||
{
|
||||
Pid = 1234,
|
||||
Entrypoint = new[] { "/bin/start" },
|
||||
EntryTrace = Array.Empty<RuntimeEntryTrace>()
|
||||
},
|
||||
LoadedLibraries = libraries
|
||||
};
|
||||
|
||||
return RuntimeEventEnvelope.Create(runtimeEvent, ZastavaContractVersions.RuntimeEvent);
|
||||
}
|
||||
|
||||
private static Bom CreateSbomWithComponents(
|
||||
(string bomRef, string name, string version, string purl, string[] hashes, string[] paths)[] components)
|
||||
{
|
||||
var bom = new Bom
|
||||
{
|
||||
Version = 1,
|
||||
SerialNumber = $"urn:uuid:{Guid.NewGuid()}",
|
||||
Components = new List<Component>()
|
||||
};
|
||||
|
||||
foreach (var (bomRef, name, version, purl, hashes, paths) in components)
|
||||
{
|
||||
var component = new Component
|
||||
{
|
||||
BomRef = bomRef,
|
||||
Name = name,
|
||||
Version = version,
|
||||
Purl = purl,
|
||||
Type = Component.Classification.Library,
|
||||
Hashes = hashes.Select(h => new Hash
|
||||
{
|
||||
Alg = Hash.HashAlgorithm.SHA_256,
|
||||
Content = h
|
||||
}).ToList()
|
||||
};
|
||||
|
||||
if (paths.Length > 0)
|
||||
{
|
||||
component.Evidence = new Evidence
|
||||
{
|
||||
Occurrences = paths.Select(p => new EvidenceOccurrence
|
||||
{
|
||||
Location = p
|
||||
}).ToList()
|
||||
};
|
||||
}
|
||||
|
||||
bom.Components.Add(component);
|
||||
}
|
||||
|
||||
return bom;
|
||||
}
|
||||
|
||||
private sealed class InMemoryArtifactObjectStore : IArtifactObjectStore
|
||||
{
|
||||
private readonly Dictionary<string, byte[]> _store = new(StringComparer.OrdinalIgnoreCase);
|
||||
|
||||
public void Store(string key, byte[] content)
|
||||
{
|
||||
_store[key] = content;
|
||||
}
|
||||
|
||||
public Task PutAsync(ArtifactObjectDescriptor descriptor, Stream content, CancellationToken cancellationToken)
|
||||
{
|
||||
using var ms = new MemoryStream();
|
||||
content.CopyTo(ms);
|
||||
_store[$"{descriptor.Bucket}/{descriptor.Key}"] = ms.ToArray();
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
|
||||
public Task<Stream?> GetAsync(ArtifactObjectDescriptor descriptor, CancellationToken cancellationToken)
|
||||
{
|
||||
var key = $"{descriptor.Bucket}/{descriptor.Key}";
|
||||
if (_store.TryGetValue(key, out var content))
|
||||
{
|
||||
return Task.FromResult<Stream?>(new MemoryStream(content));
|
||||
}
|
||||
|
||||
return Task.FromResult<Stream?>(null);
|
||||
}
|
||||
|
||||
public Task DeleteAsync(ArtifactObjectDescriptor descriptor, CancellationToken cancellationToken)
|
||||
{
|
||||
var key = $"{descriptor.Bucket}/{descriptor.Key}";
|
||||
_store.Remove(key);
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user