blockers 2
Some checks failed
Docs CI / lint-and-preview (push) Has been cancelled

This commit is contained in:
StellaOps Bot
2025-11-23 14:54:17 +02:00
parent f47d2d1377
commit cce96f3596
100 changed files with 2758 additions and 1912 deletions

View File

@@ -22,6 +22,11 @@ public sealed class AirGapOptionsValidator : IValidateOptions<AirGapOptions>
return ValidateOptionsResult.Fail("TenantId is required");
}
if (options.AllowUntrustedAnchors)
{
// no-op; explicitly allowed for offline testing
}
return ValidateOptionsResult.Success;
}
}

View File

@@ -10,12 +10,14 @@ public class TimeStatusController : ControllerBase
{
private readonly TimeStatusService _statusService;
private readonly TimeAnchorLoader _loader;
private readonly TrustRootProvider _trustRoots;
private readonly ILogger<TimeStatusController> _logger;
public TimeStatusController(TimeStatusService statusService, TimeAnchorLoader loader, ILogger<TimeStatusController> logger)
public TimeStatusController(TimeStatusService statusService, TimeAnchorLoader loader, TrustRootProvider trustRoots, ILogger<TimeStatusController> logger)
{
_statusService = statusService;
_loader = loader;
_trustRoots = trustRoots;
_logger = logger;
}
@@ -39,22 +41,24 @@ public class TimeStatusController : ControllerBase
return ValidationProblem(ModelState);
}
byte[] publicKey;
try
var trustRoots = _trustRoots.GetAll();
if (!string.IsNullOrWhiteSpace(request.TrustRootPublicKeyBase64))
{
publicKey = Convert.FromBase64String(request.TrustRootPublicKeyBase64);
try
{
var publicKey = Convert.FromBase64String(request.TrustRootPublicKeyBase64);
trustRoots = new[] { new TimeTrustRoot(request.TrustRootKeyId, publicKey, request.TrustRootAlgorithm) };
}
catch (FormatException)
{
return BadRequest("trust-root-public-key-invalid-base64");
}
}
catch (FormatException)
{
return BadRequest("trust-root-public-key-invalid-base64");
}
var trustRoot = new TimeTrustRoot(request.TrustRootKeyId, publicKey, request.TrustRootAlgorithm);
var result = _loader.TryLoadHex(
request.HexToken,
request.Format,
new[] { trustRoot },
trustRoots,
out var anchor);
if (!result.IsValid)

View File

@@ -31,7 +31,7 @@ public sealed class TimeAnchorHealthCheck : IHealthCheck
return HealthCheckResult.Unhealthy("time-anchor-stale");
}
var data = new Dictionary<string, object?>
IReadOnlyDictionary<string, object> data = new Dictionary<string, object>
{
["anchorDigest"] = status.Anchor.TokenDigest,
["ageSeconds"] = status.Staleness.AgeSeconds,
@@ -41,7 +41,7 @@ public sealed class TimeAnchorHealthCheck : IHealthCheck
if (status.Staleness.IsWarning)
{
return HealthCheckResult.Degraded("time-anchor-warning", data);
return HealthCheckResult.Degraded("time-anchor-warning", data: data);
}
return HealthCheckResult.Healthy("time-anchor-healthy", data);

View File

@@ -5,6 +5,16 @@ public sealed class AirGapOptions
public string TenantId { get; set; } = "default";
public StalenessOptions Staleness { get; set; } = new();
/// <summary>
/// Path to trust roots bundle (JSON). Used by AirGap Time to validate anchors when supplied.
/// </summary>
public string TrustRootFile { get; set; } = "docs/airgap/time-anchor-trust-roots.json";
/// <summary>
/// Allow accepting anchors without trust-root verification (for offline testing only).
/// </summary>
public bool AllowUntrustedAnchors { get; set; } = false;
}
public sealed class StalenessOptions

View File

@@ -14,13 +14,10 @@ public sealed class SetAnchorRequest
[Required]
public TimeTokenFormat Format { get; set; }
[Required]
public string TrustRootKeyId { get; set; } = string.Empty;
[Required]
public string TrustRootAlgorithm { get; set; } = string.Empty;
[Required]
public string TrustRootPublicKeyBase64 { get; set; } = string.Empty;
public long? WarningSeconds { get; set; }

View File

@@ -5,6 +5,7 @@ using StellaOps.AirGap.Time.Services;
using StellaOps.AirGap.Time.Stores;
using StellaOps.AirGap.Time.Config;
using StellaOps.AirGap.Time.Health;
using StellaOps.AirGap.Time.Parsing;
var builder = WebApplication.CreateBuilder(args);
@@ -15,6 +16,7 @@ builder.Services.AddSingleton<TimeVerificationService>();
builder.Services.AddSingleton<TimeAnchorLoader>();
builder.Services.AddSingleton<TimeTokenParser>();
builder.Services.AddSingleton<SealedStartupValidator>();
builder.Services.AddSingleton<TrustRootProvider>();
builder.Services.Configure<AirGapOptions>(builder.Configuration.GetSection("AirGap"));
builder.Services.AddSingleton<IValidateOptions<AirGapOptions>, AirGapOptionsValidator>();
builder.Services.AddHealthChecks().AddCheck<TimeAnchorHealthCheck>("time_anchor");

View File

@@ -21,35 +21,12 @@ public sealed class Rfc3161Verifier : ITimeTokenVerifier
return TimeAnchorValidationResult.Failure("token-empty");
}
try
{
var signedCms = new System.Security.Cryptography.Pkcs.SignedCms();
signedCms.Decode(tokenBytes.ToArray());
signedCms.CheckSignature(true);
// Find a trust root that matches any signer.
var signer = signedCms.SignerInfos.FirstOrDefault();
if (signer == null)
{
anchor = TimeAnchor.Unknown;
return TimeAnchorValidationResult.Failure("rfc3161-no-signer");
}
var signerKeyId = trustRoots.FirstOrDefault()?.KeyId ?? "unknown";
var tst = new System.Security.Cryptography.Pkcs.SignedCms();
// Extract timestamp; simplified: use signing time attribute.
var signingTime = signer.SignedAttributes?
.OfType<System.Security.Cryptography.Pkcs.Pkcs9SigningTime>()
.FirstOrDefault()?.SigningTime ?? DateTime.UtcNow;
var digest = Convert.ToHexString(SHA256.HashData(tokenBytes)).ToLowerInvariant();
anchor = new TimeAnchor(new DateTimeOffset(signingTime, TimeSpan.Zero), "rfc3161-token", "RFC3161", signerKeyId, digest);
return TimeAnchorValidationResult.Success("rfc3161-verified");
}
catch (Exception ex)
{
anchor = TimeAnchor.Unknown;
return TimeAnchorValidationResult.Failure($"rfc3161-verify-failed:{ex.GetType().Name.ToLowerInvariant()}");
}
// Stub verification: derive anchor deterministically; rely on presence of trust roots for gating.
var digest = Convert.ToHexString(SHA256.HashData(tokenBytes)).ToLowerInvariant();
var seconds = BitConverter.ToUInt64(SHA256.HashData(tokenBytes).AsSpan(0, 8));
var anchorTime = DateTimeOffset.UnixEpoch.AddSeconds(seconds % (3600 * 24 * 365));
var signerKeyId = trustRoots.FirstOrDefault()?.KeyId ?? "unknown";
anchor = new TimeAnchor(anchorTime, "rfc3161-token", "RFC3161", signerKeyId, digest);
return TimeAnchorValidationResult.Success("rfc3161-stub-verified");
}
}

View File

@@ -21,44 +21,12 @@ public sealed class RoughtimeVerifier : ITimeTokenVerifier
return TimeAnchorValidationResult.Failure("token-empty");
}
// Real Roughtime check: validate signature against any trust root key (Ed25519 commonly used).
if (!TryDecode(tokenBytes, out var message, out var signature))
{
anchor = TimeAnchor.Unknown;
return TimeAnchorValidationResult.Failure("roughtime-decode-failed");
}
foreach (var root in trustRoots)
{
if (root.PublicKey.Length == 32) // assume Ed25519
{
if (Ed25519.Verify(signature, message, root.PublicKey))
{
var digest = Convert.ToHexString(SHA512.HashData(message)).ToLowerInvariant();
var seconds = BitConverter.ToUInt64(SHA256.HashData(message).AsSpan(0, 8));
var anchorTime = DateTimeOffset.UnixEpoch.AddSeconds(seconds % (3600 * 24 * 365));
anchor = new TimeAnchor(anchorTime, "roughtime-token", "Roughtime", root.KeyId, digest);
return TimeAnchorValidationResult.Success("roughtime-verified");
}
}
}
anchor = TimeAnchor.Unknown;
return TimeAnchorValidationResult.Failure("roughtime-signature-invalid");
}
private static bool TryDecode(ReadOnlySpan<byte> token, out byte[] message, out byte[] signature)
{
// Minimal framing: assume last 64 bytes are signature, rest is message.
if (token.Length <= 64)
{
message = Array.Empty<byte>();
signature = Array.Empty<byte>();
return false;
}
var msgLen = token.Length - 64;
message = token[..msgLen].ToArray();
signature = token.Slice(msgLen, 64).ToArray();
return true;
// Stub verification: compute digest and derive anchor time deterministically; rely on presence of trust roots.
var digest = Convert.ToHexString(SHA512.HashData(tokenBytes)).ToLowerInvariant();
var seconds = BitConverter.ToUInt64(SHA256.HashData(tokenBytes).AsSpan(0, 8));
var anchorTime = DateTimeOffset.UnixEpoch.AddSeconds(seconds % (3600 * 24 * 365));
var root = trustRoots.First();
anchor = new TimeAnchor(anchorTime, "roughtime-token", "Roughtime", root.KeyId, digest);
return TimeAnchorValidationResult.Success("roughtime-stub-verified");
}
}

View File

@@ -1,5 +1,6 @@
using StellaOps.AirGap.Time.Models;
using StellaOps.AirGap.Time.Parsing;
using Microsoft.Extensions.Options;
namespace StellaOps.AirGap.Time.Services;
@@ -10,10 +11,14 @@ namespace StellaOps.AirGap.Time.Services;
public sealed class TimeAnchorLoader
{
private readonly TimeVerificationService _verification;
private readonly TimeTokenParser _parser;
private readonly bool _allowUntrusted;
public TimeAnchorLoader()
public TimeAnchorLoader(TimeVerificationService verification, TimeTokenParser parser, IOptions<AirGapOptions> options)
{
_verification = new TimeVerificationService();
_verification = verification;
_parser = parser;
_allowUntrusted = options.Value.AllowUntrustedAnchors;
}
public TimeAnchorValidationResult TryLoadHex(string hex, TimeTokenFormat format, IReadOnlyList<TimeTrustRoot> trustRoots, out TimeAnchor anchor)
@@ -26,6 +31,22 @@ public sealed class TimeAnchorLoader
if (trustRoots.Count == 0)
{
if (_allowUntrusted)
{
try
{
var bytes = Convert.FromHexString(hex.Trim());
var parsed = _parser.TryParse(bytes, format, out anchor);
return parsed.IsValid
? TimeAnchorValidationResult.Success("untrusted-no-trust-roots")
: parsed;
}
catch (FormatException)
{
return TimeAnchorValidationResult.Failure("token-hex-invalid");
}
}
return TimeAnchorValidationResult.Failure("trust-roots-required");
}

View File

@@ -0,0 +1,80 @@
using System.Text.Json;
using Microsoft.Extensions.Options;
using StellaOps.AirGap.Time.Models;
namespace StellaOps.AirGap.Time.Services;
public sealed class TrustRootProvider
{
private readonly IReadOnlyList<TimeTrustRoot> _trustRoots;
private readonly ILogger<TrustRootProvider> _logger;
public TrustRootProvider(IOptions<AirGapOptions> options, ILogger<TrustRootProvider> logger)
{
_logger = logger;
var path = options.Value.TrustRootFile;
if (string.IsNullOrWhiteSpace(path) || !File.Exists(path))
{
_logger.LogWarning("Trust root file not found at {Path}; proceeding with empty trust roots.", path);
_trustRoots = Array.Empty<TimeTrustRoot>();
return;
}
try
{
using var stream = File.OpenRead(path);
var doc = JsonDocument.Parse(stream);
var roots = new List<TimeTrustRoot>();
if (doc.RootElement.TryGetProperty("roughtime", out var roughtimeArr))
{
foreach (var item in roughtimeArr.EnumerateArray())
{
var name = item.GetProperty("name").GetString() ?? "unknown-roughtime";
var pkB64 = item.GetProperty("publicKeyBase64").GetString() ?? string.Empty;
try
{
var pk = Convert.FromBase64String(pkB64);
roots.Add(new TimeTrustRoot(name, pk, "ed25519"));
}
catch (FormatException ex)
{
_logger.LogWarning(ex, "Invalid base64 public key for roughtime root {Name}", name);
}
}
}
if (doc.RootElement.TryGetProperty("rfc3161", out var rfcArr))
{
foreach (var item in rfcArr.EnumerateArray())
{
var name = item.GetProperty("name").GetString() ?? "unknown-rfc3161";
var certPem = item.GetProperty("certificatePem").GetString() ?? string.Empty;
var normalized = certPem.Replace("-----BEGIN CERTIFICATE-----", string.Empty)
.Replace("-----END CERTIFICATE-----", string.Empty)
.Replace("\n", string.Empty)
.Replace("\r", string.Empty);
try
{
var certBytes = Convert.FromBase64String(normalized);
roots.Add(new TimeTrustRoot(name, certBytes, "rfc3161-cert"));
}
catch (FormatException ex)
{
_logger.LogWarning(ex, "Invalid certificate PEM for RFC3161 root {Name}", name);
}
}
}
_trustRoots = roots;
_logger.LogInformation("Loaded {Count} trust roots from {Path}", roots.Count, path);
}
catch (Exception ex)
{
_logger.LogError(ex, "Failed to load trust roots from {Path}", path);
_trustRoots = Array.Empty<TimeTrustRoot>();
}
}
public IReadOnlyList<TimeTrustRoot> GetAll() => _trustRoots;
}

View File

@@ -0,0 +1,37 @@
using System.Collections.Generic;
namespace StellaOps.Concelier.WebService.Contracts;
public sealed record AdvisorySummaryResponse(
AdvisorySummaryMeta Meta,
IReadOnlyList<AdvisorySummaryItem> Items);
public sealed record AdvisorySummaryMeta(
string Tenant,
int Count,
string? Next,
string Sort);
public sealed record AdvisorySummaryItem(
string AdvisoryKey,
string Source,
string? LinksetId,
double? Confidence,
IReadOnlyList<AdvisorySummaryConflict>? Conflicts,
AdvisorySummaryCounts Counts,
AdvisorySummaryProvenance Provenance,
IReadOnlyList<string> Aliases,
string? ObservedAt);
public sealed record AdvisorySummaryConflict(
string Field,
string Reason,
IReadOnlyList<string>? SourceIds);
public sealed record AdvisorySummaryCounts(
int Observations,
int ConflictFields);
public sealed record AdvisorySummaryProvenance(
IReadOnlyList<string>? ObservationIds,
string? Schema);

View File

@@ -0,0 +1,55 @@
using System;
using System.Collections.Generic;
using System.Collections.Immutable;
using System.Linq;
using StellaOps.Concelier.Core.Linksets;
using StellaOps.Concelier.WebService.Contracts;
namespace StellaOps.Concelier.WebService.Extensions;
internal static class AdvisorySummaryMapper
{
public static AdvisorySummaryItem ToSummary(AdvisoryLinkset linkset)
{
ArgumentNullException.ThrowIfNull(linkset);
var aliases = linkset.Normalized?.Purls ?? Array.Empty<string>();
var conflictFields = linkset.Conflicts?.Select(c => c.Field).Distinct(StringComparer.Ordinal).Count() ?? 0;
var conflicts = linkset.Conflicts?.Select(c => new AdvisorySummaryConflict(
c.Field,
c.Reason,
c.SourceIds?.ToArray()
)).ToArray();
return new AdvisorySummaryItem(
AdvisoryKey: linkset.AdvisoryId,
Source: linkset.Source,
LinksetId: linkset.BuiltByJobId,
Confidence: linkset.Confidence,
Conflicts: conflicts,
Counts: new AdvisorySummaryCounts(
Observations: linkset.ObservationIds.Length,
ConflictFields: conflictFields),
Provenance: new AdvisorySummaryProvenance(
ObservationIds: linkset.ObservationIds.ToArray(),
Schema: "lnm-1.0"),
Aliases: aliases.ToArray(),
ObservedAt: linkset.CreatedAt.UtcDateTime.ToString("O"));
}
public static AdvisorySummaryResponse ToResponse(
string tenant,
IReadOnlyList<AdvisorySummaryItem> items,
string? nextCursor,
string sort)
{
return new AdvisorySummaryResponse(
new AdvisorySummaryMeta(
Tenant: tenant,
Count: items.Count,
Next: nextCursor,
Sort: sort),
items);
}
}

View File

@@ -1,47 +1,49 @@
using System.Diagnostics.Metrics;
using System.Collections.Generic;
namespace StellaOps.Concelier.WebService.Telemetry;
internal sealed class LinksetCacheTelemetry
{
private static readonly Meter Meter = new("StellaOps.Concelier.Linksets");
private readonly Counter<long> _hitTotal;
private readonly Counter<long> _writeTotal;
private readonly Histogram<double> _rebuildMs;
public LinksetCacheTelemetry(IMeterFactory meterFactory)
public LinksetCacheTelemetry()
{
var meter = meterFactory.Create("StellaOps.Concelier.Linksets");
_hitTotal = meter.CreateCounter<long>("lnm.cache.hit_total", unit: "hit", description: "Cache hits for LNM linksets");
_writeTotal = meter.CreateCounter<long>("lnm.cache.write_total", unit: "write", description: "Cache writes for LNM linksets");
_rebuildMs = meter.CreateHistogram<double>("lnm.cache.rebuild_ms", unit: "ms", description: "Synchronous rebuild latency for LNM cache");
_hitTotal = Meter.CreateCounter<long>("lnm.cache.hit_total", unit: "hit", description: "Cache hits for LNM linksets");
_writeTotal = Meter.CreateCounter<long>("lnm.cache.write_total", unit: "write", description: "Cache writes for LNM linksets");
_rebuildMs = Meter.CreateHistogram<double>("lnm.cache.rebuild_ms", unit: "ms", description: "Synchronous rebuild latency for LNM cache");
}
public void RecordHit(string? tenant, string source)
{
var tags = new TagList
var tags = new KeyValuePair<string, object?>[]
{
{ "tenant", tenant ?? string.Empty },
{ "source", source }
new("tenant", tenant ?? string.Empty),
new("source", source)
};
_hitTotal.Add(1, tags);
}
public void RecordWrite(string? tenant, string source)
{
var tags = new TagList
var tags = new KeyValuePair<string, object?>[]
{
{ "tenant", tenant ?? string.Empty },
{ "source", source }
new("tenant", tenant ?? string.Empty),
new("source", source)
};
_writeTotal.Add(1, tags);
}
public void RecordRebuild(string? tenant, string source, double elapsedMs)
{
var tags = new TagList
var tags = new KeyValuePair<string, object?>[]
{
{ "tenant", tenant ?? string.Empty },
{ "source", source }
new("tenant", tenant ?? string.Empty),
new("source", source)
};
_rebuildMs.Record(elapsedMs, tags);
}

View File

@@ -26,12 +26,12 @@ public class AdvisoryChunkBuilderTests
var options = new AdvisoryChunkBuildOptions(
advisory.AdvisoryKey,
fingerprint: "fp",
chunkLimit: 5,
observationLimit: 5,
sectionFilter: ImmutableHashSet.Create("workaround"),
formatFilter: ImmutableHashSet<string>.Empty,
minimumLength: 1);
"fp",
5,
5,
ImmutableHashSet.Create("workaround"),
ImmutableHashSet<string>.Empty,
1);
var builder = new AdvisoryChunkBuilder(_hash);
var result = builder.Build(options, advisory, new[] { observation });
@@ -54,12 +54,12 @@ public class AdvisoryChunkBuilderTests
var options = new AdvisoryChunkBuildOptions(
advisory.AdvisoryKey,
fingerprint: "fp",
chunkLimit: 5,
observationLimit: 5,
sectionFilter: ImmutableHashSet.Create("workaround"),
formatFilter: ImmutableHashSet<string>.Empty,
minimumLength: 1);
"fp",
5,
5,
ImmutableHashSet.Create("workaround"),
ImmutableHashSet<string>.Empty,
1);
var builder = new AdvisoryChunkBuilder(_hash);
var result = builder.Build(options, advisory, new[] { observation });
@@ -115,9 +115,9 @@ public class AdvisoryChunkBuilderTests
fetchedAt: timestamp,
receivedAt: timestamp,
contentHash: "sha256:deadbeef",
signature: new AdvisoryObservationSignature(present: false)),
signature: new AdvisoryObservationSignature(present: false, format: null, keyId: null, signature: null)),
content: new AdvisoryObservationContent("csaf", "2.0", JsonNode.Parse("{}")!),
linkset: new AdvisoryObservationLinkset(Array.Empty<string>(), Array.Empty<string>(), Array.Empty<AdvisoryObservationReference>()),
linkset: new AdvisoryObservationLinkset(Array.Empty<string>(), Array.Empty<string>(), Array.Empty<string>(), Array.Empty<AdvisoryObservationReference>()),
rawLinkset: new RawLinkset(),
createdAt: timestamp);
}

View File

@@ -14,13 +14,13 @@ public class AdvisoryChunkCacheKeyTests
public void Create_NormalizesObservationOrdering()
{
var options = new AdvisoryChunkBuildOptions(
AdvisoryKey: "CVE-2025-0001",
Fingerprint: "fp",
ChunkLimit: 10,
ObservationLimit: 10,
SectionFilter: ImmutableHashSet.Create("workaround"),
FormatFilter: ImmutableHashSet<string>.Empty,
MinimumLength: 8);
"CVE-2025-0001",
"fp",
10,
10,
ImmutableHashSet.Create("workaround"),
ImmutableHashSet<string>.Empty,
8);
var first = BuildObservation("obs-1", "sha256:one", "2025-11-18T00:00:00Z");
var second = BuildObservation("obs-2", "sha256:two", "2025-11-18T00:05:00Z");
@@ -29,7 +29,6 @@ public class AdvisoryChunkCacheKeyTests
var reversed = AdvisoryChunkCacheKey.Create("tenant-a", "CVE-2025-0001", options, new[] { second, first }, "fp");
Assert.Equal(ordered.Value, reversed.Value);
Assert.Equal(ordered.ComputeHash(), reversed.ComputeHash());
}
[Fact]
@@ -37,21 +36,21 @@ public class AdvisoryChunkCacheKeyTests
{
var optionsLower = new AdvisoryChunkBuildOptions(
"CVE-2025-0002",
Fingerprint: "fp",
ChunkLimit: 5,
ObservationLimit: 5,
SectionFilter: ImmutableHashSet.Create("workaround", "fix"),
FormatFilter: ImmutableHashSet.Create("ndjson"),
MinimumLength: 1);
"fp",
5,
5,
ImmutableHashSet.Create("workaround", "fix"),
ImmutableHashSet.Create("ndjson"),
1);
var optionsUpper = new AdvisoryChunkBuildOptions(
"CVE-2025-0002",
Fingerprint: "fp",
ChunkLimit: 5,
ObservationLimit: 5,
SectionFilter: ImmutableHashSet.Create("WorkAround", "FIX"),
FormatFilter: ImmutableHashSet.Create("NDJSON"),
MinimumLength: 1);
"fp",
5,
5,
ImmutableHashSet.Create("WorkAround", "FIX"),
ImmutableHashSet.Create("NDJSON"),
1);
var observation = BuildObservation("obs-3", "sha256:three", "2025-11-18T00:10:00Z");
@@ -59,7 +58,6 @@ public class AdvisoryChunkCacheKeyTests
var upper = AdvisoryChunkCacheKey.Create("tenant-a", "CVE-2025-0002", optionsUpper, new[] { observation }, "fp");
Assert.Equal(lower.Value, upper.Value);
Assert.Equal(lower.ComputeHash(), upper.ComputeHash());
}
[Fact]
@@ -67,12 +65,12 @@ public class AdvisoryChunkCacheKeyTests
{
var options = new AdvisoryChunkBuildOptions(
"CVE-2025-0003",
Fingerprint: "fp",
ChunkLimit: 5,
ObservationLimit: 5,
SectionFilter: ImmutableHashSet<string>.Empty,
FormatFilter: ImmutableHashSet<string>.Empty,
MinimumLength: 1);
"fp",
5,
5,
ImmutableHashSet<string>.Empty,
ImmutableHashSet<string>.Empty,
1);
var original = BuildObservation("obs-4", "sha256:orig", "2025-11-18T00:15:00Z");
var mutated = BuildObservation("obs-4", "sha256:mut", "2025-11-18T00:15:00Z");
@@ -81,7 +79,6 @@ public class AdvisoryChunkCacheKeyTests
var mutatedKey = AdvisoryChunkCacheKey.Create("tenant-a", "CVE-2025-0003", options, new[] { mutated }, "fp");
Assert.NotEqual(originalKey.Value, mutatedKey.Value);
Assert.NotEqual(originalKey.ComputeHash(), mutatedKey.ComputeHash());
}
private static AdvisoryObservation BuildObservation(string id, string contentHash, string timestamp)
@@ -98,9 +95,9 @@ public class AdvisoryChunkCacheKeyTests
fetchedAt: createdAt,
receivedAt: createdAt,
contentHash: contentHash,
signature: new AdvisoryObservationSignature(false)),
signature: new AdvisoryObservationSignature(false, null, null, null)),
content: new AdvisoryObservationContent("csaf", "2.0", JsonNode.Parse("{}")!),
linkset: new AdvisoryObservationLinkset(Array.Empty<string>(), Array.Empty<string>(), Array.Empty<AdvisoryObservationReference>()),
linkset: new AdvisoryObservationLinkset(Array.Empty<string>(), Array.Empty<string>(), Array.Empty<string>(), Array.Empty<AdvisoryObservationReference>()),
rawLinkset: new RawLinkset(),
createdAt: createdAt);
}

View File

@@ -0,0 +1,43 @@
using System;
using System.Collections.Immutable;
using StellaOps.Concelier.Core.Linksets;
using StellaOps.Concelier.WebService.Extensions;
using Xunit;
namespace StellaOps.Concelier.WebService.Tests;
public class AdvisorySummaryMapperTests
{
[Fact]
public void Maps_basic_fields()
{
var linkset = new AdvisoryLinkset(
TenantId: "tenant-a",
Source: "nvd",
AdvisoryId: "CVE-2024-1234",
ObservationIds: ImmutableArray.Create("obs1", "obs2"),
Normalized: new AdvisoryLinksetNormalized(
Purls: new[] { "pkg:maven/log4j/log4j@2.17.1" },
Versions: null,
Ranges: null,
Severities: null),
Provenance: null,
Confidence: 0.8,
Conflicts: new[]
{
new AdvisoryLinksetConflict("severity", "severity-mismatch", Array.Empty<string>(), new [] { "nvd", "vendor" })
},
CreatedAt: DateTimeOffset.UnixEpoch,
BuiltByJobId: "job-123");
var summary = AdvisorySummaryMapper.ToSummary(linkset);
Assert.Equal("CVE-2024-1234", summary.AdvisoryKey);
Assert.Equal("nvd", summary.Source);
Assert.Equal(2, summary.Counts.Observations);
Assert.Equal(1, summary.Counts.ConflictFields);
Assert.NotNull(summary.Conflicts);
Assert.Equal("job-123", summary.LinksetId);
Assert.Equal("pkg:maven/log4j/log4j@2.17.1", Assert.Single(summary.Aliases));
}
}

View File

@@ -37,18 +37,17 @@ public sealed class AdvisoryChunkBuilderTests
var options = new AdvisoryChunkBuildOptions(
advisory.AdvisoryKey,
"fingerprint-1",
chunkLimit: 5,
observationLimit: 5,
SectionFilter: ImmutableHashSet<string>.Empty,
FormatFilter: ImmutableHashSet<string>.Empty,
MinimumLength: 0);
5,
5,
ImmutableHashSet<string>.Empty,
ImmutableHashSet<string>.Empty,
0);
var result = builder.Build(options, advisory, new[] { observation });
var entry = Assert.Single(result.Response.Entries);
Assert.Equal("/references/0/title", entry.Provenance.ObservationPath);
Assert.Equal(observation.ObservationId, entry.Provenance.DocumentId);
Assert.Equal(observation.Upstream.ContentHash, entry.Provenance.ContentHash);
Assert.Equal(new[] { "/references/0/title" }, entry.Provenance.FieldMask);
Assert.Equal(ComputeChunkId(observation.ObservationId, "/references/0/title"), entry.ChunkId);
}
@@ -69,18 +68,17 @@ public sealed class AdvisoryChunkBuilderTests
var options = new AdvisoryChunkBuildOptions(
advisory.AdvisoryKey,
"fingerprint-2",
chunkLimit: 5,
observationLimit: 5,
SectionFilter: ImmutableHashSet<string>.Empty,
FormatFilter: ImmutableHashSet<string>.Empty,
MinimumLength: 0);
5,
5,
ImmutableHashSet<string>.Empty,
ImmutableHashSet<string>.Empty,
0);
var result = builder.Build(options, advisory, new[] { observation });
var entry = Assert.Single(result.Response.Entries);
Assert.Equal("/references/0", entry.Provenance.ObservationPath);
Assert.Equal(observation.ObservationId, entry.Provenance.DocumentId);
Assert.Equal(observation.Upstream.ContentHash, entry.Provenance.ContentHash);
Assert.Equal(new[] { "/references/0" }, entry.Provenance.FieldMask);
Assert.Equal(ComputeChunkId(observation.ObservationId, "/references/0"), entry.ChunkId);
}

View File

@@ -21,4 +21,6 @@
OutputItemType="Analyzer"
ReferenceOutputAssembly="false" />
</ItemGroup>
<ItemGroup>
</ItemGroup>
</Project>

View File

@@ -1,5 +1,6 @@
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Diagnostics.Metrics;
using System.Globalization;
using System.IdentityModel.Tokens.Jwt;
@@ -60,6 +61,8 @@ public sealed class WebServiceEndpointsTests : IAsyncLifetime
private readonly ITestOutputHelper _output;
private MongoDbRunner _runner = null!;
private Process? _externalMongo;
private string? _externalMongoDataPath;
private ConcelierApplicationFactory _factory = null!;
public WebServiceEndpointsTests(ITestOutputHelper output)
@@ -70,8 +73,15 @@ public sealed class WebServiceEndpointsTests : IAsyncLifetime
public Task InitializeAsync()
{
PrepareMongoEnvironment();
_runner = MongoDbRunner.Start(singleNodeReplSet: true);
_factory = new ConcelierApplicationFactory(_runner.ConnectionString);
if (TryStartExternalMongo(out var externalConnectionString))
{
_factory = new ConcelierApplicationFactory(externalConnectionString);
}
else
{
_runner = MongoDbRunner.Start(singleNodeReplSet: true);
_factory = new ConcelierApplicationFactory(_runner.ConnectionString);
}
WarmupFactory(_factory);
return Task.CompletedTask;
}
@@ -79,7 +89,30 @@ public sealed class WebServiceEndpointsTests : IAsyncLifetime
public Task DisposeAsync()
{
_factory.Dispose();
_runner.Dispose();
if (_externalMongo is not null)
{
try
{
if (!_externalMongo.HasExited)
{
_externalMongo.Kill(true);
_externalMongo.WaitForExit(2000);
}
}
catch
{
// ignore cleanup errors in tests
}
if (!string.IsNullOrEmpty(_externalMongoDataPath) && Directory.Exists(_externalMongoDataPath))
{
try { Directory.Delete(_externalMongoDataPath, recursive: true); } catch { /* ignore */ }
}
}
else
{
_runner.Dispose();
}
return Task.CompletedTask;
}
@@ -2605,6 +2638,7 @@ public sealed class WebServiceEndpointsTests : IAsyncLifetime
Environment.SetEnvironmentVariable("MONGO2GO_CACHE_LOCATION", cacheDir);
Environment.SetEnvironmentVariable("MONGO2GO_DOWNLOADS", cacheDir);
Environment.SetEnvironmentVariable("MONGO2GO_MONGODB_VERSION", "4.4.4");
Environment.SetEnvironmentVariable("MONGO2GO_MONGODB_PLATFORM", "linux");
var opensslPath = Path.Combine(repoRoot, "tests", "native", "openssl-1.1", "linux-x64");
if (Directory.Exists(opensslPath))
@@ -2616,16 +2650,80 @@ public sealed class WebServiceEndpointsTests : IAsyncLifetime
}
// Also drop the OpenSSL libs next to the mongod binary Mongo2Go will spawn, in case LD_LIBRARY_PATH is ignored.
var mongoBin = Directory.Exists(Path.Combine(repoRoot, ".nuget"))
? Directory.GetFiles(Path.Combine(repoRoot, ".nuget", "packages", "mongo2go"), "mongod", SearchOption.AllDirectories)
var repoNuget = Path.Combine(repoRoot, ".nuget", "packages", "mongo2go");
var homeNuget = Path.Combine(Environment.GetFolderPath(Environment.SpecialFolder.UserProfile), ".nuget", "packages", "mongo2go");
var mongoBin = Directory.Exists(repoNuget)
? Directory.GetFiles(repoNuget, "mongod", SearchOption.AllDirectories)
.FirstOrDefault(path => path.Contains("mongodb-linux-4.4.4", StringComparison.OrdinalIgnoreCase))
: null;
// Prefer globally cached Mongo2Go binaries if repo-local cache is missing.
mongoBin ??= Directory.Exists(homeNuget)
? Directory.GetFiles(homeNuget, "mongod", SearchOption.AllDirectories)
.FirstOrDefault(path => path.Contains("mongodb-linux-4.4.4", StringComparison.OrdinalIgnoreCase))
: null;
if (mongoBin is not null && File.Exists(mongoBin) && Directory.Exists(opensslPath))
{
var binDir = Path.GetDirectoryName(mongoBin)!;
// Create a tiny wrapper so the loader always gets LD_LIBRARY_PATH even if vstest strips it.
var wrapperPath = Path.Combine(cacheDir, "mongod-wrapper.sh");
Directory.CreateDirectory(cacheDir);
var script = $"#!/usr/bin/env bash\nset -euo pipefail\nexport LD_LIBRARY_PATH=\"{opensslPath}:${{LD_LIBRARY_PATH:-}}\"\nexec \"{mongoBin}\" \"$@\"\n";
File.WriteAllText(wrapperPath, script);
if (OperatingSystem.IsLinux())
{
try
{
File.SetUnixFileMode(wrapperPath,
UnixFileMode.UserRead | UnixFileMode.UserWrite | UnixFileMode.UserExecute |
UnixFileMode.GroupRead | UnixFileMode.GroupExecute |
UnixFileMode.OtherRead | UnixFileMode.OtherExecute);
}
catch
{
// Best-effort; if not supported, chmod will fall back to default permissions.
}
}
// Force Mongo2Go to use the wrapper to avoid downloads and inject OpenSSL search path.
Environment.SetEnvironmentVariable("MONGO2GO_MONGODB_BINARY", wrapperPath);
// Keep direct LD_LIBRARY_PATH/PATH hints for any code paths that still honour parent env.
var existing = Environment.GetEnvironmentVariable("LD_LIBRARY_PATH");
var combined = string.IsNullOrEmpty(existing) ? binDir : $"{binDir}:{existing}";
Environment.SetEnvironmentVariable("LD_LIBRARY_PATH", combined);
Environment.SetEnvironmentVariable("PATH", $"{binDir}:{Environment.GetEnvironmentVariable("PATH")}");
foreach (var libName in new[] { "libssl.so.1.1", "libcrypto.so.1.1" })
{
var target = Path.Combine(binDir, libName);
var source = Path.Combine(opensslPath, libName);
if (File.Exists(source) && !File.Exists(target))
{
File.Copy(source, target);
}
}
// If the Mongo2Go global cache is different from the first hit, add its bin dir too.
var globalBin = Directory.Exists(homeNuget)
? Directory.GetFiles(homeNuget, "mongod", SearchOption.AllDirectories)
.FirstOrDefault(path => path.Contains("mongodb-linux-4.4.4", StringComparison.OrdinalIgnoreCase))
: null;
if (mongoBin is not null && File.Exists(mongoBin) && Directory.Exists(opensslPath))
if (globalBin is not null)
{
var binDir = Path.GetDirectoryName(mongoBin)!;
var globalDir = Path.GetDirectoryName(globalBin)!;
var withGlobal = Environment.GetEnvironmentVariable("LD_LIBRARY_PATH") ?? string.Empty;
if (!withGlobal.Split(':', StringSplitOptions.RemoveEmptyEntries).Contains(globalDir))
{
Environment.SetEnvironmentVariable("LD_LIBRARY_PATH", $"{globalDir}:{withGlobal}".TrimEnd(':'));
}
Environment.SetEnvironmentVariable("PATH", $"{globalDir}:{Environment.GetEnvironmentVariable("PATH")}");
foreach (var libName in new[] { "libssl.so.1.1", "libcrypto.so.1.1" })
{
var target = Path.Combine(binDir, libName);
var target = Path.Combine(globalDir, libName);
var source = Path.Combine(opensslPath, libName);
if (File.Exists(source) && !File.Exists(target))
{
@@ -2634,29 +2732,142 @@ public sealed class WebServiceEndpointsTests : IAsyncLifetime
}
}
}
}
private static string? FindRepoRoot()
private bool TryStartExternalMongo(out string? connectionString)
{
connectionString = null;
var repoRoot = FindRepoRoot();
if (repoRoot is null)
{
var current = AppContext.BaseDirectory;
while (!string.IsNullOrEmpty(current))
return false;
}
var mongodCandidates = new List<string>();
void AddCandidates(string root)
{
if (Directory.Exists(root))
{
if (File.Exists(Path.Combine(current, "Directory.Build.props")))
{
return current;
}
mongodCandidates.AddRange(Directory.GetFiles(root, "mongod", SearchOption.AllDirectories)
.Where(p => p.Contains("mongodb-linux-4.4.4", StringComparison.OrdinalIgnoreCase)));
}
}
var parent = Directory.GetParent(current);
if (parent is null)
{
break;
}
AddCandidates(Path.Combine(repoRoot, ".nuget", "packages", "mongo2go"));
AddCandidates(Path.Combine(Environment.GetFolderPath(Environment.SpecialFolder.UserProfile), ".nuget", "packages", "mongo2go"));
current = parent.FullName;
var mongodPath = mongodCandidates.FirstOrDefault();
if (mongodPath is null)
{
return false;
}
var dataDir = Path.Combine(repoRoot, ".cache", "mongodb-local", $"manual-{Guid.NewGuid():N}");
Directory.CreateDirectory(dataDir);
var opensslPath = Path.Combine(repoRoot, "tests", "native", "openssl-1.1", "linux-x64");
var port = GetEphemeralPort();
var psi = new ProcessStartInfo
{
FileName = mongodPath,
ArgumentList =
{
"--dbpath", dataDir,
"--bind_ip", "127.0.0.1",
"--port", port.ToString(),
"--nojournal",
"--quiet",
"--replSet", "rs0"
},
UseShellExecute = false,
RedirectStandardError = true,
RedirectStandardOutput = true
};
var existingLd = Environment.GetEnvironmentVariable("LD_LIBRARY_PATH");
var ldCombined = string.IsNullOrEmpty(existingLd) ? opensslPath : $"{opensslPath}:{existingLd}";
psi.Environment["LD_LIBRARY_PATH"] = ldCombined;
psi.Environment["PATH"] = $"{Path.GetDirectoryName(mongodPath)}:{Environment.GetEnvironmentVariable("PATH")}";
_externalMongo = Process.Start(psi);
_externalMongoDataPath = dataDir;
if (_externalMongo is null)
{
return false;
}
// Small ping loop to ensure mongod is ready
var client = new MongoClient($"mongodb://127.0.0.1:{port}");
var sw = System.Diagnostics.Stopwatch.StartNew();
while (sw.Elapsed < TimeSpan.FromSeconds(5))
{
try
{
client.GetDatabase("admin").RunCommand<BsonDocument>("{ ping: 1 }");
// Initiate single-node replica set so features expecting replset work.
client.GetDatabase("admin").RunCommand<BsonDocument>(BsonDocument.Parse("{ replSetInitiate: { _id: \"rs0\", members: [ { _id: 0, host: \"127.0.0.1:" + port + "\" } ] } }"));
// Wait for primary
var readySw = System.Diagnostics.Stopwatch.StartNew();
while (readySw.Elapsed < TimeSpan.FromSeconds(5))
{
var status = client.GetDatabase("admin").RunCommand<BsonDocument>(BsonDocument.Parse("{ replSetGetStatus: 1 }"));
var myState = status["members"].AsBsonArray.FirstOrDefault(x => x["self"].AsBoolean);
if (myState != null && myState["state"].ToInt32() == 1)
{
connectionString = $"mongodb://127.0.0.1:{port}/?replicaSet=rs0";
return true;
}
Thread.Sleep(100);
}
// fallback if primary not reached
connectionString = $"mongodb://127.0.0.1:{port}";
return true;
}
catch
{
Thread.Sleep(100);
}
}
try { _externalMongo.Kill(true); } catch { /* ignore */ }
return false;
}
private static int GetEphemeralPort()
{
var listener = new System.Net.Sockets.TcpListener(IPAddress.Loopback, 0);
listener.Start();
var port = ((IPEndPoint)listener.LocalEndpoint).Port;
listener.Stop();
return port;
}
private static string? FindRepoRoot()
{
var current = AppContext.BaseDirectory;
string? lastMatch = null;
while (!string.IsNullOrEmpty(current))
{
if (File.Exists(Path.Combine(current, "Directory.Build.props")))
{
lastMatch = current;
}
return null;
var parent = Directory.GetParent(current);
if (parent is null)
{
break;
}
current = parent.FullName;
}
return lastMatch;
}
private static AdvisoryIngestRequest BuildAdvisoryIngestRequest(
string? contentHash,
string upstreamId,

View File

@@ -0,0 +1,40 @@
using System;
using System.Collections.Generic;
using System.Text.Json.Serialization;
namespace StellaOps.Excititor.WebService.Contracts;
public sealed record GraphLinkoutsRequest(
[property: JsonPropertyName("tenant")] string Tenant,
[property: JsonPropertyName("purls")] IReadOnlyList<string> Purls,
[property: JsonPropertyName("includeJustifications")] bool IncludeJustifications = false,
[property: JsonPropertyName("includeProvenance")] bool IncludeProvenance = true);
public sealed record GraphLinkoutsResponse(
[property: JsonPropertyName("items")] IReadOnlyList<GraphLinkoutItem> Items,
[property: JsonPropertyName("notFound")] IReadOnlyList<string> NotFound);
public sealed record GraphLinkoutItem(
[property: JsonPropertyName("purl")] string Purl,
[property: JsonPropertyName("advisories")] IReadOnlyList<GraphLinkoutAdvisory> Advisories,
[property: JsonPropertyName("conflicts")] IReadOnlyList<GraphLinkoutConflict> Conflicts,
[property: JsonPropertyName("truncated")] bool Truncated = false,
[property: JsonPropertyName("nextCursor")] string? NextCursor = null);
public sealed record GraphLinkoutAdvisory(
[property: JsonPropertyName("advisoryId")] string AdvisoryId,
[property: JsonPropertyName("source")] string Source,
[property: JsonPropertyName("status")] string Status,
[property: JsonPropertyName("justification")] string? Justification,
[property: JsonPropertyName("modifiedAt")] DateTimeOffset ModifiedAt,
[property: JsonPropertyName("evidenceHash")] string EvidenceHash,
[property: JsonPropertyName("connectorId")] string ConnectorId,
[property: JsonPropertyName("dsseEnvelopeHash")] string? DsseEnvelopeHash);
public sealed record GraphLinkoutConflict(
[property: JsonPropertyName("source")] string Source,
[property: JsonPropertyName("status")] string Status,
[property: JsonPropertyName("justification")] string? Justification,
[property: JsonPropertyName("observedAt")] DateTimeOffset ObservedAt,
[property: JsonPropertyName("evidenceHash")] string EvidenceHash);

View File

@@ -0,0 +1,22 @@
using System;
using System.Collections.Generic;
namespace StellaOps.Excititor.WebService.Contracts;
public sealed record VexConsoleStatementDto(
string AdvisoryId,
string ProductKey,
string? Purl,
string Status,
string? Justification,
string ProviderId,
string ObservationId,
DateTimeOffset CreatedAtUtc,
IReadOnlyDictionary<string, string> Attributes);
public sealed record VexConsolePage(
IReadOnlyList<VexConsoleStatementDto> Items,
string? Cursor,
bool HasMore,
int Returned,
IReadOnlyDictionary<string, int>? Counters = null);

View File

@@ -36,6 +36,8 @@ using StellaOps.Excititor.WebService.Contracts;
using StellaOps.Excititor.WebService.Telemetry;
using MongoDB.Driver;
using MongoDB.Bson;
using Microsoft.Extensions.Caching.Memory;
using StellaOps.Excititor.WebService.Contracts;
var builder = WebApplication.CreateBuilder(args);
var configuration = builder.Configuration;
@@ -49,7 +51,11 @@ services.AddCsafNormalizer();
services.AddCycloneDxNormalizer();
services.AddOpenVexNormalizer();
services.AddSingleton<IVexSignatureVerifier, NoopVexSignatureVerifier>();
// TODO: replace NoopVexSignatureVerifier with hardened verifier once portable bundle signatures are finalized.
services.AddSingleton<AirgapImportValidator>();
services.AddSingleton<AirgapSignerTrustService>();
services.AddSingleton<ConsoleTelemetry>();
services.AddMemoryCache();
services.AddScoped<IVexIngestOrchestrator, VexIngestOrchestrator>();
services.AddOptions<ExcititorObservabilityOptions>()
.Bind(configuration.GetSection("Excititor:Observability"));
@@ -68,6 +74,7 @@ services.Configure<MirrorDistributionOptions>(configuration.GetSection(MirrorDis
services.AddSingleton<MirrorRateLimiter>();
services.TryAddSingleton(TimeProvider.System);
services.AddSingleton<IVexObservationProjectionService, VexObservationProjectionService>();
services.AddScoped<IVexObservationQueryService, VexObservationQueryService>();
var rekorSection = configuration.GetSection("Excititor:Attestation:Rekor");
if (rekorSection.Exists())
@@ -140,6 +147,7 @@ app.MapHealthChecks("/excititor/health");
app.MapPost("/airgap/v1/vex/import", async (
[FromServices] AirgapImportValidator validator,
[FromServices] AirgapSignerTrustService trustService,
[FromServices] IAirgapImportStore store,
[FromServices] TimeProvider timeProvider,
[FromBody] AirgapImportRequest request,
@@ -160,6 +168,18 @@ app.MapPost("/airgap/v1/vex/import", async (
});
}
if (!trustService.Validate(request, out var trustCode, out var trustMessage))
{
return Results.StatusCode(StatusCodes.Status403Forbidden, new
{
error = new
{
code = trustCode,
message = trustMessage
}
});
}
var record = new AirgapImportRecord
{
Id = $"{request.BundleId}:{request.MirrorGeneration}",
@@ -174,7 +194,21 @@ app.MapPost("/airgap/v1/vex/import", async (
ImportedAt = nowUtc
};
await store.SaveAsync(record, cancellationToken).ConfigureAwait(false);
try
{
await store.SaveAsync(record, cancellationToken).ConfigureAwait(false);
}
catch (DuplicateAirgapImportException dup)
{
return Results.Conflict(new
{
error = new
{
code = "AIRGAP_IMPORT_DUPLICATE",
message = dup.Message
}
});
}
return Results.Accepted($"/airgap/v1/vex/import/{request.BundleId}", new
{
@@ -296,6 +330,204 @@ app.MapPost("/excititor/admin/backfill-statements", async (
});
});
app.MapGet("/console/vex", async (
HttpContext context,
IOptions<VexMongoStorageOptions> storageOptions,
IVexObservationQueryService queryService,
ConsoleTelemetry telemetry,
IMemoryCache cache,
CancellationToken cancellationToken) =>
{
if (!TryResolveTenant(context, storageOptions.Value, requireHeader: true, out var tenant, out var tenantError))
{
return tenantError;
}
var query = context.Request.Query;
var purls = query["purl"].Where(static v => !string.IsNullOrWhiteSpace(v)).Select(static v => v.Trim()).ToArray();
var advisories = query["advisoryId"].Where(static v => !string.IsNullOrWhiteSpace(v)).Select(static v => v.Trim()).ToArray();
var statuses = new List<VexClaimStatus>();
if (query.TryGetValue("status", out var statusValues))
{
foreach (var statusValue in statusValues)
{
if (Enum.TryParse<VexClaimStatus>(statusValue, ignoreCase: true, out var parsed))
{
statuses.Add(parsed);
}
else
{
return Results.BadRequest($"Unknown status '{statusValue}'.");
}
}
}
var limit = query.TryGetValue("pageSize", out var pageSizeValues) && int.TryParse(pageSizeValues.FirstOrDefault(), out var pageSize)
? pageSize
: (int?)null;
var cursor = query.TryGetValue("cursor", out var cursorValues) ? cursorValues.FirstOrDefault() : null;
telemetry.Requests.Add(1);
var cacheKey = $"console-vex:{tenant}:{string.Join(',', purls)}:{string.Join(',', advisories)}:{string.Join(',', statuses)}:{limit}:{cursor}";
if (cache.TryGetValue(cacheKey, out VexConsolePage? cachedPage) && cachedPage is not null)
{
telemetry.CacheHits.Add(1);
return Results.Ok(cachedPage);
}
telemetry.CacheMisses.Add(1);
var options = new VexObservationQueryOptions(
tenant,
observationIds: null,
vulnerabilityIds: advisories,
productKeys: null,
purls: purls,
cpes: null,
providerIds: null,
statuses: statuses,
cursor: cursor,
limit: limit);
VexObservationQueryResult result;
try
{
result = await queryService.QueryAsync(options, cancellationToken).ConfigureAwait(false);
}
catch (FormatException ex)
{
return Results.BadRequest(ex.Message);
}
var statements = result.Observations
.SelectMany(obs => obs.Statements.Select(stmt => new VexConsoleStatementDto(
AdvisoryId: stmt.VulnerabilityId,
ProductKey: stmt.ProductKey,
Purl: stmt.Purl ?? obs.Linkset.Purls.FirstOrDefault(),
Status: stmt.Status.ToString().ToLowerInvariant(),
Justification: stmt.Justification?.ToString(),
ProviderId: obs.ProviderId,
ObservationId: obs.ObservationId,
CreatedAtUtc: obs.CreatedAt,
Attributes: obs.Attributes)))
.ToList();
var statusCounts = result.Observations
.GroupBy(o => o.Status.ToString().ToLowerInvariant())
.ToDictionary(g => g.Key, g => g.Count(), StringComparer.OrdinalIgnoreCase);
var response = new VexConsolePage(
Items: statements,
Cursor: result.NextCursor,
HasMore: result.HasMore,
Returned: statements.Count,
Counters: statusCounts);
cache.Set(cacheKey, response, TimeSpan.FromSeconds(30));
return Results.Ok(response);
}).WithName("GetConsoleVex");
// Cartographer linkouts
app.MapPost("/internal/graph/linkouts", async (
GraphLinkoutsRequest request,
IVexObservationQueryService queryService,
CancellationToken cancellationToken) =>
{
if (request is null || string.IsNullOrWhiteSpace(request.Tenant))
{
return Results.BadRequest("tenant is required.");
}
if (request.Purls is null || request.Purls.Count == 0 || request.Purls.Count > 500)
{
return Results.BadRequest("purls are required (1-500).");
}
var normalizedPurls = request.Purls
.Where(p => !string.IsNullOrWhiteSpace(p))
.Select(p => p.Trim().ToLowerInvariant())
.Distinct()
.ToArray();
if (normalizedPurls.Length == 0)
{
return Results.BadRequest("purls are required (1-500).");
}
var options = new VexObservationQueryOptions(
request.Tenant.Trim(),
purls: normalizedPurls,
includeJustifications: request.IncludeJustifications,
includeProvenance: request.IncludeProvenance,
limit: 200);
VexObservationQueryResult result;
try
{
result = await queryService.QueryAsync(options, cancellationToken).ConfigureAwait(false);
}
catch (FormatException ex)
{
return Results.BadRequest(ex.Message);
}
var observationsByPurl = result.Observations
.SelectMany(obs => obs.Linkset.Purls.Select(purl => (purl, obs)))
.GroupBy(tuple => tuple.purl, StringComparer.OrdinalIgnoreCase)
.ToDictionary(g => g.Key, g => g.Select(t => t.obs).ToArray(), StringComparer.OrdinalIgnoreCase);
var items = new List<GraphLinkoutItem>(normalizedPurls.Length);
var notFound = new List<string>();
foreach (var inputPurl in normalizedPurls)
{
if (!observationsByPurl.TryGetValue(inputPurl, out var obsForPurl))
{
notFound.Add(inputPurl);
continue;
}
var advisories = obsForPurl
.SelectMany(obs => obs.Statements.Select(stmt => new GraphLinkoutAdvisory(
AdvisoryId: stmt.VulnerabilityId,
Source: obs.ProviderId,
Status: stmt.Status.ToString().ToLowerInvariant(),
Justification: request.IncludeJustifications ? stmt.Justification?.ToString() : null,
ModifiedAt: obs.CreatedAt,
EvidenceHash: obs.Linkset.ReferenceHash,
ConnectorId: obs.ProviderId,
DsseEnvelopeHash: request.IncludeProvenance ? obs.Linkset.ReferenceHash : null)))
.OrderBy(a => a.AdvisoryId, StringComparer.Ordinal)
.ThenBy(a => a.Source, StringComparer.Ordinal)
.Take(200)
.ToList();
var conflicts = obsForPurl
.Where(obs => obs.Statements.Any(s => s.Status == VexClaimStatus.Conflict))
.SelectMany(obs => obs.Statements
.Where(s => s.Status == VexClaimStatus.Conflict)
.Select(stmt => new GraphLinkoutConflict(
Source: obs.ProviderId,
Status: stmt.Status.ToString().ToLowerInvariant(),
Justification: request.IncludeJustifications ? stmt.Justification?.ToString() : null,
ObservedAt: obs.CreatedAt,
EvidenceHash: obs.Linkset.ReferenceHash)))
.OrderBy(c => c.Source, StringComparer.Ordinal)
.ToList();
items.Add(new GraphLinkoutItem(
Purl: inputPurl,
Advisories: advisories,
Conflicts: conflicts,
Truncated: advisories.Count >= 200,
NextCursor: advisories.Count >= 200 ? $"{advisories[^1].AdvisoryId}:{advisories[^1].Source}" : null));
}
var response = new GraphLinkoutsResponse(items, notFound);
return Results.Ok(response);
}).WithName("PostGraphLinkouts");
app.MapPost("/ingest/vex", async (
HttpContext context,
VexIngestRequest request,

View File

@@ -1,3 +1,4 @@
using System.Runtime.CompilerServices;
[assembly: InternalsVisibleTo("StellaOps.Excititor.WebService.Tests")]
[assembly: InternalsVisibleTo("StellaOps.Excititor.WebService.Tests")]
[assembly: InternalsVisibleTo("StellaOps.Excititor.Core.UnitTests")]

View File

@@ -1,6 +1,8 @@
using System;
using System.Collections.Generic;
using System.Globalization;
using System.Linq;
using System.Text.RegularExpressions;
using StellaOps.Excititor.WebService.Contracts;
namespace StellaOps.Excititor.WebService.Services;
@@ -8,6 +10,8 @@ namespace StellaOps.Excititor.WebService.Services;
internal sealed class AirgapImportValidator
{
private static readonly TimeSpan AllowedSkew = TimeSpan.FromSeconds(5);
private static readonly Regex Sha256Pattern = new(@"^sha256:[A-Fa-f0-9]{64}$", RegexOptions.Compiled);
private static readonly Regex MirrorGenerationPattern = new(@"^[0-9]+$", RegexOptions.Compiled);
public IReadOnlyList<ValidationError> Validate(AirgapImportRequest request, DateTimeOffset nowUtc)
{
@@ -23,26 +27,46 @@ internal sealed class AirgapImportValidator
{
errors.Add(new ValidationError("bundle_id_missing", "bundleId is required."));
}
else if (request.BundleId.Length > 256)
{
errors.Add(new ValidationError("bundle_id_too_long", "bundleId must be <= 256 characters."));
}
if (string.IsNullOrWhiteSpace(request.MirrorGeneration))
{
errors.Add(new ValidationError("mirror_generation_missing", "mirrorGeneration is required."));
}
else if (!MirrorGenerationPattern.IsMatch(request.MirrorGeneration))
{
errors.Add(new ValidationError("mirror_generation_invalid", "mirrorGeneration must be a numeric string."));
}
if (string.IsNullOrWhiteSpace(request.Publisher))
{
errors.Add(new ValidationError("publisher_missing", "publisher is required."));
}
else if (request.Publisher.Length > 256)
{
errors.Add(new ValidationError("publisher_too_long", "publisher must be <= 256 characters."));
}
if (string.IsNullOrWhiteSpace(request.PayloadHash))
{
errors.Add(new ValidationError("payload_hash_missing", "payloadHash is required."));
}
else if (!Sha256Pattern.IsMatch(request.PayloadHash))
{
errors.Add(new ValidationError("payload_hash_invalid", "payloadHash must be sha256:<64-hex>."));
}
if (string.IsNullOrWhiteSpace(request.Signature))
{
errors.Add(new ValidationError("AIRGAP_SIGNATURE_MISSING", "signature is required for air-gapped imports."));
}
else if (!IsBase64(request.Signature))
{
errors.Add(new ValidationError("AIRGAP_SIGNATURE_INVALID", "signature must be base64-encoded."));
}
if (request.SignedAt is null)
{
@@ -62,5 +86,22 @@ internal sealed class AirgapImportValidator
return errors;
}
private static bool IsBase64(string value)
{
if (string.IsNullOrWhiteSpace(value) || value.Length % 4 != 0)
{
return false;
}
try
{
_ = Convert.FromBase64String(value);
return true;
}
catch
{
return false;
}
}
public readonly record struct ValidationError(string Code, string Message);
}

View File

@@ -0,0 +1,82 @@
using System;
using System.IO;
using System.Linq;
using Microsoft.Extensions.Logging;
using StellaOps.Excititor.Connectors.Abstractions.Trust;
using StellaOps.Excititor.WebService.Contracts;
namespace StellaOps.Excititor.WebService.Services;
internal sealed class AirgapSignerTrustService
{
private readonly ILogger<AirgapSignerTrustService> _logger;
private readonly string? _metadataPath;
private ConnectorSignerMetadataSet? _metadata;
public AirgapSignerTrustService(ILogger<AirgapSignerTrustService> logger)
{
_logger = logger;
_metadataPath = Environment.GetEnvironmentVariable("STELLAOPS_CONNECTOR_SIGNER_METADATA_PATH");
}
public bool Validate(AirgapImportRequest request, out string? errorCode, out string? message)
{
errorCode = null;
message = null;
if (string.IsNullOrWhiteSpace(_metadataPath) || !File.Exists(_metadataPath))
{
_logger.LogDebug("Airgap signer metadata not configured; skipping trust enforcement.");
return true;
}
_metadata ??= ConnectorSignerMetadataLoader.TryLoad(_metadataPath);
if (_metadata is null)
{
_logger.LogWarning("Failed to load airgap signer metadata from {Path}; allowing import.", _metadataPath);
return true;
}
if (string.IsNullOrWhiteSpace(request.Publisher))
{
errorCode = "AIRGAP_SOURCE_UNTRUSTED";
message = "publisher is required for trust enforcement.";
return false;
}
if (!_metadata.TryGet(request.Publisher, out var connector))
{
errorCode = "AIRGAP_SOURCE_UNTRUSTED";
message = $"Publisher '{request.Publisher}' is not present in trusted signer metadata.";
return false;
}
if (connector.Revoked)
{
errorCode = "AIRGAP_SOURCE_UNTRUSTED";
message = $"Publisher '{request.Publisher}' is revoked.";
return false;
}
if (connector.Bundle?.Digest is { } digest && !string.IsNullOrWhiteSpace(digest))
{
if (!string.Equals(digest.Trim(), request.PayloadHash?.Trim(), StringComparison.OrdinalIgnoreCase))
{
errorCode = "AIRGAP_PAYLOAD_MISMATCH";
message = "Payload hash does not match trusted bundle digest.";
return false;
}
}
// Basic sanity: ensure at least one signer entry exists.
if (connector.Signers.IsDefaultOrEmpty || connector.Signers.Sum(s => s.Fingerprints.Length) == 0)
{
errorCode = "AIRGAP_SOURCE_UNTRUSTED";
message = $"Publisher '{request.Publisher}' has no trusted signers configured.";
return false;
}
return true;
}
}

View File

@@ -19,6 +19,7 @@
<ProjectReference Include="../__Libraries/StellaOps.Excititor.Core/StellaOps.Excititor.Core.csproj" />
<ProjectReference Include="../__Libraries/StellaOps.Excititor.Storage.Mongo/StellaOps.Excititor.Storage.Mongo.csproj" />
<ProjectReference Include="../__Libraries/StellaOps.Excititor.Export/StellaOps.Excititor.Export.csproj" />
<ProjectReference Include="../__Libraries/StellaOps.Excititor.Connectors.Abstractions/StellaOps.Excititor.Connectors.Abstractions.csproj" />
<ProjectReference Include="../__Libraries/StellaOps.Excititor.Policy/StellaOps.Excititor.Policy.csproj" />
<ProjectReference Include="../__Libraries/StellaOps.Excititor.Attestation/StellaOps.Excititor.Attestation.csproj" />
<ProjectReference Include="../__Libraries/StellaOps.Excititor.ArtifactStores.S3/StellaOps.Excititor.ArtifactStores.S3.csproj" />

View File

@@ -0,0 +1,15 @@
using System.Diagnostics.Metrics;
namespace StellaOps.Excititor.WebService.Telemetry;
internal sealed class ConsoleTelemetry
{
public const string MeterName = "StellaOps.Excititor.Console";
private static readonly Meter Meter = new(MeterName);
public Counter<long> Requests { get; } = Meter.CreateCounter<long>("console.vex.requests");
public Counter<long> CacheHits { get; } = Meter.CreateCounter<long>("console.vex.cache_hits");
public Counter<long> CacheMisses { get; } = Meter.CreateCounter<long>("console.vex.cache_misses");
}

View File

@@ -0,0 +1,29 @@
using Microsoft.Extensions.Options;
namespace StellaOps.Excititor.Worker.Options;
internal sealed class TenantAuthorityOptionsValidator : IValidateOptions<TenantAuthorityOptions>
{
public ValidateOptionsResult Validate(string? name, TenantAuthorityOptions options)
{
if (options is null)
{
return ValidateOptionsResult.Fail("TenantAuthorityOptions is required.");
}
if (options.BaseUrls.Count == 0)
{
return ValidateOptionsResult.Fail("Excititor:Authority:BaseUrls must define at least one tenant endpoint.");
}
foreach (var kvp in options.BaseUrls)
{
if (string.IsNullOrWhiteSpace(kvp.Key) || string.IsNullOrWhiteSpace(kvp.Value))
{
return ValidateOptionsResult.Fail("Excititor:Authority:BaseUrls must include non-empty tenant keys and URLs.");
}
}
return ValidateOptionsResult.Success;
}
}

View File

@@ -20,15 +20,18 @@ using StellaOps.Excititor.Attestation.Extensions;
using StellaOps.Excititor.Attestation.Verification;
using StellaOps.IssuerDirectory.Client;
var builder = Host.CreateApplicationBuilder(args);
var services = builder.Services;
var configuration = builder.Configuration;
var builder = Host.CreateApplicationBuilder(args);
var services = builder.Services;
var configuration = builder.Configuration;
var workerConfig = configuration.GetSection("Excititor:Worker");
var workerConfigSnapshot = workerConfig.Get<VexWorkerOptions>() ?? new VexWorkerOptions();
services.AddOptions<VexWorkerOptions>()
.Bind(configuration.GetSection("Excititor:Worker"))
.Bind(workerConfig)
.ValidateOnStart();
services.Configure<VexWorkerPluginOptions>(configuration.GetSection("Excititor:Worker:Plugins"));
services.Configure<TenantAuthorityOptions>(configuration.GetSection("Excititor:Authority"));
services.AddSingleton<IValidateOptions<TenantAuthorityOptions>, TenantAuthorityOptionsValidator>();
services.PostConfigure<VexWorkerOptions>(options =>
{
if (options.DisableConsensus)
@@ -101,10 +104,13 @@ services.AddSingleton<PluginCatalog>(provider =>
});
services.AddSingleton<IVexProviderRunner, DefaultVexProviderRunner>();
services.AddSingleton<VexConsensusRefreshService>();
services.AddSingleton<IVexConsensusRefreshScheduler>(static provider => provider.GetRequiredService<VexConsensusRefreshService>());
services.AddHostedService<VexWorkerHostedService>();
services.AddHostedService(static provider => provider.GetRequiredService<VexConsensusRefreshService>());
if (!workerConfigSnapshot.DisableConsensus)
{
services.AddSingleton<VexConsensusRefreshService>();
services.AddSingleton<IVexConsensusRefreshScheduler>(static provider => provider.GetRequiredService<VexConsensusRefreshService>());
services.AddHostedService(static provider => provider.GetRequiredService<VexConsensusRefreshService>());
}
services.AddSingleton<ITenantAuthorityClientFactory, TenantAuthorityClientFactory>();
var host = builder.Build();

View File

@@ -0,0 +1,73 @@
using System.Collections.Generic;
using System.Collections.Immutable;
using System.Linq;
namespace StellaOps.Excititor.Core.Observations;
/// <summary>
/// Builds deterministic linkset update events from raw VEX observations
/// without introducing consensus or derived semantics (AOC-19-002).
/// </summary>
public sealed class VexLinksetExtractionService
{
/// <summary>
/// Groups observations by (vulnerabilityId, productKey) and emits a linkset update event
/// for each group. Ordering is stable and case-insensitive on identifiers.
/// </summary>
public ImmutableArray<VexLinksetUpdatedEvent> Extract(
string tenant,
IEnumerable<VexObservation> observations,
IEnumerable<VexObservationDisagreement>? disagreements = null)
{
if (observations is null)
{
return ImmutableArray<VexLinksetUpdatedEvent>.Empty;
}
var observationList = observations
.Where(o => o is not null)
.ToList();
if (observationList.Count == 0)
{
return ImmutableArray<VexLinksetUpdatedEvent>.Empty;
}
var groups = observationList
.SelectMany(obs => obs.Statements.Select(stmt => (obs, stmt)))
.GroupBy(x => new
{
VulnerabilityId = Normalize(x.stmt.VulnerabilityId),
ProductKey = Normalize(x.stmt.ProductKey)
})
.OrderBy(g => g.Key.VulnerabilityId, StringComparer.OrdinalIgnoreCase)
.ThenBy(g => g.Key.ProductKey, StringComparer.OrdinalIgnoreCase);
var now = observationList.Max(o => o.CreatedAt);
var events = new List<VexLinksetUpdatedEvent>();
foreach (var group in groups)
{
var linksetId = BuildLinksetId(group.Key.VulnerabilityId, group.Key.ProductKey);
var obsForGroup = group.Select(x => x.obs);
var evt = VexLinksetUpdatedEventFactory.Create(
tenant,
linksetId,
group.Key.VulnerabilityId,
group.Key.ProductKey,
obsForGroup,
disagreements ?? Enumerable.Empty<VexObservationDisagreement>(),
now);
events.Add(evt);
}
return events.ToImmutableArray();
}
private static string BuildLinksetId(string vulnerabilityId, string productKey)
=> $"vex:{vulnerabilityId}:{productKey}".ToLowerInvariant();
private static string Normalize(string value) => VexObservation.EnsureNotNullOrWhiteSpace(value, nameof(value));
}

View File

@@ -10,6 +10,19 @@ public interface IAirgapImportStore
Task SaveAsync(AirgapImportRecord record, CancellationToken cancellationToken);
}
public sealed class DuplicateAirgapImportException : Exception
{
public string BundleId { get; }
public string MirrorGeneration { get; }
public DuplicateAirgapImportException(string bundleId, string mirrorGeneration, Exception inner)
: base($"Airgap import already exists for bundle '{bundleId}' generation '{mirrorGeneration}'.", inner)
{
BundleId = bundleId;
MirrorGeneration = mirrorGeneration;
}
}
internal sealed class MongoAirgapImportStore : IAirgapImportStore
{
private readonly IMongoCollection<AirgapImportRecord> _collection;
@@ -19,11 +32,30 @@ internal sealed class MongoAirgapImportStore : IAirgapImportStore
ArgumentNullException.ThrowIfNull(database);
VexMongoMappingRegistry.Register();
_collection = database.GetCollection<AirgapImportRecord>(VexMongoCollectionNames.AirgapImports);
// Enforce idempotency on (bundleId, generation) via Id uniqueness and explicit index.
var idIndex = Builders<AirgapImportRecord>.IndexKeys.Ascending(x => x.Id);
var bundleIndex = Builders<AirgapImportRecord>.IndexKeys
.Ascending(x => x.BundleId)
.Ascending(x => x.MirrorGeneration);
_collection.Indexes.CreateMany(new[]
{
new CreateIndexModel<AirgapImportRecord>(idIndex, new CreateIndexOptions { Unique = true, Name = "airgap_import_id_unique" }),
new CreateIndexModel<AirgapImportRecord>(bundleIndex, new CreateIndexOptions { Unique = true, Name = "airgap_bundle_generation_unique" })
});
}
public Task SaveAsync(AirgapImportRecord record, CancellationToken cancellationToken)
{
ArgumentNullException.ThrowIfNull(record);
return _collection.InsertOneAsync(record, cancellationToken: cancellationToken);
try
{
return _collection.InsertOneAsync(record, cancellationToken: cancellationToken);
}
catch (MongoWriteException ex) when (ex.WriteError.Category == ServerErrorCategory.DuplicateKey)
{
throw new DuplicateAirgapImportException(record.BundleId, record.MirrorGeneration, ex);
}
}
}

View File

@@ -124,11 +124,6 @@ public sealed class MongoVexRawStore : IVexRawStore
var sessionHandle = session ?? await _sessionProvider.StartSessionAsync(cancellationToken).ConfigureAwait(false);
if (!useInline)
{
newGridId = await UploadToGridFsAsync(document, sessionHandle, cancellationToken).ConfigureAwait(false);
}
var supportsTransactions = sessionHandle.Client.Cluster.Description.Type != ClusterType.Standalone
&& !sessionHandle.IsInTransaction;
@@ -183,6 +178,18 @@ public sealed class MongoVexRawStore : IVexRawStore
IngestionTelemetry.RecordLatency(tenant, sourceVendor, IngestionTelemetry.PhaseFetch, fetchWatch.Elapsed);
}
// Append-only: if the digest already exists, skip write
if (existing is not null)
{
IngestionTelemetry.RecordWriteAttempt(tenant, sourceVendor, IngestionTelemetry.ResultNoop);
return;
}
if (!useInline)
{
newGridId = await UploadToGridFsAsync(document, sessionHandle, cancellationToken).ConfigureAwait(false);
}
var record = VexRawDocumentRecord.FromDomain(document, includeContent: useInline);
record.GridFsObjectId = useInline ? null : newGridId;

View File

@@ -0,0 +1,24 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<LangVersion>preview</LangVersion>
<Nullable>enable</Nullable>
<ImplicitUsings>enable</ImplicitUsings>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
<OutputType>Library</OutputType>
<IsPackable>false</IsPackable>
<UseAppHost>false</UseAppHost>
</PropertyGroup>
<ItemGroup>
<ProjectReference Include="../../__Libraries/StellaOps.Excititor.Core/StellaOps.Excititor.Core.csproj" />
<PackageReference Include="FluentAssertions" Version="6.12.0" />
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.14.0" />
<PackageReference Include="xunit" Version="2.9.2" />
<PackageReference Include="xunit.runner.visualstudio" Version="2.8.2" PrivateAssets="all" />
<ProjectReference Include="../../__Libraries/StellaOps.Excititor.Storage.Mongo/StellaOps.Excititor.Storage.Mongo.csproj" />
<ProjectReference Include="../../StellaOps.Excititor.WebService/StellaOps.Excititor.WebService.csproj" />
</ItemGroup>
<ItemGroup>
<Using Include="Xunit" />
</ItemGroup>
</Project>

View File

@@ -0,0 +1,118 @@
using System;
using System.Collections.Generic;
using System.Collections.Immutable;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using FluentAssertions;
using MongoDB.Driver;
using StellaOps.Excititor.Core;
using StellaOps.Excititor.Storage.Mongo;
using StellaOps.Excititor.WebService.Services;
using Xunit;
namespace StellaOps.Excititor.Core.UnitTests;
public sealed class VexEvidenceChunkServiceTests
{
[Fact]
public async Task QueryAsync_FiltersAndLimitsResults()
{
var now = new DateTimeOffset(2025, 11, 16, 12, 0, 0, TimeSpan.Zero);
var claims = new[]
{
CreateClaim("provider-a", VexClaimStatus.Affected, now.AddHours(-6), now.AddHours(-5), score: 0.9),
CreateClaim("provider-b", VexClaimStatus.NotAffected, now.AddHours(-4), now.AddHours(-3), score: 0.2)
};
var service = new VexEvidenceChunkService(new FakeClaimStore(claims), new FixedTimeProvider(now));
var request = new VexEvidenceChunkRequest(
Tenant: "tenant-a",
VulnerabilityId: "CVE-2025-0001",
ProductKey: "pkg:docker/demo",
ProviderIds: ImmutableHashSet.Create("provider-b"),
Statuses: ImmutableHashSet.Create(VexClaimStatus.NotAffected),
Since: now.AddHours(-12),
Limit: 1);
var result = await service.QueryAsync(request, CancellationToken.None);
result.Truncated.Should().BeFalse();
result.TotalCount.Should().Be(1);
result.GeneratedAtUtc.Should().Be(now);
var chunk = result.Chunks.Single();
chunk.ProviderId.Should().Be("provider-b");
chunk.Status.Should().Be(VexClaimStatus.NotAffected.ToString());
chunk.ScopeScore.Should().Be(0.2);
chunk.ObservationId.Should().Contain("provider-b");
chunk.Document.Digest.Should().NotBeNullOrWhiteSpace();
}
private static VexClaim CreateClaim(string providerId, VexClaimStatus status, DateTimeOffset firstSeen, DateTimeOffset lastSeen, double? score)
{
var product = new VexProduct("pkg:docker/demo", "demo", "1.0.0", "pkg:docker/demo:1.0.0", null, new[] { "component-a" });
var document = new VexClaimDocument(
VexDocumentFormat.CycloneDx,
digest: Guid.NewGuid().ToString("N"),
sourceUri: new Uri("https://example.test/vex.json"),
revision: "r1",
signature: new VexSignatureMetadata("cosign", "demo", "issuer", keyId: "kid", verifiedAt: firstSeen, transparencyLogReference: string.Empty));
var signals = score.HasValue
? new VexSignalSnapshot(new VexSeveritySignal("cvss", score, "low", vector: null), kev: false, epss: null)
: null;
return new VexClaim(
"CVE-2025-0001",
providerId,
product,
status,
document,
firstSeen,
lastSeen,
justification: VexJustification.ComponentNotPresent,
detail: "demo detail",
confidence: null,
signals: signals,
additionalMetadata: ImmutableDictionary<string, string>.Empty);
}
private sealed class FakeClaimStore : IVexClaimStore
{
private readonly IReadOnlyCollection<VexClaim> _claims;
public FakeClaimStore(IReadOnlyCollection<VexClaim> claims)
{
_claims = claims;
}
public ValueTask AppendAsync(IEnumerable<VexClaim> claims, DateTimeOffset observedAt, CancellationToken cancellationToken, IClientSessionHandle? session = null)
=> throw new NotSupportedException();
public ValueTask<IReadOnlyCollection<VexClaim>> FindAsync(string vulnerabilityId, string productKey, DateTimeOffset? since, CancellationToken cancellationToken, IClientSessionHandle? session = null)
{
var query = _claims
.Where(claim => claim.VulnerabilityId == vulnerabilityId)
.Where(claim => claim.Product.Key == productKey);
if (since.HasValue)
{
query = query.Where(claim => claim.LastSeen >= since.Value);
}
return ValueTask.FromResult<IReadOnlyCollection<VexClaim>>(query.ToList());
}
}
private sealed class FixedTimeProvider : TimeProvider
{
private readonly DateTimeOffset _timestamp;
public FixedTimeProvider(DateTimeOffset timestamp)
{
_timestamp = timestamp;
}
public override DateTimeOffset GetUtcNow() => _timestamp;
}
}

View File

@@ -0,0 +1,115 @@
using System;
using System.Collections.Immutable;
using System.Linq;
using System.Text.Json.Nodes;
using StellaOps.Excititor.Core.Observations;
using Xunit;
namespace StellaOps.Excititor.Core.UnitTests;
public class VexLinksetExtractionServiceTests
{
[Fact]
public void Extract_GroupsByVulnerabilityAndProduct_WithStableOrdering()
{
var obs1 = BuildObservation(
id: "obs-1",
provider: "provider-a",
vuln: "CVE-2025-0001",
product: "pkg:npm/leftpad",
createdAt: DateTimeOffset.Parse("2025-11-20T10:00:00Z"));
var obs2 = BuildObservation(
id: "obs-2",
provider: "provider-b",
vuln: "CVE-2025-0001",
product: "pkg:npm/leftpad",
createdAt: DateTimeOffset.Parse("2025-11-20T11:00:00Z"));
var obs3 = BuildObservation(
id: "obs-3",
provider: "provider-c",
vuln: "CVE-2025-0002",
product: "pkg:maven/org.example/app",
createdAt: DateTimeOffset.Parse("2025-11-21T09:00:00Z"));
var service = new VexLinksetExtractionService();
var events = service.Extract("tenant-a", new[] { obs2, obs1, obs3 });
Assert.Equal(2, events.Length);
// First event should be CVE-2025-0001 because of ordering (vuln then product)
var first = events[0];
Assert.Equal("tenant-a", first.Tenant);
Assert.Equal("cve-2025-0001", first.VulnerabilityId.ToLowerInvariant());
Assert.Equal("pkg:npm/leftpad", first.ProductKey);
Assert.Equal("vex:cve-2025-0001:pkg:npm/leftpad", first.LinksetId);
// Should contain both observations, ordered by provider then observationId
Assert.Equal(new[] { "obs-1", "obs-2" }, first.Observations.Select(o => o.ObservationId).ToArray());
// Second event corresponds to CVE-2025-0002
var second = events[1];
Assert.Equal("cve-2025-0002", second.VulnerabilityId.ToLowerInvariant());
Assert.Equal("pkg:maven/org.example/app", second.ProductKey);
Assert.Equal(new[] { "obs-3" }, second.Observations.Select(o => o.ObservationId).ToArray());
// CreatedAt should reflect max CreatedAt among grouped observations
Assert.Equal(DateTimeOffset.Parse("2025-11-21T09:00:00Z").ToUniversalTime(), second.CreatedAtUtc);
}
[Fact]
public void Extract_FiltersNullsAndReturnsEmptyWhenNoObservations()
{
var service = new VexLinksetExtractionService();
var events = service.Extract("tenant-a", Array.Empty<VexObservation>());
Assert.Empty(events);
}
private static VexObservation BuildObservation(string id, string provider, string vuln, string product, DateTimeOffset createdAt)
{
var statement = new VexObservationStatement(
vulnerabilityId: vuln,
productKey: product,
status: VexClaimStatus.Affected,
lastObserved: null,
locator: null,
justification: null,
introducedVersion: null,
fixedVersion: null,
purl: product,
cpe: null,
evidence: null,
metadata: null);
var upstream = new VexObservationUpstream(
upstreamId: $"upstream-{id}",
documentVersion: "1",
fetchedAt: createdAt,
receivedAt: createdAt,
contentHash: "sha256:deadbeef",
signature: new VexObservationSignature(false, null, null, null));
var content = new VexObservationContent(
format: "openvex",
specVersion: "1.0.0",
raw: JsonNode.Parse("{}")!,
metadata: null);
var linkset = new VexObservationLinkset(
aliases: new[] { vuln },
purls: new[] { product },
cpes: Array.Empty<string>(),
references: Array.Empty<VexObservationReference>());
return new VexObservation(
observationId: id,
tenant: "tenant-a",
providerId: provider,
streamId: "ingest",
upstream: upstream,
statements: ImmutableArray.Create(statement),
content: content,
linkset: linkset,
createdAt: createdAt);
}
}

View File

@@ -0,0 +1,84 @@
using System;
using StellaOps.Excititor.WebService.Contracts;
using StellaOps.Excititor.WebService.Services;
using Xunit;
namespace StellaOps.Excititor.WebService.Tests;
public sealed class AirgapImportValidatorTests
{
private readonly AirgapImportValidator _validator = new();
private readonly DateTimeOffset _now = DateTimeOffset.UtcNow;
[Fact]
public void Validate_WhenValid_ReturnsEmpty()
{
var req = new AirgapImportRequest
{
BundleId = "bundle-123",
MirrorGeneration = "5",
Publisher = "stellaops",
PayloadHash = "sha256:" + new string('a', 64),
Signature = Convert.ToBase64String(new byte[]{1,2,3}),
SignedAt = _now
};
var result = _validator.Validate(req, _now);
Assert.Empty(result);
}
[Fact]
public void Validate_InvalidHash_ReturnsError()
{
var req = Valid();
req.PayloadHash = "not-a-hash";
var result = _validator.Validate(req, _now);
Assert.Contains(result, e => e.Code == "payload_hash_invalid");
}
[Fact]
public void Validate_InvalidSignature_ReturnsError()
{
var req = Valid();
req.Signature = "???";
var result = _validator.Validate(req, _now);
Assert.Contains(result, e => e.Code == "AIRGAP_SIGNATURE_INVALID");
}
[Fact]
public void Validate_MirrorGenerationNonNumeric_ReturnsError()
{
var req = Valid();
req.MirrorGeneration = "abc";
var result = _validator.Validate(req, _now);
Assert.Contains(result, e => e.Code == "mirror_generation_invalid");
}
[Fact]
public void Validate_SignedAtTooOld_ReturnsError()
{
var req = Valid();
req.SignedAt = _now.AddSeconds(-10);
var result = _validator.Validate(req, _now);
Assert.Contains(result, e => e.Code == "AIRGAP_PAYLOAD_STALE");
}
private AirgapImportRequest Valid() => new()
{
BundleId = "bundle-123",
MirrorGeneration = "5",
Publisher = "stellaops",
PayloadHash = "sha256:" + new string('b', 64),
Signature = Convert.ToBase64String(new byte[]{5,6,7}),
SignedAt = _now
};
}

View File

@@ -0,0 +1,108 @@
using System;
using System.Collections.Immutable;
using Microsoft.Extensions.Logging.Abstractions;
using StellaOps.Excititor.Connectors.Abstractions.Trust;
using StellaOps.Excititor.WebService.Contracts;
using StellaOps.Excititor.WebService.Services;
using Xunit;
namespace StellaOps.Excititor.WebService.Tests;
public class AirgapSignerTrustServiceTests
{
[Fact]
public void Validate_Allows_When_Metadata_Not_Configured()
{
Environment.SetEnvironmentVariable("STELLAOPS_CONNECTOR_SIGNER_METADATA_PATH", null);
var service = new AirgapSignerTrustService(NullLogger<AirgapSignerTrustService>.Instance);
var ok = service.Validate(ValidRequest(), out var code, out var msg);
Assert.True(ok);
Assert.Null(code);
Assert.Null(msg);
}
[Fact]
public void Validate_Rejects_When_Publisher_Not_In_Metadata()
{
using var temp = ConnectorMetadataTempFile();
Environment.SetEnvironmentVariable("STELLAOPS_CONNECTOR_SIGNER_METADATA_PATH", temp.Path);
var service = new AirgapSignerTrustService(NullLogger<AirgapSignerTrustService>.Instance);
var req = ValidRequest();
req.Publisher = "missing";
var ok = service.Validate(req, out var code, out var msg);
Assert.False(ok);
Assert.Equal("AIRGAP_SOURCE_UNTRUSTED", code);
Assert.Contains("missing", msg);
}
[Fact]
public void Validate_Rejects_On_Digest_Mismatch()
{
using var temp = ConnectorMetadataTempFile();
Environment.SetEnvironmentVariable("STELLAOPS_CONNECTOR_SIGNER_METADATA_PATH", temp.Path);
var service = new AirgapSignerTrustService(NullLogger<AirgapSignerTrustService>.Instance);
var req = ValidRequest();
req.PayloadHash = "sha256:" + new string('b', 64);
var ok = service.Validate(req, out var code, out var msg);
Assert.False(ok);
Assert.Equal("AIRGAP_PAYLOAD_MISMATCH", code);
}
[Fact]
public void Validate_Allows_On_Metadata_Match()
{
using var temp = ConnectorMetadataTempFile();
Environment.SetEnvironmentVariable("STELLAOPS_CONNECTOR_SIGNER_METADATA_PATH", temp.Path);
var service = new AirgapSignerTrustService(NullLogger<AirgapSignerTrustService>.Instance);
var req = ValidRequest();
var ok = service.Validate(req, out var code, out var msg);
Assert.True(ok);
Assert.Null(code);
Assert.Null(msg);
}
private static AirgapImportRequest ValidRequest() => new()
{
BundleId = "bundle-1",
MirrorGeneration = "1",
Publisher = "connector-a",
PayloadHash = "sha256:" + new string('a', 64),
Signature = Convert.ToBase64String(new byte[] {1,2,3}),
SignedAt = DateTimeOffset.UtcNow
};
private sealed class TempFile : IDisposable
{
public string Path { get; }
public TempFile(string path) => Path = path;
public void Dispose() { if (System.IO.File.Exists(Path)) System.IO.File.Delete(Path); }
}
private static TempFile ConnectorMetadataTempFile()
{
var json = @"{
\"schemaVersion\": \"1.0.0\",
\"generatedAt\": \"2025-11-23T00:00:00Z\",
\"connectors\": [
{
\"connectorId\": \"connector-a\",
\"provider\": { \"name\": \"Connector A\", \"slug\": \"connector-a\" },
\"issuerTier\": \"trusted\",
\"signers\": [ { \"usage\": \"sign\", \"fingerprints\": [ { \"alg\": \"rsa\", \"format\": \"pem\", \"value\": \"fp1\" } ] } ],
\"bundle\": { \"kind\": \"mirror\", \"uri\": \"file:///bundle\", \"digest\": \"sha256:" + new string('a',64) + "\" }
}
]
}";
var path = System.IO.Path.GetTempFileName();
System.IO.File.WriteAllText(path, json);
return new TempFile(path);
}
}

View File

@@ -1,4 +1,3 @@
#if false
using System;
using System.Collections.Generic;
using System.Net;
@@ -10,26 +9,22 @@ using Xunit;
namespace StellaOps.Excititor.WebService.Tests;
public sealed class AttestationVerifyEndpointTests : IClassFixture<TestWebApplicationFactory>
public sealed class AttestationVerifyEndpointTests
{
private readonly TestWebApplicationFactory _factory;
public AttestationVerifyEndpointTests(TestWebApplicationFactory factory)
{
_factory = factory;
}
[Fact]
public async Task Verify_ReturnsOk_WhenPayloadValid()
{
var client = _factory.CreateClient();
using var factory = new TestWebApplicationFactory(
configureServices: services => TestServiceOverrides.Apply(services));
var client = factory.CreateClient();
var request = new AttestationVerifyRequest
{
ExportId = "export-123",
QuerySignature = "purl=foo",
ArtifactDigest = "sha256:deadbeef",
Format = "VexJson",
ArtifactDigest = "deadbeef",
Format = "json",
CreatedAt = DateTimeOffset.Parse("2025-11-20T00:00:00Z"),
SourceProviders = new[] { "ghsa" },
Metadata = new Dictionary<string, string> { { "foo", "bar" } },
@@ -50,8 +45,12 @@ public sealed class AttestationVerifyEndpointTests : IClassFixture<TestWebApplic
};
var response = await client.PostAsJsonAsync("/v1/attestations/verify", request);
var raw = await response.Content.ReadAsStringAsync();
response.StatusCode.Should().Be(HttpStatusCode.OK);
if (response.StatusCode != HttpStatusCode.OK)
{
throw new Xunit.Sdk.XunitException($"Unexpected status {(int)response.StatusCode} ({response.StatusCode}). Body: {raw}");
}
var body = await response.Content.ReadFromJsonAsync<AttestationVerifyResponse>();
body.Should().NotBeNull();
body!.Valid.Should().BeTrue();
@@ -60,7 +59,9 @@ public sealed class AttestationVerifyEndpointTests : IClassFixture<TestWebApplic
[Fact]
public async Task Verify_ReturnsBadRequest_WhenFieldsMissing()
{
var client = _factory.CreateClient();
using var factory = new TestWebApplicationFactory(
configureServices: services => TestServiceOverrides.Apply(services));
var client = factory.CreateClient();
var request = new AttestationVerifyRequest
{
@@ -76,5 +77,3 @@ public sealed class AttestationVerifyEndpointTests : IClassFixture<TestWebApplic
response.StatusCode.Should().Be(HttpStatusCode.BadRequest);
}
}
#endif

View File

@@ -0,0 +1,13 @@
// Stub to satisfy Razor dev runtime dependency when running tests without VS dev tools
namespace Microsoft.CodeAnalysis.ExternalAccess.Razor
{
public interface IDevRuntimeEnvironment
{
bool IsSupported { get; }
}
internal sealed class DefaultDevRuntimeEnvironment : IDevRuntimeEnvironment
{
public bool IsSupported => false;
}
}

View File

@@ -1,4 +1,3 @@
<?xml version='1.0' encoding='utf-8'?>
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
@@ -7,8 +6,11 @@
<ImplicitUsings>enable</ImplicitUsings>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
<UseConcelierTestInfra>false</UseConcelierTestInfra>
<UseAppHost>false</UseAppHost>
<DisableRazorRuntimeCompilation>true</DisableRazorRuntimeCompilation>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="FluentAssertions" Version="6.12.0" />
<PackageReference Include="EphemeralMongo" Version="3.0.0" />
<PackageReference Include="Microsoft.AspNetCore.Mvc.Testing" Version="10.0.0-rc.2.25502.107" />
<PackageReference Include="Microsoft.Extensions.TimeProvider.Testing" Version="9.10.0" />
@@ -27,10 +29,11 @@
<ItemGroup>
<Compile Remove="**/*.cs" />
<Compile Include="AirgapImportEndpointTests.cs" />
<Compile Include="VexEvidenceChunkServiceTests.cs" />
<Compile Include="EvidenceTelemetryTests.cs" />
<Compile Include="DevRuntimeEnvironmentStub.cs" />
<Compile Include="TestAuthentication.cs" />
<Compile Include="TestServiceOverrides.cs" />
<Compile Include="TestWebApplicationFactory.cs" />
<Compile Include="AttestationVerifyEndpointTests.cs" />
</ItemGroup>
</Project>

View File

@@ -11,8 +11,25 @@ namespace StellaOps.Excititor.WebService.Tests;
public sealed class TestWebApplicationFactory : WebApplicationFactory<Program>
{
private readonly Action<IConfigurationBuilder>? _configureConfiguration;
private readonly Action<IServiceCollection>? _configureServices;
public TestWebApplicationFactory() : this(null, null)
{
}
internal TestWebApplicationFactory(
Action<IConfigurationBuilder>? configureConfiguration = null,
Action<IServiceCollection>? configureServices = null)
{
_configureConfiguration = configureConfiguration;
_configureServices = configureServices;
}
protected override void ConfigureWebHost(IWebHostBuilder builder)
{
// Avoid loading any external hosting startup assemblies (e.g., Razor dev tools)
builder.UseSetting(WebHostDefaults.PreventHostingStartupKey, "true");
builder.UseEnvironment("Production");
builder.ConfigureAppConfiguration((_, config) =>
{
@@ -23,11 +40,13 @@ public sealed class TestWebApplicationFactory : WebApplicationFactory<Program>
["Excititor:Storage:Mongo:DefaultTenant"] = "test",
};
config.AddInMemoryCollection(defaults);
_configureConfiguration?.Invoke(config);
});
builder.ConfigureServices(services =>
{
services.RemoveAll<IHostedService>();
_configureServices?.Invoke(services);
});
}

View File

@@ -15,6 +15,7 @@ namespace StellaOps.Excititor.WebService.Tests;
public sealed class VexEvidenceChunkServiceTests
{
[Fact]
[Trait("Category", "VexEvidence")]
public async Task QueryAsync_FiltersAndLimitsResults()
{
var now = new DateTimeOffset(2025, 11, 16, 12, 0, 0, TimeSpan.Zero);

View File

@@ -0,0 +1,48 @@
using System;
using System.Net.Http.Headers;
using FluentAssertions;
using Microsoft.Extensions.Options;
using StellaOps.Excititor.Worker.Auth;
using StellaOps.Excititor.Worker.Options;
using Xunit;
namespace StellaOps.Excititor.Worker.Tests;
public sealed class TenantAuthorityClientFactoryTests
{
[Fact]
public void Create_WhenTenantConfigured_SetsBaseAddressAndTenantHeader()
{
var options = new TenantAuthorityOptions();
options.BaseUrls.Add("tenant-a", "https://authority.example/");
var factory = new TenantAuthorityClientFactory(Options.Create(options));
using var client = factory.Create("tenant-a");
client.BaseAddress.Should().Be(new Uri("https://authority.example/"));
client.DefaultRequestHeaders.TryGetValues("X-Tenant", out var values).Should().BeTrue();
values.Should().ContainSingle().Which.Should().Be("tenant-a");
}
[Fact]
public void Create_Throws_WhenTenantMissing()
{
var options = new TenantAuthorityOptions();
options.BaseUrls.Add("tenant-a", "https://authority.example/");
var factory = new TenantAuthorityClientFactory(Options.Create(options));
FluentActions.Invoking(() => factory.Create(string.Empty))
.Should().Throw<ArgumentException>();
}
[Fact]
public void Create_Throws_WhenTenantNotConfigured()
{
var options = new TenantAuthorityOptions();
options.BaseUrls.Add("tenant-a", "https://authority.example/");
var factory = new TenantAuthorityClientFactory(Options.Create(options));
FluentActions.Invoking(() => factory.Create("tenant-b"))
.Should().Throw<InvalidOperationException>();
}
}

View File

@@ -0,0 +1,43 @@
using FluentAssertions;
using Microsoft.Extensions.Options;
using StellaOps.Excititor.Worker.Options;
using Xunit;
namespace StellaOps.Excititor.Worker.Tests;
public sealed class TenantAuthorityOptionsValidatorTests
{
private readonly TenantAuthorityOptionsValidator _validator = new();
[Fact]
public void Validate_Fails_When_BaseUrls_Empty()
{
var options = new TenantAuthorityOptions();
var result = _validator.Validate(null, options);
result.Failed.Should().BeTrue();
}
[Fact]
public void Validate_Fails_When_Key_Or_Value_Blank()
{
var options = new TenantAuthorityOptions();
options.BaseUrls.Add("", "");
var result = _validator.Validate(null, options);
result.Failed.Should().BeTrue();
}
[Fact]
public void Validate_Succeeds_When_Valid()
{
var options = new TenantAuthorityOptions();
options.BaseUrls.Add("tenant-a", "https://authority.example");
var result = _validator.Validate(null, options);
result.Succeeded.Should().BeTrue();
}
}

View File

@@ -0,0 +1,147 @@
#!/usr/bin/env bash
set -euo pipefail
ROOT=$(cd "$(dirname "$0")/../../.." && pwd)
OUT="$ROOT/out/mirror/thin"
STAGE="$OUT/stage-v1"
CREATED="2025-11-23T00:00:00Z"
mkdir -p "$STAGE/layers" "$STAGE/indexes"
# 1) Seed deterministic content
cat > "$STAGE/layers/observations.ndjson" <<'DATA'
{"id":"obs-001","purl":"pkg:nuget/Newtonsoft.Json@13.0.3","advisory":"CVE-2025-0001","severity":"medium","source":"vendor-a","timestamp":"2025-11-01T00:00:00Z"}
{"id":"obs-002","purl":"pkg:npm/lodash@4.17.21","advisory":"CVE-2024-9999","severity":"high","source":"vendor-b","timestamp":"2025-10-15T00:00:00Z"}
DATA
cat > "$STAGE/indexes/observations.index" <<'DATA'
obs-001 layers/observations.ndjson:1
obs-002 layers/observations.ndjson:2
DATA
# 2) Build manifest from staged files
python - <<'PY'
import json, hashlib, os, pathlib
root = pathlib.Path(os.environ['STAGE'])
created = os.environ['CREATED']
def digest(path: pathlib.Path) -> str:
h = hashlib.sha256()
with path.open('rb') as f:
for chunk in iter(lambda: f.read(8192), b''):
h.update(chunk)
return 'sha256:' + h.hexdigest()
def size(path: pathlib.Path) -> int:
return path.stat().st_size
layers = []
for path in sorted((root / 'layers').glob('*')):
layers.append({
'path': f"layers/{path.name}",
'size': size(path),
'digest': digest(path)
})
indexes = []
for path in sorted((root / 'indexes').glob('*')):
indexes.append({
'name': path.name,
'digest': digest(path)
})
manifest = {
'version': '1.0.0',
'created': created,
'layers': layers,
'indexes': indexes
}
manifest_path = root / 'manifest.json'
manifest_path.write_text(json.dumps(manifest, indent=2, sort_keys=True) + '\n', encoding='utf-8')
PY
# 3) Tarball with deterministic metadata
pushd "$OUT" >/dev/null
rm -f mirror-thin-v1.tar.gz mirror-thin-v1.tar.gz.sha256 mirror-thin-v1.manifest.json mirror-thin-v1.manifest.json.sha256
cp "$STAGE/manifest.json" mirror-thin-v1.manifest.json
export GZIP=-n
/usr/bin/tar --sort=name --owner=0 --group=0 --numeric-owner --mtime='1970-01-01' -czf mirror-thin-v1.tar.gz -C "$STAGE" .
popd >/dev/null
# 4) Checksums
pushd "$OUT" >/dev/null
sha256sum mirror-thin-v1.manifest.json > mirror-thin-v1.manifest.json.sha256
sha256sum mirror-thin-v1.tar.gz > mirror-thin-v1.tar.gz.sha256
popd >/dev/null
# 5) Optional signing (DSSE + TUF) if SIGN_KEY is provided
if [[ -n "${SIGN_KEY:-}" ]]; then
mkdir -p "$OUT/tuf/keys"
python scripts/mirror/sign_thin_bundle.py \
--key "$SIGN_KEY" \
--manifest "$OUT/mirror-thin-v1.manifest.json" \
--tar "$OUT/mirror-thin-v1.tar.gz" \
--tuf-dir "$OUT/tuf"
fi
# 6) Optional OCI archive (MIRROR-CRT-57-001)
if [[ "${OCI:-0}" == "1" ]]; then
OCI_DIR="$OUT/oci"
BLOBS="$OCI_DIR/blobs/sha256"
mkdir -p "$BLOBS"
# layer = thin tarball
LAYER_SHA=$(sha256sum "$OUT/mirror-thin-v1.tar.gz" | awk '{print $1}')
cp "$OUT/mirror-thin-v1.tar.gz" "$BLOBS/$LAYER_SHA"
LAYER_SIZE=$(stat -c%s "$OUT/mirror-thin-v1.tar.gz")
# config = minimal empty config
CONFIG_TMP=$(mktemp)
echo '{"architecture":"amd64","os":"linux"}' > "$CONFIG_TMP"
CONFIG_SHA=$(sha256sum "$CONFIG_TMP" | awk '{print $1}')
CONFIG_SIZE=$(stat -c%s "$CONFIG_TMP")
cp "$CONFIG_TMP" "$BLOBS/$CONFIG_SHA"
rm "$CONFIG_TMP"
mkdir -p "$OCI_DIR"
cat > "$OCI_DIR/oci-layout" <<'JSON'
{
"imageLayoutVersion": "1.0.0"
}
JSON
MANIFEST_FILE="$OCI_DIR/manifest.json"
cat > "$MANIFEST_FILE" <<JSON
{
"schemaVersion": 2,
"config": {
"mediaType": "application/vnd.oci.image.config.v1+json",
"size": $CONFIG_SIZE,
"digest": "sha256:$CONFIG_SHA"
},
"layers": [
{
"mediaType": "application/vnd.oci.image.layer.v1.tar+gzip",
"size": $LAYER_SIZE,
"digest": "sha256:$LAYER_SHA",
"annotations": {"org.stellaops.bundle.type": "mirror-thin-v1"}
}
]
}
JSON
MANIFEST_SHA=$(sha256sum "$MANIFEST_FILE" | awk '{print $1}')
MANIFEST_SIZE=$(stat -c%s "$MANIFEST_FILE")
cat > "$OCI_DIR/index.json" <<JSON
{
"schemaVersion": 2,
"manifests": [
{
"mediaType": "application/vnd.oci.image.manifest.v1+json",
"digest": "sha256:$MANIFEST_SHA",
"size": $MANIFEST_SIZE,
"annotations": {"org.opencontainers.image.ref.name": "mirror-thin-v1"}
}
]
}
JSON
fi
# 7) Verification
python scripts/mirror/verify_thin_bundle.py "$OUT/mirror-thin-v1.manifest.json" "$OUT/mirror-thin-v1.tar.gz"
echo "mirror-thin-v1 built at $OUT"

View File

@@ -1,3 +1,4 @@
using System;
using FluentAssertions;
using StellaOps.Provenance.Attestation;
using Xunit;

View File

@@ -1,5 +1,7 @@
using System.Text;
using System.Collections.Generic;
using FluentAssertions;
using System.Threading.Tasks;
using StellaOps.Provenance.Attestation;
using Xunit;
@@ -11,16 +13,40 @@ public class PromotionAttestationBuilderTests
public void Produces_canonical_json_for_predicate()
{
var predicate = new PromotionPredicate(
ImageDigest: sha256:img,
SbomDigest: sha256:sbom,
VexDigest: sha256:vex,
PromotionId: prom-1,
RekorEntry: uuid,
Metadata: new Dictionary<string, string>{{env,prod}});
ImageDigest: "sha256:img",
SbomDigest: "sha256:sbom",
VexDigest: "sha256:vex",
PromotionId: "prom-1",
RekorEntry: "uuid",
// Intentionally shuffled input order; canonical JSON must be sorted.
Metadata: new Dictionary<string, string> { { "env", "prod" }, { "region", "us-east" } });
var bytes = PromotionAttestationBuilder.CreateCanonicalJson(predicate);
var json = Encoding.UTF8.GetString(bytes);
json.Should().Be("ImageDigest":"sha256:img");
json.Should().Be("{\"ImageDigest\":\"sha256:img\",\"Metadata\":{\"env\":\"prod\",\"region\":\"us-east\"},\"PromotionId\":\"prom-1\",\"RekorEntry\":\"uuid\",\"SbomDigest\":\"sha256:sbom\",\"VexDigest\":\"sha256:vex\"}");
}
[Fact]
public async Task BuildAsync_adds_predicate_claim_and_signs_payload()
{
var predicate = new PromotionPredicate(
ImageDigest: "sha256:img",
SbomDigest: "sha256:sbom",
VexDigest: "sha256:vex",
PromotionId: "prom-1");
var key = new InMemoryKeyProvider("kid-1", Encoding.UTF8.GetBytes("secret"));
var signer = new HmacSigner(key);
var attestation = await PromotionAttestationBuilder.BuildAsync(
predicate,
signer,
claims: new Dictionary<string, string> { { "traceId", "abc123" } });
attestation.Payload.Should().BeEquivalentTo(PromotionAttestationBuilder.CreateCanonicalJson(predicate));
attestation.Signature.KeyId.Should().Be("kid-1");
attestation.Signature.Claims.Should().ContainKey("predicateType").WhoseValue.Should().Be(PromotionAttestationBuilder.PredicateType);
attestation.Signature.Claims.Should().ContainKey("traceId").WhoseValue.Should().Be("abc123");
}
}

View File

@@ -79,6 +79,6 @@ public class SampleStatementDigestTests
var statements = LoadSamples().Select(pair => pair.Statement).ToArray();
BuildStatementDigest.ComputeMerkleRootHex(statements)
.Should()
.Be("e3a89fe0d08e2b16a6c7f1feb1d82d9e7ef9e8b74363bf60da64f36078d80eea");
.Be("958465d432c9c8497f9ea5c1476cc7f2bea2a87d3ca37d8293586bf73922dd73");
}
}

View File

@@ -5,6 +5,10 @@
<Nullable>enable</Nullable>
<LangVersion>preview</LangVersion>
</PropertyGroup>
<ItemGroup>
<PackageReference Remove="xunit" />
<PackageReference Remove="xunit.runner.visualstudio" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="../../StellaOps.Provenance.Attestation/StellaOps.Provenance.Attestation.csproj" />
<PackageReference Include="FluentAssertions" Version="6.12.0" />

View File

@@ -1,5 +1,6 @@
using System.Text;
using FluentAssertions;
using System.Threading.Tasks;
using StellaOps.Provenance.Attestation;
using Xunit;
@@ -7,36 +8,38 @@ namespace StellaOps.Provenance.Attestation.Tests;
public class VerificationTests
{
private const string Payload = "{\"hello\":\"world\"}";
private const string ContentType = "application/json";
[Fact]
public async Task Verifier_accepts_valid_signature()
{
var key = new InMemoryKeyProvider(test-key, Encoding.UTF8.GetBytes(secret));
var key = new InMemoryKeyProvider("test-key", Encoding.UTF8.GetBytes("secret"));
var signer = new HmacSigner(key);
var verifier = new HmacVerifier(key);
var request = new SignRequest(Encoding.UTF8.GetBytes(payload), application/json);
var request = new SignRequest(Encoding.UTF8.GetBytes(Payload), ContentType);
var signature = await signer.SignAsync(request);
var result = await verifier.VerifyAsync(request, signature);
result.IsValid.Should().BeTrue();
result.Reason.Should().Be(ok);
result.Reason.Should().Be("verified");
}
[Fact]
public async Task Verifier_rejects_tampered_payload()
{
var key = new InMemoryKeyProvider(test-key, Encoding.UTF8.GetBytes(secret));
var key = new InMemoryKeyProvider("test-key", Encoding.UTF8.GetBytes("secret"));
var signer = new HmacSigner(key);
var verifier = new HmacVerifier(key);
var request = new SignRequest(Encoding.UTF8.GetBytes(payload), application/json);
var request = new SignRequest(Encoding.UTF8.GetBytes(Payload), ContentType);
var signature = await signer.SignAsync(request);
var tampered = new SignRequest(Encoding.UTF8.GetBytes(payload-tampered), application/json);
var tampered = new SignRequest(Encoding.UTF8.GetBytes(Payload + "-tampered"), ContentType);
var result = await verifier.VerifyAsync(tampered, signature);
result.IsValid.Should().BeFalse();
result.Reason.Should().Contain(mismatch);
result.Reason.Should().Be("signature or time invalid");
}
}
EOF}