Add unit tests for PhpFrameworkSurface and PhpPharScanner
Some checks failed
AOC Guard CI / aoc-guard (push) Has been cancelled
AOC Guard CI / aoc-verify (push) Has been cancelled
Docs CI / lint-and-preview (push) Has been cancelled
Scanner Analyzers / Discover Analyzers (push) Has been cancelled
Scanner Analyzers / Build Analyzers (push) Has been cancelled
Scanner Analyzers / Test Language Analyzers (push) Has been cancelled
Scanner Analyzers / Validate Test Fixtures (push) Has been cancelled
Scanner Analyzers / Verify Deterministic Output (push) Has been cancelled
Findings Ledger CI / build-test (push) Has been cancelled
Findings Ledger CI / migration-validation (push) Has been cancelled
Manifest Integrity / Validate Schema Integrity (push) Has been cancelled
Manifest Integrity / Validate Contract Documents (push) Has been cancelled
Manifest Integrity / Validate Pack Fixtures (push) Has been cancelled
Manifest Integrity / Audit SHA256SUMS Files (push) Has been cancelled
Manifest Integrity / Verify Merkle Roots (push) Has been cancelled
Findings Ledger CI / generate-manifest (push) Has been cancelled

- Implement comprehensive tests for PhpFrameworkSurface, covering scenarios such as empty surfaces, presence of routes, controllers, middlewares, CLI commands, cron jobs, and event listeners.
- Validate metadata creation for route counts, HTTP methods, protected and public routes, and route patterns.
- Introduce tests for PhpPharScanner, including handling of non-existent files, null or empty paths, invalid PHAR files, and minimal PHAR structures.
- Ensure correct computation of SHA256 for valid PHAR files and validate the properties of PhpPharArchive, PhpPharEntry, and PhpPharScanResult.
This commit is contained in:
StellaOps Bot
2025-12-07 13:44:13 +02:00
parent af30fc322f
commit 965cbf9574
49 changed files with 11935 additions and 152 deletions

View File

@@ -11,6 +11,7 @@ using StellaOps.Attestor.Core.Signing;
using StellaOps.Cryptography;
using StellaOps.Cryptography.Kms;
using StellaOps.Cryptography.Plugin.BouncyCastle;
using StellaOps.Cryptography.Plugin.SmSoft;
namespace StellaOps.Attestor.Infrastructure.Signing;
@@ -44,6 +45,21 @@ internal sealed class AttestorSigningKeyRegistry : IDisposable
var edProvider = new BouncyCastleEd25519CryptoProvider();
RegisterProvider(edProvider);
// SM2 software provider (non-certified). Requires SM_SOFT_ALLOWED env to be enabled.
SmSoftCryptoProvider? smProvider = null;
if (RequiresSm2(signingOptions))
{
smProvider = new SmSoftCryptoProvider();
if (smProvider.Supports(CryptoCapability.Signing, SignatureAlgorithms.Sm2))
{
RegisterProvider(smProvider);
}
else
{
_logger.LogWarning("SM2 requested but SM_SOFT_ALLOWED is not enabled; SM provider not registered.");
}
}
KmsCryptoProvider? kmsProvider = null;
if (RequiresKms(signingOptions))
{
@@ -86,6 +102,7 @@ internal sealed class AttestorSigningKeyRegistry : IDisposable
providerMap,
defaultProvider,
edProvider,
smProvider,
kmsProvider,
_kmsClient,
timeProvider);
@@ -126,11 +143,16 @@ internal sealed class AttestorSigningKeyRegistry : IDisposable
=> signingOptions.Keys?.Any(static key =>
string.Equals(key?.Mode, "kms", StringComparison.OrdinalIgnoreCase)) == true;
private static bool RequiresSm2(AttestorOptions.SigningOptions signingOptions)
=> signingOptions.Keys?.Any(static key =>
string.Equals(key?.Algorithm, SignatureAlgorithms.Sm2, StringComparison.OrdinalIgnoreCase)) == true;
private SigningKeyEntry CreateEntry(
AttestorOptions.SigningKeyOptions key,
IReadOnlyDictionary<string, ICryptoProvider> providers,
DefaultCryptoProvider defaultProvider,
BouncyCastleEd25519CryptoProvider edProvider,
SmSoftCryptoProvider? smProvider,
KmsCryptoProvider? kmsProvider,
FileKmsClient? kmsClient,
TimeProvider timeProvider)
@@ -205,6 +227,22 @@ internal sealed class AttestorSigningKeyRegistry : IDisposable
edProvider.UpsertSigningKey(signingKey);
}
else if (string.Equals(providerName, "cn.sm.soft", StringComparison.OrdinalIgnoreCase))
{
if (smProvider is null)
{
throw new InvalidOperationException($"SM2 signing provider is not configured but signing key '{key.KeyId}' requests algorithm 'SM2'.");
}
var privateKeyBytes = LoadSm2KeyBytes(key);
var signingKey = new CryptoSigningKey(
new CryptoKeyReference(providerKeyId, providerName),
normalizedAlgorithm,
privateKeyBytes,
now);
smProvider.UpsertSigningKey(signingKey);
}
else
{
var parameters = LoadEcParameters(key);
@@ -252,6 +290,11 @@ internal sealed class AttestorSigningKeyRegistry : IDisposable
return "bouncycastle.ed25519";
}
if (string.Equals(key.Algorithm, SignatureAlgorithms.Sm2, StringComparison.OrdinalIgnoreCase))
{
return "cn.sm.soft";
}
return "default";
}
@@ -311,6 +354,20 @@ internal sealed class AttestorSigningKeyRegistry : IDisposable
return ecdsa.ExportParameters(true);
}
private static byte[] LoadSm2KeyBytes(AttestorOptions.SigningKeyOptions key)
{
var material = ReadMaterial(key);
// SM2 provider accepts PEM or PKCS#8 DER bytes
return key.MaterialFormat?.ToLowerInvariant() switch
{
null or "pem" => System.Text.Encoding.UTF8.GetBytes(material),
"base64" => Convert.FromBase64String(material),
"hex" => Convert.FromHexString(material),
_ => throw new InvalidOperationException($"Unsupported materialFormat '{key.MaterialFormat}' for SM2 signing key '{key.KeyId}'. Supported formats: pem, base64, hex.")
};
}
private static string ReadMaterial(AttestorOptions.SigningKeyOptions key)
{
if (!string.IsNullOrWhiteSpace(key.MaterialPassphrase))

View File

@@ -12,6 +12,7 @@
<ProjectReference Include="..\..\..\__Libraries\StellaOps.Cryptography\StellaOps.Cryptography.csproj" />
<ProjectReference Include="..\..\..\__Libraries\StellaOps.Cryptography.Plugin.BouncyCastle\StellaOps.Cryptography.Plugin.BouncyCastle.csproj" />
<ProjectReference Include="..\..\..\__Libraries\StellaOps.Cryptography.Kms\StellaOps.Cryptography.Kms.csproj" />
<ProjectReference Include="..\..\..\__Libraries\StellaOps.Cryptography.Plugin.SmSoft\StellaOps.Cryptography.Plugin.SmSoft.csproj" />
</ItemGroup>
<ItemGroup>
<PackageReference Include="Microsoft.Extensions.DependencyInjection.Abstractions" Version="10.0.0" />
@@ -23,6 +24,6 @@
<PackageReference Include="Microsoft.Extensions.Http" Version="10.0.0" />
<PackageReference Include="MongoDB.Driver" Version="3.5.0" />
<PackageReference Include="StackExchange.Redis" Version="2.8.24" />
<PackageReference Include="AWSSDK.S3" Version="3.7.307.6" />
<PackageReference Include="AWSSDK.S3" Version="4.0.2" />
</ItemGroup>
</Project>

View File

@@ -15,6 +15,10 @@ using StellaOps.Attestor.Infrastructure.Submission;
using StellaOps.Attestor.Tests;
using StellaOps.Cryptography;
using StellaOps.Cryptography.Kms;
using Org.BouncyCastle.Crypto.Generators;
using Org.BouncyCastle.Crypto.Parameters;
using Org.BouncyCastle.Crypto.Signers;
using Org.BouncyCastle.Security;
using Xunit;
namespace StellaOps.Attestor.Tests;
@@ -210,6 +214,147 @@ public sealed class AttestorSigningServiceTests : IDisposable
Assert.Equal("signed", auditSink.Records[0].Result);
}
[Fact]
public async Task SignAsync_Sm2Key_ReturnsValidSignature_WhenGateEnabled()
{
var originalGate = Environment.GetEnvironmentVariable("SM_SOFT_ALLOWED");
try
{
Environment.SetEnvironmentVariable("SM_SOFT_ALLOWED", "1");
// Generate SM2 key pair
var curve = Org.BouncyCastle.Asn1.GM.GMNamedCurves.GetByName("SM2P256V1");
var domain = new ECDomainParameters(curve.Curve, curve.G, curve.N, curve.H, curve.GetSeed());
var generator = new ECKeyPairGenerator("EC");
generator.Init(new ECKeyGenerationParameters(domain, new SecureRandom()));
var keyPair = generator.GenerateKeyPair();
var privateDer = Org.BouncyCastle.Pkcs.PrivateKeyInfoFactory.CreatePrivateKeyInfo(keyPair.Private).GetDerEncoded();
var options = Options.Create(new AttestorOptions
{
Signing = new AttestorOptions.SigningOptions
{
Keys =
{
new AttestorOptions.SigningKeyOptions
{
KeyId = "sm2-1",
Algorithm = SignatureAlgorithms.Sm2,
Mode = "keyful",
Material = Convert.ToBase64String(privateDer),
MaterialFormat = "base64"
}
}
}
});
using var metrics = new AttestorMetrics();
using var registry = new AttestorSigningKeyRegistry(options, TimeProvider.System, NullLogger<AttestorSigningKeyRegistry>.Instance);
var auditSink = new InMemoryAttestorAuditSink();
var service = new AttestorSigningService(
registry,
new DefaultDsseCanonicalizer(),
auditSink,
metrics,
NullLogger<AttestorSigningService>.Instance,
TimeProvider.System);
var payloadBytes = Encoding.UTF8.GetBytes("{}");
var request = new AttestationSignRequest
{
KeyId = "sm2-1",
PayloadType = "application/json",
PayloadBase64 = Convert.ToBase64String(payloadBytes),
Artifact = new AttestorSubmissionRequest.ArtifactInfo
{
Sha256 = new string('c', 64),
Kind = "sbom"
}
};
var context = new SubmissionContext
{
CallerSubject = "urn:subject",
CallerAudience = "attestor",
CallerClientId = "client",
CallerTenant = "tenant",
MtlsThumbprint = "thumbprint"
};
var result = await service.SignAsync(request, context);
Assert.NotNull(result);
Assert.Equal("sm2-1", result.KeyId);
Assert.Equal("keyful", result.Mode);
Assert.Equal("cn.sm.soft", result.Provider);
Assert.Equal(SignatureAlgorithms.Sm2, result.Algorithm);
Assert.False(string.IsNullOrWhiteSpace(result.Meta.BundleSha256));
Assert.Single(result.Bundle.Dsse.Signatures);
// Verify the signature
var signature = Convert.FromBase64String(result.Bundle.Dsse.Signatures[0].Signature);
var preAuth = DssePreAuthenticationEncoding.Compute(result.Bundle.Dsse.PayloadType, Convert.FromBase64String(result.Bundle.Dsse.PayloadBase64));
var verifier = new SM2Signer();
var userId = Encoding.ASCII.GetBytes("1234567812345678");
verifier.Init(false, new ParametersWithID(keyPair.Public, userId));
verifier.BlockUpdate(preAuth, 0, preAuth.Length);
Assert.True(verifier.VerifySignature(signature));
Assert.Single(auditSink.Records);
Assert.Equal("sign", auditSink.Records[0].Action);
Assert.Equal("signed", auditSink.Records[0].Result);
}
finally
{
Environment.SetEnvironmentVariable("SM_SOFT_ALLOWED", originalGate);
}
}
[Fact]
public void Sm2Registry_Fails_WhenGateDisabled()
{
var originalGate = Environment.GetEnvironmentVariable("SM_SOFT_ALLOWED");
try
{
Environment.SetEnvironmentVariable("SM_SOFT_ALLOWED", null);
// Generate SM2 key pair
var curve = Org.BouncyCastle.Asn1.GM.GMNamedCurves.GetByName("SM2P256V1");
var domain = new ECDomainParameters(curve.Curve, curve.G, curve.N, curve.H, curve.GetSeed());
var generator = new ECKeyPairGenerator("EC");
generator.Init(new ECKeyGenerationParameters(domain, new SecureRandom()));
var keyPair = generator.GenerateKeyPair();
var privateDer = Org.BouncyCastle.Pkcs.PrivateKeyInfoFactory.CreatePrivateKeyInfo(keyPair.Private).GetDerEncoded();
var options = Options.Create(new AttestorOptions
{
Signing = new AttestorOptions.SigningOptions
{
Keys =
{
new AttestorOptions.SigningKeyOptions
{
KeyId = "sm2-fail",
Algorithm = SignatureAlgorithms.Sm2,
Mode = "keyful",
Material = Convert.ToBase64String(privateDer),
MaterialFormat = "base64"
}
}
}
});
// Creating registry should throw because SM_SOFT_ALLOWED is not set
Assert.Throws<InvalidOperationException>(() =>
new AttestorSigningKeyRegistry(options, TimeProvider.System, NullLogger<AttestorSigningKeyRegistry>.Instance));
}
finally
{
Environment.SetEnvironmentVariable("SM_SOFT_ALLOWED", originalGate);
}
}
private string CreateTempDirectory()
{
var path = Path.Combine(Path.GetTempPath(), "attestor-signing-tests", Guid.NewGuid().ToString("N"));

View File

@@ -61,7 +61,7 @@ public sealed class AttestorVerificationServiceTests
using var metrics = new AttestorMetrics();
using var activitySource = new AttestorActivitySource();
var canonicalizer = new DefaultDsseCanonicalizer();
var engine = new AttestorVerificationEngine(canonicalizer, options, NullLogger<AttestorVerificationEngine>.Instance);
var engine = new AttestorVerificationEngine(canonicalizer, new TestCryptoHash(), options, NullLogger<AttestorVerificationEngine>.Instance);
var repository = new InMemoryAttestorEntryRepository();
var dedupeStore = new InMemoryAttestorDedupeStore();
var rekorClient = new StubRekorClient(new NullLogger<StubRekorClient>());
@@ -149,7 +149,7 @@ public sealed class AttestorVerificationServiceTests
using var metrics = new AttestorMetrics();
using var activitySource = new AttestorActivitySource();
var canonicalizer = new DefaultDsseCanonicalizer();
var engine = new AttestorVerificationEngine(canonicalizer, options, NullLogger<AttestorVerificationEngine>.Instance);
var engine = new AttestorVerificationEngine(canonicalizer, new TestCryptoHash(), options, NullLogger<AttestorVerificationEngine>.Instance);
var repository = new InMemoryAttestorEntryRepository();
var dedupeStore = new InMemoryAttestorDedupeStore();
var rekorClient = new StubRekorClient(new NullLogger<StubRekorClient>());
@@ -325,7 +325,7 @@ public sealed class AttestorVerificationServiceTests
using var metrics = new AttestorMetrics();
using var activitySource = new AttestorActivitySource();
var canonicalizer = new DefaultDsseCanonicalizer();
var engine = new AttestorVerificationEngine(canonicalizer, options, NullLogger<AttestorVerificationEngine>.Instance);
var engine = new AttestorVerificationEngine(canonicalizer, new TestCryptoHash(), options, NullLogger<AttestorVerificationEngine>.Instance);
var repository = new InMemoryAttestorEntryRepository();
var rekorClient = new RecordingRekorClient();
@@ -388,7 +388,7 @@ public sealed class AttestorVerificationServiceTests
using var metrics = new AttestorMetrics();
using var activitySource = new AttestorActivitySource();
var canonicalizer = new DefaultDsseCanonicalizer();
var engine = new AttestorVerificationEngine(canonicalizer, options, NullLogger<AttestorVerificationEngine>.Instance);
var engine = new AttestorVerificationEngine(canonicalizer, new TestCryptoHash(), options, NullLogger<AttestorVerificationEngine>.Instance);
var repository = new InMemoryAttestorEntryRepository();
var rekorClient = new RecordingRekorClient();
@@ -498,7 +498,7 @@ public sealed class AttestorVerificationServiceTests
using var metrics = new AttestorMetrics();
using var activitySource = new AttestorActivitySource();
var canonicalizer = new DefaultDsseCanonicalizer();
var engine = new AttestorVerificationEngine(canonicalizer, options, NullLogger<AttestorVerificationEngine>.Instance);
var engine = new AttestorVerificationEngine(canonicalizer, new TestCryptoHash(), options, NullLogger<AttestorVerificationEngine>.Instance);
var repository = new InMemoryAttestorEntryRepository();
var dedupeStore = new InMemoryAttestorDedupeStore();
var rekorClient = new StubRekorClient(new NullLogger<StubRekorClient>());

View File

@@ -21,5 +21,6 @@
<ProjectReference Include="../../../__Libraries/StellaOps.Configuration/StellaOps.Configuration.csproj" />
<ProjectReference Include="../../../__Libraries/StellaOps.DependencyInjection/StellaOps.DependencyInjection.csproj" />
<ProjectReference Include="..\..\StellaOps.Attestor.Verify\StellaOps.Attestor.Verify.csproj" />
<ProjectReference Include="..\..\..\__Libraries\StellaOps.Cryptography.Plugin.SmSoft\StellaOps.Cryptography.Plugin.SmSoft.csproj" />
</ItemGroup>
</Project>

View File

@@ -1,12 +1,15 @@
using System;
using System.Collections.Concurrent;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Security.Cryptography;
using System.Threading;
using System.Threading.Tasks;
using StellaOps.Attestor.Core.Audit;
using StellaOps.Attestor.Core.Verification;
using StellaOps.Attestor.Core.Storage;
using StellaOps.Cryptography;
namespace StellaOps.Attestor.Tests;
@@ -210,3 +213,66 @@ internal sealed class InMemoryAttestorArchiveStore : IAttestorArchiveStore
return Task.FromResult<AttestorArchiveBundle?>(null);
}
}
internal sealed class TestCryptoHash : ICryptoHash
{
public byte[] ComputeHash(ReadOnlySpan<byte> data, string? algorithmId = null)
{
using var algorithm = CreateAlgorithm(algorithmId);
return algorithm.ComputeHash(data.ToArray());
}
public string ComputeHashHex(ReadOnlySpan<byte> data, string? algorithmId = null)
=> Convert.ToHexString(ComputeHash(data, algorithmId)).ToLowerInvariant();
public string ComputeHashBase64(ReadOnlySpan<byte> data, string? algorithmId = null)
=> Convert.ToBase64String(ComputeHash(data, algorithmId));
public async ValueTask<byte[]> ComputeHashAsync(Stream stream, string? algorithmId = null, CancellationToken cancellationToken = default)
{
using var algorithm = CreateAlgorithm(algorithmId);
await using var buffer = new MemoryStream();
await stream.CopyToAsync(buffer, cancellationToken).ConfigureAwait(false);
return algorithm.ComputeHash(buffer.ToArray());
}
public async ValueTask<string> ComputeHashHexAsync(Stream stream, string? algorithmId = null, CancellationToken cancellationToken = default)
{
var bytes = await ComputeHashAsync(stream, algorithmId, cancellationToken).ConfigureAwait(false);
return Convert.ToHexString(bytes).ToLowerInvariant();
}
public byte[] ComputeHashForPurpose(ReadOnlySpan<byte> data, string purpose)
=> ComputeHash(data, HashAlgorithms.Sha256);
public string ComputeHashHexForPurpose(ReadOnlySpan<byte> data, string purpose)
=> ComputeHashHex(data, HashAlgorithms.Sha256);
public string ComputeHashBase64ForPurpose(ReadOnlySpan<byte> data, string purpose)
=> ComputeHashBase64(data, HashAlgorithms.Sha256);
public ValueTask<byte[]> ComputeHashForPurposeAsync(Stream stream, string purpose, CancellationToken cancellationToken = default)
=> ComputeHashAsync(stream, HashAlgorithms.Sha256, cancellationToken);
public ValueTask<string> ComputeHashHexForPurposeAsync(Stream stream, string purpose, CancellationToken cancellationToken = default)
=> ComputeHashHexAsync(stream, HashAlgorithms.Sha256, cancellationToken);
public string GetAlgorithmForPurpose(string purpose)
=> HashAlgorithms.Sha256;
public string GetHashPrefix(string purpose)
=> "sha256:";
public string ComputePrefixedHashForPurpose(ReadOnlySpan<byte> data, string purpose)
=> $"{GetHashPrefix(purpose)}{ComputeHashHexForPurpose(data, purpose)}";
private static HashAlgorithm CreateAlgorithm(string? algorithmId)
{
return algorithmId?.ToUpperInvariant() switch
{
null or "" or HashAlgorithms.Sha256 => SHA256.Create(),
HashAlgorithms.Sha512 => SHA512.Create(),
_ => throw new NotSupportedException($"Test crypto hash does not support algorithm {algorithmId}.")
};
}
}

View File

@@ -1,25 +1,26 @@
using System;
using System.Collections.Generic;
using System.Collections.Immutable;
using System.Globalization;
using System.Linq;
using System.Net.Http;
using System.Net.Http.Headers;
using System.Runtime.CompilerServices;
using System.Security.Cryptography;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.Logging;
using StellaOps.Excititor.Connectors.Abstractions;
using StellaOps.Excititor.Connectors.SUSE.RancherVEXHub.Authentication;
using StellaOps.Excititor.Connectors.SUSE.RancherVEXHub.Configuration;
using StellaOps.Excititor.Connectors.SUSE.RancherVEXHub.Events;
using StellaOps.Excititor.Connectors.SUSE.RancherVEXHub.Metadata;
using StellaOps.Excititor.Connectors.SUSE.RancherVEXHub.State;
using StellaOps.Excititor.Core;
using StellaOps.Excititor.Storage.Mongo;
using System.Collections.Generic;
using System.Collections.Immutable;
using System.Globalization;
using System.Linq;
using System.Net.Http;
using System.Net.Http.Headers;
using System.Runtime.CompilerServices;
using System.Security.Cryptography;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.Logging;
using StellaOps.Excititor.Connectors.Abstractions;
using StellaOps.Excititor.Connectors.SUSE.RancherVEXHub.Authentication;
using StellaOps.Excititor.Connectors.SUSE.RancherVEXHub.Configuration;
using StellaOps.Excititor.Connectors.SUSE.RancherVEXHub.Events;
using StellaOps.Excititor.Connectors.SUSE.RancherVEXHub.Metadata;
using StellaOps.Excititor.Connectors.SUSE.RancherVEXHub.State;
using StellaOps.Excititor.Connectors.Abstractions.Trust;
using StellaOps.Excititor.Core;
using StellaOps.Excititor.Storage.Mongo;
namespace StellaOps.Excititor.Connectors.SUSE.RancherVEXHub;
@@ -91,14 +92,14 @@ public sealed class RancherHubConnector : VexConnectorBase
throw new InvalidOperationException("Connector must be validated before fetch operations.");
}
if (_metadata is null)
{
_metadata = await _metadataLoader.LoadAsync(_options, cancellationToken).ConfigureAwait(false);
}
await UpsertProviderAsync(context.Services, _metadata.Metadata.Provider, cancellationToken).ConfigureAwait(false);
var checkpoint = await _checkpointManager.LoadAsync(Descriptor.Id, context, cancellationToken).ConfigureAwait(false);
if (_metadata is null)
{
_metadata = await _metadataLoader.LoadAsync(_options, cancellationToken).ConfigureAwait(false);
}
await UpsertProviderAsync(context.Services, _metadata.Metadata.Provider, cancellationToken).ConfigureAwait(false);
var checkpoint = await _checkpointManager.LoadAsync(Descriptor.Id, context, cancellationToken).ConfigureAwait(false);
var digestHistory = checkpoint.Digests.ToList();
var dedupeSet = new HashSet<string>(checkpoint.Digests, StringComparer.OrdinalIgnoreCase);
var latestCursor = checkpoint.Cursor;
@@ -215,19 +216,19 @@ public sealed class RancherHubConnector : VexConnectorBase
var contentBytes = await response.Content.ReadAsByteArrayAsync(cancellationToken).ConfigureAwait(false);
var publishedAt = record.PublishedAt ?? UtcNow();
var metadata = BuildMetadata(builder =>
{
builder
.Add("rancher.event.id", record.Id)
.Add("rancher.event.type", record.Type)
.Add("rancher.event.channel", record.Channel)
.Add("rancher.event.published", publishedAt)
.Add("rancher.event.cursor", batch.NextCursor ?? batch.Cursor)
.Add("rancher.event.offline", batch.FromOfflineSnapshot ? "true" : "false")
.Add("rancher.event.declaredDigest", record.DocumentDigest);
AddProvenanceMetadata(builder);
});
var metadata = BuildMetadata(builder =>
{
builder
.Add("rancher.event.id", record.Id)
.Add("rancher.event.type", record.Type)
.Add("rancher.event.channel", record.Channel)
.Add("rancher.event.published", publishedAt)
.Add("rancher.event.cursor", batch.NextCursor ?? batch.Cursor)
.Add("rancher.event.offline", batch.FromOfflineSnapshot ? "true" : "false")
.Add("rancher.event.declaredDigest", record.DocumentDigest);
AddProvenanceMetadata(builder);
});
var format = ResolveFormat(record.DocumentFormat);
var document = CreateRawDocument(format, record.DocumentUri, contentBytes, metadata);
@@ -250,48 +251,52 @@ public sealed class RancherHubConnector : VexConnectorBase
}
digestHistory.Add(document.Digest);
await context.RawSink.StoreAsync(document, cancellationToken).ConfigureAwait(false);
return new EventProcessingResult(document, false, publishedAt);
}
private void AddProvenanceMetadata(VexConnectorMetadataBuilder builder)
{
ArgumentNullException.ThrowIfNull(builder);
var provider = _metadata?.Metadata.Provider;
if (provider is null)
{
return;
}
builder
.Add("vex.provenance.provider", provider.Id)
.Add("vex.provenance.providerName", provider.DisplayName)
.Add("vex.provenance.providerKind", provider.Kind.ToString().ToLowerInvariant(CultureInfo.InvariantCulture))
.Add("vex.provenance.trust.weight", provider.Trust.Weight.ToString("0.###", CultureInfo.InvariantCulture));
if (provider.Trust.Cosign is { } cosign)
{
builder
.Add("vex.provenance.cosign.issuer", cosign.Issuer)
.Add("vex.provenance.cosign.identityPattern", cosign.IdentityPattern);
}
if (!provider.Trust.PgpFingerprints.IsDefaultOrEmpty && provider.Trust.PgpFingerprints.Length > 0)
{
builder.Add("vex.provenance.pgp.fingerprints", string.Join(',', provider.Trust.PgpFingerprints));
}
var tier = provider.Kind.ToString().ToLowerInvariant(CultureInfo.InvariantCulture);
builder
.Add("vex.provenance.trust.tier", tier)
.Add("vex.provenance.trust.note", $"tier={tier};weight={provider.Trust.Weight.ToString("0.###", CultureInfo.InvariantCulture)}");
}
private static bool TrimHistory(List<string> digestHistory)
{
if (digestHistory.Count <= MaxDigestHistory)
{
await context.RawSink.StoreAsync(document, cancellationToken).ConfigureAwait(false);
return new EventProcessingResult(document, false, publishedAt);
}
private void AddProvenanceMetadata(VexConnectorMetadataBuilder builder)
{
ArgumentNullException.ThrowIfNull(builder);
var provider = _metadata?.Metadata.Provider;
if (provider is null)
{
return;
}
builder
.Add("vex.provenance.provider", provider.Id)
.Add("vex.provenance.providerName", provider.DisplayName)
.Add("vex.provenance.providerKind", provider.Kind.ToString().ToLowerInvariant(CultureInfo.InvariantCulture))
.Add("vex.provenance.trust.weight", provider.Trust.Weight.ToString("0.###", CultureInfo.InvariantCulture));
if (provider.Trust.Cosign is { } cosign)
{
builder
.Add("vex.provenance.cosign.issuer", cosign.Issuer)
.Add("vex.provenance.cosign.identityPattern", cosign.IdentityPattern);
}
if (!provider.Trust.PgpFingerprints.IsDefaultOrEmpty && provider.Trust.PgpFingerprints.Length > 0)
{
builder.Add("vex.provenance.pgp.fingerprints", string.Join(',', provider.Trust.PgpFingerprints));
}
var tier = provider.Kind.ToString().ToLowerInvariant(CultureInfo.InvariantCulture);
builder
.Add("vex.provenance.trust.tier", tier)
.Add("vex.provenance.trust.note", $"tier={tier};weight={provider.Trust.Weight.ToString("0.###", CultureInfo.InvariantCulture)}");
// Enrich with connector signer metadata (fingerprints, issuer tier, bundle info)
// from external signer metadata file (STELLAOPS_CONNECTOR_SIGNER_METADATA_PATH)
ConnectorSignerMetadataEnricher.Enrich(builder, Descriptor.Id, Logger);
}
private static bool TrimHistory(List<string> digestHistory)
{
if (digestHistory.Count <= MaxDigestHistory)
{
return false;
}
@@ -303,55 +308,55 @@ public sealed class RancherHubConnector : VexConnectorBase
private async Task<HttpRequestMessage> CreateDocumentRequestAsync(Uri documentUri, CancellationToken cancellationToken)
{
var request = new HttpRequestMessage(HttpMethod.Get, documentUri);
if (_metadata?.Metadata.Subscription.RequiresAuthentication ?? false)
{
var token = await _tokenProvider.GetAccessTokenAsync(_options!, cancellationToken).ConfigureAwait(false);
if (token is not null)
{
var scheme = string.IsNullOrWhiteSpace(token.TokenType) ? "Bearer" : token.TokenType;
request.Headers.Authorization = new AuthenticationHeaderValue(scheme, token.Value);
}
}
return request;
}
private static async ValueTask UpsertProviderAsync(IServiceProvider services, VexProvider provider, CancellationToken cancellationToken)
{
if (services is null)
{
return;
}
var store = services.GetService<IVexProviderStore>();
if (store is null)
{
return;
}
await store.SaveAsync(provider, cancellationToken).ConfigureAwait(false);
}
private async Task QuarantineAsync(
RancherHubEventRecord record,
RancherHubEventBatch batch,
string reason,
if (_metadata?.Metadata.Subscription.RequiresAuthentication ?? false)
{
var token = await _tokenProvider.GetAccessTokenAsync(_options!, cancellationToken).ConfigureAwait(false);
if (token is not null)
{
var scheme = string.IsNullOrWhiteSpace(token.TokenType) ? "Bearer" : token.TokenType;
request.Headers.Authorization = new AuthenticationHeaderValue(scheme, token.Value);
}
}
return request;
}
private static async ValueTask UpsertProviderAsync(IServiceProvider services, VexProvider provider, CancellationToken cancellationToken)
{
if (services is null)
{
return;
}
var store = services.GetService<IVexProviderStore>();
if (store is null)
{
return;
}
await store.SaveAsync(provider, cancellationToken).ConfigureAwait(false);
}
private async Task QuarantineAsync(
RancherHubEventRecord record,
RancherHubEventBatch batch,
string reason,
VexConnectorContext context,
CancellationToken cancellationToken)
{
var metadata = BuildMetadata(builder =>
{
builder
.Add("rancher.event.id", record.Id)
.Add("rancher.event.type", record.Type)
.Add("rancher.event.channel", record.Channel)
.Add("rancher.event.quarantine", "true")
.Add("rancher.event.error", reason)
.Add("rancher.event.cursor", batch.NextCursor ?? batch.Cursor)
.Add("rancher.event.offline", batch.FromOfflineSnapshot ? "true" : "false");
AddProvenanceMetadata(builder);
});
var metadata = BuildMetadata(builder =>
{
builder
.Add("rancher.event.id", record.Id)
.Add("rancher.event.type", record.Type)
.Add("rancher.event.channel", record.Channel)
.Add("rancher.event.quarantine", "true")
.Add("rancher.event.error", reason)
.Add("rancher.event.cursor", batch.NextCursor ?? batch.Cursor)
.Add("rancher.event.offline", batch.FromOfflineSnapshot ? "true" : "false");
AddProvenanceMetadata(builder);
});
var sourceUri = record.DocumentUri ?? _metadata?.Metadata.Subscription.EventsUri ?? _options!.DiscoveryUri;
var payload = Encoding.UTF8.GetBytes(record.RawJson);

View File

@@ -461,7 +461,7 @@ public sealed class UbuntuCsafConnector : VexConnectorBase
.Add("vex.provenance.trust.tier", tier)
.Add("vex.provenance.trust.note", $"tier={tier};weight={provider.Trust.Weight.ToString("0.###", CultureInfo.InvariantCulture)}");
ConnectorSignerMetadataEnricher.Enrich(builder, Descriptor.Id, _logger);
ConnectorSignerMetadataEnricher.Enrich(builder, Descriptor.Id, Logger);
}
private static async ValueTask UpsertProviderAsync(IServiceProvider services, VexProvider provider, CancellationToken cancellationToken)

View File

@@ -0,0 +1,498 @@
using Microsoft.Extensions.Logging.Abstractions;
using Moq;
using StellaOps.Findings.Ledger.Domain;
using StellaOps.Findings.Ledger.Infrastructure;
using StellaOps.Findings.Ledger.Infrastructure.Attestation;
using StellaOps.Findings.Ledger.Services;
using Xunit;
namespace StellaOps.Findings.Ledger.Tests.Attestation;
public class AttestationPointerServiceTests
{
private readonly Mock<ILedgerEventRepository> _ledgerEventRepository;
private readonly Mock<ILedgerEventWriteService> _writeService;
private readonly InMemoryAttestationPointerRepository _repository;
private readonly FakeTimeProvider _timeProvider;
private readonly AttestationPointerService _service;
public AttestationPointerServiceTests()
{
_ledgerEventRepository = new Mock<ILedgerEventRepository>();
_writeService = new Mock<ILedgerEventWriteService>();
_repository = new InMemoryAttestationPointerRepository();
_timeProvider = new FakeTimeProvider(new DateTimeOffset(2025, 1, 1, 12, 0, 0, TimeSpan.Zero));
_writeService.Setup(w => w.AppendAsync(It.IsAny<LedgerEventDraft>(), It.IsAny<CancellationToken>()))
.ReturnsAsync((LedgerEventDraft draft, CancellationToken _) =>
{
var record = new LedgerEventRecord(
draft.TenantId,
draft.ChainId,
draft.SequenceNumber,
draft.EventId,
draft.EventType,
draft.PolicyVersion,
draft.FindingId,
draft.ArtifactId,
draft.SourceRunId,
draft.ActorId,
draft.ActorType,
draft.OccurredAt,
draft.RecordedAt,
draft.Payload,
"event-hash",
draft.ProvidedPreviousHash ?? LedgerEventConstants.EmptyHash,
"merkle-leaf-hash",
draft.CanonicalEnvelope.ToJsonString());
return LedgerWriteResult.Success(record);
});
_service = new AttestationPointerService(
_ledgerEventRepository.Object,
_writeService.Object,
_repository,
_timeProvider,
NullLogger<AttestationPointerService>.Instance);
}
[Fact]
public async Task CreatePointer_CreatesNewPointer()
{
var input = new AttestationPointerInput(
TenantId: "tenant-1",
FindingId: "finding-123",
AttestationType: AttestationType.DsseEnvelope,
Relationship: AttestationRelationship.VerifiedBy,
AttestationRef: new AttestationRef(
Digest: "sha256:abc123def456789012345678901234567890123456789012345678901234abcd",
AttestationId: Guid.NewGuid(),
StorageUri: "s3://attestations/test.json",
PayloadType: "application/vnd.in-toto+json",
PredicateType: "https://slsa.dev/provenance/v1"),
VerificationResult: new VerificationResult(
Verified: true,
VerifiedAt: _timeProvider.GetUtcNow(),
Verifier: "stellaops-attestor",
VerifierVersion: "2025.01.0"),
CreatedBy: "test-system");
var result = await _service.CreatePointerAsync(input);
Assert.True(result.Success);
Assert.NotNull(result.PointerId);
Assert.NotNull(result.LedgerEventId);
Assert.Null(result.Error);
var saved = await _repository.GetByIdAsync("tenant-1", result.PointerId!.Value, CancellationToken.None);
Assert.NotNull(saved);
Assert.Equal(input.FindingId, saved!.FindingId);
Assert.Equal(input.AttestationType, saved.AttestationType);
Assert.Equal(input.AttestationRef.Digest, saved.AttestationRef.Digest);
}
[Fact]
public async Task CreatePointer_IsIdempotent()
{
var input = new AttestationPointerInput(
TenantId: "tenant-1",
FindingId: "finding-456",
AttestationType: AttestationType.SlsaProvenance,
Relationship: AttestationRelationship.AttestedBy,
AttestationRef: new AttestationRef(
Digest: "sha256:def456789012345678901234567890123456789012345678901234567890abcd"),
CreatedBy: "test-system");
var result1 = await _service.CreatePointerAsync(input);
var result2 = await _service.CreatePointerAsync(input);
Assert.True(result1.Success);
Assert.True(result2.Success);
Assert.Equal(result1.PointerId, result2.PointerId);
var pointers = await _repository.GetByFindingIdAsync("tenant-1", "finding-456", CancellationToken.None);
Assert.Single(pointers);
}
[Fact]
public async Task GetPointers_ReturnsAllPointersForFinding()
{
var input1 = new AttestationPointerInput(
TenantId: "tenant-1",
FindingId: "finding-multi",
AttestationType: AttestationType.DsseEnvelope,
Relationship: AttestationRelationship.VerifiedBy,
AttestationRef: new AttestationRef(
Digest: "sha256:aaa111222333444555666777888999000111222333444555666777888999000a"));
var input2 = new AttestationPointerInput(
TenantId: "tenant-1",
FindingId: "finding-multi",
AttestationType: AttestationType.VexAttestation,
Relationship: AttestationRelationship.DerivedFrom,
AttestationRef: new AttestationRef(
Digest: "sha256:bbb111222333444555666777888999000111222333444555666777888999000b"));
await _service.CreatePointerAsync(input1);
await _service.CreatePointerAsync(input2);
var pointers = await _service.GetPointersAsync("tenant-1", "finding-multi");
Assert.Equal(2, pointers.Count);
Assert.Contains(pointers, p => p.AttestationType == AttestationType.DsseEnvelope);
Assert.Contains(pointers, p => p.AttestationType == AttestationType.VexAttestation);
}
[Fact]
public async Task GetSummary_CalculatesCorrectCounts()
{
var verified = new AttestationPointerInput(
TenantId: "tenant-1",
FindingId: "finding-summary",
AttestationType: AttestationType.DsseEnvelope,
Relationship: AttestationRelationship.VerifiedBy,
AttestationRef: new AttestationRef(
Digest: "sha256:ver111222333444555666777888999000111222333444555666777888999000a"),
VerificationResult: new VerificationResult(Verified: true, VerifiedAt: _timeProvider.GetUtcNow()));
var unverified = new AttestationPointerInput(
TenantId: "tenant-1",
FindingId: "finding-summary",
AttestationType: AttestationType.SbomAttestation,
Relationship: AttestationRelationship.DerivedFrom,
AttestationRef: new AttestationRef(
Digest: "sha256:unv111222333444555666777888999000111222333444555666777888999000b"));
await _service.CreatePointerAsync(verified);
await _service.CreatePointerAsync(unverified);
var summary = await _service.GetSummaryAsync("tenant-1", "finding-summary");
Assert.Equal("finding-summary", summary.FindingId);
Assert.Equal(2, summary.AttestationCount);
Assert.Equal(1, summary.VerifiedCount);
Assert.Equal(OverallVerificationStatus.PartiallyVerified, summary.OverallVerificationStatus);
Assert.Contains(AttestationType.DsseEnvelope, summary.AttestationTypes);
Assert.Contains(AttestationType.SbomAttestation, summary.AttestationTypes);
}
[Fact]
public async Task Search_FiltersByAttestationType()
{
var input1 = new AttestationPointerInput(
TenantId: "tenant-1",
FindingId: "finding-search-1",
AttestationType: AttestationType.DsseEnvelope,
Relationship: AttestationRelationship.VerifiedBy,
AttestationRef: new AttestationRef(
Digest: "sha256:sea111222333444555666777888999000111222333444555666777888999000a"));
var input2 = new AttestationPointerInput(
TenantId: "tenant-1",
FindingId: "finding-search-2",
AttestationType: AttestationType.SlsaProvenance,
Relationship: AttestationRelationship.AttestedBy,
AttestationRef: new AttestationRef(
Digest: "sha256:sea222333444555666777888999000111222333444555666777888999000111b"));
await _service.CreatePointerAsync(input1);
await _service.CreatePointerAsync(input2);
var query = new AttestationPointerQuery(
TenantId: "tenant-1",
AttestationTypes: new[] { AttestationType.DsseEnvelope });
var results = await _service.SearchAsync(query);
Assert.Single(results);
Assert.Equal(AttestationType.DsseEnvelope, results[0].AttestationType);
}
[Fact]
public async Task UpdateVerificationResult_UpdatesExistingPointer()
{
var input = new AttestationPointerInput(
TenantId: "tenant-1",
FindingId: "finding-update",
AttestationType: AttestationType.DsseEnvelope,
Relationship: AttestationRelationship.VerifiedBy,
AttestationRef: new AttestationRef(
Digest: "sha256:upd111222333444555666777888999000111222333444555666777888999000a"));
var createResult = await _service.CreatePointerAsync(input);
Assert.True(createResult.Success);
var verificationResult = new VerificationResult(
Verified: true,
VerifiedAt: _timeProvider.GetUtcNow(),
Verifier: "external-verifier",
VerifierVersion: "1.0.0",
Checks: new[]
{
new VerificationCheck(VerificationCheckType.SignatureValid, true, "ECDSA verified"),
new VerificationCheck(VerificationCheckType.CertificateValid, true, "Chain verified")
});
var success = await _service.UpdateVerificationResultAsync(
"tenant-1", createResult.PointerId!.Value, verificationResult);
Assert.True(success);
var updated = await _repository.GetByIdAsync("tenant-1", createResult.PointerId!.Value, CancellationToken.None);
Assert.NotNull(updated?.VerificationResult);
Assert.True(updated.VerificationResult!.Verified);
Assert.Equal("external-verifier", updated.VerificationResult.Verifier);
Assert.Equal(2, updated.VerificationResult.Checks!.Count);
}
[Fact]
public async Task TenantIsolation_PreventsAccessAcrossTenants()
{
var input = new AttestationPointerInput(
TenantId: "tenant-a",
FindingId: "finding-isolated",
AttestationType: AttestationType.DsseEnvelope,
Relationship: AttestationRelationship.VerifiedBy,
AttestationRef: new AttestationRef(
Digest: "sha256:iso111222333444555666777888999000111222333444555666777888999000a"));
var result = await _service.CreatePointerAsync(input);
Assert.True(result.Success);
var fromTenantA = await _service.GetPointersAsync("tenant-a", "finding-isolated");
var fromTenantB = await _service.GetPointersAsync("tenant-b", "finding-isolated");
Assert.Single(fromTenantA);
Assert.Empty(fromTenantB);
}
private sealed class FakeTimeProvider : TimeProvider
{
private DateTimeOffset _now;
public FakeTimeProvider(DateTimeOffset now) => _now = now;
public override DateTimeOffset GetUtcNow() => _now;
public void Advance(TimeSpan duration) => _now = _now.Add(duration);
}
}
/// <summary>
/// In-memory implementation for testing.
/// </summary>
internal sealed class InMemoryAttestationPointerRepository : IAttestationPointerRepository
{
private readonly List<AttestationPointerRecord> _records = new();
private readonly object _lock = new();
public Task InsertAsync(AttestationPointerRecord record, CancellationToken cancellationToken)
{
lock (_lock)
{
_records.Add(record);
}
return Task.CompletedTask;
}
public Task<AttestationPointerRecord?> GetByIdAsync(string tenantId, Guid pointerId, CancellationToken cancellationToken)
{
lock (_lock)
{
var result = _records.FirstOrDefault(r => r.TenantId == tenantId && r.PointerId == pointerId);
return Task.FromResult(result);
}
}
public Task<IReadOnlyList<AttestationPointerRecord>> GetByFindingIdAsync(string tenantId, string findingId, CancellationToken cancellationToken)
{
lock (_lock)
{
var results = _records
.Where(r => r.TenantId == tenantId && r.FindingId == findingId)
.OrderByDescending(r => r.CreatedAt)
.ToList();
return Task.FromResult<IReadOnlyList<AttestationPointerRecord>>(results);
}
}
public Task<IReadOnlyList<AttestationPointerRecord>> GetByDigestAsync(string tenantId, string digest, CancellationToken cancellationToken)
{
lock (_lock)
{
var results = _records
.Where(r => r.TenantId == tenantId && r.AttestationRef.Digest == digest)
.OrderByDescending(r => r.CreatedAt)
.ToList();
return Task.FromResult<IReadOnlyList<AttestationPointerRecord>>(results);
}
}
public Task<IReadOnlyList<AttestationPointerRecord>> SearchAsync(AttestationPointerQuery query, CancellationToken cancellationToken)
{
lock (_lock)
{
var results = _records.Where(r => r.TenantId == query.TenantId);
if (query.FindingIds is { Count: > 0 })
{
results = results.Where(r => query.FindingIds.Contains(r.FindingId));
}
if (query.AttestationTypes is { Count: > 0 })
{
results = results.Where(r => query.AttestationTypes.Contains(r.AttestationType));
}
if (query.VerificationStatus.HasValue)
{
results = query.VerificationStatus.Value switch
{
AttestationVerificationFilter.Verified =>
results.Where(r => r.VerificationResult?.Verified == true),
AttestationVerificationFilter.Unverified =>
results.Where(r => r.VerificationResult is null),
AttestationVerificationFilter.Failed =>
results.Where(r => r.VerificationResult?.Verified == false),
_ => results
};
}
if (query.CreatedAfter.HasValue)
{
results = results.Where(r => r.CreatedAt >= query.CreatedAfter.Value);
}
if (query.CreatedBefore.HasValue)
{
results = results.Where(r => r.CreatedAt <= query.CreatedBefore.Value);
}
if (!string.IsNullOrWhiteSpace(query.SignerIdentity))
{
results = results.Where(r => r.AttestationRef.SignerInfo?.Subject == query.SignerIdentity);
}
if (!string.IsNullOrWhiteSpace(query.PredicateType))
{
results = results.Where(r => r.AttestationRef.PredicateType == query.PredicateType);
}
var list = results
.OrderByDescending(r => r.CreatedAt)
.Skip(query.Offset)
.Take(query.Limit)
.ToList();
return Task.FromResult<IReadOnlyList<AttestationPointerRecord>>(list);
}
}
public Task<FindingAttestationSummary> GetSummaryAsync(string tenantId, string findingId, CancellationToken cancellationToken)
{
lock (_lock)
{
var pointers = _records.Where(r => r.TenantId == tenantId && r.FindingId == findingId).ToList();
if (pointers.Count == 0)
{
return Task.FromResult(new FindingAttestationSummary(
findingId, 0, 0, null, Array.Empty<AttestationType>(), OverallVerificationStatus.NoAttestations));
}
var verifiedCount = pointers.Count(p => p.VerificationResult?.Verified == true);
var latest = pointers.Max(p => p.CreatedAt);
var types = pointers.Select(p => p.AttestationType).Distinct().ToList();
var status = pointers.Count switch
{
0 => OverallVerificationStatus.NoAttestations,
_ when verifiedCount == pointers.Count => OverallVerificationStatus.AllVerified,
_ when verifiedCount > 0 => OverallVerificationStatus.PartiallyVerified,
_ => OverallVerificationStatus.NoneVerified
};
return Task.FromResult(new FindingAttestationSummary(
findingId, pointers.Count, verifiedCount, latest, types, status));
}
}
public Task<IReadOnlyList<FindingAttestationSummary>> GetSummariesAsync(string tenantId, IReadOnlyList<string> findingIds, CancellationToken cancellationToken)
{
var tasks = findingIds.Select(fid => GetSummaryAsync(tenantId, fid, cancellationToken));
return Task.WhenAll(tasks).ContinueWith(t => (IReadOnlyList<FindingAttestationSummary>)t.Result.ToList());
}
public Task<bool> ExistsAsync(string tenantId, string findingId, string digest, AttestationType attestationType, CancellationToken cancellationToken)
{
lock (_lock)
{
var exists = _records.Any(r =>
r.TenantId == tenantId &&
r.FindingId == findingId &&
r.AttestationRef.Digest == digest &&
r.AttestationType == attestationType);
return Task.FromResult(exists);
}
}
public Task UpdateVerificationResultAsync(string tenantId, Guid pointerId, VerificationResult verificationResult, CancellationToken cancellationToken)
{
lock (_lock)
{
var idx = _records.FindIndex(r => r.TenantId == tenantId && r.PointerId == pointerId);
if (idx >= 0)
{
var old = _records[idx];
_records[idx] = old with { VerificationResult = verificationResult };
}
}
return Task.CompletedTask;
}
public Task<int> GetCountAsync(string tenantId, string findingId, CancellationToken cancellationToken)
{
lock (_lock)
{
var count = _records.Count(r => r.TenantId == tenantId && r.FindingId == findingId);
return Task.FromResult(count);
}
}
public Task<IReadOnlyList<string>> GetFindingIdsWithAttestationsAsync(string tenantId, AttestationVerificationFilter? verificationFilter, IReadOnlyList<AttestationType>? attestationTypes, int limit, int offset, CancellationToken cancellationToken)
{
lock (_lock)
{
var results = _records.Where(r => r.TenantId == tenantId);
if (attestationTypes is { Count: > 0 })
{
results = results.Where(r => attestationTypes.Contains(r.AttestationType));
}
if (verificationFilter.HasValue)
{
results = verificationFilter.Value switch
{
AttestationVerificationFilter.Verified =>
results.Where(r => r.VerificationResult?.Verified == true),
AttestationVerificationFilter.Unverified =>
results.Where(r => r.VerificationResult is null),
AttestationVerificationFilter.Failed =>
results.Where(r => r.VerificationResult?.Verified == false),
_ => results
};
}
var list = results
.Select(r => r.FindingId)
.Distinct()
.OrderBy(f => f)
.Skip(offset)
.Take(limit)
.ToList();
return Task.FromResult<IReadOnlyList<string>>(list);
}
}
}

View File

@@ -0,0 +1,373 @@
namespace StellaOps.Findings.Ledger.Tests.Snapshot;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Extensions.Logging.Abstractions;
using StellaOps.Findings.Ledger.Domain;
using StellaOps.Findings.Ledger.Infrastructure.Snapshot;
using StellaOps.Findings.Ledger.Services;
using Xunit;
public class SnapshotServiceTests
{
private readonly InMemorySnapshotRepository _snapshotRepository;
private readonly InMemoryTimeTravelRepository _timeTravelRepository;
private readonly SnapshotService _service;
public SnapshotServiceTests()
{
_snapshotRepository = new InMemorySnapshotRepository();
_timeTravelRepository = new InMemoryTimeTravelRepository();
_service = new SnapshotService(
_snapshotRepository,
_timeTravelRepository,
NullLogger<SnapshotService>.Instance);
}
[Fact]
public async Task CreateSnapshotAsync_CreatesSnapshotSuccessfully()
{
var input = new CreateSnapshotInput(
TenantId: "tenant-1",
Label: "test-snapshot",
Description: "Test description");
var result = await _service.CreateSnapshotAsync(input);
Assert.True(result.Success);
Assert.NotNull(result.Snapshot);
Assert.Equal("test-snapshot", result.Snapshot.Label);
Assert.Equal(SnapshotStatus.Available, result.Snapshot.Status);
}
[Fact]
public async Task CreateSnapshotAsync_WithExpiry_SetsExpiresAt()
{
var input = new CreateSnapshotInput(
TenantId: "tenant-1",
Label: "expiring-snapshot",
ExpiresIn: TimeSpan.FromHours(24));
var result = await _service.CreateSnapshotAsync(input);
Assert.True(result.Success);
Assert.NotNull(result.Snapshot?.ExpiresAt);
Assert.True(result.Snapshot.ExpiresAt > DateTimeOffset.UtcNow);
}
[Fact]
public async Task GetSnapshotAsync_ReturnsExistingSnapshot()
{
var input = new CreateSnapshotInput(TenantId: "tenant-1", Label: "get-test");
var createResult = await _service.CreateSnapshotAsync(input);
var snapshot = await _service.GetSnapshotAsync("tenant-1", createResult.Snapshot!.SnapshotId);
Assert.NotNull(snapshot);
Assert.Equal("get-test", snapshot.Label);
}
[Fact]
public async Task GetSnapshotAsync_ReturnsNullForNonExistent()
{
var snapshot = await _service.GetSnapshotAsync("tenant-1", Guid.NewGuid());
Assert.Null(snapshot);
}
[Fact]
public async Task ListSnapshotsAsync_ReturnsAllSnapshots()
{
await _service.CreateSnapshotAsync(new CreateSnapshotInput("tenant-1", Label: "snap-1"));
await _service.CreateSnapshotAsync(new CreateSnapshotInput("tenant-1", Label: "snap-2"));
await _service.CreateSnapshotAsync(new CreateSnapshotInput("tenant-2", Label: "snap-3"));
var (snapshots, _) = await _service.ListSnapshotsAsync(new SnapshotListQuery("tenant-1"));
Assert.Equal(2, snapshots.Count);
}
[Fact]
public async Task DeleteSnapshotAsync_MarksAsDeleted()
{
var input = new CreateSnapshotInput(TenantId: "tenant-1", Label: "to-delete");
var createResult = await _service.CreateSnapshotAsync(input);
var deleted = await _service.DeleteSnapshotAsync("tenant-1", createResult.Snapshot!.SnapshotId);
Assert.True(deleted);
var snapshot = await _service.GetSnapshotAsync("tenant-1", createResult.Snapshot.SnapshotId);
Assert.Equal(SnapshotStatus.Deleted, snapshot?.Status);
}
[Fact]
public async Task GetCurrentPointAsync_ReturnsLatestPoint()
{
var point = await _service.GetCurrentPointAsync("tenant-1");
Assert.True(point.SequenceNumber >= 0);
}
[Fact]
public async Task QueryHistoricalFindingsAsync_ReturnsItems()
{
_timeTravelRepository.AddFinding("tenant-1", new FindingHistoryItem(
"finding-1", "artifact-1", "CVE-2024-001", "open", 7.5m, "v1",
DateTimeOffset.UtcNow.AddDays(-1), DateTimeOffset.UtcNow, null));
var request = new HistoricalQueryRequest(
"tenant-1", null, null, null, EntityType.Finding, null);
var result = await _service.QueryHistoricalFindingsAsync(request);
Assert.Single(result.Items);
Assert.Equal("finding-1", result.Items[0].FindingId);
}
[Fact]
public async Task CheckStalenessAsync_ReturnsResult()
{
var result = await _service.CheckStalenessAsync("tenant-1", TimeSpan.FromHours(1));
Assert.NotNull(result);
Assert.True(result.CheckedAt <= DateTimeOffset.UtcNow);
}
[Fact]
public async Task TenantIsolation_CannotAccessOtherTenantSnapshots()
{
var input = new CreateSnapshotInput(TenantId: "tenant-1", Label: "isolated");
var createResult = await _service.CreateSnapshotAsync(input);
var snapshot = await _service.GetSnapshotAsync("tenant-2", createResult.Snapshot!.SnapshotId);
Assert.Null(snapshot);
}
}
/// <summary>
/// In-memory implementation for testing.
/// </summary>
internal class InMemorySnapshotRepository : ISnapshotRepository
{
private readonly List<LedgerSnapshot> _snapshots = new();
private readonly object _lock = new();
public Task<LedgerSnapshot> CreateAsync(
string tenantId,
CreateSnapshotInput input,
long currentSequence,
DateTimeOffset currentTimestamp,
CancellationToken ct = default)
{
var snapshot = new LedgerSnapshot(
tenantId,
Guid.NewGuid(),
input.Label,
input.Description,
SnapshotStatus.Creating,
DateTimeOffset.UtcNow,
input.ExpiresIn.HasValue ? DateTimeOffset.UtcNow.Add(input.ExpiresIn.Value) : null,
input.AtSequence ?? currentSequence,
input.AtTimestamp ?? currentTimestamp,
new SnapshotStatistics(0, 0, 0, 0, 0, 0),
null,
null,
input.Metadata);
lock (_lock)
{
_snapshots.Add(snapshot);
}
return Task.FromResult(snapshot);
}
public Task<LedgerSnapshot?> GetByIdAsync(string tenantId, Guid snapshotId, CancellationToken ct = default)
{
lock (_lock)
{
var snapshot = _snapshots.FirstOrDefault(s => s.TenantId == tenantId && s.SnapshotId == snapshotId);
return Task.FromResult(snapshot);
}
}
public Task<(IReadOnlyList<LedgerSnapshot> Snapshots, string? NextPageToken)> ListAsync(
SnapshotListQuery query,
CancellationToken ct = default)
{
lock (_lock)
{
var filtered = _snapshots
.Where(s => s.TenantId == query.TenantId)
.Where(s => !query.Status.HasValue || s.Status == query.Status.Value)
.Take(query.PageSize)
.ToList();
return Task.FromResult<(IReadOnlyList<LedgerSnapshot>, string?)>((filtered, null));
}
}
public Task<bool> UpdateStatusAsync(string tenantId, Guid snapshotId, SnapshotStatus newStatus, CancellationToken ct = default)
{
lock (_lock)
{
var index = _snapshots.FindIndex(s => s.TenantId == tenantId && s.SnapshotId == snapshotId);
if (index < 0) return Task.FromResult(false);
_snapshots[index] = _snapshots[index] with { Status = newStatus };
return Task.FromResult(true);
}
}
public Task<bool> UpdateStatisticsAsync(string tenantId, Guid snapshotId, SnapshotStatistics statistics, CancellationToken ct = default)
{
lock (_lock)
{
var index = _snapshots.FindIndex(s => s.TenantId == tenantId && s.SnapshotId == snapshotId);
if (index < 0) return Task.FromResult(false);
_snapshots[index] = _snapshots[index] with { Statistics = statistics };
return Task.FromResult(true);
}
}
public Task<bool> SetMerkleRootAsync(string tenantId, Guid snapshotId, string merkleRoot, string? dsseDigest, CancellationToken ct = default)
{
lock (_lock)
{
var index = _snapshots.FindIndex(s => s.TenantId == tenantId && s.SnapshotId == snapshotId);
if (index < 0) return Task.FromResult(false);
_snapshots[index] = _snapshots[index] with { MerkleRoot = merkleRoot, DsseDigest = dsseDigest };
return Task.FromResult(true);
}
}
public Task<int> ExpireSnapshotsAsync(DateTimeOffset cutoff, CancellationToken ct = default)
{
lock (_lock)
{
var count = 0;
for (int i = 0; i < _snapshots.Count; i++)
{
if (_snapshots[i].ExpiresAt.HasValue &&
_snapshots[i].ExpiresAt < cutoff &&
_snapshots[i].Status == SnapshotStatus.Available)
{
_snapshots[i] = _snapshots[i] with { Status = SnapshotStatus.Expired };
count++;
}
}
return Task.FromResult(count);
}
}
public Task<bool> DeleteAsync(string tenantId, Guid snapshotId, CancellationToken ct = default)
{
return UpdateStatusAsync(tenantId, snapshotId, SnapshotStatus.Deleted, ct);
}
public Task<LedgerSnapshot?> GetLatestAsync(string tenantId, CancellationToken ct = default)
{
lock (_lock)
{
var snapshot = _snapshots
.Where(s => s.TenantId == tenantId && s.Status == SnapshotStatus.Available)
.OrderByDescending(s => s.CreatedAt)
.FirstOrDefault();
return Task.FromResult(snapshot);
}
}
public Task<bool> ExistsAsync(string tenantId, Guid snapshotId, CancellationToken ct = default)
{
lock (_lock)
{
return Task.FromResult(_snapshots.Any(s => s.TenantId == tenantId && s.SnapshotId == snapshotId));
}
}
}
/// <summary>
/// In-memory time-travel repository for testing.
/// </summary>
internal class InMemoryTimeTravelRepository : ITimeTravelRepository
{
private readonly Dictionary<string, List<FindingHistoryItem>> _findings = new();
private readonly Dictionary<string, List<VexHistoryItem>> _vex = new();
private readonly Dictionary<string, List<AdvisoryHistoryItem>> _advisories = new();
private readonly Dictionary<string, List<ReplayEvent>> _events = new();
private long _currentSequence = 100;
public void AddFinding(string tenantId, FindingHistoryItem finding)
{
if (!_findings.ContainsKey(tenantId))
_findings[tenantId] = new List<FindingHistoryItem>();
_findings[tenantId].Add(finding);
}
public Task<QueryPoint> GetCurrentPointAsync(string tenantId, CancellationToken ct = default)
{
return Task.FromResult(new QueryPoint(DateTimeOffset.UtcNow, _currentSequence));
}
public Task<QueryPoint?> ResolveQueryPointAsync(string tenantId, DateTimeOffset? timestamp, long? sequence, Guid? snapshotId, CancellationToken ct = default)
{
return Task.FromResult<QueryPoint?>(new QueryPoint(timestamp ?? DateTimeOffset.UtcNow, sequence ?? _currentSequence, snapshotId));
}
public Task<HistoricalQueryResponse<FindingHistoryItem>> QueryFindingsAsync(HistoricalQueryRequest request, CancellationToken ct = default)
{
var items = _findings.TryGetValue(request.TenantId, out var list) ? list : new List<FindingHistoryItem>();
var queryPoint = new QueryPoint(DateTimeOffset.UtcNow, _currentSequence);
return Task.FromResult(new HistoricalQueryResponse<FindingHistoryItem>(queryPoint, EntityType.Finding, items, null, items.Count));
}
public Task<HistoricalQueryResponse<VexHistoryItem>> QueryVexAsync(HistoricalQueryRequest request, CancellationToken ct = default)
{
var items = _vex.TryGetValue(request.TenantId, out var list) ? list : new List<VexHistoryItem>();
var queryPoint = new QueryPoint(DateTimeOffset.UtcNow, _currentSequence);
return Task.FromResult(new HistoricalQueryResponse<VexHistoryItem>(queryPoint, EntityType.Vex, items, null, items.Count));
}
public Task<HistoricalQueryResponse<AdvisoryHistoryItem>> QueryAdvisoriesAsync(HistoricalQueryRequest request, CancellationToken ct = default)
{
var items = _advisories.TryGetValue(request.TenantId, out var list) ? list : new List<AdvisoryHistoryItem>();
var queryPoint = new QueryPoint(DateTimeOffset.UtcNow, _currentSequence);
return Task.FromResult(new HistoricalQueryResponse<AdvisoryHistoryItem>(queryPoint, EntityType.Advisory, items, null, items.Count));
}
public Task<(IReadOnlyList<ReplayEvent> Events, ReplayMetadata Metadata)> ReplayEventsAsync(ReplayRequest request, CancellationToken ct = default)
{
var items = _events.TryGetValue(request.TenantId, out var list) ? list : new List<ReplayEvent>();
var metadata = new ReplayMetadata(0, _currentSequence, items.Count, false, 10);
return Task.FromResult<(IReadOnlyList<ReplayEvent>, ReplayMetadata)>((items, metadata));
}
public Task<DiffResponse> ComputeDiffAsync(DiffRequest request, CancellationToken ct = default)
{
var fromPoint = new QueryPoint(request.From.Timestamp ?? DateTimeOffset.UtcNow.AddHours(-1), request.From.SequenceNumber ?? 0);
var toPoint = new QueryPoint(request.To.Timestamp ?? DateTimeOffset.UtcNow, request.To.SequenceNumber ?? _currentSequence);
var summary = new DiffSummary(0, 0, 0, 0);
return Task.FromResult(new DiffResponse(fromPoint, toPoint, summary, null, null));
}
public Task<IReadOnlyList<ChangeLogEntry>> GetChangelogAsync(string tenantId, EntityType entityType, string entityId, int limit = 100, CancellationToken ct = default)
{
return Task.FromResult<IReadOnlyList<ChangeLogEntry>>(new List<ChangeLogEntry>());
}
public Task<StalenessResult> CheckStalenessAsync(string tenantId, TimeSpan threshold, CancellationToken ct = default)
{
return Task.FromResult(new StalenessResult(
false,
DateTimeOffset.UtcNow,
DateTimeOffset.UtcNow.AddMinutes(-5),
threshold,
TimeSpan.FromMinutes(5)));
}
}

View File

@@ -0,0 +1,328 @@
using StellaOps.Findings.Ledger.Infrastructure.Attestation;
namespace StellaOps.Findings.Ledger.WebService.Contracts;
/// <summary>
/// Request to create an attestation pointer.
/// </summary>
public sealed record CreateAttestationPointerRequest(
string FindingId,
string AttestationType,
string Relationship,
AttestationRefDto AttestationRef,
VerificationResultDto? VerificationResult = null,
string? CreatedBy = null,
Dictionary<string, object>? Metadata = null);
/// <summary>
/// Reference to an attestation artifact.
/// </summary>
public sealed record AttestationRefDto(
string Digest,
string? AttestationId = null,
string? StorageUri = null,
string? PayloadType = null,
string? PredicateType = null,
IReadOnlyList<string>? SubjectDigests = null,
SignerInfoDto? SignerInfo = null,
RekorEntryRefDto? RekorEntry = null);
/// <summary>
/// Information about the attestation signer.
/// </summary>
public sealed record SignerInfoDto(
string? KeyId = null,
string? Issuer = null,
string? Subject = null,
IReadOnlyList<string>? CertificateChain = null,
DateTimeOffset? SignedAt = null);
/// <summary>
/// Reference to Rekor transparency log entry.
/// </summary>
public sealed record RekorEntryRefDto(
long? LogIndex = null,
string? LogId = null,
string? Uuid = null,
long? IntegratedTime = null);
/// <summary>
/// Result of attestation verification.
/// </summary>
public sealed record VerificationResultDto(
bool Verified,
DateTimeOffset VerifiedAt,
string? Verifier = null,
string? VerifierVersion = null,
string? PolicyRef = null,
IReadOnlyList<VerificationCheckDto>? Checks = null,
IReadOnlyList<string>? Warnings = null,
IReadOnlyList<string>? Errors = null);
/// <summary>
/// Individual verification check result.
/// </summary>
public sealed record VerificationCheckDto(
string CheckType,
bool Passed,
string? Details = null,
Dictionary<string, object>? Evidence = null);
/// <summary>
/// Response for creating an attestation pointer.
/// </summary>
public sealed record CreateAttestationPointerResponse(
bool Success,
string? PointerId,
string? LedgerEventId,
string? Error);
/// <summary>
/// Response for getting attestation pointers.
/// </summary>
public sealed record AttestationPointerResponse(
string PointerId,
string FindingId,
string AttestationType,
string Relationship,
AttestationRefDto AttestationRef,
VerificationResultDto? VerificationResult,
DateTimeOffset CreatedAt,
string CreatedBy,
Dictionary<string, object>? Metadata,
string? LedgerEventId);
/// <summary>
/// Response for attestation summary.
/// </summary>
public sealed record AttestationSummaryResponse(
string FindingId,
int AttestationCount,
int VerifiedCount,
DateTimeOffset? LatestAttestation,
IReadOnlyList<string> AttestationTypes,
string OverallVerificationStatus);
/// <summary>
/// Query parameters for searching attestation pointers.
/// </summary>
public sealed record AttestationPointerSearchRequest(
IReadOnlyList<string>? FindingIds = null,
IReadOnlyList<string>? AttestationTypes = null,
string? VerificationStatus = null,
DateTimeOffset? CreatedAfter = null,
DateTimeOffset? CreatedBefore = null,
string? SignerIdentity = null,
string? PredicateType = null,
int Limit = 100,
int Offset = 0);
/// <summary>
/// Response for searching attestation pointers.
/// </summary>
public sealed record AttestationPointerSearchResponse(
IReadOnlyList<AttestationPointerResponse> Pointers,
int TotalCount);
/// <summary>
/// Request to update verification result.
/// </summary>
public sealed record UpdateVerificationResultRequest(
VerificationResultDto VerificationResult);
/// <summary>
/// Mapping extensions for attestation pointer DTOs.
/// </summary>
public static class AttestationPointerMappings
{
public static AttestationPointerInput ToInput(this CreateAttestationPointerRequest request, string tenantId)
{
if (!Enum.TryParse<AttestationType>(request.AttestationType, ignoreCase: true, out var attestationType))
{
throw new ArgumentException($"Invalid attestation type: {request.AttestationType}");
}
if (!Enum.TryParse<AttestationRelationship>(request.Relationship, ignoreCase: true, out var relationship))
{
throw new ArgumentException($"Invalid relationship: {request.Relationship}");
}
return new AttestationPointerInput(
tenantId,
request.FindingId,
attestationType,
relationship,
request.AttestationRef.ToModel(),
request.VerificationResult?.ToModel(),
request.CreatedBy,
request.Metadata);
}
public static AttestationRef ToModel(this AttestationRefDto dto)
{
return new AttestationRef(
dto.Digest,
dto.AttestationId is not null ? Guid.Parse(dto.AttestationId) : null,
dto.StorageUri,
dto.PayloadType,
dto.PredicateType,
dto.SubjectDigests,
dto.SignerInfo?.ToModel(),
dto.RekorEntry?.ToModel());
}
public static SignerInfo ToModel(this SignerInfoDto dto)
{
return new SignerInfo(
dto.KeyId,
dto.Issuer,
dto.Subject,
dto.CertificateChain,
dto.SignedAt);
}
public static RekorEntryRef ToModel(this RekorEntryRefDto dto)
{
return new RekorEntryRef(
dto.LogIndex,
dto.LogId,
dto.Uuid,
dto.IntegratedTime);
}
public static VerificationResult ToModel(this VerificationResultDto dto)
{
return new VerificationResult(
dto.Verified,
dto.VerifiedAt,
dto.Verifier,
dto.VerifierVersion,
dto.PolicyRef,
dto.Checks?.Select(c => c.ToModel()).ToList(),
dto.Warnings,
dto.Errors);
}
public static VerificationCheck ToModel(this VerificationCheckDto dto)
{
if (!Enum.TryParse<VerificationCheckType>(dto.CheckType, ignoreCase: true, out var checkType))
{
throw new ArgumentException($"Invalid check type: {dto.CheckType}");
}
return new VerificationCheck(checkType, dto.Passed, dto.Details, dto.Evidence);
}
public static AttestationPointerResponse ToResponse(this AttestationPointerRecord record)
{
return new AttestationPointerResponse(
record.PointerId.ToString(),
record.FindingId,
record.AttestationType.ToString(),
record.Relationship.ToString(),
record.AttestationRef.ToDto(),
record.VerificationResult?.ToDto(),
record.CreatedAt,
record.CreatedBy,
record.Metadata,
record.LedgerEventId?.ToString());
}
public static AttestationRefDto ToDto(this AttestationRef model)
{
return new AttestationRefDto(
model.Digest,
model.AttestationId?.ToString(),
model.StorageUri,
model.PayloadType,
model.PredicateType,
model.SubjectDigests,
model.SignerInfo?.ToDto(),
model.RekorEntry?.ToDto());
}
public static SignerInfoDto ToDto(this SignerInfo model)
{
return new SignerInfoDto(
model.KeyId,
model.Issuer,
model.Subject,
model.CertificateChain,
model.SignedAt);
}
public static RekorEntryRefDto ToDto(this RekorEntryRef model)
{
return new RekorEntryRefDto(
model.LogIndex,
model.LogId,
model.Uuid,
model.IntegratedTime);
}
public static VerificationResultDto ToDto(this VerificationResult model)
{
return new VerificationResultDto(
model.Verified,
model.VerifiedAt,
model.Verifier,
model.VerifierVersion,
model.PolicyRef,
model.Checks?.Select(c => c.ToDto()).ToList(),
model.Warnings,
model.Errors);
}
public static VerificationCheckDto ToDto(this VerificationCheck model)
{
return new VerificationCheckDto(
model.CheckType.ToString(),
model.Passed,
model.Details,
model.Evidence);
}
public static AttestationSummaryResponse ToResponse(this FindingAttestationSummary summary)
{
return new AttestationSummaryResponse(
summary.FindingId,
summary.AttestationCount,
summary.VerifiedCount,
summary.LatestAttestation,
summary.AttestationTypes.Select(t => t.ToString()).ToList(),
summary.OverallVerificationStatus.ToString());
}
public static AttestationPointerQuery ToQuery(this AttestationPointerSearchRequest request, string tenantId)
{
IReadOnlyList<AttestationType>? attestationTypes = null;
if (request.AttestationTypes is { Count: > 0 })
{
attestationTypes = request.AttestationTypes
.Where(t => Enum.TryParse<AttestationType>(t, ignoreCase: true, out _))
.Select(t => Enum.Parse<AttestationType>(t, ignoreCase: true))
.ToList();
}
AttestationVerificationFilter? verificationFilter = null;
if (!string.IsNullOrWhiteSpace(request.VerificationStatus))
{
if (Enum.TryParse<AttestationVerificationFilter>(request.VerificationStatus, ignoreCase: true, out var filter))
{
verificationFilter = filter;
}
}
return new AttestationPointerQuery(
tenantId,
request.FindingIds,
attestationTypes,
verificationFilter,
request.CreatedAfter,
request.CreatedBefore,
request.SignerIdentity,
request.PredicateType,
request.Limit,
request.Offset);
}
}

View File

@@ -0,0 +1,460 @@
namespace StellaOps.Findings.Ledger.WebService.Contracts;
using StellaOps.Findings.Ledger.Domain;
using StellaOps.Findings.Ledger.Infrastructure.Snapshot;
// === Snapshot Contracts ===
/// <summary>
/// Request to create a snapshot.
/// </summary>
public sealed record CreateSnapshotRequest(
string? Label = null,
string? Description = null,
DateTimeOffset? AtTimestamp = null,
long? AtSequence = null,
int? ExpiresInHours = null,
IReadOnlyList<string>? IncludeEntityTypes = null,
bool Sign = false,
Dictionary<string, object>? Metadata = null)
{
public CreateSnapshotInput ToInput(string tenantId) => new(
TenantId: tenantId,
Label: Label,
Description: Description,
AtTimestamp: AtTimestamp,
AtSequence: AtSequence,
ExpiresIn: ExpiresInHours.HasValue ? TimeSpan.FromHours(ExpiresInHours.Value) : null,
IncludeEntityTypes: IncludeEntityTypes?.Select(ParseEntityType).ToList(),
Sign: Sign,
Metadata: Metadata);
private static EntityType ParseEntityType(string s) =>
Enum.TryParse<EntityType>(s, true, out var et) ? et : EntityType.Finding;
}
/// <summary>
/// Response for a snapshot.
/// </summary>
public sealed record SnapshotResponse(
Guid SnapshotId,
string? Label,
string? Description,
string Status,
DateTimeOffset CreatedAt,
DateTimeOffset? ExpiresAt,
long SequenceNumber,
DateTimeOffset Timestamp,
SnapshotStatisticsResponse Statistics,
string? MerkleRoot,
string? DsseDigest,
Dictionary<string, object>? Metadata);
/// <summary>
/// Response for snapshot statistics.
/// </summary>
public sealed record SnapshotStatisticsResponse(
long FindingsCount,
long VexStatementsCount,
long AdvisoriesCount,
long SbomsCount,
long EventsCount,
long SizeBytes);
/// <summary>
/// Result of creating a snapshot.
/// </summary>
public sealed record CreateSnapshotResponse(
bool Success,
SnapshotResponse? Snapshot,
string? Error);
/// <summary>
/// Response for listing snapshots.
/// </summary>
public sealed record SnapshotListResponse(
IReadOnlyList<SnapshotResponse> Snapshots,
string? NextPageToken);
// === Time-Travel Contracts ===
/// <summary>
/// Request for historical query.
/// </summary>
public sealed record HistoricalQueryApiRequest(
DateTimeOffset? AtTimestamp = null,
long? AtSequence = null,
Guid? SnapshotId = null,
string? Status = null,
decimal? SeverityMin = null,
decimal? SeverityMax = null,
string? PolicyVersion = null,
string? ArtifactId = null,
string? VulnId = null,
int PageSize = 500,
string? PageToken = null)
{
public HistoricalQueryRequest ToRequest(string tenantId, EntityType entityType) => new(
TenantId: tenantId,
AtTimestamp: AtTimestamp,
AtSequence: AtSequence,
SnapshotId: SnapshotId,
EntityType: entityType,
Filters: new TimeQueryFilters(
Status: Status,
SeverityMin: SeverityMin,
SeverityMax: SeverityMax,
PolicyVersion: PolicyVersion,
ArtifactId: ArtifactId,
VulnId: VulnId),
PageSize: PageSize,
PageToken: PageToken);
}
/// <summary>
/// Response for historical query.
/// </summary>
public sealed record HistoricalQueryApiResponse<T>(
QueryPointResponse QueryPoint,
string EntityType,
IReadOnlyList<T> Items,
string? NextPageToken,
long TotalCount);
/// <summary>
/// Query point response.
/// </summary>
public sealed record QueryPointResponse(
DateTimeOffset Timestamp,
long SequenceNumber,
Guid? SnapshotId);
/// <summary>
/// Finding history item response.
/// </summary>
public sealed record FindingHistoryResponse(
string FindingId,
string ArtifactId,
string VulnId,
string Status,
decimal? Severity,
string? PolicyVersion,
DateTimeOffset FirstSeen,
DateTimeOffset LastUpdated,
Dictionary<string, string>? Labels);
/// <summary>
/// VEX history item response.
/// </summary>
public sealed record VexHistoryResponse(
string StatementId,
string VulnId,
string ProductId,
string Status,
string? Justification,
DateTimeOffset IssuedAt,
DateTimeOffset? ExpiresAt);
/// <summary>
/// Advisory history item response.
/// </summary>
public sealed record AdvisoryHistoryResponse(
string AdvisoryId,
string Source,
string Title,
decimal? CvssScore,
DateTimeOffset PublishedAt,
DateTimeOffset? ModifiedAt);
// === Replay Contracts ===
/// <summary>
/// Request for replaying events.
/// </summary>
public sealed record ReplayApiRequest(
long? FromSequence = null,
long? ToSequence = null,
DateTimeOffset? FromTimestamp = null,
DateTimeOffset? ToTimestamp = null,
IReadOnlyList<Guid>? ChainIds = null,
IReadOnlyList<string>? EventTypes = null,
bool IncludePayload = true,
int PageSize = 1000)
{
public ReplayRequest ToRequest(string tenantId) => new(
TenantId: tenantId,
FromSequence: FromSequence,
ToSequence: ToSequence,
FromTimestamp: FromTimestamp,
ToTimestamp: ToTimestamp,
ChainIds: ChainIds,
EventTypes: EventTypes,
IncludePayload: IncludePayload,
PageSize: PageSize);
}
/// <summary>
/// Response for replay.
/// </summary>
public sealed record ReplayApiResponse(
IReadOnlyList<ReplayEventResponse> Events,
ReplayMetadataResponse Metadata);
/// <summary>
/// Replay event response.
/// </summary>
public sealed record ReplayEventResponse(
Guid EventId,
long SequenceNumber,
Guid ChainId,
int ChainSequence,
string EventType,
DateTimeOffset OccurredAt,
DateTimeOffset RecordedAt,
string? ActorId,
string? ActorType,
string? ArtifactId,
string? FindingId,
string? PolicyVersion,
string EventHash,
string PreviousHash,
object? Payload);
/// <summary>
/// Replay metadata response.
/// </summary>
public sealed record ReplayMetadataResponse(
long FromSequence,
long ToSequence,
long EventsCount,
bool HasMore,
long ReplayDurationMs);
// === Diff Contracts ===
/// <summary>
/// Request for computing diff.
/// </summary>
public sealed record DiffApiRequest(
DiffPointRequest From,
DiffPointRequest To,
IReadOnlyList<string>? EntityTypes = null,
bool IncludeUnchanged = false,
string OutputFormat = "Summary")
{
public DiffRequest ToRequest(string tenantId) => new(
TenantId: tenantId,
From: From.ToDiffPoint(),
To: To.ToDiffPoint(),
EntityTypes: EntityTypes?.Select(ParseEntityType).ToList(),
IncludeUnchanged: IncludeUnchanged,
OutputFormat: Enum.TryParse<DiffOutputFormat>(OutputFormat, true, out var fmt)
? fmt : DiffOutputFormat.Summary);
private static EntityType ParseEntityType(string s) =>
Enum.TryParse<EntityType>(s, true, out var et) ? et : EntityType.Finding;
}
/// <summary>
/// Diff point request.
/// </summary>
public sealed record DiffPointRequest(
DateTimeOffset? Timestamp = null,
long? SequenceNumber = null,
Guid? SnapshotId = null)
{
public DiffPoint ToDiffPoint() => new(Timestamp, SequenceNumber, SnapshotId);
}
/// <summary>
/// Response for diff.
/// </summary>
public sealed record DiffApiResponse(
QueryPointResponse FromPoint,
QueryPointResponse ToPoint,
DiffSummaryResponse Summary,
IReadOnlyList<DiffEntryResponse>? Changes,
string? NextPageToken);
/// <summary>
/// Diff summary response.
/// </summary>
public sealed record DiffSummaryResponse(
int Added,
int Modified,
int Removed,
int Unchanged,
Dictionary<string, DiffCountsResponse>? ByEntityType);
/// <summary>
/// Diff counts response.
/// </summary>
public sealed record DiffCountsResponse(int Added, int Modified, int Removed);
/// <summary>
/// Diff entry response.
/// </summary>
public sealed record DiffEntryResponse(
string EntityType,
string EntityId,
string ChangeType,
object? FromState,
object? ToState,
IReadOnlyList<string>? ChangedFields);
// === Changelog Contracts ===
/// <summary>
/// Changelog entry response.
/// </summary>
public sealed record ChangeLogEntryResponse(
long SequenceNumber,
DateTimeOffset Timestamp,
string EntityType,
string EntityId,
string EventType,
string? EventHash,
string? ActorId,
string? Summary);
// === Staleness Contracts ===
/// <summary>
/// Staleness check response.
/// </summary>
public sealed record StalenessResponse(
bool IsStale,
DateTimeOffset CheckedAt,
DateTimeOffset? LastEventAt,
string StalenessThreshold,
string? StalenessDuration,
Dictionary<string, EntityStalenessResponse>? ByEntityType);
/// <summary>
/// Entity staleness response.
/// </summary>
public sealed record EntityStalenessResponse(
bool IsStale,
DateTimeOffset? LastEventAt,
long EventsBehind);
// === Extension Methods ===
public static class SnapshotExtensions
{
public static SnapshotResponse ToResponse(this LedgerSnapshot snapshot) => new(
SnapshotId: snapshot.SnapshotId,
Label: snapshot.Label,
Description: snapshot.Description,
Status: snapshot.Status.ToString(),
CreatedAt: snapshot.CreatedAt,
ExpiresAt: snapshot.ExpiresAt,
SequenceNumber: snapshot.SequenceNumber,
Timestamp: snapshot.Timestamp,
Statistics: snapshot.Statistics.ToResponse(),
MerkleRoot: snapshot.MerkleRoot,
DsseDigest: snapshot.DsseDigest,
Metadata: snapshot.Metadata);
public static SnapshotStatisticsResponse ToResponse(this SnapshotStatistics stats) => new(
FindingsCount: stats.FindingsCount,
VexStatementsCount: stats.VexStatementsCount,
AdvisoriesCount: stats.AdvisoriesCount,
SbomsCount: stats.SbomsCount,
EventsCount: stats.EventsCount,
SizeBytes: stats.SizeBytes);
public static QueryPointResponse ToResponse(this QueryPoint point) => new(
Timestamp: point.Timestamp,
SequenceNumber: point.SequenceNumber,
SnapshotId: point.SnapshotId);
public static FindingHistoryResponse ToResponse(this FindingHistoryItem item) => new(
FindingId: item.FindingId,
ArtifactId: item.ArtifactId,
VulnId: item.VulnId,
Status: item.Status,
Severity: item.Severity,
PolicyVersion: item.PolicyVersion,
FirstSeen: item.FirstSeen,
LastUpdated: item.LastUpdated,
Labels: item.Labels);
public static VexHistoryResponse ToResponse(this VexHistoryItem item) => new(
StatementId: item.StatementId,
VulnId: item.VulnId,
ProductId: item.ProductId,
Status: item.Status,
Justification: item.Justification,
IssuedAt: item.IssuedAt,
ExpiresAt: item.ExpiresAt);
public static AdvisoryHistoryResponse ToResponse(this AdvisoryHistoryItem item) => new(
AdvisoryId: item.AdvisoryId,
Source: item.Source,
Title: item.Title,
CvssScore: item.CvssScore,
PublishedAt: item.PublishedAt,
ModifiedAt: item.ModifiedAt);
public static ReplayEventResponse ToResponse(this ReplayEvent e) => new(
EventId: e.EventId,
SequenceNumber: e.SequenceNumber,
ChainId: e.ChainId,
ChainSequence: e.ChainSequence,
EventType: e.EventType,
OccurredAt: e.OccurredAt,
RecordedAt: e.RecordedAt,
ActorId: e.ActorId,
ActorType: e.ActorType,
ArtifactId: e.ArtifactId,
FindingId: e.FindingId,
PolicyVersion: e.PolicyVersion,
EventHash: e.EventHash,
PreviousHash: e.PreviousHash,
Payload: e.Payload);
public static ReplayMetadataResponse ToResponse(this ReplayMetadata m) => new(
FromSequence: m.FromSequence,
ToSequence: m.ToSequence,
EventsCount: m.EventsCount,
HasMore: m.HasMore,
ReplayDurationMs: m.ReplayDurationMs);
public static DiffSummaryResponse ToResponse(this DiffSummary summary) => new(
Added: summary.Added,
Modified: summary.Modified,
Removed: summary.Removed,
Unchanged: summary.Unchanged,
ByEntityType: summary.ByEntityType?.ToDictionary(
kv => kv.Key.ToString(),
kv => new DiffCountsResponse(kv.Value.Added, kv.Value.Modified, kv.Value.Removed)));
public static DiffEntryResponse ToResponse(this DiffEntry entry) => new(
EntityType: entry.EntityType.ToString(),
EntityId: entry.EntityId,
ChangeType: entry.ChangeType.ToString(),
FromState: entry.FromState,
ToState: entry.ToState,
ChangedFields: entry.ChangedFields);
public static ChangeLogEntryResponse ToResponse(this ChangeLogEntry entry) => new(
SequenceNumber: entry.SequenceNumber,
Timestamp: entry.Timestamp,
EntityType: entry.EntityType.ToString(),
EntityId: entry.EntityId,
EventType: entry.EventType,
EventHash: entry.EventHash,
ActorId: entry.ActorId,
Summary: entry.Summary);
public static StalenessResponse ToResponse(this StalenessResult result) => new(
IsStale: result.IsStale,
CheckedAt: result.CheckedAt,
LastEventAt: result.LastEventAt,
StalenessThreshold: result.StalenessThreshold.ToString(),
StalenessDuration: result.StalenessDuration?.ToString(),
ByEntityType: result.ByEntityType?.ToDictionary(
kv => kv.Key.ToString(),
kv => new EntityStalenessResponse(kv.Value.IsStale, kv.Value.LastEventAt, kv.Value.EventsBehind)));
}

View File

@@ -155,6 +155,14 @@ builder.Services.AddHostedService<LedgerMerkleAnchorWorker>();
builder.Services.AddHostedService<LedgerProjectionWorker>();
builder.Services.AddSingleton<ExportQueryService>();
builder.Services.AddSingleton<AttestationQueryService>();
builder.Services.AddSingleton<StellaOps.Findings.Ledger.Infrastructure.Attestation.IAttestationPointerRepository,
StellaOps.Findings.Ledger.Infrastructure.Postgres.PostgresAttestationPointerRepository>();
builder.Services.AddSingleton<AttestationPointerService>();
builder.Services.AddSingleton<StellaOps.Findings.Ledger.Infrastructure.Snapshot.ISnapshotRepository,
StellaOps.Findings.Ledger.Infrastructure.Postgres.PostgresSnapshotRepository>();
builder.Services.AddSingleton<StellaOps.Findings.Ledger.Infrastructure.Snapshot.ITimeTravelRepository,
StellaOps.Findings.Ledger.Infrastructure.Postgres.PostgresTimeTravelRepository>();
builder.Services.AddSingleton<SnapshotService>();
var app = builder.Build();
@@ -633,6 +641,206 @@ app.MapPost("/internal/ledger/airgap-import", async Task<Results<Accepted<Airgap
.ProducesProblem(StatusCodes.Status400BadRequest)
.ProducesProblem(StatusCodes.Status409Conflict);
// Attestation Pointer Endpoints (LEDGER-ATTEST-73-001)
app.MapPost("/v1/ledger/attestation-pointers", async Task<Results<Created<CreateAttestationPointerResponse>, Ok<CreateAttestationPointerResponse>, ProblemHttpResult>> (
HttpContext httpContext,
CreateAttestationPointerRequest request,
AttestationPointerService service,
CancellationToken cancellationToken) =>
{
if (!TryGetTenant(httpContext, out var tenantProblem, out var tenantId))
{
return tenantProblem!;
}
try
{
var input = request.ToInput(tenantId);
var result = await service.CreatePointerAsync(input, cancellationToken).ConfigureAwait(false);
var response = new CreateAttestationPointerResponse(
result.Success,
result.PointerId?.ToString(),
result.LedgerEventId?.ToString(),
result.Error);
if (!result.Success)
{
return TypedResults.Problem(
statusCode: StatusCodes.Status400BadRequest,
title: "attestation_pointer_failed",
detail: result.Error);
}
return TypedResults.Created($"/v1/ledger/attestation-pointers/{result.PointerId}", response);
}
catch (ArgumentException ex)
{
return TypedResults.Problem(
statusCode: StatusCodes.Status400BadRequest,
title: "invalid_request",
detail: ex.Message);
}
})
.WithName("CreateAttestationPointer")
.RequireAuthorization(LedgerWritePolicy)
.Produces(StatusCodes.Status201Created)
.Produces(StatusCodes.Status200OK)
.ProducesProblem(StatusCodes.Status400BadRequest);
app.MapGet("/v1/ledger/attestation-pointers/{pointerId}", async Task<Results<JsonHttpResult<AttestationPointerResponse>, NotFound, ProblemHttpResult>> (
HttpContext httpContext,
string pointerId,
AttestationPointerService service,
CancellationToken cancellationToken) =>
{
if (!TryGetTenant(httpContext, out var tenantProblem, out var tenantId))
{
return tenantProblem!;
}
if (!Guid.TryParse(pointerId, out var pointerGuid))
{
return TypedResults.Problem(
statusCode: StatusCodes.Status400BadRequest,
title: "invalid_pointer_id",
detail: "Pointer ID must be a valid GUID.");
}
var pointer = await service.GetPointerAsync(tenantId, pointerGuid, cancellationToken).ConfigureAwait(false);
if (pointer is null)
{
return TypedResults.NotFound();
}
return TypedResults.Json(pointer.ToResponse());
})
.WithName("GetAttestationPointer")
.RequireAuthorization(LedgerExportPolicy)
.Produces(StatusCodes.Status200OK)
.Produces(StatusCodes.Status404NotFound)
.ProducesProblem(StatusCodes.Status400BadRequest);
app.MapGet("/v1/ledger/findings/{findingId}/attestation-pointers", async Task<Results<JsonHttpResult<IReadOnlyList<AttestationPointerResponse>>, ProblemHttpResult>> (
HttpContext httpContext,
string findingId,
AttestationPointerService service,
CancellationToken cancellationToken) =>
{
if (!TryGetTenant(httpContext, out var tenantProblem, out var tenantId))
{
return tenantProblem!;
}
var pointers = await service.GetPointersAsync(tenantId, findingId, cancellationToken).ConfigureAwait(false);
IReadOnlyList<AttestationPointerResponse> responseList = pointers.Select(p => p.ToResponse()).ToList();
return TypedResults.Json(responseList);
})
.WithName("GetFindingAttestationPointers")
.RequireAuthorization(LedgerExportPolicy)
.Produces(StatusCodes.Status200OK)
.ProducesProblem(StatusCodes.Status400BadRequest);
app.MapGet("/v1/ledger/findings/{findingId}/attestation-summary", async Task<Results<JsonHttpResult<AttestationSummaryResponse>, ProblemHttpResult>> (
HttpContext httpContext,
string findingId,
AttestationPointerService service,
CancellationToken cancellationToken) =>
{
if (!TryGetTenant(httpContext, out var tenantProblem, out var tenantId))
{
return tenantProblem!;
}
var summary = await service.GetSummaryAsync(tenantId, findingId, cancellationToken).ConfigureAwait(false);
return TypedResults.Json(summary.ToResponse());
})
.WithName("GetFindingAttestationSummary")
.RequireAuthorization(LedgerExportPolicy)
.Produces(StatusCodes.Status200OK)
.ProducesProblem(StatusCodes.Status400BadRequest);
app.MapPost("/v1/ledger/attestation-pointers/search", async Task<Results<JsonHttpResult<AttestationPointerSearchResponse>, ProblemHttpResult>> (
HttpContext httpContext,
AttestationPointerSearchRequest request,
AttestationPointerService service,
CancellationToken cancellationToken) =>
{
if (!TryGetTenant(httpContext, out var tenantProblem, out var tenantId))
{
return tenantProblem!;
}
try
{
var query = request.ToQuery(tenantId);
var pointers = await service.SearchAsync(query, cancellationToken).ConfigureAwait(false);
var response = new AttestationPointerSearchResponse(
pointers.Select(p => p.ToResponse()).ToList(),
pointers.Count);
return TypedResults.Json(response);
}
catch (ArgumentException ex)
{
return TypedResults.Problem(
statusCode: StatusCodes.Status400BadRequest,
title: "invalid_request",
detail: ex.Message);
}
})
.WithName("SearchAttestationPointers")
.RequireAuthorization(LedgerExportPolicy)
.Produces(StatusCodes.Status200OK)
.ProducesProblem(StatusCodes.Status400BadRequest);
app.MapPut("/v1/ledger/attestation-pointers/{pointerId}/verification", async Task<Results<NoContent, NotFound, ProblemHttpResult>> (
HttpContext httpContext,
string pointerId,
UpdateVerificationResultRequest request,
AttestationPointerService service,
CancellationToken cancellationToken) =>
{
if (!TryGetTenant(httpContext, out var tenantProblem, out var tenantId))
{
return tenantProblem!;
}
if (!Guid.TryParse(pointerId, out var pointerGuid))
{
return TypedResults.Problem(
statusCode: StatusCodes.Status400BadRequest,
title: "invalid_pointer_id",
detail: "Pointer ID must be a valid GUID.");
}
try
{
var verificationResult = request.VerificationResult.ToModel();
var success = await service.UpdateVerificationResultAsync(tenantId, pointerGuid, verificationResult, cancellationToken).ConfigureAwait(false);
if (!success)
{
return TypedResults.NotFound();
}
return TypedResults.NoContent();
}
catch (ArgumentException ex)
{
return TypedResults.Problem(
statusCode: StatusCodes.Status400BadRequest,
title: "invalid_request",
detail: ex.Message);
}
})
.WithName("UpdateAttestationPointerVerification")
.RequireAuthorization(LedgerWritePolicy)
.Produces(StatusCodes.Status204NoContent)
.Produces(StatusCodes.Status404NotFound)
.ProducesProblem(StatusCodes.Status400BadRequest);
app.MapGet("/.well-known/openapi", () =>
{
var contentRoot = AppContext.BaseDirectory;
@@ -649,6 +857,383 @@ app.MapGet("/.well-known/openapi", () =>
.Produces(StatusCodes.Status200OK)
.ProducesProblem(StatusCodes.Status500InternalServerError);
// Snapshot Endpoints (LEDGER-PACKS-42-001-DEV)
app.MapPost("/v1/ledger/snapshots", async Task<Results<Created<CreateSnapshotResponse>, ProblemHttpResult>> (
HttpContext httpContext,
CreateSnapshotRequest request,
SnapshotService service,
CancellationToken cancellationToken) =>
{
if (!TryGetTenant(httpContext, out var tenantProblem, out var tenantId))
{
return tenantProblem!;
}
var input = request.ToInput(tenantId);
var result = await service.CreateSnapshotAsync(input, cancellationToken).ConfigureAwait(false);
var response = new CreateSnapshotResponse(
result.Success,
result.Snapshot?.ToResponse(),
result.Error);
if (!result.Success)
{
return TypedResults.Problem(
statusCode: StatusCodes.Status400BadRequest,
title: "snapshot_creation_failed",
detail: result.Error);
}
return TypedResults.Created($"/v1/ledger/snapshots/{result.Snapshot!.SnapshotId}", response);
})
.WithName("CreateSnapshot")
.RequireAuthorization(LedgerWritePolicy)
.Produces(StatusCodes.Status201Created)
.ProducesProblem(StatusCodes.Status400BadRequest);
app.MapGet("/v1/ledger/snapshots", async Task<Results<JsonHttpResult<SnapshotListResponse>, ProblemHttpResult>> (
HttpContext httpContext,
SnapshotService service,
CancellationToken cancellationToken) =>
{
if (!TryGetTenant(httpContext, out var tenantProblem, out var tenantId))
{
return tenantProblem!;
}
var statusStr = httpContext.Request.Query["status"].ToString();
Domain.SnapshotStatus? status = null;
if (!string.IsNullOrEmpty(statusStr) && Enum.TryParse<Domain.SnapshotStatus>(statusStr, true, out var parsedStatus))
{
status = parsedStatus;
}
var query = new Domain.SnapshotListQuery(
tenantId,
status,
ParseDate(httpContext.Request.Query["created_after"]),
ParseDate(httpContext.Request.Query["created_before"]),
ParseInt(httpContext.Request.Query["page_size"]) ?? 100,
httpContext.Request.Query["page_token"].ToString());
var (snapshots, nextPageToken) = await service.ListSnapshotsAsync(query, cancellationToken).ConfigureAwait(false);
var response = new SnapshotListResponse(
snapshots.Select(s => s.ToResponse()).ToList(),
nextPageToken);
return TypedResults.Json(response);
})
.WithName("ListSnapshots")
.RequireAuthorization(LedgerExportPolicy)
.Produces(StatusCodes.Status200OK)
.ProducesProblem(StatusCodes.Status400BadRequest);
app.MapGet("/v1/ledger/snapshots/{snapshotId}", async Task<Results<JsonHttpResult<SnapshotResponse>, NotFound, ProblemHttpResult>> (
HttpContext httpContext,
string snapshotId,
SnapshotService service,
CancellationToken cancellationToken) =>
{
if (!TryGetTenant(httpContext, out var tenantProblem, out var tenantId))
{
return tenantProblem!;
}
if (!Guid.TryParse(snapshotId, out var snapshotGuid))
{
return TypedResults.Problem(
statusCode: StatusCodes.Status400BadRequest,
title: "invalid_snapshot_id",
detail: "Snapshot ID must be a valid GUID.");
}
var snapshot = await service.GetSnapshotAsync(tenantId, snapshotGuid, cancellationToken).ConfigureAwait(false);
if (snapshot is null)
{
return TypedResults.NotFound();
}
return TypedResults.Json(snapshot.ToResponse());
})
.WithName("GetSnapshot")
.RequireAuthorization(LedgerExportPolicy)
.Produces(StatusCodes.Status200OK)
.Produces(StatusCodes.Status404NotFound)
.ProducesProblem(StatusCodes.Status400BadRequest);
app.MapDelete("/v1/ledger/snapshots/{snapshotId}", async Task<Results<NoContent, NotFound, ProblemHttpResult>> (
HttpContext httpContext,
string snapshotId,
SnapshotService service,
CancellationToken cancellationToken) =>
{
if (!TryGetTenant(httpContext, out var tenantProblem, out var tenantId))
{
return tenantProblem!;
}
if (!Guid.TryParse(snapshotId, out var snapshotGuid))
{
return TypedResults.Problem(
statusCode: StatusCodes.Status400BadRequest,
title: "invalid_snapshot_id",
detail: "Snapshot ID must be a valid GUID.");
}
var deleted = await service.DeleteSnapshotAsync(tenantId, snapshotGuid, cancellationToken).ConfigureAwait(false);
if (!deleted)
{
return TypedResults.NotFound();
}
return TypedResults.NoContent();
})
.WithName("DeleteSnapshot")
.RequireAuthorization(LedgerWritePolicy)
.Produces(StatusCodes.Status204NoContent)
.Produces(StatusCodes.Status404NotFound)
.ProducesProblem(StatusCodes.Status400BadRequest);
// Time-Travel Query Endpoints
app.MapGet("/v1/ledger/time-travel/findings", async Task<Results<JsonHttpResult<HistoricalQueryApiResponse<FindingHistoryResponse>>, ProblemHttpResult>> (
HttpContext httpContext,
SnapshotService service,
CancellationToken cancellationToken) =>
{
if (!TryGetTenant(httpContext, out var tenantProblem, out var tenantId))
{
return tenantProblem!;
}
var request = new HistoricalQueryApiRequest(
AtTimestamp: ParseDate(httpContext.Request.Query["at_timestamp"]),
AtSequence: ParseLong(httpContext.Request.Query["at_sequence"]),
SnapshotId: ParseGuid(httpContext.Request.Query["snapshot_id"]),
Status: httpContext.Request.Query["status"].ToString(),
SeverityMin: ParseDecimal(httpContext.Request.Query["severity_min"]),
SeverityMax: ParseDecimal(httpContext.Request.Query["severity_max"]),
PolicyVersion: httpContext.Request.Query["policy_version"].ToString(),
ArtifactId: httpContext.Request.Query["artifact_id"].ToString(),
VulnId: httpContext.Request.Query["vuln_id"].ToString(),
PageSize: ParseInt(httpContext.Request.Query["page_size"]) ?? 500,
PageToken: httpContext.Request.Query["page_token"].ToString());
var domainRequest = request.ToRequest(tenantId, Domain.EntityType.Finding);
var result = await service.QueryHistoricalFindingsAsync(domainRequest, cancellationToken).ConfigureAwait(false);
var response = new HistoricalQueryApiResponse<FindingHistoryResponse>(
result.QueryPoint.ToResponse(),
"Finding",
result.Items.Select(i => i.ToResponse()).ToList(),
result.NextPageToken,
result.TotalCount);
return TypedResults.Json(response);
})
.WithName("TimeTravelQueryFindings")
.RequireAuthorization(LedgerExportPolicy)
.Produces(StatusCodes.Status200OK)
.ProducesProblem(StatusCodes.Status400BadRequest);
app.MapGet("/v1/ledger/time-travel/vex", async Task<Results<JsonHttpResult<HistoricalQueryApiResponse<VexHistoryResponse>>, ProblemHttpResult>> (
HttpContext httpContext,
SnapshotService service,
CancellationToken cancellationToken) =>
{
if (!TryGetTenant(httpContext, out var tenantProblem, out var tenantId))
{
return tenantProblem!;
}
var request = new HistoricalQueryApiRequest(
AtTimestamp: ParseDate(httpContext.Request.Query["at_timestamp"]),
AtSequence: ParseLong(httpContext.Request.Query["at_sequence"]),
SnapshotId: ParseGuid(httpContext.Request.Query["snapshot_id"]),
PageSize: ParseInt(httpContext.Request.Query["page_size"]) ?? 500,
PageToken: httpContext.Request.Query["page_token"].ToString());
var domainRequest = request.ToRequest(tenantId, Domain.EntityType.Vex);
var result = await service.QueryHistoricalVexAsync(domainRequest, cancellationToken).ConfigureAwait(false);
var response = new HistoricalQueryApiResponse<VexHistoryResponse>(
result.QueryPoint.ToResponse(),
"Vex",
result.Items.Select(i => i.ToResponse()).ToList(),
result.NextPageToken,
result.TotalCount);
return TypedResults.Json(response);
})
.WithName("TimeTravelQueryVex")
.RequireAuthorization(LedgerExportPolicy)
.Produces(StatusCodes.Status200OK)
.ProducesProblem(StatusCodes.Status400BadRequest);
app.MapGet("/v1/ledger/time-travel/advisories", async Task<Results<JsonHttpResult<HistoricalQueryApiResponse<AdvisoryHistoryResponse>>, ProblemHttpResult>> (
HttpContext httpContext,
SnapshotService service,
CancellationToken cancellationToken) =>
{
if (!TryGetTenant(httpContext, out var tenantProblem, out var tenantId))
{
return tenantProblem!;
}
var request = new HistoricalQueryApiRequest(
AtTimestamp: ParseDate(httpContext.Request.Query["at_timestamp"]),
AtSequence: ParseLong(httpContext.Request.Query["at_sequence"]),
SnapshotId: ParseGuid(httpContext.Request.Query["snapshot_id"]),
PageSize: ParseInt(httpContext.Request.Query["page_size"]) ?? 500,
PageToken: httpContext.Request.Query["page_token"].ToString());
var domainRequest = request.ToRequest(tenantId, Domain.EntityType.Advisory);
var result = await service.QueryHistoricalAdvisoriesAsync(domainRequest, cancellationToken).ConfigureAwait(false);
var response = new HistoricalQueryApiResponse<AdvisoryHistoryResponse>(
result.QueryPoint.ToResponse(),
"Advisory",
result.Items.Select(i => i.ToResponse()).ToList(),
result.NextPageToken,
result.TotalCount);
return TypedResults.Json(response);
})
.WithName("TimeTravelQueryAdvisories")
.RequireAuthorization(LedgerExportPolicy)
.Produces(StatusCodes.Status200OK)
.ProducesProblem(StatusCodes.Status400BadRequest);
// Replay Endpoint
app.MapPost("/v1/ledger/replay", async Task<Results<JsonHttpResult<ReplayApiResponse>, ProblemHttpResult>> (
HttpContext httpContext,
ReplayApiRequest request,
SnapshotService service,
CancellationToken cancellationToken) =>
{
if (!TryGetTenant(httpContext, out var tenantProblem, out var tenantId))
{
return tenantProblem!;
}
var domainRequest = request.ToRequest(tenantId);
var (events, metadata) = await service.ReplayEventsAsync(domainRequest, cancellationToken).ConfigureAwait(false);
var response = new ReplayApiResponse(
events.Select(e => e.ToResponse()).ToList(),
metadata.ToResponse());
return TypedResults.Json(response);
})
.WithName("ReplayEvents")
.RequireAuthorization(LedgerExportPolicy)
.Produces(StatusCodes.Status200OK)
.ProducesProblem(StatusCodes.Status400BadRequest);
// Diff Endpoint
app.MapPost("/v1/ledger/diff", async Task<Results<JsonHttpResult<DiffApiResponse>, ProblemHttpResult>> (
HttpContext httpContext,
DiffApiRequest request,
SnapshotService service,
CancellationToken cancellationToken) =>
{
if (!TryGetTenant(httpContext, out var tenantProblem, out var tenantId))
{
return tenantProblem!;
}
var domainRequest = request.ToRequest(tenantId);
var result = await service.ComputeDiffAsync(domainRequest, cancellationToken).ConfigureAwait(false);
var response = new DiffApiResponse(
result.FromPoint.ToResponse(),
result.ToPoint.ToResponse(),
result.Summary.ToResponse(),
result.Changes?.Select(c => c.ToResponse()).ToList(),
result.NextPageToken);
return TypedResults.Json(response);
})
.WithName("ComputeDiff")
.RequireAuthorization(LedgerExportPolicy)
.Produces(StatusCodes.Status200OK)
.ProducesProblem(StatusCodes.Status400BadRequest);
// Changelog Endpoint
app.MapGet("/v1/ledger/changelog/{entityType}/{entityId}", async Task<Results<JsonHttpResult<IReadOnlyList<ChangeLogEntryResponse>>, ProblemHttpResult>> (
HttpContext httpContext,
string entityType,
string entityId,
SnapshotService service,
CancellationToken cancellationToken) =>
{
if (!TryGetTenant(httpContext, out var tenantProblem, out var tenantId))
{
return tenantProblem!;
}
if (!Enum.TryParse<Domain.EntityType>(entityType, true, out var parsedEntityType))
{
return TypedResults.Problem(
statusCode: StatusCodes.Status400BadRequest,
title: "invalid_entity_type",
detail: "Entity type must be one of: Finding, Vex, Advisory, Sbom, Evidence.");
}
var limit = ParseInt(httpContext.Request.Query["limit"]) ?? 100;
var changelog = await service.GetChangelogAsync(tenantId, parsedEntityType, entityId, limit, cancellationToken).ConfigureAwait(false);
IReadOnlyList<ChangeLogEntryResponse> response = changelog.Select(e => e.ToResponse()).ToList();
return TypedResults.Json(response);
})
.WithName("GetChangelog")
.RequireAuthorization(LedgerExportPolicy)
.Produces(StatusCodes.Status200OK)
.ProducesProblem(StatusCodes.Status400BadRequest);
// Staleness Check Endpoint
app.MapGet("/v1/ledger/staleness", async Task<Results<JsonHttpResult<StalenessResponse>, ProblemHttpResult>> (
HttpContext httpContext,
SnapshotService service,
CancellationToken cancellationToken) =>
{
if (!TryGetTenant(httpContext, out var tenantProblem, out var tenantId))
{
return tenantProblem!;
}
var thresholdMinutes = ParseInt(httpContext.Request.Query["threshold_minutes"]) ?? 60;
var threshold = TimeSpan.FromMinutes(thresholdMinutes);
var result = await service.CheckStalenessAsync(tenantId, threshold, cancellationToken).ConfigureAwait(false);
return TypedResults.Json(result.ToResponse());
})
.WithName("CheckStaleness")
.RequireAuthorization(LedgerExportPolicy)
.Produces(StatusCodes.Status200OK)
.ProducesProblem(StatusCodes.Status400BadRequest);
// Current Point Endpoint
app.MapGet("/v1/ledger/current-point", async Task<Results<JsonHttpResult<QueryPointResponse>, ProblemHttpResult>> (
HttpContext httpContext,
SnapshotService service,
CancellationToken cancellationToken) =>
{
if (!TryGetTenant(httpContext, out var tenantProblem, out var tenantId))
{
return tenantProblem!;
}
var point = await service.GetCurrentPointAsync(tenantId, cancellationToken).ConfigureAwait(false);
return TypedResults.Json(point.ToResponse());
})
.WithName("GetCurrentPoint")
.RequireAuthorization(LedgerExportPolicy)
.Produces(StatusCodes.Status200OK)
.ProducesProblem(StatusCodes.Status400BadRequest);
app.Run();
static Created<LedgerEventResponse> CreateCreatedResponse(LedgerEventRecord record)
@@ -738,3 +1323,8 @@ static bool? ParseBool(string value)
{
return bool.TryParse(value, out var result) ? result : null;
}
static Guid? ParseGuid(string value)
{
return Guid.TryParse(value, out var result) ? result : null;
}

View File

@@ -18,6 +18,7 @@ public static class LedgerEventConstants
public const string EventEvidenceSnapshotLinked = "airgap.evidence_snapshot_linked";
public const string EventAirgapTimelineImpact = "airgap.timeline_impact";
public const string EventOrchestratorExportRecorded = "orchestrator.export_recorded";
public const string EventAttestationPointerLinked = "attestation.pointer_linked";
public static readonly ImmutableHashSet<string> SupportedEventTypes = ImmutableHashSet.Create(StringComparer.Ordinal,
EventFindingCreated,
@@ -33,7 +34,8 @@ public static class LedgerEventConstants
EventAirgapBundleImported,
EventEvidenceSnapshotLinked,
EventAirgapTimelineImpact,
EventOrchestratorExportRecorded);
EventOrchestratorExportRecorded,
EventAttestationPointerLinked);
public static readonly ImmutableHashSet<string> FindingEventTypes = ImmutableHashSet.Create(StringComparer.Ordinal,
EventFindingCreated,

View File

@@ -0,0 +1,281 @@
namespace StellaOps.Findings.Ledger.Domain;
/// <summary>
/// Represents a point-in-time snapshot of ledger state.
/// </summary>
public sealed record LedgerSnapshot(
string TenantId,
Guid SnapshotId,
string? Label,
string? Description,
SnapshotStatus Status,
DateTimeOffset CreatedAt,
DateTimeOffset? ExpiresAt,
long SequenceNumber,
DateTimeOffset Timestamp,
SnapshotStatistics Statistics,
string? MerkleRoot,
string? DsseDigest,
Dictionary<string, object>? Metadata = null);
/// <summary>
/// Snapshot lifecycle status.
/// </summary>
public enum SnapshotStatus
{
Creating,
Available,
Exporting,
Expired,
Deleted
}
/// <summary>
/// Statistics for a snapshot.
/// </summary>
public sealed record SnapshotStatistics(
long FindingsCount,
long VexStatementsCount,
long AdvisoriesCount,
long SbomsCount,
long EventsCount,
long SizeBytes);
/// <summary>
/// Input for creating a snapshot.
/// </summary>
public sealed record CreateSnapshotInput(
string TenantId,
string? Label = null,
string? Description = null,
DateTimeOffset? AtTimestamp = null,
long? AtSequence = null,
TimeSpan? ExpiresIn = null,
IReadOnlyList<EntityType>? IncludeEntityTypes = null,
bool Sign = false,
Dictionary<string, object>? Metadata = null);
/// <summary>
/// Result of creating a snapshot.
/// </summary>
public sealed record CreateSnapshotResult(
bool Success,
LedgerSnapshot? Snapshot,
string? Error);
/// <summary>
/// Entity types tracked in the ledger.
/// </summary>
public enum EntityType
{
Finding,
Vex,
Advisory,
Sbom,
Evidence
}
/// <summary>
/// Query point specification (timestamp or sequence).
/// </summary>
public sealed record QueryPoint(
DateTimeOffset Timestamp,
long SequenceNumber,
Guid? SnapshotId = null);
/// <summary>
/// Filters for time-travel queries.
/// </summary>
public sealed record TimeQueryFilters(
string? Status = null,
decimal? SeverityMin = null,
decimal? SeverityMax = null,
string? PolicyVersion = null,
string? ArtifactId = null,
string? VulnId = null,
Dictionary<string, string>? Labels = null);
/// <summary>
/// Request for historical query.
/// </summary>
public sealed record HistoricalQueryRequest(
string TenantId,
DateTimeOffset? AtTimestamp,
long? AtSequence,
Guid? SnapshotId,
EntityType EntityType,
TimeQueryFilters? Filters,
int PageSize = 500,
string? PageToken = null);
/// <summary>
/// Response for historical query.
/// </summary>
public sealed record HistoricalQueryResponse<T>(
QueryPoint QueryPoint,
EntityType EntityType,
IReadOnlyList<T> Items,
string? NextPageToken,
long TotalCount);
/// <summary>
/// Request for replaying events.
/// </summary>
public sealed record ReplayRequest(
string TenantId,
long? FromSequence = null,
long? ToSequence = null,
DateTimeOffset? FromTimestamp = null,
DateTimeOffset? ToTimestamp = null,
IReadOnlyList<Guid>? ChainIds = null,
IReadOnlyList<string>? EventTypes = null,
bool IncludePayload = true,
int PageSize = 1000);
/// <summary>
/// Replayed event record.
/// </summary>
public sealed record ReplayEvent(
Guid EventId,
long SequenceNumber,
Guid ChainId,
int ChainSequence,
string EventType,
DateTimeOffset OccurredAt,
DateTimeOffset RecordedAt,
string? ActorId,
string? ActorType,
string? ArtifactId,
string? FindingId,
string? PolicyVersion,
string EventHash,
string PreviousHash,
object? Payload);
/// <summary>
/// Replay metadata.
/// </summary>
public sealed record ReplayMetadata(
long FromSequence,
long ToSequence,
long EventsCount,
bool HasMore,
long ReplayDurationMs);
/// <summary>
/// Request for computing diff between two points.
/// </summary>
public sealed record DiffRequest(
string TenantId,
DiffPoint From,
DiffPoint To,
IReadOnlyList<EntityType>? EntityTypes = null,
bool IncludeUnchanged = false,
DiffOutputFormat OutputFormat = DiffOutputFormat.Summary);
/// <summary>
/// Diff point specification.
/// </summary>
public sealed record DiffPoint(
DateTimeOffset? Timestamp = null,
long? SequenceNumber = null,
Guid? SnapshotId = null);
/// <summary>
/// Diff output format.
/// </summary>
public enum DiffOutputFormat
{
Summary,
Detailed,
Full
}
/// <summary>
/// Diff summary counts.
/// </summary>
public sealed record DiffSummary(
int Added,
int Modified,
int Removed,
int Unchanged,
Dictionary<EntityType, DiffCounts>? ByEntityType = null);
/// <summary>
/// Diff counts per entity type.
/// </summary>
public sealed record DiffCounts(int Added, int Modified, int Removed);
/// <summary>
/// Individual diff entry.
/// </summary>
public sealed record DiffEntry(
EntityType EntityType,
string EntityId,
DiffChangeType ChangeType,
object? FromState,
object? ToState,
IReadOnlyList<string>? ChangedFields);
/// <summary>
/// Type of change in a diff.
/// </summary>
public enum DiffChangeType
{
Added,
Modified,
Removed
}
/// <summary>
/// Diff response.
/// </summary>
public sealed record DiffResponse(
QueryPoint FromPoint,
QueryPoint ToPoint,
DiffSummary Summary,
IReadOnlyList<DiffEntry>? Changes,
string? NextPageToken);
/// <summary>
/// Changelog entry.
/// </summary>
public sealed record ChangeLogEntry(
long SequenceNumber,
DateTimeOffset Timestamp,
EntityType EntityType,
string EntityId,
string EventType,
string? EventHash,
string? ActorId,
string? Summary);
/// <summary>
/// Staleness check result.
/// </summary>
public sealed record StalenessResult(
bool IsStale,
DateTimeOffset CheckedAt,
DateTimeOffset? LastEventAt,
TimeSpan StalenessThreshold,
TimeSpan? StalenessDuration,
Dictionary<EntityType, EntityStaleness>? ByEntityType = null);
/// <summary>
/// Staleness per entity type.
/// </summary>
public sealed record EntityStaleness(
bool IsStale,
DateTimeOffset? LastEventAt,
long EventsBehind);
/// <summary>
/// Query parameters for listing snapshots.
/// </summary>
public sealed record SnapshotListQuery(
string TenantId,
SnapshotStatus? Status = null,
DateTimeOffset? CreatedAfter = null,
DateTimeOffset? CreatedBefore = null,
int PageSize = 100,
string? PageToken = null);

View File

@@ -0,0 +1,184 @@
namespace StellaOps.Findings.Ledger.Infrastructure.Attestation;
/// <summary>
/// Record representing an attestation pointer linking a finding to a verification report or attestation envelope.
/// </summary>
public sealed record AttestationPointerRecord(
string TenantId,
Guid PointerId,
string FindingId,
AttestationType AttestationType,
AttestationRelationship Relationship,
AttestationRef AttestationRef,
VerificationResult? VerificationResult,
DateTimeOffset CreatedAt,
string CreatedBy,
Dictionary<string, object>? Metadata = null,
Guid? LedgerEventId = null);
/// <summary>
/// Type of attestation being pointed to.
/// </summary>
public enum AttestationType
{
VerificationReport,
DsseEnvelope,
SlsaProvenance,
VexAttestation,
SbomAttestation,
ScanAttestation,
PolicyAttestation,
ApprovalAttestation
}
/// <summary>
/// Semantic relationship between finding and attestation.
/// </summary>
public enum AttestationRelationship
{
VerifiedBy,
AttestedBy,
SignedBy,
ApprovedBy,
DerivedFrom
}
/// <summary>
/// Reference to an attestation artifact.
/// </summary>
public sealed record AttestationRef(
string Digest,
Guid? AttestationId = null,
string? StorageUri = null,
string? PayloadType = null,
string? PredicateType = null,
IReadOnlyList<string>? SubjectDigests = null,
SignerInfo? SignerInfo = null,
RekorEntryRef? RekorEntry = null);
/// <summary>
/// Information about the attestation signer.
/// </summary>
public sealed record SignerInfo(
string? KeyId = null,
string? Issuer = null,
string? Subject = null,
IReadOnlyList<string>? CertificateChain = null,
DateTimeOffset? SignedAt = null);
/// <summary>
/// Reference to Rekor transparency log entry.
/// </summary>
public sealed record RekorEntryRef(
long? LogIndex = null,
string? LogId = null,
string? Uuid = null,
long? IntegratedTime = null);
/// <summary>
/// Result of attestation verification.
/// </summary>
public sealed record VerificationResult(
bool Verified,
DateTimeOffset VerifiedAt,
string? Verifier = null,
string? VerifierVersion = null,
string? PolicyRef = null,
IReadOnlyList<VerificationCheck>? Checks = null,
IReadOnlyList<string>? Warnings = null,
IReadOnlyList<string>? Errors = null);
/// <summary>
/// Individual verification check result.
/// </summary>
public sealed record VerificationCheck(
VerificationCheckType CheckType,
bool Passed,
string? Details = null,
Dictionary<string, object>? Evidence = null);
/// <summary>
/// Type of verification check performed.
/// </summary>
public enum VerificationCheckType
{
SignatureValid,
CertificateValid,
CertificateNotExpired,
CertificateNotRevoked,
RekorEntryValid,
TimestampValid,
PolicyMet,
IdentityVerified,
IssuerTrusted
}
/// <summary>
/// Input for creating an attestation pointer.
/// </summary>
public sealed record AttestationPointerInput(
string TenantId,
string FindingId,
AttestationType AttestationType,
AttestationRelationship Relationship,
AttestationRef AttestationRef,
VerificationResult? VerificationResult = null,
string? CreatedBy = null,
Dictionary<string, object>? Metadata = null);
/// <summary>
/// Result of creating an attestation pointer.
/// </summary>
public sealed record AttestationPointerResult(
bool Success,
Guid? PointerId,
Guid? LedgerEventId,
string? Error);
/// <summary>
/// Summary of attestations for a finding.
/// </summary>
public sealed record FindingAttestationSummary(
string FindingId,
int AttestationCount,
int VerifiedCount,
DateTimeOffset? LatestAttestation,
IReadOnlyList<AttestationType> AttestationTypes,
OverallVerificationStatus OverallVerificationStatus);
/// <summary>
/// Overall verification status for a finding's attestations.
/// </summary>
public enum OverallVerificationStatus
{
AllVerified,
PartiallyVerified,
NoneVerified,
NoAttestations
}
/// <summary>
/// Query parameters for searching attestation pointers.
/// </summary>
public sealed record AttestationPointerQuery(
string TenantId,
IReadOnlyList<string>? FindingIds = null,
IReadOnlyList<AttestationType>? AttestationTypes = null,
AttestationVerificationFilter? VerificationStatus = null,
DateTimeOffset? CreatedAfter = null,
DateTimeOffset? CreatedBefore = null,
string? SignerIdentity = null,
string? PredicateType = null,
int Limit = 100,
int Offset = 0);
/// <summary>
/// Filter for verification status.
/// </summary>
public enum AttestationVerificationFilter
{
Any,
Verified,
Unverified,
Failed
}

View File

@@ -0,0 +1,97 @@
namespace StellaOps.Findings.Ledger.Infrastructure.Attestation;
/// <summary>
/// Repository for managing attestation pointers linking findings to verification reports and attestation envelopes.
/// </summary>
public interface IAttestationPointerRepository
{
/// <summary>
/// Inserts a new attestation pointer.
/// </summary>
Task InsertAsync(AttestationPointerRecord record, CancellationToken cancellationToken);
/// <summary>
/// Gets an attestation pointer by ID.
/// </summary>
Task<AttestationPointerRecord?> GetByIdAsync(
string tenantId,
Guid pointerId,
CancellationToken cancellationToken);
/// <summary>
/// Gets all attestation pointers for a finding.
/// </summary>
Task<IReadOnlyList<AttestationPointerRecord>> GetByFindingIdAsync(
string tenantId,
string findingId,
CancellationToken cancellationToken);
/// <summary>
/// Gets attestation pointers by attestation digest.
/// </summary>
Task<IReadOnlyList<AttestationPointerRecord>> GetByDigestAsync(
string tenantId,
string digest,
CancellationToken cancellationToken);
/// <summary>
/// Searches attestation pointers based on query parameters.
/// </summary>
Task<IReadOnlyList<AttestationPointerRecord>> SearchAsync(
AttestationPointerQuery query,
CancellationToken cancellationToken);
/// <summary>
/// Gets attestation summary for a finding.
/// </summary>
Task<FindingAttestationSummary> GetSummaryAsync(
string tenantId,
string findingId,
CancellationToken cancellationToken);
/// <summary>
/// Gets attestation summaries for multiple findings.
/// </summary>
Task<IReadOnlyList<FindingAttestationSummary>> GetSummariesAsync(
string tenantId,
IReadOnlyList<string> findingIds,
CancellationToken cancellationToken);
/// <summary>
/// Checks if an attestation pointer already exists (for idempotency).
/// </summary>
Task<bool> ExistsAsync(
string tenantId,
string findingId,
string digest,
AttestationType attestationType,
CancellationToken cancellationToken);
/// <summary>
/// Updates the verification result for an attestation pointer.
/// </summary>
Task UpdateVerificationResultAsync(
string tenantId,
Guid pointerId,
VerificationResult verificationResult,
CancellationToken cancellationToken);
/// <summary>
/// Gets the count of attestation pointers for a finding.
/// </summary>
Task<int> GetCountAsync(
string tenantId,
string findingId,
CancellationToken cancellationToken);
/// <summary>
/// Gets findings that have attestations matching the criteria.
/// </summary>
Task<IReadOnlyList<string>> GetFindingIdsWithAttestationsAsync(
string tenantId,
AttestationVerificationFilter? verificationFilter,
IReadOnlyList<AttestationType>? attestationTypes,
int limit,
int offset,
CancellationToken cancellationToken);
}

View File

@@ -0,0 +1,668 @@
using System.Text;
using System.Text.Json;
using Microsoft.Extensions.Logging;
using Npgsql;
using NpgsqlTypes;
using StellaOps.Findings.Ledger.Infrastructure.Attestation;
namespace StellaOps.Findings.Ledger.Infrastructure.Postgres;
/// <summary>
/// Postgres-backed repository for attestation pointers.
/// </summary>
public sealed class PostgresAttestationPointerRepository : IAttestationPointerRepository
{
private static readonly JsonSerializerOptions JsonOptions = new()
{
PropertyNamingPolicy = JsonNamingPolicy.SnakeCaseLower,
WriteIndented = false
};
private readonly LedgerDataSource _dataSource;
private readonly ILogger<PostgresAttestationPointerRepository> _logger;
public PostgresAttestationPointerRepository(
LedgerDataSource dataSource,
ILogger<PostgresAttestationPointerRepository> logger)
{
_dataSource = dataSource ?? throw new ArgumentNullException(nameof(dataSource));
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
}
public async Task InsertAsync(AttestationPointerRecord record, CancellationToken cancellationToken)
{
ArgumentNullException.ThrowIfNull(record);
const string sql = """
INSERT INTO ledger_attestation_pointers (
tenant_id, pointer_id, finding_id, attestation_type, relationship,
attestation_ref, verification_result, created_at, created_by,
metadata, ledger_event_id
) VALUES (
@tenant_id, @pointer_id, @finding_id, @attestation_type, @relationship,
@attestation_ref::jsonb, @verification_result::jsonb, @created_at, @created_by,
@metadata::jsonb, @ledger_event_id
)
""";
await using var connection = await _dataSource.OpenConnectionAsync(
record.TenantId, "attestation_pointer_write", cancellationToken).ConfigureAwait(false);
await using var command = new NpgsqlCommand(sql, connection)
{
CommandTimeout = _dataSource.CommandTimeoutSeconds
};
command.Parameters.AddWithValue("tenant_id", record.TenantId);
command.Parameters.AddWithValue("pointer_id", record.PointerId);
command.Parameters.AddWithValue("finding_id", record.FindingId);
command.Parameters.AddWithValue("attestation_type", record.AttestationType.ToString());
command.Parameters.AddWithValue("relationship", record.Relationship.ToString());
command.Parameters.AddWithValue("attestation_ref", JsonSerializer.Serialize(record.AttestationRef, JsonOptions));
command.Parameters.AddWithValue("verification_result",
record.VerificationResult is not null
? JsonSerializer.Serialize(record.VerificationResult, JsonOptions)
: DBNull.Value);
command.Parameters.AddWithValue("created_at", record.CreatedAt);
command.Parameters.AddWithValue("created_by", record.CreatedBy);
command.Parameters.AddWithValue("metadata",
record.Metadata is not null
? JsonSerializer.Serialize(record.Metadata, JsonOptions)
: DBNull.Value);
command.Parameters.AddWithValue("ledger_event_id",
record.LedgerEventId.HasValue ? record.LedgerEventId.Value : DBNull.Value);
await command.ExecuteNonQueryAsync(cancellationToken).ConfigureAwait(false);
_logger.LogDebug(
"Inserted attestation pointer {PointerId} for finding {FindingId} with type {AttestationType}",
record.PointerId, record.FindingId, record.AttestationType);
}
public async Task<AttestationPointerRecord?> GetByIdAsync(
string tenantId,
Guid pointerId,
CancellationToken cancellationToken)
{
ArgumentException.ThrowIfNullOrWhiteSpace(tenantId);
const string sql = """
SELECT tenant_id, pointer_id, finding_id, attestation_type, relationship,
attestation_ref, verification_result, created_at, created_by,
metadata, ledger_event_id
FROM ledger_attestation_pointers
WHERE tenant_id = @tenant_id AND pointer_id = @pointer_id
""";
await using var connection = await _dataSource.OpenConnectionAsync(
tenantId, "attestation_pointer_read", cancellationToken).ConfigureAwait(false);
await using var command = new NpgsqlCommand(sql, connection)
{
CommandTimeout = _dataSource.CommandTimeoutSeconds
};
command.Parameters.AddWithValue("tenant_id", tenantId);
command.Parameters.AddWithValue("pointer_id", pointerId);
await using var reader = await command.ExecuteReaderAsync(cancellationToken).ConfigureAwait(false);
if (await reader.ReadAsync(cancellationToken).ConfigureAwait(false))
{
return ReadRecord(reader);
}
return null;
}
public async Task<IReadOnlyList<AttestationPointerRecord>> GetByFindingIdAsync(
string tenantId,
string findingId,
CancellationToken cancellationToken)
{
ArgumentException.ThrowIfNullOrWhiteSpace(tenantId);
ArgumentException.ThrowIfNullOrWhiteSpace(findingId);
const string sql = """
SELECT tenant_id, pointer_id, finding_id, attestation_type, relationship,
attestation_ref, verification_result, created_at, created_by,
metadata, ledger_event_id
FROM ledger_attestation_pointers
WHERE tenant_id = @tenant_id AND finding_id = @finding_id
ORDER BY created_at DESC
""";
await using var connection = await _dataSource.OpenConnectionAsync(
tenantId, "attestation_pointer_read", cancellationToken).ConfigureAwait(false);
await using var command = new NpgsqlCommand(sql, connection)
{
CommandTimeout = _dataSource.CommandTimeoutSeconds
};
command.Parameters.AddWithValue("tenant_id", tenantId);
command.Parameters.AddWithValue("finding_id", findingId);
return await ReadRecordsAsync(command, cancellationToken).ConfigureAwait(false);
}
public async Task<IReadOnlyList<AttestationPointerRecord>> GetByDigestAsync(
string tenantId,
string digest,
CancellationToken cancellationToken)
{
ArgumentException.ThrowIfNullOrWhiteSpace(tenantId);
ArgumentException.ThrowIfNullOrWhiteSpace(digest);
const string sql = """
SELECT tenant_id, pointer_id, finding_id, attestation_type, relationship,
attestation_ref, verification_result, created_at, created_by,
metadata, ledger_event_id
FROM ledger_attestation_pointers
WHERE tenant_id = @tenant_id
AND attestation_ref->>'digest' = @digest
ORDER BY created_at DESC
""";
await using var connection = await _dataSource.OpenConnectionAsync(
tenantId, "attestation_pointer_read", cancellationToken).ConfigureAwait(false);
await using var command = new NpgsqlCommand(sql, connection)
{
CommandTimeout = _dataSource.CommandTimeoutSeconds
};
command.Parameters.AddWithValue("tenant_id", tenantId);
command.Parameters.AddWithValue("digest", digest);
return await ReadRecordsAsync(command, cancellationToken).ConfigureAwait(false);
}
public async Task<IReadOnlyList<AttestationPointerRecord>> SearchAsync(
AttestationPointerQuery query,
CancellationToken cancellationToken)
{
ArgumentNullException.ThrowIfNull(query);
ArgumentException.ThrowIfNullOrWhiteSpace(query.TenantId);
var sqlBuilder = new StringBuilder("""
SELECT tenant_id, pointer_id, finding_id, attestation_type, relationship,
attestation_ref, verification_result, created_at, created_by,
metadata, ledger_event_id
FROM ledger_attestation_pointers
WHERE tenant_id = @tenant_id
""");
var parameters = new List<NpgsqlParameter>
{
new("tenant_id", query.TenantId) { NpgsqlDbType = NpgsqlDbType.Text }
};
if (query.FindingIds is { Count: > 0 })
{
sqlBuilder.Append(" AND finding_id = ANY(@finding_ids)");
parameters.Add(new NpgsqlParameter<string[]>("finding_ids", query.FindingIds.ToArray()));
}
if (query.AttestationTypes is { Count: > 0 })
{
sqlBuilder.Append(" AND attestation_type = ANY(@attestation_types)");
parameters.Add(new NpgsqlParameter<string[]>("attestation_types",
query.AttestationTypes.Select(t => t.ToString()).ToArray()));
}
if (query.VerificationStatus.HasValue && query.VerificationStatus.Value != AttestationVerificationFilter.Any)
{
sqlBuilder.Append(query.VerificationStatus.Value switch
{
AttestationVerificationFilter.Verified =>
" AND verification_result IS NOT NULL AND (verification_result->>'verified')::boolean = true",
AttestationVerificationFilter.Unverified =>
" AND verification_result IS NULL",
AttestationVerificationFilter.Failed =>
" AND verification_result IS NOT NULL AND (verification_result->>'verified')::boolean = false",
_ => ""
});
}
if (query.CreatedAfter.HasValue)
{
sqlBuilder.Append(" AND created_at >= @created_after");
parameters.Add(new NpgsqlParameter<DateTimeOffset>("created_after", query.CreatedAfter.Value)
{
NpgsqlDbType = NpgsqlDbType.TimestampTz
});
}
if (query.CreatedBefore.HasValue)
{
sqlBuilder.Append(" AND created_at <= @created_before");
parameters.Add(new NpgsqlParameter<DateTimeOffset>("created_before", query.CreatedBefore.Value)
{
NpgsqlDbType = NpgsqlDbType.TimestampTz
});
}
if (!string.IsNullOrWhiteSpace(query.SignerIdentity))
{
sqlBuilder.Append(" AND attestation_ref->'signer_info'->>'subject' = @signer_identity");
parameters.Add(new NpgsqlParameter<string>("signer_identity", query.SignerIdentity));
}
if (!string.IsNullOrWhiteSpace(query.PredicateType))
{
sqlBuilder.Append(" AND attestation_ref->>'predicate_type' = @predicate_type");
parameters.Add(new NpgsqlParameter<string>("predicate_type", query.PredicateType));
}
sqlBuilder.Append(" ORDER BY created_at DESC");
sqlBuilder.Append(" LIMIT @limit OFFSET @offset");
parameters.Add(new NpgsqlParameter<int>("limit", query.Limit));
parameters.Add(new NpgsqlParameter<int>("offset", query.Offset));
await using var connection = await _dataSource.OpenConnectionAsync(
query.TenantId, "attestation_pointer_search", cancellationToken).ConfigureAwait(false);
await using var command = new NpgsqlCommand(sqlBuilder.ToString(), connection)
{
CommandTimeout = _dataSource.CommandTimeoutSeconds
};
command.Parameters.AddRange(parameters.ToArray());
return await ReadRecordsAsync(command, cancellationToken).ConfigureAwait(false);
}
public async Task<FindingAttestationSummary> GetSummaryAsync(
string tenantId,
string findingId,
CancellationToken cancellationToken)
{
ArgumentException.ThrowIfNullOrWhiteSpace(tenantId);
ArgumentException.ThrowIfNullOrWhiteSpace(findingId);
const string sql = """
SELECT
COUNT(*) as total_count,
COUNT(*) FILTER (WHERE verification_result IS NOT NULL
AND (verification_result->>'verified')::boolean = true) as verified_count,
MAX(created_at) as latest_attestation,
array_agg(DISTINCT attestation_type) as attestation_types
FROM ledger_attestation_pointers
WHERE tenant_id = @tenant_id AND finding_id = @finding_id
""";
await using var connection = await _dataSource.OpenConnectionAsync(
tenantId, "attestation_pointer_summary", cancellationToken).ConfigureAwait(false);
await using var command = new NpgsqlCommand(sql, connection)
{
CommandTimeout = _dataSource.CommandTimeoutSeconds
};
command.Parameters.AddWithValue("tenant_id", tenantId);
command.Parameters.AddWithValue("finding_id", findingId);
await using var reader = await command.ExecuteReaderAsync(cancellationToken).ConfigureAwait(false);
if (await reader.ReadAsync(cancellationToken).ConfigureAwait(false))
{
var totalCount = reader.GetInt32(0);
var verifiedCount = reader.GetInt32(1);
var latestAttestation = reader.IsDBNull(2)
? (DateTimeOffset?)null
: reader.GetFieldValue<DateTimeOffset>(2);
var attestationTypesRaw = reader.IsDBNull(3)
? Array.Empty<string>()
: reader.GetFieldValue<string[]>(3);
var attestationTypes = attestationTypesRaw
.Where(t => Enum.TryParse<AttestationType>(t, out _))
.Select(t => Enum.Parse<AttestationType>(t))
.ToList();
var overallStatus = totalCount switch
{
0 => OverallVerificationStatus.NoAttestations,
_ when verifiedCount == totalCount => OverallVerificationStatus.AllVerified,
_ when verifiedCount > 0 => OverallVerificationStatus.PartiallyVerified,
_ => OverallVerificationStatus.NoneVerified
};
return new FindingAttestationSummary(
findingId,
totalCount,
verifiedCount,
latestAttestation,
attestationTypes,
overallStatus);
}
return new FindingAttestationSummary(
findingId,
0,
0,
null,
Array.Empty<AttestationType>(),
OverallVerificationStatus.NoAttestations);
}
public async Task<IReadOnlyList<FindingAttestationSummary>> GetSummariesAsync(
string tenantId,
IReadOnlyList<string> findingIds,
CancellationToken cancellationToken)
{
ArgumentException.ThrowIfNullOrWhiteSpace(tenantId);
ArgumentNullException.ThrowIfNull(findingIds);
if (findingIds.Count == 0)
{
return Array.Empty<FindingAttestationSummary>();
}
const string sql = """
SELECT
finding_id,
COUNT(*) as total_count,
COUNT(*) FILTER (WHERE verification_result IS NOT NULL
AND (verification_result->>'verified')::boolean = true) as verified_count,
MAX(created_at) as latest_attestation,
array_agg(DISTINCT attestation_type) as attestation_types
FROM ledger_attestation_pointers
WHERE tenant_id = @tenant_id AND finding_id = ANY(@finding_ids)
GROUP BY finding_id
""";
await using var connection = await _dataSource.OpenConnectionAsync(
tenantId, "attestation_pointer_summaries", cancellationToken).ConfigureAwait(false);
await using var command = new NpgsqlCommand(sql, connection)
{
CommandTimeout = _dataSource.CommandTimeoutSeconds
};
command.Parameters.AddWithValue("tenant_id", tenantId);
command.Parameters.AddWithValue("finding_ids", findingIds.ToArray());
var results = new List<FindingAttestationSummary>();
var foundIds = new HashSet<string>(StringComparer.Ordinal);
await using var reader = await command.ExecuteReaderAsync(cancellationToken).ConfigureAwait(false);
while (await reader.ReadAsync(cancellationToken).ConfigureAwait(false))
{
var fid = reader.GetString(0);
foundIds.Add(fid);
var totalCount = reader.GetInt32(1);
var verifiedCount = reader.GetInt32(2);
var latestAttestation = reader.IsDBNull(3)
? (DateTimeOffset?)null
: reader.GetFieldValue<DateTimeOffset>(3);
var attestationTypesRaw = reader.IsDBNull(4)
? Array.Empty<string>()
: reader.GetFieldValue<string[]>(4);
var attestationTypes = attestationTypesRaw
.Where(t => Enum.TryParse<AttestationType>(t, out _))
.Select(t => Enum.Parse<AttestationType>(t))
.ToList();
var overallStatus = totalCount switch
{
0 => OverallVerificationStatus.NoAttestations,
_ when verifiedCount == totalCount => OverallVerificationStatus.AllVerified,
_ when verifiedCount > 0 => OverallVerificationStatus.PartiallyVerified,
_ => OverallVerificationStatus.NoneVerified
};
results.Add(new FindingAttestationSummary(
fid,
totalCount,
verifiedCount,
latestAttestation,
attestationTypes,
overallStatus));
}
// Add empty summaries for findings without attestations
foreach (var fid in findingIds.Where(f => !foundIds.Contains(f)))
{
results.Add(new FindingAttestationSummary(
fid,
0,
0,
null,
Array.Empty<AttestationType>(),
OverallVerificationStatus.NoAttestations));
}
return results;
}
public async Task<bool> ExistsAsync(
string tenantId,
string findingId,
string digest,
AttestationType attestationType,
CancellationToken cancellationToken)
{
ArgumentException.ThrowIfNullOrWhiteSpace(tenantId);
ArgumentException.ThrowIfNullOrWhiteSpace(findingId);
ArgumentException.ThrowIfNullOrWhiteSpace(digest);
const string sql = """
SELECT EXISTS(
SELECT 1 FROM ledger_attestation_pointers
WHERE tenant_id = @tenant_id
AND finding_id = @finding_id
AND attestation_ref->>'digest' = @digest
AND attestation_type = @attestation_type
)
""";
await using var connection = await _dataSource.OpenConnectionAsync(
tenantId, "attestation_pointer_exists", cancellationToken).ConfigureAwait(false);
await using var command = new NpgsqlCommand(sql, connection)
{
CommandTimeout = _dataSource.CommandTimeoutSeconds
};
command.Parameters.AddWithValue("tenant_id", tenantId);
command.Parameters.AddWithValue("finding_id", findingId);
command.Parameters.AddWithValue("digest", digest);
command.Parameters.AddWithValue("attestation_type", attestationType.ToString());
var result = await command.ExecuteScalarAsync(cancellationToken).ConfigureAwait(false);
return result is true;
}
public async Task UpdateVerificationResultAsync(
string tenantId,
Guid pointerId,
VerificationResult verificationResult,
CancellationToken cancellationToken)
{
ArgumentException.ThrowIfNullOrWhiteSpace(tenantId);
ArgumentNullException.ThrowIfNull(verificationResult);
const string sql = """
UPDATE ledger_attestation_pointers
SET verification_result = @verification_result::jsonb
WHERE tenant_id = @tenant_id AND pointer_id = @pointer_id
""";
await using var connection = await _dataSource.OpenConnectionAsync(
tenantId, "attestation_pointer_update", cancellationToken).ConfigureAwait(false);
await using var command = new NpgsqlCommand(sql, connection)
{
CommandTimeout = _dataSource.CommandTimeoutSeconds
};
command.Parameters.AddWithValue("tenant_id", tenantId);
command.Parameters.AddWithValue("pointer_id", pointerId);
command.Parameters.AddWithValue("verification_result",
JsonSerializer.Serialize(verificationResult, JsonOptions));
await command.ExecuteNonQueryAsync(cancellationToken).ConfigureAwait(false);
_logger.LogDebug(
"Updated verification result for attestation pointer {PointerId}, verified={Verified}",
pointerId, verificationResult.Verified);
}
public async Task<int> GetCountAsync(
string tenantId,
string findingId,
CancellationToken cancellationToken)
{
ArgumentException.ThrowIfNullOrWhiteSpace(tenantId);
ArgumentException.ThrowIfNullOrWhiteSpace(findingId);
const string sql = """
SELECT COUNT(*)
FROM ledger_attestation_pointers
WHERE tenant_id = @tenant_id AND finding_id = @finding_id
""";
await using var connection = await _dataSource.OpenConnectionAsync(
tenantId, "attestation_pointer_count", cancellationToken).ConfigureAwait(false);
await using var command = new NpgsqlCommand(sql, connection)
{
CommandTimeout = _dataSource.CommandTimeoutSeconds
};
command.Parameters.AddWithValue("tenant_id", tenantId);
command.Parameters.AddWithValue("finding_id", findingId);
var result = await command.ExecuteScalarAsync(cancellationToken).ConfigureAwait(false);
return Convert.ToInt32(result);
}
public async Task<IReadOnlyList<string>> GetFindingIdsWithAttestationsAsync(
string tenantId,
AttestationVerificationFilter? verificationFilter,
IReadOnlyList<AttestationType>? attestationTypes,
int limit,
int offset,
CancellationToken cancellationToken)
{
ArgumentException.ThrowIfNullOrWhiteSpace(tenantId);
var sqlBuilder = new StringBuilder("""
SELECT DISTINCT finding_id
FROM ledger_attestation_pointers
WHERE tenant_id = @tenant_id
""");
var parameters = new List<NpgsqlParameter>
{
new("tenant_id", tenantId) { NpgsqlDbType = NpgsqlDbType.Text }
};
if (attestationTypes is { Count: > 0 })
{
sqlBuilder.Append(" AND attestation_type = ANY(@attestation_types)");
parameters.Add(new NpgsqlParameter<string[]>("attestation_types",
attestationTypes.Select(t => t.ToString()).ToArray()));
}
if (verificationFilter.HasValue && verificationFilter.Value != AttestationVerificationFilter.Any)
{
sqlBuilder.Append(verificationFilter.Value switch
{
AttestationVerificationFilter.Verified =>
" AND verification_result IS NOT NULL AND (verification_result->>'verified')::boolean = true",
AttestationVerificationFilter.Unverified =>
" AND verification_result IS NULL",
AttestationVerificationFilter.Failed =>
" AND verification_result IS NOT NULL AND (verification_result->>'verified')::boolean = false",
_ => ""
});
}
sqlBuilder.Append(" ORDER BY finding_id LIMIT @limit OFFSET @offset");
parameters.Add(new NpgsqlParameter<int>("limit", limit));
parameters.Add(new NpgsqlParameter<int>("offset", offset));
await using var connection = await _dataSource.OpenConnectionAsync(
tenantId, "attestation_pointer_findings", cancellationToken).ConfigureAwait(false);
await using var command = new NpgsqlCommand(sqlBuilder.ToString(), connection)
{
CommandTimeout = _dataSource.CommandTimeoutSeconds
};
command.Parameters.AddRange(parameters.ToArray());
var results = new List<string>();
await using var reader = await command.ExecuteReaderAsync(cancellationToken).ConfigureAwait(false);
while (await reader.ReadAsync(cancellationToken).ConfigureAwait(false))
{
results.Add(reader.GetString(0));
}
return results;
}
private static async Task<IReadOnlyList<AttestationPointerRecord>> ReadRecordsAsync(
NpgsqlCommand command,
CancellationToken cancellationToken)
{
var results = new List<AttestationPointerRecord>();
await using var reader = await command.ExecuteReaderAsync(cancellationToken).ConfigureAwait(false);
while (await reader.ReadAsync(cancellationToken).ConfigureAwait(false))
{
results.Add(ReadRecord(reader));
}
return results;
}
private static AttestationPointerRecord ReadRecord(NpgsqlDataReader reader)
{
var tenantId = reader.GetString(0);
var pointerId = reader.GetGuid(1);
var findingId = reader.GetString(2);
var attestationType = Enum.Parse<AttestationType>(reader.GetString(3));
var relationship = Enum.Parse<AttestationRelationship>(reader.GetString(4));
var attestationRefJson = reader.GetString(5);
var attestationRef = JsonSerializer.Deserialize<AttestationRef>(attestationRefJson, JsonOptions)!;
VerificationResult? verificationResult = null;
if (!reader.IsDBNull(6))
{
var verificationResultJson = reader.GetString(6);
verificationResult = JsonSerializer.Deserialize<VerificationResult>(verificationResultJson, JsonOptions);
}
var createdAt = reader.GetFieldValue<DateTimeOffset>(7);
var createdBy = reader.GetString(8);
Dictionary<string, object>? metadata = null;
if (!reader.IsDBNull(9))
{
var metadataJson = reader.GetString(9);
metadata = JsonSerializer.Deserialize<Dictionary<string, object>>(metadataJson, JsonOptions);
}
Guid? ledgerEventId = reader.IsDBNull(10) ? null : reader.GetGuid(10);
return new AttestationPointerRecord(
tenantId,
pointerId,
findingId,
attestationType,
relationship,
attestationRef,
verificationResult,
createdAt,
createdBy,
metadata,
ledgerEventId);
}
}

View File

@@ -0,0 +1,402 @@
namespace StellaOps.Findings.Ledger.Infrastructure.Postgres;
using System.Text;
using System.Text.Json;
using Npgsql;
using NpgsqlTypes;
using StellaOps.Findings.Ledger.Domain;
using StellaOps.Findings.Ledger.Infrastructure.Snapshot;
/// <summary>
/// PostgreSQL implementation of snapshot repository.
/// </summary>
public sealed class PostgresSnapshotRepository : ISnapshotRepository
{
private readonly NpgsqlDataSource _dataSource;
private readonly JsonSerializerOptions _jsonOptions;
public PostgresSnapshotRepository(NpgsqlDataSource dataSource)
{
_dataSource = dataSource;
_jsonOptions = new JsonSerializerOptions
{
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
WriteIndented = false
};
}
public async Task<LedgerSnapshot> CreateAsync(
string tenantId,
CreateSnapshotInput input,
long currentSequence,
DateTimeOffset currentTimestamp,
CancellationToken ct = default)
{
var snapshotId = Guid.NewGuid();
var createdAt = DateTimeOffset.UtcNow;
var expiresAt = input.ExpiresIn.HasValue
? createdAt.Add(input.ExpiresIn.Value)
: (DateTimeOffset?)null;
var sequenceNumber = input.AtSequence ?? currentSequence;
var timestamp = input.AtTimestamp ?? currentTimestamp;
var initialStats = new SnapshotStatistics(0, 0, 0, 0, 0, 0);
var metadataJson = input.Metadata != null
? JsonSerializer.Serialize(input.Metadata, _jsonOptions)
: null;
var entityTypesJson = input.IncludeEntityTypes != null
? JsonSerializer.Serialize(input.IncludeEntityTypes.Select(e => e.ToString()).ToList(), _jsonOptions)
: null;
const string sql = """
INSERT INTO ledger_snapshots (
tenant_id, snapshot_id, label, description, status,
created_at, expires_at, sequence_number, snapshot_timestamp,
findings_count, vex_statements_count, advisories_count,
sboms_count, events_count, size_bytes,
merkle_root, dsse_digest, metadata, include_entity_types, sign_requested
) VALUES (
@tenantId, @snapshotId, @label, @description, @status,
@createdAt, @expiresAt, @sequenceNumber, @timestamp,
@findingsCount, @vexCount, @advisoriesCount,
@sbomsCount, @eventsCount, @sizeBytes,
@merkleRoot, @dsseDigest, @metadata::jsonb, @entityTypes::jsonb, @sign
)
""";
await using var cmd = _dataSource.CreateCommand(sql);
cmd.Parameters.AddWithValue("tenantId", tenantId);
cmd.Parameters.AddWithValue("snapshotId", snapshotId);
cmd.Parameters.AddWithValue("label", (object?)input.Label ?? DBNull.Value);
cmd.Parameters.AddWithValue("description", (object?)input.Description ?? DBNull.Value);
cmd.Parameters.AddWithValue("status", SnapshotStatus.Creating.ToString());
cmd.Parameters.AddWithValue("createdAt", createdAt);
cmd.Parameters.AddWithValue("expiresAt", (object?)expiresAt ?? DBNull.Value);
cmd.Parameters.AddWithValue("sequenceNumber", sequenceNumber);
cmd.Parameters.AddWithValue("timestamp", timestamp);
cmd.Parameters.AddWithValue("findingsCount", initialStats.FindingsCount);
cmd.Parameters.AddWithValue("vexCount", initialStats.VexStatementsCount);
cmd.Parameters.AddWithValue("advisoriesCount", initialStats.AdvisoriesCount);
cmd.Parameters.AddWithValue("sbomsCount", initialStats.SbomsCount);
cmd.Parameters.AddWithValue("eventsCount", initialStats.EventsCount);
cmd.Parameters.AddWithValue("sizeBytes", initialStats.SizeBytes);
cmd.Parameters.AddWithValue("merkleRoot", DBNull.Value);
cmd.Parameters.AddWithValue("dsseDigest", DBNull.Value);
cmd.Parameters.AddWithValue("metadata", (object?)metadataJson ?? DBNull.Value);
cmd.Parameters.AddWithValue("entityTypes", (object?)entityTypesJson ?? DBNull.Value);
cmd.Parameters.AddWithValue("sign", input.Sign);
await cmd.ExecuteNonQueryAsync(ct);
return new LedgerSnapshot(
tenantId,
snapshotId,
input.Label,
input.Description,
SnapshotStatus.Creating,
createdAt,
expiresAt,
sequenceNumber,
timestamp,
initialStats,
null,
null,
input.Metadata);
}
public async Task<LedgerSnapshot?> GetByIdAsync(
string tenantId,
Guid snapshotId,
CancellationToken ct = default)
{
const string sql = """
SELECT tenant_id, snapshot_id, label, description, status,
created_at, expires_at, sequence_number, snapshot_timestamp,
findings_count, vex_statements_count, advisories_count,
sboms_count, events_count, size_bytes,
merkle_root, dsse_digest, metadata
FROM ledger_snapshots
WHERE tenant_id = @tenantId AND snapshot_id = @snapshotId
""";
await using var cmd = _dataSource.CreateCommand(sql);
cmd.Parameters.AddWithValue("tenantId", tenantId);
cmd.Parameters.AddWithValue("snapshotId", snapshotId);
await using var reader = await cmd.ExecuteReaderAsync(ct);
if (!await reader.ReadAsync(ct))
return null;
return MapSnapshot(reader);
}
public async Task<(IReadOnlyList<LedgerSnapshot> Snapshots, string? NextPageToken)> ListAsync(
SnapshotListQuery query,
CancellationToken ct = default)
{
var sql = new StringBuilder("""
SELECT tenant_id, snapshot_id, label, description, status,
created_at, expires_at, sequence_number, snapshot_timestamp,
findings_count, vex_statements_count, advisories_count,
sboms_count, events_count, size_bytes,
merkle_root, dsse_digest, metadata
FROM ledger_snapshots
WHERE tenant_id = @tenantId
""");
var parameters = new List<NpgsqlParameter>
{
new("tenantId", query.TenantId)
};
if (query.Status.HasValue)
{
sql.Append(" AND status = @status");
parameters.Add(new NpgsqlParameter("status", query.Status.Value.ToString()));
}
if (query.CreatedAfter.HasValue)
{
sql.Append(" AND created_at >= @createdAfter");
parameters.Add(new NpgsqlParameter("createdAfter", query.CreatedAfter.Value));
}
if (query.CreatedBefore.HasValue)
{
sql.Append(" AND created_at < @createdBefore");
parameters.Add(new NpgsqlParameter("createdBefore", query.CreatedBefore.Value));
}
if (!string.IsNullOrEmpty(query.PageToken))
{
if (Guid.TryParse(query.PageToken, out var lastId))
{
sql.Append(" AND snapshot_id > @lastId");
parameters.Add(new NpgsqlParameter("lastId", lastId));
}
}
sql.Append(" ORDER BY created_at DESC, snapshot_id");
sql.Append(" LIMIT @limit");
parameters.Add(new NpgsqlParameter("limit", query.PageSize + 1));
await using var cmd = _dataSource.CreateCommand(sql.ToString());
cmd.Parameters.AddRange(parameters.ToArray());
var snapshots = new List<LedgerSnapshot>();
await using var reader = await cmd.ExecuteReaderAsync(ct);
while (await reader.ReadAsync(ct) && snapshots.Count < query.PageSize)
{
snapshots.Add(MapSnapshot(reader));
}
string? nextPageToken = null;
if (await reader.ReadAsync(ct))
{
nextPageToken = snapshots.Last().SnapshotId.ToString();
}
return (snapshots, nextPageToken);
}
public async Task<bool> UpdateStatusAsync(
string tenantId,
Guid snapshotId,
SnapshotStatus newStatus,
CancellationToken ct = default)
{
const string sql = """
UPDATE ledger_snapshots
SET status = @status, updated_at = @updatedAt
WHERE tenant_id = @tenantId AND snapshot_id = @snapshotId
""";
await using var cmd = _dataSource.CreateCommand(sql);
cmd.Parameters.AddWithValue("tenantId", tenantId);
cmd.Parameters.AddWithValue("snapshotId", snapshotId);
cmd.Parameters.AddWithValue("status", newStatus.ToString());
cmd.Parameters.AddWithValue("updatedAt", DateTimeOffset.UtcNow);
return await cmd.ExecuteNonQueryAsync(ct) > 0;
}
public async Task<bool> UpdateStatisticsAsync(
string tenantId,
Guid snapshotId,
SnapshotStatistics statistics,
CancellationToken ct = default)
{
const string sql = """
UPDATE ledger_snapshots
SET findings_count = @findingsCount,
vex_statements_count = @vexCount,
advisories_count = @advisoriesCount,
sboms_count = @sbomsCount,
events_count = @eventsCount,
size_bytes = @sizeBytes,
updated_at = @updatedAt
WHERE tenant_id = @tenantId AND snapshot_id = @snapshotId
""";
await using var cmd = _dataSource.CreateCommand(sql);
cmd.Parameters.AddWithValue("tenantId", tenantId);
cmd.Parameters.AddWithValue("snapshotId", snapshotId);
cmd.Parameters.AddWithValue("findingsCount", statistics.FindingsCount);
cmd.Parameters.AddWithValue("vexCount", statistics.VexStatementsCount);
cmd.Parameters.AddWithValue("advisoriesCount", statistics.AdvisoriesCount);
cmd.Parameters.AddWithValue("sbomsCount", statistics.SbomsCount);
cmd.Parameters.AddWithValue("eventsCount", statistics.EventsCount);
cmd.Parameters.AddWithValue("sizeBytes", statistics.SizeBytes);
cmd.Parameters.AddWithValue("updatedAt", DateTimeOffset.UtcNow);
return await cmd.ExecuteNonQueryAsync(ct) > 0;
}
public async Task<bool> SetMerkleRootAsync(
string tenantId,
Guid snapshotId,
string merkleRoot,
string? dsseDigest,
CancellationToken ct = default)
{
const string sql = """
UPDATE ledger_snapshots
SET merkle_root = @merkleRoot,
dsse_digest = @dsseDigest,
updated_at = @updatedAt
WHERE tenant_id = @tenantId AND snapshot_id = @snapshotId
""";
await using var cmd = _dataSource.CreateCommand(sql);
cmd.Parameters.AddWithValue("tenantId", tenantId);
cmd.Parameters.AddWithValue("snapshotId", snapshotId);
cmd.Parameters.AddWithValue("merkleRoot", merkleRoot);
cmd.Parameters.AddWithValue("dsseDigest", (object?)dsseDigest ?? DBNull.Value);
cmd.Parameters.AddWithValue("updatedAt", DateTimeOffset.UtcNow);
return await cmd.ExecuteNonQueryAsync(ct) > 0;
}
public async Task<int> ExpireSnapshotsAsync(
DateTimeOffset cutoff,
CancellationToken ct = default)
{
const string sql = """
UPDATE ledger_snapshots
SET status = @expiredStatus, updated_at = @updatedAt
WHERE expires_at IS NOT NULL
AND expires_at < @cutoff
AND status = @availableStatus
""";
await using var cmd = _dataSource.CreateCommand(sql);
cmd.Parameters.AddWithValue("expiredStatus", SnapshotStatus.Expired.ToString());
cmd.Parameters.AddWithValue("availableStatus", SnapshotStatus.Available.ToString());
cmd.Parameters.AddWithValue("cutoff", cutoff);
cmd.Parameters.AddWithValue("updatedAt", DateTimeOffset.UtcNow);
return await cmd.ExecuteNonQueryAsync(ct);
}
public async Task<bool> DeleteAsync(
string tenantId,
Guid snapshotId,
CancellationToken ct = default)
{
const string sql = """
UPDATE ledger_snapshots
SET status = @deletedStatus, updated_at = @updatedAt
WHERE tenant_id = @tenantId AND snapshot_id = @snapshotId
""";
await using var cmd = _dataSource.CreateCommand(sql);
cmd.Parameters.AddWithValue("tenantId", tenantId);
cmd.Parameters.AddWithValue("snapshotId", snapshotId);
cmd.Parameters.AddWithValue("deletedStatus", SnapshotStatus.Deleted.ToString());
cmd.Parameters.AddWithValue("updatedAt", DateTimeOffset.UtcNow);
return await cmd.ExecuteNonQueryAsync(ct) > 0;
}
public async Task<LedgerSnapshot?> GetLatestAsync(
string tenantId,
CancellationToken ct = default)
{
const string sql = """
SELECT tenant_id, snapshot_id, label, description, status,
created_at, expires_at, sequence_number, snapshot_timestamp,
findings_count, vex_statements_count, advisories_count,
sboms_count, events_count, size_bytes,
merkle_root, dsse_digest, metadata
FROM ledger_snapshots
WHERE tenant_id = @tenantId AND status = @status
ORDER BY created_at DESC
LIMIT 1
""";
await using var cmd = _dataSource.CreateCommand(sql);
cmd.Parameters.AddWithValue("tenantId", tenantId);
cmd.Parameters.AddWithValue("status", SnapshotStatus.Available.ToString());
await using var reader = await cmd.ExecuteReaderAsync(ct);
if (!await reader.ReadAsync(ct))
return null;
return MapSnapshot(reader);
}
public async Task<bool> ExistsAsync(
string tenantId,
Guid snapshotId,
CancellationToken ct = default)
{
const string sql = """
SELECT 1 FROM ledger_snapshots
WHERE tenant_id = @tenantId AND snapshot_id = @snapshotId
LIMIT 1
""";
await using var cmd = _dataSource.CreateCommand(sql);
cmd.Parameters.AddWithValue("tenantId", tenantId);
cmd.Parameters.AddWithValue("snapshotId", snapshotId);
await using var reader = await cmd.ExecuteReaderAsync(ct);
return await reader.ReadAsync(ct);
}
private LedgerSnapshot MapSnapshot(NpgsqlDataReader reader)
{
var metadataJson = reader.IsDBNull(reader.GetOrdinal("metadata"))
? null
: reader.GetString(reader.GetOrdinal("metadata"));
Dictionary<string, object>? metadata = null;
if (!string.IsNullOrEmpty(metadataJson))
{
metadata = JsonSerializer.Deserialize<Dictionary<string, object>>(metadataJson, _jsonOptions);
}
return new LedgerSnapshot(
TenantId: reader.GetString(reader.GetOrdinal("tenant_id")),
SnapshotId: reader.GetGuid(reader.GetOrdinal("snapshot_id")),
Label: reader.IsDBNull(reader.GetOrdinal("label")) ? null : reader.GetString(reader.GetOrdinal("label")),
Description: reader.IsDBNull(reader.GetOrdinal("description")) ? null : reader.GetString(reader.GetOrdinal("description")),
Status: Enum.Parse<SnapshotStatus>(reader.GetString(reader.GetOrdinal("status"))),
CreatedAt: reader.GetFieldValue<DateTimeOffset>(reader.GetOrdinal("created_at")),
ExpiresAt: reader.IsDBNull(reader.GetOrdinal("expires_at")) ? null : reader.GetFieldValue<DateTimeOffset>(reader.GetOrdinal("expires_at")),
SequenceNumber: reader.GetInt64(reader.GetOrdinal("sequence_number")),
Timestamp: reader.GetFieldValue<DateTimeOffset>(reader.GetOrdinal("snapshot_timestamp")),
Statistics: new SnapshotStatistics(
FindingsCount: reader.GetInt64(reader.GetOrdinal("findings_count")),
VexStatementsCount: reader.GetInt64(reader.GetOrdinal("vex_statements_count")),
AdvisoriesCount: reader.GetInt64(reader.GetOrdinal("advisories_count")),
SbomsCount: reader.GetInt64(reader.GetOrdinal("sboms_count")),
EventsCount: reader.GetInt64(reader.GetOrdinal("events_count")),
SizeBytes: reader.GetInt64(reader.GetOrdinal("size_bytes"))),
MerkleRoot: reader.IsDBNull(reader.GetOrdinal("merkle_root")) ? null : reader.GetString(reader.GetOrdinal("merkle_root")),
DsseDigest: reader.IsDBNull(reader.GetOrdinal("dsse_digest")) ? null : reader.GetString(reader.GetOrdinal("dsse_digest")),
Metadata: metadata);
}
}

View File

@@ -0,0 +1,832 @@
namespace StellaOps.Findings.Ledger.Infrastructure.Postgres;
using System.Diagnostics;
using System.Text;
using System.Text.Json;
using Npgsql;
using StellaOps.Findings.Ledger.Domain;
using StellaOps.Findings.Ledger.Infrastructure.Snapshot;
/// <summary>
/// PostgreSQL implementation of time-travel repository.
/// </summary>
public sealed class PostgresTimeTravelRepository : ITimeTravelRepository
{
private readonly NpgsqlDataSource _dataSource;
private readonly ISnapshotRepository _snapshotRepository;
private readonly JsonSerializerOptions _jsonOptions;
public PostgresTimeTravelRepository(
NpgsqlDataSource dataSource,
ISnapshotRepository snapshotRepository)
{
_dataSource = dataSource;
_snapshotRepository = snapshotRepository;
_jsonOptions = new JsonSerializerOptions
{
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
WriteIndented = false
};
}
public async Task<QueryPoint> GetCurrentPointAsync(
string tenantId,
CancellationToken ct = default)
{
const string sql = """
SELECT COALESCE(MAX(sequence_number), 0) as seq,
COALESCE(MAX(recorded_at), NOW()) as ts
FROM ledger_events
WHERE tenant_id = @tenantId
""";
await using var cmd = _dataSource.CreateCommand(sql);
cmd.Parameters.AddWithValue("tenantId", tenantId);
await using var reader = await cmd.ExecuteReaderAsync(ct);
await reader.ReadAsync(ct);
return new QueryPoint(
Timestamp: reader.GetFieldValue<DateTimeOffset>(reader.GetOrdinal("ts")),
SequenceNumber: reader.GetInt64(reader.GetOrdinal("seq")));
}
public async Task<QueryPoint?> ResolveQueryPointAsync(
string tenantId,
DateTimeOffset? timestamp,
long? sequence,
Guid? snapshotId,
CancellationToken ct = default)
{
// If snapshot ID is provided, get point from snapshot
if (snapshotId.HasValue)
{
var snapshot = await _snapshotRepository.GetByIdAsync(tenantId, snapshotId.Value, ct);
if (snapshot == null)
return null;
return new QueryPoint(
Timestamp: snapshot.Timestamp,
SequenceNumber: snapshot.SequenceNumber,
SnapshotId: snapshotId);
}
// If sequence is provided, get timestamp for that sequence
if (sequence.HasValue)
{
const string sql = """
SELECT recorded_at FROM ledger_events
WHERE tenant_id = @tenantId AND sequence_number = @seq
""";
await using var cmd = _dataSource.CreateCommand(sql);
cmd.Parameters.AddWithValue("tenantId", tenantId);
cmd.Parameters.AddWithValue("seq", sequence.Value);
await using var reader = await cmd.ExecuteReaderAsync(ct);
if (!await reader.ReadAsync(ct))
return null;
return new QueryPoint(
Timestamp: reader.GetFieldValue<DateTimeOffset>(0),
SequenceNumber: sequence.Value);
}
// If timestamp is provided, find the sequence at that point
if (timestamp.HasValue)
{
const string sql = """
SELECT sequence_number, recorded_at FROM ledger_events
WHERE tenant_id = @tenantId AND recorded_at <= @ts
ORDER BY sequence_number DESC
LIMIT 1
""";
await using var cmd = _dataSource.CreateCommand(sql);
cmd.Parameters.AddWithValue("tenantId", tenantId);
cmd.Parameters.AddWithValue("ts", timestamp.Value);
await using var reader = await cmd.ExecuteReaderAsync(ct);
if (!await reader.ReadAsync(ct))
{
// No events before timestamp, return point at 0
return new QueryPoint(timestamp.Value, 0);
}
return new QueryPoint(
Timestamp: reader.GetFieldValue<DateTimeOffset>(1),
SequenceNumber: reader.GetInt64(0));
}
// No constraints - return current point
return await GetCurrentPointAsync(tenantId, ct);
}
public async Task<HistoricalQueryResponse<FindingHistoryItem>> QueryFindingsAsync(
HistoricalQueryRequest request,
CancellationToken ct = default)
{
var queryPoint = await ResolveQueryPointAsync(
request.TenantId,
request.AtTimestamp,
request.AtSequence,
request.SnapshotId,
ct);
if (queryPoint == null)
{
return new HistoricalQueryResponse<FindingHistoryItem>(
new QueryPoint(DateTimeOffset.UtcNow, 0),
EntityType.Finding,
Array.Empty<FindingHistoryItem>(),
null,
0);
}
// Query findings state at the sequence point using event sourcing
var sql = new StringBuilder("""
WITH finding_state AS (
SELECT
e.finding_id,
e.artifact_id,
e.payload->>'vulnId' as vuln_id,
e.payload->>'status' as status,
(e.payload->>'severity')::decimal as severity,
e.policy_version,
MIN(e.recorded_at) OVER (PARTITION BY e.finding_id) as first_seen,
e.recorded_at as last_updated,
e.payload->'labels' as labels,
ROW_NUMBER() OVER (PARTITION BY e.finding_id ORDER BY e.sequence_number DESC) as rn
FROM ledger_events e
WHERE e.tenant_id = @tenantId
AND e.sequence_number <= @seq
AND e.finding_id IS NOT NULL
)
SELECT finding_id, artifact_id, vuln_id, status, severity,
policy_version, first_seen, last_updated, labels
FROM finding_state
WHERE rn = 1
""");
var parameters = new List<NpgsqlParameter>
{
new("tenantId", request.TenantId),
new("seq", queryPoint.SequenceNumber)
};
// Apply filters
if (request.Filters != null)
{
if (!string.IsNullOrEmpty(request.Filters.Status))
{
sql.Append(" AND status = @status");
parameters.Add(new NpgsqlParameter("status", request.Filters.Status));
}
if (request.Filters.SeverityMin.HasValue)
{
sql.Append(" AND severity >= @sevMin");
parameters.Add(new NpgsqlParameter("sevMin", request.Filters.SeverityMin.Value));
}
if (request.Filters.SeverityMax.HasValue)
{
sql.Append(" AND severity <= @sevMax");
parameters.Add(new NpgsqlParameter("sevMax", request.Filters.SeverityMax.Value));
}
if (!string.IsNullOrEmpty(request.Filters.ArtifactId))
{
sql.Append(" AND artifact_id = @artifactId");
parameters.Add(new NpgsqlParameter("artifactId", request.Filters.ArtifactId));
}
if (!string.IsNullOrEmpty(request.Filters.VulnId))
{
sql.Append(" AND vuln_id = @vulnId");
parameters.Add(new NpgsqlParameter("vulnId", request.Filters.VulnId));
}
}
// Pagination
if (!string.IsNullOrEmpty(request.PageToken))
{
sql.Append(" AND finding_id > @lastId");
parameters.Add(new NpgsqlParameter("lastId", request.PageToken));
}
sql.Append(" ORDER BY finding_id LIMIT @limit");
parameters.Add(new NpgsqlParameter("limit", request.PageSize + 1));
await using var cmd = _dataSource.CreateCommand(sql.ToString());
cmd.Parameters.AddRange(parameters.ToArray());
var items = new List<FindingHistoryItem>();
await using var reader = await cmd.ExecuteReaderAsync(ct);
while (await reader.ReadAsync(ct) && items.Count < request.PageSize)
{
var labelsJson = reader.IsDBNull(reader.GetOrdinal("labels"))
? null
: reader.GetString(reader.GetOrdinal("labels"));
items.Add(new FindingHistoryItem(
FindingId: reader.GetString(reader.GetOrdinal("finding_id")),
ArtifactId: reader.GetString(reader.GetOrdinal("artifact_id")),
VulnId: reader.GetString(reader.GetOrdinal("vuln_id")),
Status: reader.GetString(reader.GetOrdinal("status")),
Severity: reader.IsDBNull(reader.GetOrdinal("severity")) ? null : reader.GetDecimal(reader.GetOrdinal("severity")),
PolicyVersion: reader.IsDBNull(reader.GetOrdinal("policy_version")) ? null : reader.GetString(reader.GetOrdinal("policy_version")),
FirstSeen: reader.GetFieldValue<DateTimeOffset>(reader.GetOrdinal("first_seen")),
LastUpdated: reader.GetFieldValue<DateTimeOffset>(reader.GetOrdinal("last_updated")),
Labels: string.IsNullOrEmpty(labelsJson)
? null
: JsonSerializer.Deserialize<Dictionary<string, string>>(labelsJson, _jsonOptions)));
}
string? nextPageToken = null;
if (await reader.ReadAsync(ct))
{
nextPageToken = items.Last().FindingId;
}
return new HistoricalQueryResponse<FindingHistoryItem>(
queryPoint,
EntityType.Finding,
items,
nextPageToken,
items.Count);
}
public async Task<HistoricalQueryResponse<VexHistoryItem>> QueryVexAsync(
HistoricalQueryRequest request,
CancellationToken ct = default)
{
var queryPoint = await ResolveQueryPointAsync(
request.TenantId,
request.AtTimestamp,
request.AtSequence,
request.SnapshotId,
ct);
if (queryPoint == null)
{
return new HistoricalQueryResponse<VexHistoryItem>(
new QueryPoint(DateTimeOffset.UtcNow, 0),
EntityType.Vex,
Array.Empty<VexHistoryItem>(),
null,
0);
}
const string sql = """
WITH vex_state AS (
SELECT
e.payload->>'statementId' as statement_id,
e.payload->>'vulnId' as vuln_id,
e.payload->>'productId' as product_id,
e.payload->>'status' as status,
e.payload->>'justification' as justification,
(e.payload->>'issuedAt')::timestamptz as issued_at,
(e.payload->>'expiresAt')::timestamptz as expires_at,
ROW_NUMBER() OVER (PARTITION BY e.payload->>'statementId' ORDER BY e.sequence_number DESC) as rn
FROM ledger_events e
WHERE e.tenant_id = @tenantId
AND e.sequence_number <= @seq
AND e.event_type LIKE 'vex.%'
)
SELECT statement_id, vuln_id, product_id, status, justification, issued_at, expires_at
FROM vex_state
WHERE rn = 1
ORDER BY statement_id
LIMIT @limit
""";
await using var cmd = _dataSource.CreateCommand(sql);
cmd.Parameters.AddWithValue("tenantId", request.TenantId);
cmd.Parameters.AddWithValue("seq", queryPoint.SequenceNumber);
cmd.Parameters.AddWithValue("limit", request.PageSize);
var items = new List<VexHistoryItem>();
await using var reader = await cmd.ExecuteReaderAsync(ct);
while (await reader.ReadAsync(ct))
{
items.Add(new VexHistoryItem(
StatementId: reader.GetString(reader.GetOrdinal("statement_id")),
VulnId: reader.GetString(reader.GetOrdinal("vuln_id")),
ProductId: reader.GetString(reader.GetOrdinal("product_id")),
Status: reader.GetString(reader.GetOrdinal("status")),
Justification: reader.IsDBNull(reader.GetOrdinal("justification")) ? null : reader.GetString(reader.GetOrdinal("justification")),
IssuedAt: reader.GetFieldValue<DateTimeOffset>(reader.GetOrdinal("issued_at")),
ExpiresAt: reader.IsDBNull(reader.GetOrdinal("expires_at")) ? null : reader.GetFieldValue<DateTimeOffset>(reader.GetOrdinal("expires_at"))));
}
return new HistoricalQueryResponse<VexHistoryItem>(
queryPoint,
EntityType.Vex,
items,
null,
items.Count);
}
public async Task<HistoricalQueryResponse<AdvisoryHistoryItem>> QueryAdvisoriesAsync(
HistoricalQueryRequest request,
CancellationToken ct = default)
{
var queryPoint = await ResolveQueryPointAsync(
request.TenantId,
request.AtTimestamp,
request.AtSequence,
request.SnapshotId,
ct);
if (queryPoint == null)
{
return new HistoricalQueryResponse<AdvisoryHistoryItem>(
new QueryPoint(DateTimeOffset.UtcNow, 0),
EntityType.Advisory,
Array.Empty<AdvisoryHistoryItem>(),
null,
0);
}
const string sql = """
WITH advisory_state AS (
SELECT
e.payload->>'advisoryId' as advisory_id,
e.payload->>'source' as source,
e.payload->>'title' as title,
(e.payload->>'cvssScore')::decimal as cvss_score,
(e.payload->>'publishedAt')::timestamptz as published_at,
(e.payload->>'modifiedAt')::timestamptz as modified_at,
ROW_NUMBER() OVER (PARTITION BY e.payload->>'advisoryId' ORDER BY e.sequence_number DESC) as rn
FROM ledger_events e
WHERE e.tenant_id = @tenantId
AND e.sequence_number <= @seq
AND e.event_type LIKE 'advisory.%'
)
SELECT advisory_id, source, title, cvss_score, published_at, modified_at
FROM advisory_state
WHERE rn = 1
ORDER BY advisory_id
LIMIT @limit
""";
await using var cmd = _dataSource.CreateCommand(sql);
cmd.Parameters.AddWithValue("tenantId", request.TenantId);
cmd.Parameters.AddWithValue("seq", queryPoint.SequenceNumber);
cmd.Parameters.AddWithValue("limit", request.PageSize);
var items = new List<AdvisoryHistoryItem>();
await using var reader = await cmd.ExecuteReaderAsync(ct);
while (await reader.ReadAsync(ct))
{
items.Add(new AdvisoryHistoryItem(
AdvisoryId: reader.GetString(reader.GetOrdinal("advisory_id")),
Source: reader.GetString(reader.GetOrdinal("source")),
Title: reader.GetString(reader.GetOrdinal("title")),
CvssScore: reader.IsDBNull(reader.GetOrdinal("cvss_score")) ? null : reader.GetDecimal(reader.GetOrdinal("cvss_score")),
PublishedAt: reader.GetFieldValue<DateTimeOffset>(reader.GetOrdinal("published_at")),
ModifiedAt: reader.IsDBNull(reader.GetOrdinal("modified_at")) ? null : reader.GetFieldValue<DateTimeOffset>(reader.GetOrdinal("modified_at"))));
}
return new HistoricalQueryResponse<AdvisoryHistoryItem>(
queryPoint,
EntityType.Advisory,
items,
null,
items.Count);
}
public async Task<(IReadOnlyList<ReplayEvent> Events, ReplayMetadata Metadata)> ReplayEventsAsync(
ReplayRequest request,
CancellationToken ct = default)
{
var sw = Stopwatch.StartNew();
var sql = new StringBuilder("""
SELECT event_id, sequence_number, chain_id, chain_sequence,
event_type, occurred_at, recorded_at,
actor_id, actor_type, artifact_id, finding_id,
policy_version, event_hash, previous_hash, payload
FROM ledger_events
WHERE tenant_id = @tenantId
""");
var parameters = new List<NpgsqlParameter>
{
new("tenantId", request.TenantId)
};
if (request.FromSequence.HasValue)
{
sql.Append(" AND sequence_number >= @fromSeq");
parameters.Add(new NpgsqlParameter("fromSeq", request.FromSequence.Value));
}
if (request.ToSequence.HasValue)
{
sql.Append(" AND sequence_number <= @toSeq");
parameters.Add(new NpgsqlParameter("toSeq", request.ToSequence.Value));
}
if (request.FromTimestamp.HasValue)
{
sql.Append(" AND recorded_at >= @fromTs");
parameters.Add(new NpgsqlParameter("fromTs", request.FromTimestamp.Value));
}
if (request.ToTimestamp.HasValue)
{
sql.Append(" AND recorded_at <= @toTs");
parameters.Add(new NpgsqlParameter("toTs", request.ToTimestamp.Value));
}
if (request.ChainIds?.Count > 0)
{
sql.Append(" AND chain_id = ANY(@chainIds)");
parameters.Add(new NpgsqlParameter("chainIds", request.ChainIds.ToArray()));
}
if (request.EventTypes?.Count > 0)
{
sql.Append(" AND event_type = ANY(@eventTypes)");
parameters.Add(new NpgsqlParameter("eventTypes", request.EventTypes.ToArray()));
}
sql.Append(" ORDER BY sequence_number LIMIT @limit");
parameters.Add(new NpgsqlParameter("limit", request.PageSize + 1));
await using var cmd = _dataSource.CreateCommand(sql.ToString());
cmd.Parameters.AddRange(parameters.ToArray());
var events = new List<ReplayEvent>();
await using var reader = await cmd.ExecuteReaderAsync(ct);
while (await reader.ReadAsync(ct) && events.Count < request.PageSize)
{
object? payload = null;
if (request.IncludePayload && !reader.IsDBNull(reader.GetOrdinal("payload")))
{
var payloadJson = reader.GetString(reader.GetOrdinal("payload"));
payload = JsonSerializer.Deserialize<object>(payloadJson, _jsonOptions);
}
events.Add(new ReplayEvent(
EventId: reader.GetGuid(reader.GetOrdinal("event_id")),
SequenceNumber: reader.GetInt64(reader.GetOrdinal("sequence_number")),
ChainId: reader.GetGuid(reader.GetOrdinal("chain_id")),
ChainSequence: reader.GetInt32(reader.GetOrdinal("chain_sequence")),
EventType: reader.GetString(reader.GetOrdinal("event_type")),
OccurredAt: reader.GetFieldValue<DateTimeOffset>(reader.GetOrdinal("occurred_at")),
RecordedAt: reader.GetFieldValue<DateTimeOffset>(reader.GetOrdinal("recorded_at")),
ActorId: reader.IsDBNull(reader.GetOrdinal("actor_id")) ? null : reader.GetString(reader.GetOrdinal("actor_id")),
ActorType: reader.IsDBNull(reader.GetOrdinal("actor_type")) ? null : reader.GetString(reader.GetOrdinal("actor_type")),
ArtifactId: reader.IsDBNull(reader.GetOrdinal("artifact_id")) ? null : reader.GetString(reader.GetOrdinal("artifact_id")),
FindingId: reader.IsDBNull(reader.GetOrdinal("finding_id")) ? null : reader.GetString(reader.GetOrdinal("finding_id")),
PolicyVersion: reader.IsDBNull(reader.GetOrdinal("policy_version")) ? null : reader.GetString(reader.GetOrdinal("policy_version")),
EventHash: reader.GetString(reader.GetOrdinal("event_hash")),
PreviousHash: reader.GetString(reader.GetOrdinal("previous_hash")),
Payload: payload));
}
var hasMore = await reader.ReadAsync(ct);
sw.Stop();
var fromSeq = events.Count > 0 ? events.First().SequenceNumber : 0;
var toSeq = events.Count > 0 ? events.Last().SequenceNumber : 0;
var metadata = new ReplayMetadata(
FromSequence: fromSeq,
ToSequence: toSeq,
EventsCount: events.Count,
HasMore: hasMore,
ReplayDurationMs: sw.ElapsedMilliseconds);
return (events, metadata);
}
public async Task<DiffResponse> ComputeDiffAsync(
DiffRequest request,
CancellationToken ct = default)
{
var fromPoint = await ResolveQueryPointAsync(
request.TenantId,
request.From.Timestamp,
request.From.SequenceNumber,
request.From.SnapshotId,
ct) ?? new QueryPoint(DateTimeOffset.MinValue, 0);
var toPoint = await ResolveQueryPointAsync(
request.TenantId,
request.To.Timestamp,
request.To.SequenceNumber,
request.To.SnapshotId,
ct) ?? await GetCurrentPointAsync(request.TenantId, ct);
// Count changes between the two points
const string countSql = """
WITH changes AS (
SELECT
CASE
WHEN e.event_type LIKE 'finding.%' THEN 'Finding'
WHEN e.event_type LIKE 'vex.%' THEN 'Vex'
WHEN e.event_type LIKE 'advisory.%' THEN 'Advisory'
WHEN e.event_type LIKE 'sbom.%' THEN 'Sbom'
ELSE 'Evidence'
END as entity_type,
CASE
WHEN e.event_type LIKE '%.created' THEN 'Added'
WHEN e.event_type LIKE '%.deleted' THEN 'Removed'
ELSE 'Modified'
END as change_type
FROM ledger_events e
WHERE e.tenant_id = @tenantId
AND e.sequence_number > @fromSeq
AND e.sequence_number <= @toSeq
)
SELECT entity_type, change_type, COUNT(*) as cnt
FROM changes
GROUP BY entity_type, change_type
""";
await using var cmd = _dataSource.CreateCommand(countSql);
cmd.Parameters.AddWithValue("tenantId", request.TenantId);
cmd.Parameters.AddWithValue("fromSeq", fromPoint.SequenceNumber);
cmd.Parameters.AddWithValue("toSeq", toPoint.SequenceNumber);
var byEntityType = new Dictionary<EntityType, DiffCounts>();
int totalAdded = 0, totalModified = 0, totalRemoved = 0;
await using var reader = await cmd.ExecuteReaderAsync(ct);
while (await reader.ReadAsync(ct))
{
var entityTypeStr = reader.GetString(0);
var changeType = reader.GetString(1);
var count = (int)reader.GetInt64(2);
if (Enum.TryParse<EntityType>(entityTypeStr, out var entityType))
{
if (!byEntityType.TryGetValue(entityType, out var counts))
{
counts = new DiffCounts(0, 0, 0);
}
byEntityType[entityType] = changeType switch
{
"Added" => counts with { Added = counts.Added + count },
"Removed" => counts with { Removed = counts.Removed + count },
_ => counts with { Modified = counts.Modified + count }
};
switch (changeType)
{
case "Added": totalAdded += count; break;
case "Removed": totalRemoved += count; break;
default: totalModified += count; break;
}
}
}
var summary = new DiffSummary(
Added: totalAdded,
Modified: totalModified,
Removed: totalRemoved,
Unchanged: 0,
ByEntityType: byEntityType.Count > 0 ? byEntityType : null);
// For detailed output, include individual changes
IReadOnlyList<DiffEntry>? changes = null;
if (request.OutputFormat != DiffOutputFormat.Summary)
{
changes = await GetDetailedChangesAsync(
request.TenantId,
fromPoint.SequenceNumber,
toPoint.SequenceNumber,
request.EntityTypes,
ct);
}
return new DiffResponse(
FromPoint: fromPoint,
ToPoint: toPoint,
Summary: summary,
Changes: changes,
NextPageToken: null);
}
private async Task<IReadOnlyList<DiffEntry>> GetDetailedChangesAsync(
string tenantId,
long fromSeq,
long toSeq,
IReadOnlyList<EntityType>? entityTypes,
CancellationToken ct)
{
var sql = new StringBuilder("""
SELECT
e.event_type,
COALESCE(e.finding_id, e.artifact_id, e.payload->>'entityId') as entity_id,
e.payload as to_state
FROM ledger_events e
WHERE e.tenant_id = @tenantId
AND e.sequence_number > @fromSeq
AND e.sequence_number <= @toSeq
""");
if (entityTypes?.Count > 0)
{
var patterns = entityTypes.Select(et => et switch
{
EntityType.Finding => "finding.%",
EntityType.Vex => "vex.%",
EntityType.Advisory => "advisory.%",
EntityType.Sbom => "sbom.%",
_ => "evidence.%"
}).ToList();
sql.Append(" AND (");
for (int i = 0; i < patterns.Count; i++)
{
if (i > 0) sql.Append(" OR ");
sql.Append($"e.event_type LIKE @pattern{i}");
}
sql.Append(")");
}
sql.Append(" ORDER BY e.sequence_number LIMIT 1000");
await using var cmd = _dataSource.CreateCommand(sql.ToString());
cmd.Parameters.AddWithValue("tenantId", tenantId);
cmd.Parameters.AddWithValue("fromSeq", fromSeq);
cmd.Parameters.AddWithValue("toSeq", toSeq);
if (entityTypes?.Count > 0)
{
var patterns = entityTypes.Select(et => et switch
{
EntityType.Finding => "finding.%",
EntityType.Vex => "vex.%",
EntityType.Advisory => "advisory.%",
EntityType.Sbom => "sbom.%",
_ => "evidence.%"
}).ToList();
for (int i = 0; i < patterns.Count; i++)
{
cmd.Parameters.AddWithValue($"pattern{i}", patterns[i]);
}
}
var entries = new List<DiffEntry>();
await using var reader = await cmd.ExecuteReaderAsync(ct);
while (await reader.ReadAsync(ct))
{
var eventType = reader.GetString(0);
var entityId = reader.IsDBNull(1) ? "unknown" : reader.GetString(1);
var toStateJson = reader.IsDBNull(2) ? null : reader.GetString(2);
var entityType = eventType switch
{
var et when et.StartsWith("finding.") => EntityType.Finding,
var et when et.StartsWith("vex.") => EntityType.Vex,
var et when et.StartsWith("advisory.") => EntityType.Advisory,
var et when et.StartsWith("sbom.") => EntityType.Sbom,
_ => EntityType.Evidence
};
var changeType = eventType switch
{
var et when et.EndsWith(".created") => DiffChangeType.Added,
var et when et.EndsWith(".deleted") => DiffChangeType.Removed,
_ => DiffChangeType.Modified
};
object? toState = null;
if (!string.IsNullOrEmpty(toStateJson))
{
toState = JsonSerializer.Deserialize<object>(toStateJson, _jsonOptions);
}
entries.Add(new DiffEntry(
EntityType: entityType,
EntityId: entityId,
ChangeType: changeType,
FromState: null,
ToState: toState,
ChangedFields: null));
}
return entries;
}
public async Task<IReadOnlyList<ChangeLogEntry>> GetChangelogAsync(
string tenantId,
EntityType entityType,
string entityId,
int limit = 100,
CancellationToken ct = default)
{
var eventTypePrefix = entityType switch
{
EntityType.Finding => "finding.",
EntityType.Vex => "vex.",
EntityType.Advisory => "advisory.",
EntityType.Sbom => "sbom.",
_ => "evidence."
};
const string sql = """
SELECT sequence_number, recorded_at, event_type, event_hash, actor_id,
COALESCE(payload->>'summary', event_type) as summary
FROM ledger_events
WHERE tenant_id = @tenantId
AND event_type LIKE @eventTypePrefix
AND (finding_id = @entityId OR artifact_id = @entityId OR payload->>'entityId' = @entityId)
ORDER BY sequence_number DESC
LIMIT @limit
""";
await using var cmd = _dataSource.CreateCommand(sql);
cmd.Parameters.AddWithValue("tenantId", tenantId);
cmd.Parameters.AddWithValue("eventTypePrefix", eventTypePrefix + "%");
cmd.Parameters.AddWithValue("entityId", entityId);
cmd.Parameters.AddWithValue("limit", limit);
var entries = new List<ChangeLogEntry>();
await using var reader = await cmd.ExecuteReaderAsync(ct);
while (await reader.ReadAsync(ct))
{
entries.Add(new ChangeLogEntry(
SequenceNumber: reader.GetInt64(reader.GetOrdinal("sequence_number")),
Timestamp: reader.GetFieldValue<DateTimeOffset>(reader.GetOrdinal("recorded_at")),
EntityType: entityType,
EntityId: entityId,
EventType: reader.GetString(reader.GetOrdinal("event_type")),
EventHash: reader.IsDBNull(reader.GetOrdinal("event_hash")) ? null : reader.GetString(reader.GetOrdinal("event_hash")),
ActorId: reader.IsDBNull(reader.GetOrdinal("actor_id")) ? null : reader.GetString(reader.GetOrdinal("actor_id")),
Summary: reader.IsDBNull(reader.GetOrdinal("summary")) ? null : reader.GetString(reader.GetOrdinal("summary"))));
}
return entries;
}
public async Task<StalenessResult> CheckStalenessAsync(
string tenantId,
TimeSpan threshold,
CancellationToken ct = default)
{
var checkedAt = DateTimeOffset.UtcNow;
const string sql = """
SELECT
MAX(recorded_at) as last_event,
MAX(CASE WHEN event_type LIKE 'finding.%' THEN recorded_at END) as finding_last,
MAX(CASE WHEN event_type LIKE 'vex.%' THEN recorded_at END) as vex_last,
MAX(CASE WHEN event_type LIKE 'advisory.%' THEN recorded_at END) as advisory_last
FROM ledger_events
WHERE tenant_id = @tenantId
""";
await using var cmd = _dataSource.CreateCommand(sql);
cmd.Parameters.AddWithValue("tenantId", tenantId);
await using var reader = await cmd.ExecuteReaderAsync(ct);
await reader.ReadAsync(ct);
var lastEventAt = reader.IsDBNull(0) ? (DateTimeOffset?)null : reader.GetFieldValue<DateTimeOffset>(0);
var findingLast = reader.IsDBNull(1) ? (DateTimeOffset?)null : reader.GetFieldValue<DateTimeOffset>(1);
var vexLast = reader.IsDBNull(2) ? (DateTimeOffset?)null : reader.GetFieldValue<DateTimeOffset>(2);
var advisoryLast = reader.IsDBNull(3) ? (DateTimeOffset?)null : reader.GetFieldValue<DateTimeOffset>(3);
var isStale = lastEventAt.HasValue && (checkedAt - lastEventAt.Value) > threshold;
var stalenessDuration = lastEventAt.HasValue ? checkedAt - lastEventAt.Value : (TimeSpan?)null;
var byEntityType = new Dictionary<EntityType, EntityStaleness>
{
[EntityType.Finding] = new EntityStaleness(
findingLast.HasValue && (checkedAt - findingLast.Value) > threshold,
findingLast,
0),
[EntityType.Vex] = new EntityStaleness(
vexLast.HasValue && (checkedAt - vexLast.Value) > threshold,
vexLast,
0),
[EntityType.Advisory] = new EntityStaleness(
advisoryLast.HasValue && (checkedAt - advisoryLast.Value) > threshold,
advisoryLast,
0)
};
return new StalenessResult(
IsStale: isStale,
CheckedAt: checkedAt,
LastEventAt: lastEventAt,
StalenessThreshold: threshold,
StalenessDuration: stalenessDuration,
ByEntityType: byEntityType);
}
}

View File

@@ -0,0 +1,205 @@
namespace StellaOps.Findings.Ledger.Infrastructure.Snapshot;
using StellaOps.Findings.Ledger.Domain;
/// <summary>
/// Repository interface for ledger snapshot persistence.
/// </summary>
public interface ISnapshotRepository
{
/// <summary>
/// Creates a new snapshot record.
/// </summary>
Task<LedgerSnapshot> CreateAsync(
string tenantId,
CreateSnapshotInput input,
long currentSequence,
DateTimeOffset currentTimestamp,
CancellationToken ct = default);
/// <summary>
/// Gets a snapshot by ID.
/// </summary>
Task<LedgerSnapshot?> GetByIdAsync(
string tenantId,
Guid snapshotId,
CancellationToken ct = default);
/// <summary>
/// Lists snapshots with filtering and pagination.
/// </summary>
Task<(IReadOnlyList<LedgerSnapshot> Snapshots, string? NextPageToken)> ListAsync(
SnapshotListQuery query,
CancellationToken ct = default);
/// <summary>
/// Updates snapshot status.
/// </summary>
Task<bool> UpdateStatusAsync(
string tenantId,
Guid snapshotId,
SnapshotStatus newStatus,
CancellationToken ct = default);
/// <summary>
/// Updates snapshot statistics.
/// </summary>
Task<bool> UpdateStatisticsAsync(
string tenantId,
Guid snapshotId,
SnapshotStatistics statistics,
CancellationToken ct = default);
/// <summary>
/// Sets the Merkle root and optional DSSE digest for a snapshot.
/// </summary>
Task<bool> SetMerkleRootAsync(
string tenantId,
Guid snapshotId,
string merkleRoot,
string? dsseDigest,
CancellationToken ct = default);
/// <summary>
/// Marks expired snapshots as expired.
/// </summary>
Task<int> ExpireSnapshotsAsync(
DateTimeOffset cutoff,
CancellationToken ct = default);
/// <summary>
/// Deletes a snapshot (soft delete - marks as Deleted).
/// </summary>
Task<bool> DeleteAsync(
string tenantId,
Guid snapshotId,
CancellationToken ct = default);
/// <summary>
/// Gets the latest snapshot for a tenant.
/// </summary>
Task<LedgerSnapshot?> GetLatestAsync(
string tenantId,
CancellationToken ct = default);
/// <summary>
/// Checks if a snapshot exists.
/// </summary>
Task<bool> ExistsAsync(
string tenantId,
Guid snapshotId,
CancellationToken ct = default);
}
/// <summary>
/// Repository interface for time-travel queries.
/// </summary>
public interface ITimeTravelRepository
{
/// <summary>
/// Gets the current sequence number and timestamp.
/// </summary>
Task<QueryPoint> GetCurrentPointAsync(
string tenantId,
CancellationToken ct = default);
/// <summary>
/// Resolves a query point from timestamp, sequence, or snapshot ID.
/// </summary>
Task<QueryPoint?> ResolveQueryPointAsync(
string tenantId,
DateTimeOffset? timestamp,
long? sequence,
Guid? snapshotId,
CancellationToken ct = default);
/// <summary>
/// Queries historical findings at a specific point.
/// </summary>
Task<HistoricalQueryResponse<FindingHistoryItem>> QueryFindingsAsync(
HistoricalQueryRequest request,
CancellationToken ct = default);
/// <summary>
/// Queries historical VEX statements at a specific point.
/// </summary>
Task<HistoricalQueryResponse<VexHistoryItem>> QueryVexAsync(
HistoricalQueryRequest request,
CancellationToken ct = default);
/// <summary>
/// Queries historical advisories at a specific point.
/// </summary>
Task<HistoricalQueryResponse<AdvisoryHistoryItem>> QueryAdvisoriesAsync(
HistoricalQueryRequest request,
CancellationToken ct = default);
/// <summary>
/// Replays events within a range.
/// </summary>
Task<(IReadOnlyList<ReplayEvent> Events, ReplayMetadata Metadata)> ReplayEventsAsync(
ReplayRequest request,
CancellationToken ct = default);
/// <summary>
/// Computes diff between two points.
/// </summary>
Task<DiffResponse> ComputeDiffAsync(
DiffRequest request,
CancellationToken ct = default);
/// <summary>
/// Gets changelog entries for an entity.
/// </summary>
Task<IReadOnlyList<ChangeLogEntry>> GetChangelogAsync(
string tenantId,
EntityType entityType,
string entityId,
int limit = 100,
CancellationToken ct = default);
/// <summary>
/// Checks staleness of ledger data.
/// </summary>
Task<StalenessResult> CheckStalenessAsync(
string tenantId,
TimeSpan threshold,
CancellationToken ct = default);
}
/// <summary>
/// Historical finding item.
/// </summary>
public sealed record FindingHistoryItem(
string FindingId,
string ArtifactId,
string VulnId,
string Status,
decimal? Severity,
string? PolicyVersion,
DateTimeOffset FirstSeen,
DateTimeOffset LastUpdated,
Dictionary<string, string>? Labels);
/// <summary>
/// Historical VEX item.
/// </summary>
public sealed record VexHistoryItem(
string StatementId,
string VulnId,
string ProductId,
string Status,
string? Justification,
DateTimeOffset IssuedAt,
DateTimeOffset? ExpiresAt);
/// <summary>
/// Historical advisory item.
/// </summary>
public sealed record AdvisoryHistoryItem(
string AdvisoryId,
string Source,
string Title,
decimal? CvssScore,
DateTimeOffset PublishedAt,
DateTimeOffset? ModifiedAt);

View File

@@ -17,6 +17,12 @@ internal static class LedgerTimeline
private static readonly EventId AirgapImport = new(6401, "ledger.airgap.imported");
private static readonly EventId EvidenceSnapshotLinkedEvent = new(6501, "ledger.evidence.snapshot_linked");
private static readonly EventId AirgapTimelineImpactEvent = new(6601, "ledger.airgap.timeline_impact");
private static readonly EventId AttestationPointerLinkedEvent = new(6701, "ledger.attestation.pointer_linked");
private static readonly EventId SnapshotCreatedEvent = new(6801, "ledger.snapshot.created");
private static readonly EventId SnapshotDeletedEvent = new(6802, "ledger.snapshot.deleted");
private static readonly EventId TimeTravelQueryEvent = new(6803, "ledger.timetravel.query");
private static readonly EventId ReplayCompletedEvent = new(6804, "ledger.replay.completed");
private static readonly EventId DiffComputedEvent = new(6805, "ledger.diff.computed");
public static void EmitLedgerAppended(ILogger logger, LedgerEventRecord record, string? evidenceBundleRef = null)
{
@@ -144,4 +150,134 @@ internal static class LedgerTimeline
timeAnchor.ToString("O"),
sealedMode);
}
public static void EmitAttestationPointerLinked(
ILogger logger,
string tenantId,
string findingId,
Guid pointerId,
string attestationType,
string digest)
{
if (logger is null)
{
return;
}
logger.LogInformation(
AttestationPointerLinkedEvent,
"timeline ledger.attestation.pointer_linked tenant={Tenant} finding={FindingId} pointer={PointerId} attestation_type={AttestationType} digest={Digest}",
tenantId,
findingId,
pointerId,
attestationType,
digest);
}
public static void EmitSnapshotCreated(
ILogger logger,
string tenantId,
Guid snapshotId,
long sequenceNumber,
long findingsCount)
{
if (logger is null)
{
return;
}
logger.LogInformation(
SnapshotCreatedEvent,
"timeline ledger.snapshot.created tenant={Tenant} snapshot={SnapshotId} sequence={SequenceNumber} findings_count={FindingsCount}",
tenantId,
snapshotId,
sequenceNumber,
findingsCount);
}
public static void EmitSnapshotDeleted(
ILogger logger,
string tenantId,
Guid snapshotId)
{
if (logger is null)
{
return;
}
logger.LogInformation(
SnapshotDeletedEvent,
"timeline ledger.snapshot.deleted tenant={Tenant} snapshot={SnapshotId}",
tenantId,
snapshotId);
}
public static void EmitTimeTravelQuery(
ILogger logger,
string tenantId,
string entityType,
long atSequence,
int resultCount)
{
if (logger is null)
{
return;
}
logger.LogInformation(
TimeTravelQueryEvent,
"timeline ledger.timetravel.query tenant={Tenant} entity_type={EntityType} at_sequence={AtSequence} result_count={ResultCount}",
tenantId,
entityType,
atSequence,
resultCount);
}
public static void EmitReplayCompleted(
ILogger logger,
string tenantId,
long fromSequence,
long toSequence,
int eventsCount,
long durationMs)
{
if (logger is null)
{
return;
}
logger.LogInformation(
ReplayCompletedEvent,
"timeline ledger.replay.completed tenant={Tenant} from_sequence={FromSequence} to_sequence={ToSequence} events_count={EventsCount} duration_ms={DurationMs}",
tenantId,
fromSequence,
toSequence,
eventsCount,
durationMs);
}
public static void EmitDiffComputed(
ILogger logger,
string tenantId,
long fromSequence,
long toSequence,
int added,
int modified,
int removed)
{
if (logger is null)
{
return;
}
logger.LogInformation(
DiffComputedEvent,
"timeline ledger.diff.computed tenant={Tenant} from_sequence={FromSequence} to_sequence={ToSequence} added={Added} modified={Modified} removed={Removed}",
tenantId,
fromSequence,
toSequence,
added,
modified,
removed);
}
}

View File

@@ -0,0 +1,474 @@
using System.Text.Json.Nodes;
using Microsoft.Extensions.Logging;
using StellaOps.Findings.Ledger.Domain;
using StellaOps.Findings.Ledger.Infrastructure;
using StellaOps.Findings.Ledger.Infrastructure.Attestation;
using StellaOps.Findings.Ledger.Observability;
namespace StellaOps.Findings.Ledger.Services;
/// <summary>
/// Service for managing attestation pointers linking findings to verification reports and attestation envelopes.
/// </summary>
public sealed class AttestationPointerService
{
private readonly ILedgerEventRepository _ledgerEventRepository;
private readonly ILedgerEventWriteService _writeService;
private readonly IAttestationPointerRepository _repository;
private readonly TimeProvider _timeProvider;
private readonly ILogger<AttestationPointerService> _logger;
public AttestationPointerService(
ILedgerEventRepository ledgerEventRepository,
ILedgerEventWriteService writeService,
IAttestationPointerRepository repository,
TimeProvider timeProvider,
ILogger<AttestationPointerService> logger)
{
_ledgerEventRepository = ledgerEventRepository ?? throw new ArgumentNullException(nameof(ledgerEventRepository));
_writeService = writeService ?? throw new ArgumentNullException(nameof(writeService));
_repository = repository ?? throw new ArgumentNullException(nameof(repository));
_timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider));
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
}
/// <summary>
/// Creates an attestation pointer linking a finding to a verification report or attestation envelope.
/// </summary>
public async Task<AttestationPointerResult> CreatePointerAsync(
AttestationPointerInput input,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(input);
ArgumentException.ThrowIfNullOrWhiteSpace(input.TenantId);
ArgumentException.ThrowIfNullOrWhiteSpace(input.FindingId);
ArgumentException.ThrowIfNullOrWhiteSpace(input.AttestationRef.Digest);
var now = _timeProvider.GetUtcNow();
var createdBy = input.CreatedBy ?? "attestation-linker";
// Check for idempotency
var exists = await _repository.ExistsAsync(
input.TenantId,
input.FindingId,
input.AttestationRef.Digest,
input.AttestationType,
cancellationToken).ConfigureAwait(false);
if (exists)
{
_logger.LogDebug(
"Attestation pointer already exists for finding {FindingId} with digest {Digest}",
input.FindingId, input.AttestationRef.Digest);
// Find and return the existing pointer
var existing = await _repository.GetByDigestAsync(
input.TenantId,
input.AttestationRef.Digest,
cancellationToken).ConfigureAwait(false);
var match = existing.FirstOrDefault(p =>
p.FindingId == input.FindingId && p.AttestationType == input.AttestationType);
return new AttestationPointerResult(true, match?.PointerId, match?.LedgerEventId, null);
}
var pointerId = Guid.NewGuid();
// Create ledger event for the attestation pointer
var chainId = LedgerChainIdGenerator.FromTenantSubject(
input.TenantId, $"attestation::{input.FindingId}");
var chainHead = await _ledgerEventRepository.GetChainHeadAsync(
input.TenantId, chainId, cancellationToken).ConfigureAwait(false);
var sequence = (chainHead?.SequenceNumber ?? 0) + 1;
var previousHash = chainHead?.EventHash ?? LedgerEventConstants.EmptyHash;
var eventId = Guid.NewGuid();
var attestationPayload = BuildAttestationPayload(input, pointerId);
var envelope = BuildEnvelope(eventId, input, chainId, sequence, now, attestationPayload);
var draft = new LedgerEventDraft(
input.TenantId,
chainId,
sequence,
eventId,
LedgerEventConstants.EventAttestationPointerLinked,
"attestation-pointer",
input.FindingId,
input.FindingId,
SourceRunId: null,
ActorId: createdBy,
ActorType: "system",
OccurredAt: now,
RecordedAt: now,
Payload: attestationPayload,
CanonicalEnvelope: envelope,
ProvidedPreviousHash: previousHash);
var writeResult = await _writeService.AppendAsync(draft, cancellationToken).ConfigureAwait(false);
if (writeResult.Status is not (LedgerWriteStatus.Success or LedgerWriteStatus.Idempotent))
{
var error = string.Join(";", writeResult.Errors);
_logger.LogWarning(
"Failed to write ledger event for attestation pointer {PointerId}: {Error}",
pointerId, error);
return new AttestationPointerResult(false, null, null, error);
}
var ledgerEventId = writeResult.Record?.EventId;
var record = new AttestationPointerRecord(
input.TenantId,
pointerId,
input.FindingId,
input.AttestationType,
input.Relationship,
input.AttestationRef,
input.VerificationResult,
now,
createdBy,
input.Metadata,
ledgerEventId);
await _repository.InsertAsync(record, cancellationToken).ConfigureAwait(false);
LedgerTimeline.EmitAttestationPointerLinked(
_logger,
input.TenantId,
input.FindingId,
pointerId,
input.AttestationType.ToString(),
input.AttestationRef.Digest);
return new AttestationPointerResult(true, pointerId, ledgerEventId, null);
}
/// <summary>
/// Gets attestation pointers for a finding.
/// </summary>
public async Task<IReadOnlyList<AttestationPointerRecord>> GetPointersAsync(
string tenantId,
string findingId,
CancellationToken cancellationToken = default)
{
ArgumentException.ThrowIfNullOrWhiteSpace(tenantId);
ArgumentException.ThrowIfNullOrWhiteSpace(findingId);
return await _repository.GetByFindingIdAsync(tenantId, findingId, cancellationToken)
.ConfigureAwait(false);
}
/// <summary>
/// Gets an attestation pointer by ID.
/// </summary>
public async Task<AttestationPointerRecord?> GetPointerAsync(
string tenantId,
Guid pointerId,
CancellationToken cancellationToken = default)
{
ArgumentException.ThrowIfNullOrWhiteSpace(tenantId);
return await _repository.GetByIdAsync(tenantId, pointerId, cancellationToken)
.ConfigureAwait(false);
}
/// <summary>
/// Searches attestation pointers.
/// </summary>
public async Task<IReadOnlyList<AttestationPointerRecord>> SearchAsync(
AttestationPointerQuery query,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(query);
return await _repository.SearchAsync(query, cancellationToken).ConfigureAwait(false);
}
/// <summary>
/// Gets attestation summary for a finding.
/// </summary>
public async Task<FindingAttestationSummary> GetSummaryAsync(
string tenantId,
string findingId,
CancellationToken cancellationToken = default)
{
ArgumentException.ThrowIfNullOrWhiteSpace(tenantId);
ArgumentException.ThrowIfNullOrWhiteSpace(findingId);
return await _repository.GetSummaryAsync(tenantId, findingId, cancellationToken)
.ConfigureAwait(false);
}
/// <summary>
/// Gets attestation summaries for multiple findings.
/// </summary>
public async Task<IReadOnlyList<FindingAttestationSummary>> GetSummariesAsync(
string tenantId,
IReadOnlyList<string> findingIds,
CancellationToken cancellationToken = default)
{
ArgumentException.ThrowIfNullOrWhiteSpace(tenantId);
ArgumentNullException.ThrowIfNull(findingIds);
return await _repository.GetSummariesAsync(tenantId, findingIds, cancellationToken)
.ConfigureAwait(false);
}
/// <summary>
/// Updates the verification result for an attestation pointer.
/// </summary>
public async Task<bool> UpdateVerificationResultAsync(
string tenantId,
Guid pointerId,
VerificationResult verificationResult,
CancellationToken cancellationToken = default)
{
ArgumentException.ThrowIfNullOrWhiteSpace(tenantId);
ArgumentNullException.ThrowIfNull(verificationResult);
var existing = await _repository.GetByIdAsync(tenantId, pointerId, cancellationToken)
.ConfigureAwait(false);
if (existing is null)
{
_logger.LogWarning(
"Attestation pointer {PointerId} not found for tenant {TenantId}",
pointerId, tenantId);
return false;
}
await _repository.UpdateVerificationResultAsync(
tenantId, pointerId, verificationResult, cancellationToken).ConfigureAwait(false);
_logger.LogInformation(
"Updated verification result for attestation pointer {PointerId}, verified={Verified}",
pointerId, verificationResult.Verified);
return true;
}
/// <summary>
/// Gets findings that have attestations matching the criteria.
/// </summary>
public async Task<IReadOnlyList<string>> GetFindingIdsWithAttestationsAsync(
string tenantId,
AttestationVerificationFilter? verificationFilter = null,
IReadOnlyList<AttestationType>? attestationTypes = null,
int limit = 100,
int offset = 0,
CancellationToken cancellationToken = default)
{
ArgumentException.ThrowIfNullOrWhiteSpace(tenantId);
return await _repository.GetFindingIdsWithAttestationsAsync(
tenantId, verificationFilter, attestationTypes, limit, offset, cancellationToken)
.ConfigureAwait(false);
}
private static JsonObject BuildAttestationPayload(AttestationPointerInput input, Guid pointerId)
{
var attestationRefNode = new JsonObject
{
["digest"] = input.AttestationRef.Digest
};
if (input.AttestationRef.AttestationId.HasValue)
{
attestationRefNode["attestation_id"] = input.AttestationRef.AttestationId.Value.ToString();
}
if (!string.IsNullOrEmpty(input.AttestationRef.StorageUri))
{
attestationRefNode["storage_uri"] = input.AttestationRef.StorageUri;
}
if (!string.IsNullOrEmpty(input.AttestationRef.PayloadType))
{
attestationRefNode["payload_type"] = input.AttestationRef.PayloadType;
}
if (!string.IsNullOrEmpty(input.AttestationRef.PredicateType))
{
attestationRefNode["predicate_type"] = input.AttestationRef.PredicateType;
}
if (input.AttestationRef.SubjectDigests is { Count: > 0 })
{
var subjectsArray = new JsonArray();
foreach (var subject in input.AttestationRef.SubjectDigests)
{
subjectsArray.Add(subject);
}
attestationRefNode["subject_digests"] = subjectsArray;
}
if (input.AttestationRef.SignerInfo is not null)
{
var signerNode = new JsonObject();
if (!string.IsNullOrEmpty(input.AttestationRef.SignerInfo.KeyId))
{
signerNode["key_id"] = input.AttestationRef.SignerInfo.KeyId;
}
if (!string.IsNullOrEmpty(input.AttestationRef.SignerInfo.Issuer))
{
signerNode["issuer"] = input.AttestationRef.SignerInfo.Issuer;
}
if (!string.IsNullOrEmpty(input.AttestationRef.SignerInfo.Subject))
{
signerNode["subject"] = input.AttestationRef.SignerInfo.Subject;
}
if (input.AttestationRef.SignerInfo.SignedAt.HasValue)
{
signerNode["signed_at"] = FormatTimestamp(input.AttestationRef.SignerInfo.SignedAt.Value);
}
attestationRefNode["signer_info"] = signerNode;
}
if (input.AttestationRef.RekorEntry is not null)
{
var rekorNode = new JsonObject();
if (input.AttestationRef.RekorEntry.LogIndex.HasValue)
{
rekorNode["log_index"] = input.AttestationRef.RekorEntry.LogIndex.Value;
}
if (!string.IsNullOrEmpty(input.AttestationRef.RekorEntry.LogId))
{
rekorNode["log_id"] = input.AttestationRef.RekorEntry.LogId;
}
if (!string.IsNullOrEmpty(input.AttestationRef.RekorEntry.Uuid))
{
rekorNode["uuid"] = input.AttestationRef.RekorEntry.Uuid;
}
if (input.AttestationRef.RekorEntry.IntegratedTime.HasValue)
{
rekorNode["integrated_time"] = input.AttestationRef.RekorEntry.IntegratedTime.Value;
}
attestationRefNode["rekor_entry"] = rekorNode;
}
var pointerNode = new JsonObject
{
["pointer_id"] = pointerId.ToString(),
["attestation_type"] = input.AttestationType.ToString(),
["relationship"] = input.Relationship.ToString(),
["attestation_ref"] = attestationRefNode
};
if (input.VerificationResult is not null)
{
var verificationNode = new JsonObject
{
["verified"] = input.VerificationResult.Verified,
["verified_at"] = FormatTimestamp(input.VerificationResult.VerifiedAt)
};
if (!string.IsNullOrEmpty(input.VerificationResult.Verifier))
{
verificationNode["verifier"] = input.VerificationResult.Verifier;
}
if (!string.IsNullOrEmpty(input.VerificationResult.VerifierVersion))
{
verificationNode["verifier_version"] = input.VerificationResult.VerifierVersion;
}
if (!string.IsNullOrEmpty(input.VerificationResult.PolicyRef))
{
verificationNode["policy_ref"] = input.VerificationResult.PolicyRef;
}
if (input.VerificationResult.Checks is { Count: > 0 })
{
var checksArray = new JsonArray();
foreach (var check in input.VerificationResult.Checks)
{
var checkNode = new JsonObject
{
["check_type"] = check.CheckType.ToString(),
["passed"] = check.Passed
};
if (!string.IsNullOrEmpty(check.Details))
{
checkNode["details"] = check.Details;
}
checksArray.Add(checkNode);
}
verificationNode["checks"] = checksArray;
}
if (input.VerificationResult.Warnings is { Count: > 0 })
{
var warningsArray = new JsonArray();
foreach (var warning in input.VerificationResult.Warnings)
{
warningsArray.Add(warning);
}
verificationNode["warnings"] = warningsArray;
}
if (input.VerificationResult.Errors is { Count: > 0 })
{
var errorsArray = new JsonArray();
foreach (var error in input.VerificationResult.Errors)
{
errorsArray.Add(error);
}
verificationNode["errors"] = errorsArray;
}
pointerNode["verification_result"] = verificationNode;
}
return new JsonObject
{
["attestation"] = new JsonObject
{
["pointer"] = pointerNode
}
};
}
private static JsonObject BuildEnvelope(
Guid eventId,
AttestationPointerInput input,
Guid chainId,
long sequence,
DateTimeOffset now,
JsonObject payload)
{
return new JsonObject
{
["event"] = new JsonObject
{
["id"] = eventId.ToString(),
["type"] = LedgerEventConstants.EventAttestationPointerLinked,
["tenant"] = input.TenantId,
["chainId"] = chainId.ToString(),
["sequence"] = sequence,
["policyVersion"] = "attestation-pointer",
["artifactId"] = input.FindingId,
["finding"] = new JsonObject
{
["id"] = input.FindingId,
["artifactId"] = input.FindingId,
["vulnId"] = "attestation-pointer"
},
["actor"] = new JsonObject
{
["id"] = input.CreatedBy ?? "attestation-linker",
["type"] = "system"
},
["occurredAt"] = FormatTimestamp(now),
["recordedAt"] = FormatTimestamp(now),
["payload"] = payload.DeepClone()
}
};
}
private static string FormatTimestamp(DateTimeOffset value)
=> value.ToUniversalTime().ToString("yyyy-MM-dd'T'HH:mm:ss.fff'Z'");
}

View File

@@ -0,0 +1,370 @@
namespace StellaOps.Findings.Ledger.Services;
using System.Security.Cryptography;
using System.Text;
using System.Text.Json;
using Microsoft.Extensions.Logging;
using StellaOps.Findings.Ledger.Domain;
using StellaOps.Findings.Ledger.Infrastructure.Snapshot;
using StellaOps.Findings.Ledger.Observability;
/// <summary>
/// Service for managing ledger snapshots and time-travel queries.
/// </summary>
public sealed class SnapshotService
{
private readonly ISnapshotRepository _snapshotRepository;
private readonly ITimeTravelRepository _timeTravelRepository;
private readonly ILogger<SnapshotService> _logger;
private readonly JsonSerializerOptions _jsonOptions;
public SnapshotService(
ISnapshotRepository snapshotRepository,
ITimeTravelRepository timeTravelRepository,
ILogger<SnapshotService> logger)
{
_snapshotRepository = snapshotRepository;
_timeTravelRepository = timeTravelRepository;
_logger = logger;
_jsonOptions = new JsonSerializerOptions
{
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
WriteIndented = false
};
}
/// <summary>
/// Creates a new snapshot of the ledger at the specified point.
/// </summary>
public async Task<CreateSnapshotResult> CreateSnapshotAsync(
CreateSnapshotInput input,
CancellationToken ct = default)
{
try
{
_logger.LogInformation(
"Creating snapshot for tenant {TenantId} at sequence {Sequence} / timestamp {Timestamp}",
input.TenantId,
input.AtSequence,
input.AtTimestamp);
// Get current ledger state
var currentPoint = await _timeTravelRepository.GetCurrentPointAsync(input.TenantId, ct);
// Create the snapshot record
var snapshot = await _snapshotRepository.CreateAsync(
input.TenantId,
input,
currentPoint.SequenceNumber,
currentPoint.Timestamp,
ct);
// Compute statistics asynchronously
var statistics = await ComputeStatisticsAsync(
input.TenantId,
snapshot.SequenceNumber,
input.IncludeEntityTypes,
ct);
await _snapshotRepository.UpdateStatisticsAsync(
input.TenantId,
snapshot.SnapshotId,
statistics,
ct);
// Compute Merkle root if signing is requested
string? merkleRoot = null;
string? dsseDigest = null;
if (input.Sign)
{
merkleRoot = await ComputeMerkleRootAsync(
input.TenantId,
snapshot.SequenceNumber,
ct);
await _snapshotRepository.SetMerkleRootAsync(
input.TenantId,
snapshot.SnapshotId,
merkleRoot,
dsseDigest,
ct);
}
// Mark as available
await _snapshotRepository.UpdateStatusAsync(
input.TenantId,
snapshot.SnapshotId,
SnapshotStatus.Available,
ct);
// Retrieve updated snapshot
var finalSnapshot = await _snapshotRepository.GetByIdAsync(
input.TenantId,
snapshot.SnapshotId,
ct);
LedgerTimeline.EmitSnapshotCreated(
_logger,
input.TenantId,
snapshot.SnapshotId,
snapshot.SequenceNumber,
statistics.FindingsCount);
return new CreateSnapshotResult(true, finalSnapshot, null);
}
catch (Exception ex)
{
_logger.LogError(ex, "Failed to create snapshot for tenant {TenantId}", input.TenantId);
return new CreateSnapshotResult(false, null, ex.Message);
}
}
/// <summary>
/// Gets a snapshot by ID.
/// </summary>
public async Task<LedgerSnapshot?> GetSnapshotAsync(
string tenantId,
Guid snapshotId,
CancellationToken ct = default)
{
return await _snapshotRepository.GetByIdAsync(tenantId, snapshotId, ct);
}
/// <summary>
/// Lists snapshots for a tenant.
/// </summary>
public async Task<(IReadOnlyList<LedgerSnapshot> Snapshots, string? NextPageToken)> ListSnapshotsAsync(
SnapshotListQuery query,
CancellationToken ct = default)
{
return await _snapshotRepository.ListAsync(query, ct);
}
/// <summary>
/// Deletes a snapshot.
/// </summary>
public async Task<bool> DeleteSnapshotAsync(
string tenantId,
Guid snapshotId,
CancellationToken ct = default)
{
var deleted = await _snapshotRepository.DeleteAsync(tenantId, snapshotId, ct);
if (deleted)
{
LedgerTimeline.EmitSnapshotDeleted(_logger, tenantId, snapshotId);
}
return deleted;
}
/// <summary>
/// Queries historical findings at a specific point in time.
/// </summary>
public async Task<HistoricalQueryResponse<FindingHistoryItem>> QueryHistoricalFindingsAsync(
HistoricalQueryRequest request,
CancellationToken ct = default)
{
return await _timeTravelRepository.QueryFindingsAsync(request, ct);
}
/// <summary>
/// Queries historical VEX statements at a specific point in time.
/// </summary>
public async Task<HistoricalQueryResponse<VexHistoryItem>> QueryHistoricalVexAsync(
HistoricalQueryRequest request,
CancellationToken ct = default)
{
return await _timeTravelRepository.QueryVexAsync(request, ct);
}
/// <summary>
/// Queries historical advisories at a specific point in time.
/// </summary>
public async Task<HistoricalQueryResponse<AdvisoryHistoryItem>> QueryHistoricalAdvisoriesAsync(
HistoricalQueryRequest request,
CancellationToken ct = default)
{
return await _timeTravelRepository.QueryAdvisoriesAsync(request, ct);
}
/// <summary>
/// Replays events within a specified range.
/// </summary>
public async Task<(IReadOnlyList<ReplayEvent> Events, ReplayMetadata Metadata)> ReplayEventsAsync(
ReplayRequest request,
CancellationToken ct = default)
{
return await _timeTravelRepository.ReplayEventsAsync(request, ct);
}
/// <summary>
/// Computes diff between two points in time.
/// </summary>
public async Task<DiffResponse> ComputeDiffAsync(
DiffRequest request,
CancellationToken ct = default)
{
return await _timeTravelRepository.ComputeDiffAsync(request, ct);
}
/// <summary>
/// Gets changelog for an entity.
/// </summary>
public async Task<IReadOnlyList<ChangeLogEntry>> GetChangelogAsync(
string tenantId,
EntityType entityType,
string entityId,
int limit = 100,
CancellationToken ct = default)
{
return await _timeTravelRepository.GetChangelogAsync(tenantId, entityType, entityId, limit, ct);
}
/// <summary>
/// Checks staleness of ledger data.
/// </summary>
public async Task<StalenessResult> CheckStalenessAsync(
string tenantId,
TimeSpan threshold,
CancellationToken ct = default)
{
return await _timeTravelRepository.CheckStalenessAsync(tenantId, threshold, ct);
}
/// <summary>
/// Gets the current query point (latest sequence and timestamp).
/// </summary>
public async Task<QueryPoint> GetCurrentPointAsync(
string tenantId,
CancellationToken ct = default)
{
return await _timeTravelRepository.GetCurrentPointAsync(tenantId, ct);
}
/// <summary>
/// Expires old snapshots.
/// </summary>
public async Task<int> ExpireOldSnapshotsAsync(CancellationToken ct = default)
{
var cutoff = DateTimeOffset.UtcNow;
var count = await _snapshotRepository.ExpireSnapshotsAsync(cutoff, ct);
if (count > 0)
{
_logger.LogInformation("Expired {Count} snapshots", count);
}
return count;
}
private async Task<SnapshotStatistics> ComputeStatisticsAsync(
string tenantId,
long atSequence,
IReadOnlyList<EntityType>? entityTypes,
CancellationToken ct)
{
// Query counts from time-travel repository
var findingsResult = await _timeTravelRepository.QueryFindingsAsync(
new HistoricalQueryRequest(
tenantId,
null,
atSequence,
null,
EntityType.Finding,
null,
1),
ct);
var vexResult = await _timeTravelRepository.QueryVexAsync(
new HistoricalQueryRequest(
tenantId,
null,
atSequence,
null,
EntityType.Vex,
null,
1),
ct);
var advisoryResult = await _timeTravelRepository.QueryAdvisoriesAsync(
new HistoricalQueryRequest(
tenantId,
null,
atSequence,
null,
EntityType.Advisory,
null,
1),
ct);
// Get event count
var (events, _) = await _timeTravelRepository.ReplayEventsAsync(
new ReplayRequest(tenantId, ToSequence: atSequence, IncludePayload: false, PageSize: 1),
ct);
// Note: These are approximations; actual counting would need dedicated queries
return new SnapshotStatistics(
FindingsCount: findingsResult.TotalCount,
VexStatementsCount: vexResult.TotalCount,
AdvisoriesCount: advisoryResult.TotalCount,
SbomsCount: 0, // Would need separate SBOM tracking
EventsCount: atSequence,
SizeBytes: 0); // Would need to compute actual storage size
}
private async Task<string> ComputeMerkleRootAsync(
string tenantId,
long atSequence,
CancellationToken ct)
{
// Get all event hashes up to the sequence
var (events, _) = await _timeTravelRepository.ReplayEventsAsync(
new ReplayRequest(
tenantId,
ToSequence: atSequence,
IncludePayload: false,
PageSize: 10000),
ct);
if (events.Count == 0)
{
return ComputeHash("empty");
}
// Build Merkle tree from event hashes
var hashes = events.Select(e => e.EventHash).ToList();
return ComputeMerkleRoot(hashes);
}
private static string ComputeMerkleRoot(List<string> hashes)
{
if (hashes.Count == 0)
return ComputeHash("empty");
if (hashes.Count == 1)
return hashes[0];
var nextLevel = new List<string>();
for (int i = 0; i < hashes.Count; i += 2)
{
if (i + 1 < hashes.Count)
{
nextLevel.Add(ComputeHash(hashes[i] + hashes[i + 1]));
}
else
{
nextLevel.Add(hashes[i]);
}
}
return ComputeMerkleRoot(nextLevel);
}
private static string ComputeHash(string input)
{
var bytes = SHA256.HashData(Encoding.UTF8.GetBytes(input));
return Convert.ToHexStringLower(bytes);
}
}

View File

@@ -0,0 +1,100 @@
-- 008_attestation_pointers.sql
-- LEDGER-ATTEST-73-001: Persist pointers from findings to verification reports and attestation envelopes
BEGIN;
-- ============================================
-- 1. Create attestation pointers table
-- ============================================
CREATE TABLE IF NOT EXISTS ledger_attestation_pointers (
tenant_id text NOT NULL,
pointer_id uuid NOT NULL,
finding_id text NOT NULL,
attestation_type text NOT NULL,
relationship text NOT NULL,
attestation_ref jsonb NOT NULL,
verification_result jsonb NULL,
created_at timestamptz NOT NULL,
created_by text NOT NULL,
metadata jsonb NULL,
ledger_event_id uuid NULL
);
ALTER TABLE ledger_attestation_pointers
ADD CONSTRAINT pk_ledger_attestation_pointers PRIMARY KEY (tenant_id, pointer_id);
-- ============================================
-- 2. Create indexes for efficient queries
-- ============================================
-- Index for finding lookups (most common query pattern)
CREATE INDEX IF NOT EXISTS ix_ledger_attestation_pointers_finding
ON ledger_attestation_pointers (tenant_id, finding_id, created_at DESC);
-- Index for digest-based lookups (idempotency checks)
CREATE INDEX IF NOT EXISTS ix_ledger_attestation_pointers_digest
ON ledger_attestation_pointers (tenant_id, (attestation_ref->>'digest'));
-- Index for attestation type filtering
CREATE INDEX IF NOT EXISTS ix_ledger_attestation_pointers_type
ON ledger_attestation_pointers (tenant_id, attestation_type, created_at DESC);
-- Index for verification status filtering (verified/unverified/failed)
CREATE INDEX IF NOT EXISTS ix_ledger_attestation_pointers_verified
ON ledger_attestation_pointers (tenant_id, ((verification_result->>'verified')::boolean))
WHERE verification_result IS NOT NULL;
-- Index for signer identity searches
CREATE INDEX IF NOT EXISTS ix_ledger_attestation_pointers_signer
ON ledger_attestation_pointers (tenant_id, (attestation_ref->'signer_info'->>'subject'))
WHERE attestation_ref->'signer_info' IS NOT NULL;
-- Index for predicate type searches
CREATE INDEX IF NOT EXISTS ix_ledger_attestation_pointers_predicate
ON ledger_attestation_pointers (tenant_id, (attestation_ref->>'predicate_type'))
WHERE attestation_ref->>'predicate_type' IS NOT NULL;
-- ============================================
-- 3. Enable Row-Level Security
-- ============================================
ALTER TABLE ledger_attestation_pointers ENABLE ROW LEVEL SECURITY;
ALTER TABLE ledger_attestation_pointers FORCE ROW LEVEL SECURITY;
DROP POLICY IF EXISTS ledger_attestation_pointers_tenant_isolation ON ledger_attestation_pointers;
CREATE POLICY ledger_attestation_pointers_tenant_isolation
ON ledger_attestation_pointers
FOR ALL
USING (tenant_id = findings_ledger_app.require_current_tenant())
WITH CHECK (tenant_id = findings_ledger_app.require_current_tenant());
-- ============================================
-- 4. Add comments for documentation
-- ============================================
COMMENT ON TABLE ledger_attestation_pointers IS
'Links findings to verification reports and attestation envelopes for explainability (LEDGER-ATTEST-73-001)';
COMMENT ON COLUMN ledger_attestation_pointers.pointer_id IS
'Unique identifier for this attestation pointer';
COMMENT ON COLUMN ledger_attestation_pointers.finding_id IS
'Finding that this pointer references';
COMMENT ON COLUMN ledger_attestation_pointers.attestation_type IS
'Type of attestation: verification_report, dsse_envelope, slsa_provenance, vex_attestation, sbom_attestation, scan_attestation, policy_attestation, approval_attestation';
COMMENT ON COLUMN ledger_attestation_pointers.relationship IS
'Semantic relationship: verified_by, attested_by, signed_by, approved_by, derived_from';
COMMENT ON COLUMN ledger_attestation_pointers.attestation_ref IS
'JSON object containing digest, storage_uri, payload_type, predicate_type, subject_digests, signer_info, rekor_entry';
COMMENT ON COLUMN ledger_attestation_pointers.verification_result IS
'JSON object containing verified (bool), verified_at, verifier, verifier_version, policy_ref, checks, warnings, errors';
COMMENT ON COLUMN ledger_attestation_pointers.ledger_event_id IS
'Reference to the ledger event that recorded this pointer creation';
COMMIT;

View File

@@ -0,0 +1,71 @@
-- Migration: 009_snapshots
-- Description: Creates ledger_snapshots table for time-travel/snapshot functionality
-- Date: 2025-12-07
-- Create ledger_snapshots table
CREATE TABLE IF NOT EXISTS ledger_snapshots (
tenant_id TEXT NOT NULL,
snapshot_id UUID NOT NULL,
label TEXT,
description TEXT,
status TEXT NOT NULL DEFAULT 'Creating',
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
updated_at TIMESTAMPTZ,
expires_at TIMESTAMPTZ,
sequence_number BIGINT NOT NULL,
snapshot_timestamp TIMESTAMPTZ NOT NULL,
findings_count BIGINT NOT NULL DEFAULT 0,
vex_statements_count BIGINT NOT NULL DEFAULT 0,
advisories_count BIGINT NOT NULL DEFAULT 0,
sboms_count BIGINT NOT NULL DEFAULT 0,
events_count BIGINT NOT NULL DEFAULT 0,
size_bytes BIGINT NOT NULL DEFAULT 0,
merkle_root TEXT,
dsse_digest TEXT,
metadata JSONB,
include_entity_types JSONB,
sign_requested BOOLEAN NOT NULL DEFAULT FALSE,
PRIMARY KEY (tenant_id, snapshot_id)
);
-- Index for listing snapshots by status
CREATE INDEX IF NOT EXISTS idx_ledger_snapshots_status
ON ledger_snapshots (tenant_id, status, created_at DESC);
-- Index for finding expired snapshots
CREATE INDEX IF NOT EXISTS idx_ledger_snapshots_expires
ON ledger_snapshots (expires_at)
WHERE expires_at IS NOT NULL AND status = 'Available';
-- Index for sequence lookups
CREATE INDEX IF NOT EXISTS idx_ledger_snapshots_sequence
ON ledger_snapshots (tenant_id, sequence_number);
-- Index for label search
CREATE INDEX IF NOT EXISTS idx_ledger_snapshots_label
ON ledger_snapshots (tenant_id, label)
WHERE label IS NOT NULL;
-- Enable RLS
ALTER TABLE ledger_snapshots ENABLE ROW LEVEL SECURITY;
-- RLS policy for tenant isolation
DO $$
BEGIN
IF NOT EXISTS (
SELECT 1 FROM pg_policies
WHERE tablename = 'ledger_snapshots'
AND policyname = 'ledger_snapshots_tenant_isolation'
) THEN
CREATE POLICY ledger_snapshots_tenant_isolation ON ledger_snapshots
USING (tenant_id = current_setting('app.tenant_id', true))
WITH CHECK (tenant_id = current_setting('app.tenant_id', true));
END IF;
END $$;
-- Add comment
COMMENT ON TABLE ledger_snapshots IS 'Point-in-time snapshots of ledger state for time-travel queries';
COMMENT ON COLUMN ledger_snapshots.sequence_number IS 'Ledger sequence number at snapshot time';
COMMENT ON COLUMN ledger_snapshots.snapshot_timestamp IS 'Timestamp of ledger state captured';
COMMENT ON COLUMN ledger_snapshots.merkle_root IS 'Merkle root hash of all events up to sequence_number';
COMMENT ON COLUMN ledger_snapshots.dsse_digest IS 'DSSE envelope digest if signed';

View File

@@ -0,0 +1,271 @@
namespace StellaOps.Scanner.Analyzers.Lang.Node.Internal.Capabilities;
/// <summary>
/// Orchestrates capability scanning across Node.js/JavaScript source files.
/// </summary>
internal static class NodeCapabilityScanBuilder
{
private static readonly string[] SourceExtensions = [".js", ".mjs", ".cjs", ".ts", ".tsx", ".jsx"];
/// <summary>
/// Scans a Node.js project directory for capabilities.
/// </summary>
public static NodeCapabilityScanResult ScanProject(string projectPath, CancellationToken cancellationToken = default)
{
ArgumentException.ThrowIfNullOrWhiteSpace(projectPath);
if (!Directory.Exists(projectPath))
{
return NodeCapabilityScanResult.Empty;
}
var allEvidences = new List<NodeCapabilityEvidence>();
foreach (var sourceFile in EnumerateSourceFiles(projectPath))
{
cancellationToken.ThrowIfCancellationRequested();
try
{
var content = File.ReadAllText(sourceFile);
var relativePath = Path.GetRelativePath(projectPath, sourceFile);
var evidences = NodeCapabilityScanner.ScanFile(content, relativePath);
allEvidences.AddRange(evidences);
}
catch (IOException)
{
// Skip inaccessible files
}
catch (UnauthorizedAccessException)
{
// Skip inaccessible files
}
}
// Deduplicate and sort for determinism
var finalEvidences = allEvidences
.DistinctBy(e => e.DeduplicationKey)
.OrderBy(e => e.SourceFile, StringComparer.Ordinal)
.ThenBy(e => e.SourceLine)
.ThenBy(e => e.Kind)
.ToList();
return new NodeCapabilityScanResult(finalEvidences);
}
/// <summary>
/// Scans a Node.js project from package.json location.
/// </summary>
public static NodeCapabilityScanResult ScanPackage(string packageJsonPath, CancellationToken cancellationToken = default)
{
ArgumentException.ThrowIfNullOrWhiteSpace(packageJsonPath);
var projectDir = File.Exists(packageJsonPath)
? Path.GetDirectoryName(packageJsonPath) ?? packageJsonPath
: packageJsonPath;
if (!Directory.Exists(projectDir))
{
return NodeCapabilityScanResult.Empty;
}
var allEvidences = new List<NodeCapabilityEvidence>();
// Scan src directory if it exists
var srcDir = Path.Combine(projectDir, "src");
if (Directory.Exists(srcDir))
{
var result = ScanProject(srcDir, cancellationToken);
allEvidences.AddRange(result.Evidences);
}
// Scan lib directory if it exists
var libDir = Path.Combine(projectDir, "lib");
if (Directory.Exists(libDir))
{
var result = ScanProject(libDir, cancellationToken);
allEvidences.AddRange(result.Evidences);
}
// Scan root level .js files
foreach (var ext in SourceExtensions)
{
foreach (var file in Directory.EnumerateFiles(projectDir, $"*{ext}", SearchOption.TopDirectoryOnly))
{
cancellationToken.ThrowIfCancellationRequested();
// Skip config files
var fileName = Path.GetFileName(file);
if (IsConfigFile(fileName))
{
continue;
}
try
{
var content = File.ReadAllText(file);
var relativePath = Path.GetRelativePath(projectDir, file);
var evidences = NodeCapabilityScanner.ScanFile(content, relativePath);
allEvidences.AddRange(evidences);
}
catch (IOException)
{
// Skip inaccessible files
}
catch (UnauthorizedAccessException)
{
// Skip inaccessible files
}
}
}
// If no structured directories found, scan the whole project
if (allEvidences.Count == 0)
{
return ScanProject(projectDir, cancellationToken);
}
var finalEvidences = allEvidences
.DistinctBy(e => e.DeduplicationKey)
.OrderBy(e => e.SourceFile, StringComparer.Ordinal)
.ThenBy(e => e.SourceLine)
.ThenBy(e => e.Kind)
.ToList();
return new NodeCapabilityScanResult(finalEvidences);
}
/// <summary>
/// Scans specific JavaScript/TypeScript source content.
/// </summary>
public static NodeCapabilityScanResult ScanContent(string content, string filePath)
{
if (string.IsNullOrWhiteSpace(content))
{
return NodeCapabilityScanResult.Empty;
}
var evidences = NodeCapabilityScanner.ScanFile(content, filePath);
return new NodeCapabilityScanResult(evidences.ToList());
}
private static IEnumerable<string> EnumerateSourceFiles(string rootPath)
{
var options = new EnumerationOptions
{
RecurseSubdirectories = true,
IgnoreInaccessible = true,
MaxRecursionDepth = 30
};
foreach (var ext in SourceExtensions)
{
foreach (var file in Directory.EnumerateFiles(rootPath, $"*{ext}", options))
{
// Skip node_modules
if (file.Contains($"{Path.DirectorySeparatorChar}node_modules{Path.DirectorySeparatorChar}") ||
file.Contains($"{Path.AltDirectorySeparatorChar}node_modules{Path.AltDirectorySeparatorChar}"))
{
continue;
}
// Skip dist/build output directories
if (file.Contains($"{Path.DirectorySeparatorChar}dist{Path.DirectorySeparatorChar}") ||
file.Contains($"{Path.DirectorySeparatorChar}build{Path.DirectorySeparatorChar}") ||
file.Contains($"{Path.DirectorySeparatorChar}out{Path.DirectorySeparatorChar}") ||
file.Contains($"{Path.AltDirectorySeparatorChar}dist{Path.AltDirectorySeparatorChar}") ||
file.Contains($"{Path.AltDirectorySeparatorChar}build{Path.AltDirectorySeparatorChar}") ||
file.Contains($"{Path.AltDirectorySeparatorChar}out{Path.AltDirectorySeparatorChar}"))
{
continue;
}
// Skip coverage directories
if (file.Contains($"{Path.DirectorySeparatorChar}coverage{Path.DirectorySeparatorChar}") ||
file.Contains($"{Path.AltDirectorySeparatorChar}coverage{Path.AltDirectorySeparatorChar}"))
{
continue;
}
// Skip hidden directories
if (file.Contains($"{Path.DirectorySeparatorChar}.") ||
file.Contains($"{Path.AltDirectorySeparatorChar}."))
{
// But allow .github, .vscode source files if they contain JS
if (!file.Contains(".github") && !file.Contains(".vscode"))
{
continue;
}
}
// Skip test files (optional - can be useful for scanning)
// if (IsTestFile(Path.GetFileName(file)))
// {
// continue;
// }
// Skip minified files
var fileName = Path.GetFileName(file);
if (fileName.Contains(".min.") || fileName.EndsWith(".min.js", StringComparison.OrdinalIgnoreCase))
{
continue;
}
// Skip config files
if (IsConfigFile(fileName))
{
continue;
}
yield return file;
}
}
}
private static bool IsConfigFile(string fileName)
{
var configPatterns = new[]
{
"webpack.config",
"rollup.config",
"vite.config",
"babel.config",
"jest.config",
"eslint.config",
"prettier.config",
"tsconfig",
"jsconfig",
".eslintrc",
".prettierrc",
".babelrc",
"karma.conf",
"protractor.conf",
"gulpfile",
"gruntfile",
"postcss.config",
"tailwind.config",
"next.config",
"nuxt.config",
"svelte.config",
"astro.config",
"vitest.config"
};
var lowerFileName = fileName.ToLowerInvariant();
return configPatterns.Any(p => lowerFileName.Contains(p));
}
// Uncomment if you want to skip test files
// private static bool IsTestFile(string fileName)
// {
// var lowerFileName = fileName.ToLowerInvariant();
// return lowerFileName.Contains(".test.") ||
// lowerFileName.Contains(".spec.") ||
// lowerFileName.Contains("_test.") ||
// lowerFileName.Contains("_spec.") ||
// lowerFileName.EndsWith(".test.js") ||
// lowerFileName.EndsWith(".spec.js") ||
// lowerFileName.EndsWith(".test.ts") ||
// lowerFileName.EndsWith(".spec.ts");
// }
}

View File

@@ -0,0 +1,538 @@
namespace StellaOps.Scanner.Analyzers.Lang.Node.Internal.Capabilities;
/// <summary>
/// Scans Node.js/JavaScript source files for security-relevant capabilities.
/// Detects patterns for command execution, file I/O, network access,
/// serialization, dynamic code evaluation, native addons, and more.
/// </summary>
internal static class NodeCapabilityScanner
{
// ========================================
// EXEC - Command/Process Execution (Critical)
// ========================================
private static readonly (Regex Pattern, string Name, CapabilityRisk Risk, float Confidence)[] ExecPatterns =
[
// child_process module
(new Regex(@"require\s*\(\s*['""]child_process['""]", RegexOptions.Compiled), "require('child_process')", CapabilityRisk.Critical, 1.0f),
(new Regex(@"from\s+['""]child_process['""]", RegexOptions.Compiled), "import child_process", CapabilityRisk.Critical, 1.0f),
(new Regex(@"child_process\s*\.\s*exec\s*\(", RegexOptions.Compiled), "child_process.exec", CapabilityRisk.Critical, 1.0f),
(new Regex(@"child_process\s*\.\s*execSync\s*\(", RegexOptions.Compiled), "child_process.execSync", CapabilityRisk.Critical, 1.0f),
(new Regex(@"child_process\s*\.\s*spawn\s*\(", RegexOptions.Compiled), "child_process.spawn", CapabilityRisk.Critical, 1.0f),
(new Regex(@"child_process\s*\.\s*spawnSync\s*\(", RegexOptions.Compiled), "child_process.spawnSync", CapabilityRisk.Critical, 1.0f),
(new Regex(@"child_process\s*\.\s*fork\s*\(", RegexOptions.Compiled), "child_process.fork", CapabilityRisk.High, 0.95f),
(new Regex(@"child_process\s*\.\s*execFile\s*\(", RegexOptions.Compiled), "child_process.execFile", CapabilityRisk.Critical, 1.0f),
(new Regex(@"child_process\s*\.\s*execFileSync\s*\(", RegexOptions.Compiled), "child_process.execFileSync", CapabilityRisk.Critical, 1.0f),
// Destructured imports
(new Regex(@"\{\s*(?:exec|execSync|spawn|spawnSync|fork|execFile)\s*\}", RegexOptions.Compiled), "destructured child_process", CapabilityRisk.Critical, 0.9f),
// Shell execution via execa, shelljs, etc.
(new Regex(@"require\s*\(\s*['""]execa['""]", RegexOptions.Compiled), "require('execa')", CapabilityRisk.Critical, 0.95f),
(new Regex(@"require\s*\(\s*['""]shelljs['""]", RegexOptions.Compiled), "require('shelljs')", CapabilityRisk.Critical, 0.95f),
(new Regex(@"shell\s*\.\s*exec\s*\(", RegexOptions.Compiled), "shelljs.exec", CapabilityRisk.Critical, 0.9f),
// process.binding for internal access
(new Regex(@"process\s*\.\s*binding\s*\(", RegexOptions.Compiled), "process.binding", CapabilityRisk.Critical, 0.95f),
];
// ========================================
// FILESYSTEM - File/Directory Operations
// ========================================
private static readonly (Regex Pattern, string Name, CapabilityRisk Risk, float Confidence)[] FilesystemPatterns =
[
// fs module
(new Regex(@"require\s*\(\s*['""]fs['""]", RegexOptions.Compiled), "require('fs')", CapabilityRisk.Medium, 0.9f),
(new Regex(@"require\s*\(\s*['""]fs/promises['""]", RegexOptions.Compiled), "require('fs/promises')", CapabilityRisk.Medium, 0.9f),
(new Regex(@"from\s+['""]fs['""]", RegexOptions.Compiled), "import fs", CapabilityRisk.Medium, 0.9f),
(new Regex(@"from\s+['""]fs/promises['""]", RegexOptions.Compiled), "import fs/promises", CapabilityRisk.Medium, 0.9f),
(new Regex(@"from\s+['""]node:fs['""]", RegexOptions.Compiled), "import node:fs", CapabilityRisk.Medium, 0.9f),
// Read operations
(new Regex(@"fs\s*\.\s*readFile(?:Sync)?\s*\(", RegexOptions.Compiled), "fs.readFile", CapabilityRisk.Medium, 0.85f),
(new Regex(@"fs\s*\.\s*readdir(?:Sync)?\s*\(", RegexOptions.Compiled), "fs.readdir", CapabilityRisk.Medium, 0.8f),
(new Regex(@"fs\s*\.\s*createReadStream\s*\(", RegexOptions.Compiled), "fs.createReadStream", CapabilityRisk.Medium, 0.85f),
// Write operations (higher risk)
(new Regex(@"fs\s*\.\s*writeFile(?:Sync)?\s*\(", RegexOptions.Compiled), "fs.writeFile", CapabilityRisk.High, 0.9f),
(new Regex(@"fs\s*\.\s*appendFile(?:Sync)?\s*\(", RegexOptions.Compiled), "fs.appendFile", CapabilityRisk.High, 0.85f),
(new Regex(@"fs\s*\.\s*createWriteStream\s*\(", RegexOptions.Compiled), "fs.createWriteStream", CapabilityRisk.High, 0.9f),
(new Regex(@"fs\s*\.\s*mkdir(?:Sync)?\s*\(", RegexOptions.Compiled), "fs.mkdir", CapabilityRisk.Medium, 0.8f),
// Delete operations (high risk)
(new Regex(@"fs\s*\.\s*unlink(?:Sync)?\s*\(", RegexOptions.Compiled), "fs.unlink", CapabilityRisk.High, 0.9f),
(new Regex(@"fs\s*\.\s*rmdir(?:Sync)?\s*\(", RegexOptions.Compiled), "fs.rmdir", CapabilityRisk.High, 0.9f),
(new Regex(@"fs\s*\.\s*rm(?:Sync)?\s*\(", RegexOptions.Compiled), "fs.rm", CapabilityRisk.High, 0.9f),
// Permission operations
(new Regex(@"fs\s*\.\s*chmod(?:Sync)?\s*\(", RegexOptions.Compiled), "fs.chmod", CapabilityRisk.High, 0.9f),
(new Regex(@"fs\s*\.\s*chown(?:Sync)?\s*\(", RegexOptions.Compiled), "fs.chown", CapabilityRisk.High, 0.9f),
// Symlink (can be used for path traversal)
(new Regex(@"fs\s*\.\s*symlink(?:Sync)?\s*\(", RegexOptions.Compiled), "fs.symlink", CapabilityRisk.High, 0.85f),
// fs-extra
(new Regex(@"require\s*\(\s*['""]fs-extra['""]", RegexOptions.Compiled), "require('fs-extra')", CapabilityRisk.Medium, 0.85f),
];
// ========================================
// NETWORK - Network I/O
// ========================================
private static readonly (Regex Pattern, string Name, CapabilityRisk Risk, float Confidence)[] NetworkPatterns =
[
// Core modules
(new Regex(@"require\s*\(\s*['""]net['""]", RegexOptions.Compiled), "require('net')", CapabilityRisk.Medium, 0.9f),
(new Regex(@"require\s*\(\s*['""]http['""]", RegexOptions.Compiled), "require('http')", CapabilityRisk.Medium, 0.85f),
(new Regex(@"require\s*\(\s*['""]https['""]", RegexOptions.Compiled), "require('https')", CapabilityRisk.Medium, 0.85f),
(new Regex(@"require\s*\(\s*['""]dgram['""]", RegexOptions.Compiled), "require('dgram')", CapabilityRisk.Medium, 0.85f),
(new Regex(@"require\s*\(\s*['""]tls['""]", RegexOptions.Compiled), "require('tls')", CapabilityRisk.Medium, 0.85f),
(new Regex(@"from\s+['""]node:(?:net|http|https|dgram|tls)['""]", RegexOptions.Compiled), "import node:network", CapabilityRisk.Medium, 0.9f),
// Socket operations
(new Regex(@"net\s*\.\s*createServer\s*\(", RegexOptions.Compiled), "net.createServer", CapabilityRisk.Medium, 0.9f),
(new Regex(@"net\s*\.\s*createConnection\s*\(", RegexOptions.Compiled), "net.createConnection", CapabilityRisk.Medium, 0.85f),
(new Regex(@"net\s*\.\s*connect\s*\(", RegexOptions.Compiled), "net.connect", CapabilityRisk.Medium, 0.85f),
// HTTP operations
(new Regex(@"http\s*\.\s*createServer\s*\(", RegexOptions.Compiled), "http.createServer", CapabilityRisk.Medium, 0.85f),
(new Regex(@"http\s*\.\s*request\s*\(", RegexOptions.Compiled), "http.request", CapabilityRisk.Medium, 0.8f),
(new Regex(@"http\s*\.\s*get\s*\(", RegexOptions.Compiled), "http.get", CapabilityRisk.Medium, 0.8f),
(new Regex(@"https\s*\.\s*request\s*\(", RegexOptions.Compiled), "https.request", CapabilityRisk.Medium, 0.8f),
// Fetch API
(new Regex(@"\bfetch\s*\(", RegexOptions.Compiled), "fetch", CapabilityRisk.Medium, 0.75f),
(new Regex(@"require\s*\(\s*['""]node-fetch['""]", RegexOptions.Compiled), "require('node-fetch')", CapabilityRisk.Medium, 0.85f),
// Axios, got, request
(new Regex(@"require\s*\(\s*['""]axios['""]", RegexOptions.Compiled), "require('axios')", CapabilityRisk.Medium, 0.85f),
(new Regex(@"require\s*\(\s*['""]got['""]", RegexOptions.Compiled), "require('got')", CapabilityRisk.Medium, 0.85f),
(new Regex(@"require\s*\(\s*['""]request['""]", RegexOptions.Compiled), "require('request')", CapabilityRisk.Medium, 0.85f),
(new Regex(@"require\s*\(\s*['""]superagent['""]", RegexOptions.Compiled), "require('superagent')", CapabilityRisk.Medium, 0.85f),
// WebSocket
(new Regex(@"require\s*\(\s*['""]ws['""]", RegexOptions.Compiled), "require('ws')", CapabilityRisk.Medium, 0.85f),
(new Regex(@"new\s+WebSocket\s*\(", RegexOptions.Compiled), "WebSocket", CapabilityRisk.Medium, 0.8f),
// DNS
(new Regex(@"require\s*\(\s*['""]dns['""]", RegexOptions.Compiled), "require('dns')", CapabilityRisk.Low, 0.8f),
];
// ========================================
// ENVIRONMENT - Environment Variables
// ========================================
private static readonly (Regex Pattern, string Name, CapabilityRisk Risk, float Confidence)[] EnvironmentPatterns =
[
(new Regex(@"process\s*\.\s*env\b", RegexOptions.Compiled), "process.env", CapabilityRisk.Medium, 0.85f),
(new Regex(@"process\s*\.\s*env\s*\[", RegexOptions.Compiled), "process.env[]", CapabilityRisk.Medium, 0.9f),
(new Regex(@"process\s*\.\s*env\s*\.\s*\w+", RegexOptions.Compiled), "process.env.*", CapabilityRisk.Medium, 0.85f),
// dotenv
(new Regex(@"require\s*\(\s*['""]dotenv['""]", RegexOptions.Compiled), "require('dotenv')", CapabilityRisk.Medium, 0.85f),
(new Regex(@"dotenv\s*\.\s*config\s*\(", RegexOptions.Compiled), "dotenv.config", CapabilityRisk.Medium, 0.85f),
// process info
(new Regex(@"process\s*\.\s*cwd\s*\(\s*\)", RegexOptions.Compiled), "process.cwd", CapabilityRisk.Low, 0.75f),
(new Regex(@"process\s*\.\s*chdir\s*\(", RegexOptions.Compiled), "process.chdir", CapabilityRisk.Medium, 0.85f),
(new Regex(@"process\s*\.\s*argv\b", RegexOptions.Compiled), "process.argv", CapabilityRisk.Low, 0.7f),
];
// ========================================
// SERIALIZATION - Data Serialization
// ========================================
private static readonly (Regex Pattern, string Name, CapabilityRisk Risk, float Confidence)[] SerializationPatterns =
[
// JSON.parse with reviver (potential code execution)
(new Regex(@"JSON\s*\.\s*parse\s*\([^,)]+,\s*\w+", RegexOptions.Compiled), "JSON.parse with reviver", CapabilityRisk.Medium, 0.7f),
// Dangerous serializers - node-serialize is known vulnerable
(new Regex(@"require\s*\(\s*['""]node-serialize['""]", RegexOptions.Compiled), "require('node-serialize')", CapabilityRisk.Critical, 1.0f),
(new Regex(@"serialize\s*\.\s*unserialize\s*\(", RegexOptions.Compiled), "node-serialize.unserialize", CapabilityRisk.Critical, 1.0f),
// serialize-javascript
(new Regex(@"require\s*\(\s*['""]serialize-javascript['""]", RegexOptions.Compiled), "require('serialize-javascript')", CapabilityRisk.High, 0.85f),
// js-yaml (load is unsafe by default in older versions)
(new Regex(@"require\s*\(\s*['""]js-yaml['""]", RegexOptions.Compiled), "require('js-yaml')", CapabilityRisk.Medium, 0.8f),
(new Regex(@"yaml\s*\.\s*load\s*\(", RegexOptions.Compiled), "yaml.load", CapabilityRisk.High, 0.85f),
// Pickle-like serializers
(new Regex(@"require\s*\(\s*['""]v8['""]", RegexOptions.Compiled), "require('v8')", CapabilityRisk.High, 0.85f),
(new Regex(@"v8\s*\.\s*deserialize\s*\(", RegexOptions.Compiled), "v8.deserialize", CapabilityRisk.High, 0.9f),
(new Regex(@"v8\s*\.\s*serialize\s*\(", RegexOptions.Compiled), "v8.serialize", CapabilityRisk.Medium, 0.8f),
];
// ========================================
// CRYPTO - Cryptographic Operations
// ========================================
private static readonly (Regex Pattern, string Name, CapabilityRisk Risk, float Confidence)[] CryptoPatterns =
[
(new Regex(@"require\s*\(\s*['""]crypto['""]", RegexOptions.Compiled), "require('crypto')", CapabilityRisk.Low, 0.85f),
(new Regex(@"from\s+['""](?:node:)?crypto['""]", RegexOptions.Compiled), "import crypto", CapabilityRisk.Low, 0.85f),
// Specific crypto operations
(new Regex(@"crypto\s*\.\s*createHash\s*\(", RegexOptions.Compiled), "crypto.createHash", CapabilityRisk.Low, 0.85f),
(new Regex(@"crypto\s*\.\s*createCipher(?:iv)?\s*\(", RegexOptions.Compiled), "crypto.createCipher", CapabilityRisk.Low, 0.85f),
(new Regex(@"crypto\s*\.\s*createDecipher(?:iv)?\s*\(", RegexOptions.Compiled), "crypto.createDecipher", CapabilityRisk.Low, 0.85f),
(new Regex(@"crypto\s*\.\s*createSign\s*\(", RegexOptions.Compiled), "crypto.createSign", CapabilityRisk.Low, 0.85f),
(new Regex(@"crypto\s*\.\s*createVerify\s*\(", RegexOptions.Compiled), "crypto.createVerify", CapabilityRisk.Low, 0.85f),
(new Regex(@"crypto\s*\.\s*randomBytes\s*\(", RegexOptions.Compiled), "crypto.randomBytes", CapabilityRisk.Low, 0.8f),
(new Regex(@"crypto\s*\.\s*pbkdf2\s*\(", RegexOptions.Compiled), "crypto.pbkdf2", CapabilityRisk.Low, 0.85f),
// Third-party crypto
(new Regex(@"require\s*\(\s*['""]bcrypt['""]", RegexOptions.Compiled), "require('bcrypt')", CapabilityRisk.Low, 0.85f),
(new Regex(@"require\s*\(\s*['""]argon2['""]", RegexOptions.Compiled), "require('argon2')", CapabilityRisk.Low, 0.85f),
// Weak crypto
(new Regex(@"createHash\s*\(\s*['""](?:md5|sha1)['""]", RegexOptions.Compiled | RegexOptions.IgnoreCase), "Weak hash algorithm", CapabilityRisk.High, 0.9f),
];
// ========================================
// DATABASE - Database Access
// ========================================
private static readonly (Regex Pattern, string Name, CapabilityRisk Risk, float Confidence)[] DatabasePatterns =
[
// SQL databases
(new Regex(@"require\s*\(\s*['""]mysql2?['""]", RegexOptions.Compiled), "require('mysql')", CapabilityRisk.Medium, 0.9f),
(new Regex(@"require\s*\(\s*['""]pg['""]", RegexOptions.Compiled), "require('pg')", CapabilityRisk.Medium, 0.9f),
(new Regex(@"require\s*\(\s*['""]sqlite3['""]", RegexOptions.Compiled), "require('sqlite3')", CapabilityRisk.Medium, 0.9f),
(new Regex(@"require\s*\(\s*['""]better-sqlite3['""]", RegexOptions.Compiled), "require('better-sqlite3')", CapabilityRisk.Medium, 0.9f),
(new Regex(@"require\s*\(\s*['""]mssql['""]", RegexOptions.Compiled), "require('mssql')", CapabilityRisk.Medium, 0.9f),
// NoSQL databases
(new Regex(@"require\s*\(\s*['""]mongodb['""]", RegexOptions.Compiled), "require('mongodb')", CapabilityRisk.Medium, 0.9f),
(new Regex(@"require\s*\(\s*['""]mongoose['""]", RegexOptions.Compiled), "require('mongoose')", CapabilityRisk.Medium, 0.9f),
(new Regex(@"require\s*\(\s*['""]redis['""]", RegexOptions.Compiled), "require('redis')", CapabilityRisk.Medium, 0.85f),
(new Regex(@"require\s*\(\s*['""]ioredis['""]", RegexOptions.Compiled), "require('ioredis')", CapabilityRisk.Medium, 0.85f),
// Query execution
(new Regex(@"\.query\s*\(\s*[`'""](?:SELECT|INSERT|UPDATE|DELETE|DROP|CREATE|ALTER)", RegexOptions.Compiled | RegexOptions.IgnoreCase), "Raw SQL query", CapabilityRisk.High, 0.9f),
(new Regex(@"\.exec\s*\(\s*[`'""](?:SELECT|INSERT|UPDATE|DELETE)", RegexOptions.Compiled | RegexOptions.IgnoreCase), "Raw SQL exec", CapabilityRisk.High, 0.85f),
// SQL injection patterns - string concatenation
(new Regex(@"[`'""](?:SELECT|INSERT|UPDATE|DELETE)\s+.*[`'""]\s*\+", RegexOptions.Compiled | RegexOptions.IgnoreCase), "SQL string concatenation", CapabilityRisk.Critical, 0.9f),
(new Regex(@"\$\{.*\}.*(?:SELECT|INSERT|UPDATE|DELETE)", RegexOptions.Compiled | RegexOptions.IgnoreCase), "SQL template literal injection", CapabilityRisk.Critical, 0.85f),
// ORMs
(new Regex(@"require\s*\(\s*['""]sequelize['""]", RegexOptions.Compiled), "require('sequelize')", CapabilityRisk.Medium, 0.85f),
(new Regex(@"require\s*\(\s*['""]typeorm['""]", RegexOptions.Compiled), "require('typeorm')", CapabilityRisk.Medium, 0.85f),
(new Regex(@"require\s*\(\s*['""]prisma['""]", RegexOptions.Compiled), "require('prisma')", CapabilityRisk.Medium, 0.85f),
(new Regex(@"require\s*\(\s*['""]knex['""]", RegexOptions.Compiled), "require('knex')", CapabilityRisk.Medium, 0.85f),
];
// ========================================
// DYNAMIC CODE - Code Evaluation (Critical)
// ========================================
private static readonly (Regex Pattern, string Name, CapabilityRisk Risk, float Confidence)[] DynamicCodePatterns =
[
// eval - most dangerous
(new Regex(@"\beval\s*\(", RegexOptions.Compiled), "eval", CapabilityRisk.Critical, 1.0f),
// Function constructor
(new Regex(@"new\s+Function\s*\(", RegexOptions.Compiled), "new Function", CapabilityRisk.Critical, 1.0f),
(new Regex(@"Function\s*\(\s*[^)]+\)\s*\(", RegexOptions.Compiled), "Function()()", CapabilityRisk.Critical, 0.95f),
// vm module
(new Regex(@"require\s*\(\s*['""]vm['""]", RegexOptions.Compiled), "require('vm')", CapabilityRisk.Critical, 0.95f),
(new Regex(@"from\s+['""](?:node:)?vm['""]", RegexOptions.Compiled), "import vm", CapabilityRisk.Critical, 0.95f),
(new Regex(@"vm\s*\.\s*runInContext\s*\(", RegexOptions.Compiled), "vm.runInContext", CapabilityRisk.Critical, 1.0f),
(new Regex(@"vm\s*\.\s*runInNewContext\s*\(", RegexOptions.Compiled), "vm.runInNewContext", CapabilityRisk.Critical, 1.0f),
(new Regex(@"vm\s*\.\s*runInThisContext\s*\(", RegexOptions.Compiled), "vm.runInThisContext", CapabilityRisk.Critical, 1.0f),
(new Regex(@"vm\s*\.\s*Script\s*\(", RegexOptions.Compiled), "vm.Script", CapabilityRisk.Critical, 0.95f),
(new Regex(@"new\s+vm\s*\.\s*Script\s*\(", RegexOptions.Compiled), "new vm.Script", CapabilityRisk.Critical, 0.95f),
// setTimeout/setInterval with strings (eval-like)
(new Regex(@"setTimeout\s*\(\s*['""`]", RegexOptions.Compiled), "setTimeout with string", CapabilityRisk.Critical, 0.9f),
(new Regex(@"setInterval\s*\(\s*['""`]", RegexOptions.Compiled), "setInterval with string", CapabilityRisk.Critical, 0.9f),
// Template engines (can execute code)
(new Regex(@"require\s*\(\s*['""]ejs['""]", RegexOptions.Compiled), "require('ejs')", CapabilityRisk.High, 0.8f),
(new Regex(@"require\s*\(\s*['""]pug['""]", RegexOptions.Compiled), "require('pug')", CapabilityRisk.Medium, 0.75f),
(new Regex(@"require\s*\(\s*['""]handlebars['""]", RegexOptions.Compiled), "require('handlebars')", CapabilityRisk.Medium, 0.7f),
// vm2 (sandbox escape vulnerabilities)
(new Regex(@"require\s*\(\s*['""]vm2['""]", RegexOptions.Compiled), "require('vm2')", CapabilityRisk.High, 0.9f),
];
// ========================================
// REFLECTION - Code Introspection
// ========================================
private static readonly (Regex Pattern, string Name, CapabilityRisk Risk, float Confidence)[] ReflectionPatterns =
[
// Reflect API
(new Regex(@"Reflect\s*\.\s*(?:get|set|has|defineProperty|deleteProperty|apply|construct)\s*\(", RegexOptions.Compiled), "Reflect.*", CapabilityRisk.Medium, 0.8f),
// Proxy
(new Regex(@"new\s+Proxy\s*\(", RegexOptions.Compiled), "new Proxy", CapabilityRisk.Medium, 0.8f),
// Property access via bracket notation with variables
(new Regex(@"\[\s*\w+\s*\]\s*\(", RegexOptions.Compiled), "Dynamic property call", CapabilityRisk.Medium, 0.65f),
// Object introspection
(new Regex(@"Object\s*\.\s*getOwnPropertyDescriptor\s*\(", RegexOptions.Compiled), "Object.getOwnPropertyDescriptor", CapabilityRisk.Low, 0.7f),
(new Regex(@"Object\s*\.\s*getPrototypeOf\s*\(", RegexOptions.Compiled), "Object.getPrototypeOf", CapabilityRisk.Low, 0.7f),
(new Regex(@"Object\s*\.\s*setPrototypeOf\s*\(", RegexOptions.Compiled), "Object.setPrototypeOf", CapabilityRisk.High, 0.85f),
(new Regex(@"__proto__", RegexOptions.Compiled), "__proto__", CapabilityRisk.High, 0.9f),
// constructor access
(new Regex(@"\.constructor\s*\(", RegexOptions.Compiled), ".constructor()", CapabilityRisk.High, 0.85f),
(new Regex(@"\[['""]\s*constructor\s*['""]", RegexOptions.Compiled), "['constructor']", CapabilityRisk.High, 0.85f),
];
// ========================================
// NATIVE CODE - Native Addons
// ========================================
private static readonly (Regex Pattern, string Name, CapabilityRisk Risk, float Confidence)[] NativeCodePatterns =
[
// Native addon loading
(new Regex(@"require\s*\([^)]*\.node['""]?\s*\)", RegexOptions.Compiled), "require('.node')", CapabilityRisk.Critical, 0.95f),
(new Regex(@"process\s*\.\s*dlopen\s*\(", RegexOptions.Compiled), "process.dlopen", CapabilityRisk.Critical, 1.0f),
// N-API / node-addon-api
(new Regex(@"require\s*\(\s*['""]node-addon-api['""]", RegexOptions.Compiled), "require('node-addon-api')", CapabilityRisk.High, 0.9f),
(new Regex(@"require\s*\(\s*['""]bindings['""]", RegexOptions.Compiled), "require('bindings')", CapabilityRisk.High, 0.9f),
// FFI
(new Regex(@"require\s*\(\s*['""]ffi-napi['""]", RegexOptions.Compiled), "require('ffi-napi')", CapabilityRisk.Critical, 0.95f),
(new Regex(@"require\s*\(\s*['""]node-ffi['""]", RegexOptions.Compiled), "require('node-ffi')", CapabilityRisk.Critical, 0.95f),
(new Regex(@"require\s*\(\s*['""]ref-napi['""]", RegexOptions.Compiled), "require('ref-napi')", CapabilityRisk.High, 0.9f),
// WebAssembly
(new Regex(@"WebAssembly\s*\.\s*instantiate\s*\(", RegexOptions.Compiled), "WebAssembly.instantiate", CapabilityRisk.High, 0.9f),
(new Regex(@"WebAssembly\s*\.\s*compile\s*\(", RegexOptions.Compiled), "WebAssembly.compile", CapabilityRisk.High, 0.9f),
(new Regex(@"new\s+WebAssembly\s*\.\s*Module\s*\(", RegexOptions.Compiled), "new WebAssembly.Module", CapabilityRisk.High, 0.9f),
(new Regex(@"new\s+WebAssembly\s*\.\s*Instance\s*\(", RegexOptions.Compiled), "new WebAssembly.Instance", CapabilityRisk.High, 0.9f),
];
// ========================================
// OTHER - Worker threads, cluster, etc.
// ========================================
private static readonly (Regex Pattern, string Name, CapabilityRisk Risk, float Confidence)[] OtherPatterns =
[
// Worker threads
(new Regex(@"require\s*\(\s*['""]worker_threads['""]", RegexOptions.Compiled), "require('worker_threads')", CapabilityRisk.Medium, 0.85f),
(new Regex(@"from\s+['""](?:node:)?worker_threads['""]", RegexOptions.Compiled), "import worker_threads", CapabilityRisk.Medium, 0.85f),
(new Regex(@"new\s+Worker\s*\(", RegexOptions.Compiled), "new Worker", CapabilityRisk.Medium, 0.8f),
// Cluster
(new Regex(@"require\s*\(\s*['""]cluster['""]", RegexOptions.Compiled), "require('cluster')", CapabilityRisk.Medium, 0.8f),
(new Regex(@"cluster\s*\.\s*fork\s*\(", RegexOptions.Compiled), "cluster.fork", CapabilityRisk.Medium, 0.85f),
// Process manipulation
(new Regex(@"process\s*\.\s*exit\s*\(", RegexOptions.Compiled), "process.exit", CapabilityRisk.Medium, 0.8f),
(new Regex(@"process\s*\.\s*kill\s*\(", RegexOptions.Compiled), "process.kill", CapabilityRisk.High, 0.9f),
(new Regex(@"process\s*\.\s*abort\s*\(", RegexOptions.Compiled), "process.abort", CapabilityRisk.High, 0.9f),
// Module loading
(new Regex(@"require\s*\.\s*resolve\s*\(", RegexOptions.Compiled), "require.resolve", CapabilityRisk.Low, 0.7f),
(new Regex(@"import\s*\(", RegexOptions.Compiled), "dynamic import()", CapabilityRisk.Medium, 0.75f),
(new Regex(@"require\s*\(\s*\w+\s*\)", RegexOptions.Compiled), "require(variable)", CapabilityRisk.High, 0.85f),
// Inspector/debugger
(new Regex(@"require\s*\(\s*['""]inspector['""]", RegexOptions.Compiled), "require('inspector')", CapabilityRisk.High, 0.9f),
(new Regex(@"\bdebugger\b", RegexOptions.Compiled), "debugger statement", CapabilityRisk.Medium, 0.75f),
];
/// <summary>
/// Scans a Node.js source file for capability usages.
/// </summary>
public static IEnumerable<NodeCapabilityEvidence> ScanFile(string content, string filePath)
{
if (string.IsNullOrWhiteSpace(content))
{
yield break;
}
// Strip comments for more accurate detection
var cleanedContent = StripComments(content);
var lines = cleanedContent.Split('\n');
for (var lineNumber = 0; lineNumber < lines.Length; lineNumber++)
{
var line = lines[lineNumber];
var lineNum = lineNumber + 1;
// Exec patterns
foreach (var evidence in ScanPatterns(line, lineNum, filePath, ExecPatterns, CapabilityKind.Exec))
{
yield return evidence;
}
// Filesystem patterns
foreach (var evidence in ScanPatterns(line, lineNum, filePath, FilesystemPatterns, CapabilityKind.Filesystem))
{
yield return evidence;
}
// Network patterns
foreach (var evidence in ScanPatterns(line, lineNum, filePath, NetworkPatterns, CapabilityKind.Network))
{
yield return evidence;
}
// Environment patterns
foreach (var evidence in ScanPatterns(line, lineNum, filePath, EnvironmentPatterns, CapabilityKind.Environment))
{
yield return evidence;
}
// Serialization patterns
foreach (var evidence in ScanPatterns(line, lineNum, filePath, SerializationPatterns, CapabilityKind.Serialization))
{
yield return evidence;
}
// Crypto patterns
foreach (var evidence in ScanPatterns(line, lineNum, filePath, CryptoPatterns, CapabilityKind.Crypto))
{
yield return evidence;
}
// Database patterns
foreach (var evidence in ScanPatterns(line, lineNum, filePath, DatabasePatterns, CapabilityKind.Database))
{
yield return evidence;
}
// Dynamic code patterns
foreach (var evidence in ScanPatterns(line, lineNum, filePath, DynamicCodePatterns, CapabilityKind.DynamicCode))
{
yield return evidence;
}
// Reflection patterns
foreach (var evidence in ScanPatterns(line, lineNum, filePath, ReflectionPatterns, CapabilityKind.Reflection))
{
yield return evidence;
}
// Native code patterns
foreach (var evidence in ScanPatterns(line, lineNum, filePath, NativeCodePatterns, CapabilityKind.NativeCode))
{
yield return evidence;
}
// Other patterns (workers, process, etc.)
foreach (var evidence in ScanPatterns(line, lineNum, filePath, OtherPatterns, CapabilityKind.Other))
{
yield return evidence;
}
}
}
private static IEnumerable<NodeCapabilityEvidence> ScanPatterns(
string line,
int lineNumber,
string filePath,
(Regex Pattern, string Name, CapabilityRisk Risk, float Confidence)[] patterns,
CapabilityKind kind)
{
foreach (var (pattern, name, risk, confidence) in patterns)
{
if (pattern.IsMatch(line))
{
yield return new NodeCapabilityEvidence(
kind: kind,
sourceFile: filePath,
sourceLine: lineNumber,
pattern: name,
snippet: line.Trim(),
confidence: confidence,
risk: risk);
}
}
}
/// <summary>
/// Strips single-line (//) and multi-line (/* */) comments from JavaScript source.
/// </summary>
private static string StripComments(string content)
{
var sb = new StringBuilder(content.Length);
var i = 0;
var inString = false;
var inTemplate = false;
var stringChar = '"';
while (i < content.Length)
{
// Handle escape sequences in strings
if ((inString || inTemplate) && content[i] == '\\' && i + 1 < content.Length)
{
sb.Append(content[i]);
sb.Append(content[i + 1]);
i += 2;
continue;
}
// Handle template literals
if (!inString && content[i] == '`')
{
inTemplate = !inTemplate;
sb.Append(content[i]);
i++;
continue;
}
// Handle string literals (but not inside template literals)
if (!inTemplate && (content[i] == '"' || content[i] == '\''))
{
if (!inString)
{
inString = true;
stringChar = content[i];
}
else if (content[i] == stringChar)
{
inString = false;
}
sb.Append(content[i]);
i++;
continue;
}
// Skip comments only when not in string/template
if (!inString && !inTemplate)
{
// Single-line comment
if (i + 1 < content.Length && content[i] == '/' && content[i + 1] == '/')
{
// Skip until end of line
while (i < content.Length && content[i] != '\n')
{
i++;
}
if (i < content.Length)
{
sb.Append('\n');
i++;
}
continue;
}
// Multi-line comment
if (i + 1 < content.Length && content[i] == '/' && content[i + 1] == '*')
{
i += 2;
while (i + 1 < content.Length && !(content[i] == '*' && content[i + 1] == '/'))
{
// Preserve newlines for line number accuracy
if (content[i] == '\n')
{
sb.Append('\n');
}
i++;
}
if (i + 1 < content.Length)
{
i += 2; // Skip */
}
continue;
}
}
sb.Append(content[i]);
i++;
}
return sb.ToString();
}
}

View File

@@ -0,0 +1,376 @@
using StellaOps.Scanner.Analyzers.Lang.Php.Internal;
namespace StellaOps.Scanner.Analyzers.Lang.Php.Tests.Internal;
public sealed class ComposerLockReaderTests : IDisposable
{
private readonly string _testDir;
public ComposerLockReaderTests()
{
_testDir = Path.Combine(Path.GetTempPath(), $"php-lock-test-{Guid.NewGuid():N}");
Directory.CreateDirectory(_testDir);
}
public void Dispose()
{
try
{
if (Directory.Exists(_testDir))
{
Directory.Delete(_testDir, recursive: true);
}
}
catch
{
// Ignore cleanup errors
}
}
[Fact]
public async Task LoadAsync_NoLockFile_ReturnsEmpty()
{
var context = CreateContext(_testDir);
var result = await ComposerLockData.LoadAsync(context, CancellationToken.None);
Assert.True(result.IsEmpty);
Assert.Empty(result.Packages);
Assert.Empty(result.DevPackages);
}
[Fact]
public async Task LoadAsync_ValidLockFile_ParsesPackages()
{
var lockContent = @"{
""content-hash"": ""abc123def456"",
""plugin-api-version"": ""2.6.0"",
""packages"": [
{
""name"": ""vendor/package"",
""version"": ""1.2.3"",
""type"": ""library""
}
],
""packages-dev"": []
}";
await File.WriteAllTextAsync(Path.Combine(_testDir, "composer.lock"), lockContent);
var context = CreateContext(_testDir);
var result = await ComposerLockData.LoadAsync(context, CancellationToken.None);
Assert.False(result.IsEmpty);
Assert.Single(result.Packages);
Assert.Equal("vendor/package", result.Packages[0].Name);
Assert.Equal("1.2.3", result.Packages[0].Version);
Assert.Equal("library", result.Packages[0].Type);
Assert.False(result.Packages[0].IsDev);
}
[Fact]
public async Task LoadAsync_ParsesDevPackages()
{
var lockContent = @"{
""packages"": [],
""packages-dev"": [
{
""name"": ""phpunit/phpunit"",
""version"": ""10.0.0"",
""type"": ""library""
}
]
}";
await File.WriteAllTextAsync(Path.Combine(_testDir, "composer.lock"), lockContent);
var context = CreateContext(_testDir);
var result = await ComposerLockData.LoadAsync(context, CancellationToken.None);
Assert.Single(result.DevPackages);
Assert.Equal("phpunit/phpunit", result.DevPackages[0].Name);
Assert.True(result.DevPackages[0].IsDev);
}
[Fact]
public async Task LoadAsync_ParsesContentHashAndPluginApi()
{
var lockContent = @"{
""content-hash"": ""a1b2c3d4e5f6"",
""plugin-api-version"": ""2.3.0"",
""packages"": []
}";
await File.WriteAllTextAsync(Path.Combine(_testDir, "composer.lock"), lockContent);
var context = CreateContext(_testDir);
var result = await ComposerLockData.LoadAsync(context, CancellationToken.None);
Assert.Equal("a1b2c3d4e5f6", result.ContentHash);
Assert.Equal("2.3.0", result.PluginApiVersion);
}
[Fact]
public async Task LoadAsync_ParsesSourceInfo()
{
var lockContent = @"{
""packages"": [
{
""name"": ""vendor/package"",
""version"": ""1.0.0"",
""source"": {
""type"": ""git"",
""url"": ""https://github.com/vendor/package.git"",
""reference"": ""abc123def456789""
}
}
]
}";
await File.WriteAllTextAsync(Path.Combine(_testDir, "composer.lock"), lockContent);
var context = CreateContext(_testDir);
var result = await ComposerLockData.LoadAsync(context, CancellationToken.None);
Assert.Single(result.Packages);
Assert.Equal("git", result.Packages[0].SourceType);
Assert.Equal("abc123def456789", result.Packages[0].SourceReference);
}
[Fact]
public async Task LoadAsync_ParsesDistInfo()
{
var lockContent = @"{
""packages"": [
{
""name"": ""vendor/package"",
""version"": ""1.0.0"",
""dist"": {
""type"": ""zip"",
""url"": ""https://packagist.org/vendor/package/1.0.0"",
""shasum"": ""sha256hashhere""
}
}
]
}";
await File.WriteAllTextAsync(Path.Combine(_testDir, "composer.lock"), lockContent);
var context = CreateContext(_testDir);
var result = await ComposerLockData.LoadAsync(context, CancellationToken.None);
Assert.Single(result.Packages);
Assert.Equal("sha256hashhere", result.Packages[0].DistSha);
Assert.Equal("https://packagist.org/vendor/package/1.0.0", result.Packages[0].DistUrl);
}
[Fact]
public async Task LoadAsync_ParsesAutoloadPsr4()
{
var lockContent = @"{
""packages"": [
{
""name"": ""vendor/package"",
""version"": ""1.0.0"",
""autoload"": {
""psr-4"": {
""Vendor\\Package\\"": ""src/""
}
}
}
]
}";
await File.WriteAllTextAsync(Path.Combine(_testDir, "composer.lock"), lockContent);
var context = CreateContext(_testDir);
var result = await ComposerLockData.LoadAsync(context, CancellationToken.None);
Assert.Single(result.Packages);
Assert.NotEmpty(result.Packages[0].Autoload.Psr4);
Assert.Contains("Vendor\\Package\\->src/", result.Packages[0].Autoload.Psr4);
}
[Fact]
public async Task LoadAsync_ParsesAutoloadClassmap()
{
var lockContent = @"{
""packages"": [
{
""name"": ""vendor/package"",
""version"": ""1.0.0"",
""autoload"": {
""classmap"": [
""src/"",
""lib/""
]
}
}
]
}";
await File.WriteAllTextAsync(Path.Combine(_testDir, "composer.lock"), lockContent);
var context = CreateContext(_testDir);
var result = await ComposerLockData.LoadAsync(context, CancellationToken.None);
Assert.Single(result.Packages);
Assert.Equal(2, result.Packages[0].Autoload.Classmap.Count);
Assert.Contains("src/", result.Packages[0].Autoload.Classmap);
Assert.Contains("lib/", result.Packages[0].Autoload.Classmap);
}
[Fact]
public async Task LoadAsync_ParsesAutoloadFiles()
{
var lockContent = @"{
""packages"": [
{
""name"": ""vendor/package"",
""version"": ""1.0.0"",
""autoload"": {
""files"": [
""src/helpers.php"",
""src/functions.php""
]
}
}
]
}";
await File.WriteAllTextAsync(Path.Combine(_testDir, "composer.lock"), lockContent);
var context = CreateContext(_testDir);
var result = await ComposerLockData.LoadAsync(context, CancellationToken.None);
Assert.Single(result.Packages);
Assert.Equal(2, result.Packages[0].Autoload.Files.Count);
Assert.Contains("src/helpers.php", result.Packages[0].Autoload.Files);
}
[Fact]
public async Task LoadAsync_MultiplePackages_ParsesAll()
{
var lockContent = @"{
""packages"": [
{ ""name"": ""vendor/first"", ""version"": ""1.0.0"" },
{ ""name"": ""vendor/second"", ""version"": ""2.0.0"" },
{ ""name"": ""vendor/third"", ""version"": ""3.0.0"" }
],
""packages-dev"": [
{ ""name"": ""dev/tool"", ""version"": ""0.1.0"" }
]
}";
await File.WriteAllTextAsync(Path.Combine(_testDir, "composer.lock"), lockContent);
var context = CreateContext(_testDir);
var result = await ComposerLockData.LoadAsync(context, CancellationToken.None);
Assert.Equal(3, result.Packages.Count);
Assert.Single(result.DevPackages);
}
[Fact]
public async Task LoadAsync_ComputesSha256()
{
var lockContent = @"{ ""packages"": [] }";
await File.WriteAllTextAsync(Path.Combine(_testDir, "composer.lock"), lockContent);
var context = CreateContext(_testDir);
var result = await ComposerLockData.LoadAsync(context, CancellationToken.None);
Assert.NotNull(result.LockSha256);
Assert.Equal(64, result.LockSha256.Length); // SHA256 hex string length
Assert.True(result.LockSha256.All(c => char.IsAsciiHexDigitLower(c)));
}
[Fact]
public async Task LoadAsync_SetsLockPath()
{
var lockContent = @"{ ""packages"": [] }";
var lockPath = Path.Combine(_testDir, "composer.lock");
await File.WriteAllTextAsync(lockPath, lockContent);
var context = CreateContext(_testDir);
var result = await ComposerLockData.LoadAsync(context, CancellationToken.None);
Assert.Equal(lockPath, result.LockPath);
}
[Fact]
public async Task LoadAsync_MissingRequiredFields_SkipsPackage()
{
var lockContent = @"{
""packages"": [
{ ""name"": ""valid/package"", ""version"": ""1.0.0"" },
{ ""name"": ""missing-version"" },
{ ""version"": ""1.0.0"" }
]
}";
await File.WriteAllTextAsync(Path.Combine(_testDir, "composer.lock"), lockContent);
var context = CreateContext(_testDir);
var result = await ComposerLockData.LoadAsync(context, CancellationToken.None);
Assert.Single(result.Packages);
Assert.Equal("valid/package", result.Packages[0].Name);
}
[Fact]
public void Empty_ReturnsEmptyInstance()
{
var empty = ComposerLockData.Empty;
Assert.True(empty.IsEmpty);
Assert.Empty(empty.Packages);
Assert.Empty(empty.DevPackages);
Assert.Equal(string.Empty, empty.LockPath);
Assert.Null(empty.ContentHash);
Assert.Null(empty.PluginApiVersion);
Assert.Null(empty.LockSha256);
}
[Fact]
public async Task LoadAsync_Psr4ArrayPaths_ParsesMultiplePaths()
{
var lockContent = @"{
""packages"": [
{
""name"": ""vendor/package"",
""version"": ""1.0.0"",
""autoload"": {
""psr-4"": {
""Vendor\\Package\\"": [""src/"", ""lib/""]
}
}
}
]
}";
await File.WriteAllTextAsync(Path.Combine(_testDir, "composer.lock"), lockContent);
var context = CreateContext(_testDir);
var result = await ComposerLockData.LoadAsync(context, CancellationToken.None);
Assert.Single(result.Packages);
Assert.Equal(2, result.Packages[0].Autoload.Psr4.Count);
}
[Fact]
public async Task LoadAsync_NormalizesBackslashesInPaths()
{
var lockContent = @"{
""packages"": [
{
""name"": ""vendor/package"",
""version"": ""1.0.0"",
""autoload"": {
""files"": [""src\\helpers.php""]
}
}
]
}";
await File.WriteAllTextAsync(Path.Combine(_testDir, "composer.lock"), lockContent);
var context = CreateContext(_testDir);
var result = await ComposerLockData.LoadAsync(context, CancellationToken.None);
Assert.Single(result.Packages);
Assert.Contains("src/helpers.php", result.Packages[0].Autoload.Files);
}
private static LanguageAnalyzerContext CreateContext(string rootPath)
{
return new LanguageAnalyzerContext(rootPath);
}
}

View File

@@ -0,0 +1,672 @@
using StellaOps.Scanner.Analyzers.Lang.Php.Internal;
namespace StellaOps.Scanner.Analyzers.Lang.Php.Tests.Internal;
public sealed class PhpCapabilityScannerTests
{
#region Exec Capabilities
[Theory]
[InlineData("exec('ls -la');", "exec")]
[InlineData("shell_exec('whoami');", "shell_exec")]
[InlineData("system('cat /etc/passwd');", "system")]
[InlineData("passthru('top');", "passthru")]
[InlineData("popen('/bin/sh', 'r');", "popen")]
[InlineData("proc_open('ls', $descriptors, $pipes);", "proc_open")]
[InlineData("pcntl_exec('/bin/bash');", "pcntl_exec")]
public void ScanContent_ExecFunction_DetectsCriticalRisk(string line, string expectedFunction)
{
var content = $"<?php\n{line}";
var result = PhpCapabilityScanner.ScanContent(content, "test.php");
Assert.NotEmpty(result);
Assert.Contains(result, e => e.Kind == PhpCapabilityKind.Exec && e.FunctionOrPattern == expectedFunction);
Assert.All(result.Where(e => e.Kind == PhpCapabilityKind.Exec), e => Assert.Equal(PhpCapabilityRisk.Critical, e.Risk));
}
[Fact]
public void ScanContent_BacktickOperator_DetectsCriticalRisk()
{
var content = "<?php\n$output = `ls -la`;";
var result = PhpCapabilityScanner.ScanContent(content, "test.php");
Assert.NotEmpty(result);
Assert.Contains(result, e => e.Kind == PhpCapabilityKind.Exec && e.FunctionOrPattern == "backtick_operator");
Assert.Contains(result, e => e.Risk == PhpCapabilityRisk.Critical);
}
[Fact]
public void ScanContent_ExecInComment_DoesNotDetect()
{
var content = @"<?php
// exec('ls -la');
/* shell_exec('whoami'); */
# system('cat');
";
var result = PhpCapabilityScanner.ScanContent(content, "test.php");
Assert.Empty(result);
}
#endregion
#region Filesystem Capabilities
[Theory]
[InlineData("fopen('file.txt', 'r');", "fopen", PhpCapabilityRisk.Medium)]
[InlineData("fwrite($fp, $data);", "fwrite", PhpCapabilityRisk.Medium)]
[InlineData("fread($fp, 1024);", "fread", PhpCapabilityRisk.Low)]
[InlineData("file_get_contents('data.txt');", "file_get_contents", PhpCapabilityRisk.Medium)]
[InlineData("file_put_contents('out.txt', $data);", "file_put_contents", PhpCapabilityRisk.Medium)]
public void ScanContent_FileReadWrite_DetectsAppropriateRisk(string line, string expectedFunction, PhpCapabilityRisk expectedRisk)
{
var content = $"<?php\n{line}";
var result = PhpCapabilityScanner.ScanContent(content, "test.php");
Assert.NotEmpty(result);
var evidence = result.FirstOrDefault(e => e.Kind == PhpCapabilityKind.Filesystem && e.FunctionOrPattern == expectedFunction);
Assert.NotNull(evidence);
Assert.Equal(expectedRisk, evidence.Risk);
}
[Theory]
[InlineData("unlink('file.txt');", "unlink", PhpCapabilityRisk.High)]
[InlineData("rmdir('/tmp/dir');", "rmdir", PhpCapabilityRisk.High)]
[InlineData("chmod('script.sh', 0755);", "chmod", PhpCapabilityRisk.High)]
[InlineData("chown('file.txt', 'root');", "chown", PhpCapabilityRisk.High)]
[InlineData("symlink('/etc/passwd', 'link');", "symlink", PhpCapabilityRisk.High)]
public void ScanContent_DangerousFileOps_DetectsHighRisk(string line, string expectedFunction, PhpCapabilityRisk expectedRisk)
{
var content = $"<?php\n{line}";
var result = PhpCapabilityScanner.ScanContent(content, "test.php");
Assert.NotEmpty(result);
var evidence = result.FirstOrDefault(e => e.Kind == PhpCapabilityKind.Filesystem && e.FunctionOrPattern == expectedFunction);
Assert.NotNull(evidence);
Assert.Equal(expectedRisk, evidence.Risk);
}
[Fact]
public void ScanContent_DirectoryFunctions_DetectsLowRisk()
{
var content = @"<?php
$files = scandir('/var/www');
$matches = glob('*.php');
$dir = opendir('/home');
";
var result = PhpCapabilityScanner.ScanContent(content, "test.php");
Assert.NotEmpty(result);
Assert.All(result.Where(e => e.Kind == PhpCapabilityKind.Filesystem), e => Assert.Equal(PhpCapabilityRisk.Low, e.Risk));
}
#endregion
#region Network Capabilities
[Theory]
[InlineData("curl_init('http://example.com');", "curl_init")]
[InlineData("curl_exec($ch);", "curl_exec")]
[InlineData("curl_multi_exec($mh, $active);", "curl_multi_exec")]
public void ScanContent_CurlFunctions_DetectsMediumRisk(string line, string expectedFunction)
{
var content = $"<?php\n{line}";
var result = PhpCapabilityScanner.ScanContent(content, "test.php");
Assert.NotEmpty(result);
var evidence = result.FirstOrDefault(e => e.Kind == PhpCapabilityKind.Network && e.FunctionOrPattern == expectedFunction);
Assert.NotNull(evidence);
Assert.Equal(PhpCapabilityRisk.Medium, evidence.Risk);
}
[Theory]
[InlineData("fsockopen('localhost', 80);", "fsockopen")]
[InlineData("socket_create(AF_INET, SOCK_STREAM, SOL_TCP);", "socket_create")]
[InlineData("socket_connect($socket, '127.0.0.1', 8080);", "socket_connect")]
[InlineData("stream_socket_client('tcp://localhost:80');", "stream_socket_client")]
[InlineData("stream_socket_server('tcp://0.0.0.0:8000');", "stream_socket_server")]
public void ScanContent_SocketFunctions_DetectsHighRisk(string line, string expectedFunction)
{
var content = $"<?php\n{line}";
var result = PhpCapabilityScanner.ScanContent(content, "test.php");
Assert.NotEmpty(result);
var evidence = result.FirstOrDefault(e => e.Kind == PhpCapabilityKind.Network && e.FunctionOrPattern == expectedFunction);
Assert.NotNull(evidence);
Assert.Equal(PhpCapabilityRisk.High, evidence.Risk);
}
[Fact]
public void ScanContent_FileGetContentsWithUrl_DetectsNetworkCapability()
{
var content = "<?php\n$data = file_get_contents('http://example.com/api');";
var result = PhpCapabilityScanner.ScanContent(content, "test.php");
Assert.NotEmpty(result);
Assert.Contains(result, e => e.Kind == PhpCapabilityKind.Network && e.FunctionOrPattern == "file_get_contents_url");
}
#endregion
#region Environment Capabilities
[Theory]
[InlineData("getenv('HOME');", "getenv", PhpCapabilityRisk.Medium)]
[InlineData("putenv('PATH=/usr/bin');", "putenv", PhpCapabilityRisk.High)]
[InlineData("apache_getenv('DOCUMENT_ROOT');", "apache_getenv", PhpCapabilityRisk.Medium)]
[InlineData("apache_setenv('MY_VAR', 'value');", "apache_setenv", PhpCapabilityRisk.High)]
public void ScanContent_EnvFunctions_DetectsAppropriateRisk(string line, string expectedFunction, PhpCapabilityRisk expectedRisk)
{
var content = $"<?php\n{line}";
var result = PhpCapabilityScanner.ScanContent(content, "test.php");
Assert.NotEmpty(result);
var evidence = result.FirstOrDefault(e => e.Kind == PhpCapabilityKind.Environment && e.FunctionOrPattern == expectedFunction);
Assert.NotNull(evidence);
Assert.Equal(expectedRisk, evidence.Risk);
}
[Fact]
public void ScanContent_EnvSuperglobal_DetectsMediumRisk()
{
var content = "<?php\n$path = $_ENV['PATH'];";
var result = PhpCapabilityScanner.ScanContent(content, "test.php");
Assert.NotEmpty(result);
Assert.Contains(result, e => e.Kind == PhpCapabilityKind.Environment && e.FunctionOrPattern == "$_ENV");
}
[Fact]
public void ScanContent_ServerSuperglobal_DetectsLowRisk()
{
var content = "<?php\n$host = $_SERVER['HTTP_HOST'];";
var result = PhpCapabilityScanner.ScanContent(content, "test.php");
Assert.NotEmpty(result);
var evidence = result.FirstOrDefault(e => e.Kind == PhpCapabilityKind.Environment && e.FunctionOrPattern == "$_SERVER");
Assert.NotNull(evidence);
Assert.Equal(PhpCapabilityRisk.Low, evidence.Risk);
}
#endregion
#region Serialization Capabilities
[Fact]
public void ScanContent_Unserialize_DetectsCriticalRisk()
{
var content = "<?php\n$obj = unserialize($data);";
var result = PhpCapabilityScanner.ScanContent(content, "test.php");
Assert.NotEmpty(result);
var evidence = result.FirstOrDefault(e => e.Kind == PhpCapabilityKind.Serialization && e.FunctionOrPattern == "unserialize");
Assert.NotNull(evidence);
Assert.Equal(PhpCapabilityRisk.Critical, evidence.Risk);
}
[Theory]
[InlineData("serialize($object);", "serialize", PhpCapabilityRisk.Low)]
[InlineData("json_encode($data);", "json_encode", PhpCapabilityRisk.Low)]
[InlineData("json_decode($json);", "json_decode", PhpCapabilityRisk.Low)]
[InlineData("igbinary_unserialize($data);", "igbinary_unserialize", PhpCapabilityRisk.High)]
public void ScanContent_SerializationFunctions_DetectsAppropriateRisk(string line, string expectedFunction, PhpCapabilityRisk expectedRisk)
{
var content = $"<?php\n{line}";
var result = PhpCapabilityScanner.ScanContent(content, "test.php");
Assert.NotEmpty(result);
var evidence = result.FirstOrDefault(e => e.Kind == PhpCapabilityKind.Serialization && e.FunctionOrPattern == expectedFunction);
Assert.NotNull(evidence);
Assert.Equal(expectedRisk, evidence.Risk);
}
[Theory]
[InlineData("public function __wakeup()")]
[InlineData("private function __sleep()")]
[InlineData("public function __serialize()")]
[InlineData("public function __unserialize($data)")]
public void ScanContent_SerializationMagicMethods_Detects(string line)
{
var content = $"<?php\nclass Test {{\n {line} {{ }}\n}}";
var result = PhpCapabilityScanner.ScanContent(content, "test.php");
Assert.NotEmpty(result);
Assert.Contains(result, e => e.Kind == PhpCapabilityKind.Serialization);
}
#endregion
#region Crypto Capabilities
[Fact]
public void ScanContent_OpenSslFunctions_DetectsMediumRisk()
{
var content = @"<?php
openssl_encrypt($data, 'AES-256-CBC', $key);
openssl_decrypt($encrypted, 'AES-256-CBC', $key);
openssl_sign($data, $signature, $privateKey);
";
var result = PhpCapabilityScanner.ScanContent(content, "test.php");
Assert.NotEmpty(result);
Assert.True(result.Count(e => e.Kind == PhpCapabilityKind.Crypto) >= 3);
}
[Fact]
public void ScanContent_SodiumFunctions_DetectsLowRisk()
{
var content = @"<?php
sodium_crypto_secretbox($message, $nonce, $key);
sodium_crypto_box($message, $nonce, $keyPair);
";
var result = PhpCapabilityScanner.ScanContent(content, "test.php");
Assert.NotEmpty(result);
Assert.All(result.Where(e => e.Kind == PhpCapabilityKind.Crypto && e.Pattern.StartsWith("sodium")),
e => Assert.Equal(PhpCapabilityRisk.Low, e.Risk));
}
[Theory]
[InlineData("md5($password);", "md5", PhpCapabilityRisk.Medium)]
[InlineData("sha1($data);", "sha1", PhpCapabilityRisk.Low)]
[InlineData("hash('sha256', $data);", "hash", PhpCapabilityRisk.Low)]
[InlineData("password_hash($password, PASSWORD_DEFAULT);", "password_hash", PhpCapabilityRisk.Low)]
[InlineData("mcrypt_encrypt(MCRYPT_RIJNDAEL_256, $key, $data, MCRYPT_MODE_CBC);", "mcrypt_encrypt", PhpCapabilityRisk.High)]
public void ScanContent_HashFunctions_DetectsAppropriateRisk(string line, string expectedFunction, PhpCapabilityRisk expectedRisk)
{
var content = $"<?php\n{line}";
var result = PhpCapabilityScanner.ScanContent(content, "test.php");
Assert.NotEmpty(result);
var evidence = result.FirstOrDefault(e => e.Kind == PhpCapabilityKind.Crypto && e.FunctionOrPattern == expectedFunction);
Assert.NotNull(evidence);
Assert.Equal(expectedRisk, evidence.Risk);
}
#endregion
#region Database Capabilities
[Fact]
public void ScanContent_MysqliFunctions_DetectsDatabase()
{
var content = @"<?php
$conn = mysqli_connect('localhost', 'user', 'pass', 'db');
mysqli_query($conn, 'SELECT * FROM users');
mysqli_close($conn);
";
var result = PhpCapabilityScanner.ScanContent(content, "test.php");
Assert.NotEmpty(result);
Assert.True(result.Count(e => e.Kind == PhpCapabilityKind.Database) >= 2);
}
[Fact]
public void ScanContent_PdoUsage_DetectsDatabase()
{
var content = @"<?php
$pdo = new PDO('mysql:host=localhost;dbname=test', 'user', 'pass');
$stmt = $pdo->prepare('SELECT * FROM users WHERE id = ?');
$stmt->execute([$id]);
";
var result = PhpCapabilityScanner.ScanContent(content, "test.php");
Assert.NotEmpty(result);
Assert.Contains(result, e => e.Kind == PhpCapabilityKind.Database && e.FunctionOrPattern == "PDO");
}
[Fact]
public void ScanContent_PostgresFunctions_DetectsDatabase()
{
var content = @"<?php
$conn = pg_connect('host=localhost dbname=test');
pg_query($conn, 'SELECT * FROM users');
";
var result = PhpCapabilityScanner.ScanContent(content, "test.php");
Assert.NotEmpty(result);
Assert.True(result.Count(e => e.Kind == PhpCapabilityKind.Database) >= 2);
}
[Fact]
public void ScanContent_RawSqlQuery_DetectsHighRisk()
{
var content = "<?php\n$query = \"SELECT * FROM users WHERE id = $id\";";
var result = PhpCapabilityScanner.ScanContent(content, "test.php");
Assert.NotEmpty(result);
Assert.Contains(result, e => e.Kind == PhpCapabilityKind.Database && e.FunctionOrPattern == "raw_sql_query");
}
#endregion
#region Upload Capabilities
[Fact]
public void ScanContent_FilesSuperglobal_DetectsHighRisk()
{
var content = "<?php\n$file = $_FILES['upload'];";
var result = PhpCapabilityScanner.ScanContent(content, "test.php");
Assert.NotEmpty(result);
var evidence = result.FirstOrDefault(e => e.Kind == PhpCapabilityKind.Upload && e.FunctionOrPattern == "$_FILES");
Assert.NotNull(evidence);
Assert.Equal(PhpCapabilityRisk.High, evidence.Risk);
}
[Fact]
public void ScanContent_MoveUploadedFile_DetectsHighRisk()
{
var content = "<?php\nmove_uploaded_file($_FILES['file']['tmp_name'], '/uploads/file.txt');";
var result = PhpCapabilityScanner.ScanContent(content, "test.php");
Assert.NotEmpty(result);
Assert.Contains(result, e => e.Kind == PhpCapabilityKind.Upload && e.FunctionOrPattern == "move_uploaded_file");
}
#endregion
#region Stream Wrapper Capabilities
[Theory]
[InlineData("php://input", PhpCapabilityRisk.High)]
[InlineData("php://filter", PhpCapabilityRisk.Critical)]
[InlineData("php://memory", PhpCapabilityRisk.Low)]
[InlineData("data://", PhpCapabilityRisk.High)]
[InlineData("phar://", PhpCapabilityRisk.Critical)]
[InlineData("zip://", PhpCapabilityRisk.High)]
[InlineData("expect://", PhpCapabilityRisk.Critical)]
public void ScanContent_StreamWrappers_DetectsAppropriateRisk(string wrapper, PhpCapabilityRisk expectedRisk)
{
var content = $"<?php\n$data = file_get_contents('{wrapper}data');";
var result = PhpCapabilityScanner.ScanContent(content, "test.php");
Assert.NotEmpty(result);
var evidence = result.FirstOrDefault(e => e.Kind == PhpCapabilityKind.StreamWrapper && e.FunctionOrPattern == wrapper);
Assert.NotNull(evidence);
Assert.Equal(expectedRisk, evidence.Risk);
}
[Fact]
public void ScanContent_StreamWrapperRegister_DetectsHighRisk()
{
var content = "<?php\nstream_wrapper_register('myproto', 'MyProtocolHandler');";
var result = PhpCapabilityScanner.ScanContent(content, "test.php");
Assert.NotEmpty(result);
var evidence = result.FirstOrDefault(e => e.Kind == PhpCapabilityKind.StreamWrapper && e.FunctionOrPattern == "stream_wrapper_register");
Assert.NotNull(evidence);
Assert.Equal(PhpCapabilityRisk.High, evidence.Risk);
}
#endregion
#region Dynamic Code Capabilities
[Theory]
[InlineData("eval($code);", "eval")]
[InlineData("create_function('$a', 'return $a * 2;');", "create_function")]
[InlineData("assert($condition);", "assert")]
public void ScanContent_DynamicCodeExecution_DetectsCriticalRisk(string line, string expectedFunction)
{
var content = $"<?php\n{line}";
var result = PhpCapabilityScanner.ScanContent(content, "test.php");
Assert.NotEmpty(result);
var evidence = result.FirstOrDefault(e => e.Kind == PhpCapabilityKind.DynamicCode && e.FunctionOrPattern == expectedFunction);
Assert.NotNull(evidence);
Assert.Equal(PhpCapabilityRisk.Critical, evidence.Risk);
}
[Theory]
[InlineData("call_user_func($callback, $arg);", "call_user_func")]
[InlineData("call_user_func_array($callback, $args);", "call_user_func_array")]
[InlineData("preg_replace('/pattern/e', 'code', $subject);", "preg_replace")]
public void ScanContent_DynamicCodeHigh_DetectsHighRisk(string line, string expectedFunction)
{
var content = $"<?php\n{line}";
var result = PhpCapabilityScanner.ScanContent(content, "test.php");
Assert.NotEmpty(result);
var evidence = result.FirstOrDefault(e => e.Kind == PhpCapabilityKind.DynamicCode && e.FunctionOrPattern == expectedFunction);
Assert.NotNull(evidence);
Assert.Equal(PhpCapabilityRisk.High, evidence.Risk);
}
[Fact]
public void ScanContent_VariableFunction_DetectsHighRisk()
{
var content = "<?php\n$func = 'system';\n$func('ls');";
var result = PhpCapabilityScanner.ScanContent(content, "test.php");
Assert.NotEmpty(result);
Assert.Contains(result, e => e.Kind == PhpCapabilityKind.DynamicCode && e.FunctionOrPattern == "variable_function");
}
#endregion
#region Reflection Capabilities
[Theory]
[InlineData("new ReflectionClass('MyClass');", "ReflectionClass")]
[InlineData("new ReflectionMethod($obj, 'method');", "ReflectionMethod")]
[InlineData("new ReflectionFunction('func');", "ReflectionFunction")]
[InlineData("new ReflectionProperty($obj, 'prop');", "ReflectionProperty")]
public void ScanContent_ReflectionClasses_DetectsMediumRisk(string line, string expectedClass)
{
var content = $"<?php\n{line}";
var result = PhpCapabilityScanner.ScanContent(content, "test.php");
Assert.NotEmpty(result);
var evidence = result.FirstOrDefault(e => e.Kind == PhpCapabilityKind.Reflection && e.FunctionOrPattern == expectedClass);
Assert.NotNull(evidence);
Assert.Equal(PhpCapabilityRisk.Medium, evidence.Risk);
}
[Theory]
[InlineData("get_defined_functions();", "get_defined_functions")]
[InlineData("get_defined_vars();", "get_defined_vars")]
[InlineData("get_loaded_extensions();", "get_loaded_extensions")]
public void ScanContent_IntrospectionFunctions_Detects(string line, string expectedFunction)
{
var content = $"<?php\n{line}";
var result = PhpCapabilityScanner.ScanContent(content, "test.php");
Assert.NotEmpty(result);
Assert.Contains(result, e => e.Kind == PhpCapabilityKind.Reflection && e.FunctionOrPattern == expectedFunction);
}
#endregion
#region Output Control Capabilities
[Theory]
[InlineData("header('Location: /redirect');", "header", PhpCapabilityRisk.Medium)]
[InlineData("setcookie('session', $value);", "setcookie", PhpCapabilityRisk.Medium)]
[InlineData("ob_start();", "ob_start", PhpCapabilityRisk.Low)]
public void ScanContent_OutputFunctions_DetectsAppropriateRisk(string line, string expectedFunction, PhpCapabilityRisk expectedRisk)
{
var content = $"<?php\n{line}";
var result = PhpCapabilityScanner.ScanContent(content, "test.php");
Assert.NotEmpty(result);
var evidence = result.FirstOrDefault(e => e.Kind == PhpCapabilityKind.OutputControl && e.FunctionOrPattern == expectedFunction);
Assert.NotNull(evidence);
Assert.Equal(expectedRisk, evidence.Risk);
}
#endregion
#region Session Capabilities
[Fact]
public void ScanContent_SessionSuperglobal_DetectsMediumRisk()
{
var content = "<?php\n$_SESSION['user'] = $userId;";
var result = PhpCapabilityScanner.ScanContent(content, "test.php");
Assert.NotEmpty(result);
var evidence = result.FirstOrDefault(e => e.Kind == PhpCapabilityKind.Session && e.FunctionOrPattern == "$_SESSION");
Assert.NotNull(evidence);
Assert.Equal(PhpCapabilityRisk.Medium, evidence.Risk);
}
[Theory]
[InlineData("session_start();", "session_start", PhpCapabilityRisk.Medium)]
[InlineData("session_destroy();", "session_destroy", PhpCapabilityRisk.Low)]
[InlineData("session_set_save_handler($handler);", "session_set_save_handler", PhpCapabilityRisk.High)]
public void ScanContent_SessionFunctions_DetectsAppropriateRisk(string line, string expectedFunction, PhpCapabilityRisk expectedRisk)
{
var content = $"<?php\n{line}";
var result = PhpCapabilityScanner.ScanContent(content, "test.php");
Assert.NotEmpty(result);
var evidence = result.FirstOrDefault(e => e.Kind == PhpCapabilityKind.Session && e.FunctionOrPattern == expectedFunction);
Assert.NotNull(evidence);
Assert.Equal(expectedRisk, evidence.Risk);
}
#endregion
#region Error Handling Capabilities
[Theory]
[InlineData("ini_set('display_errors', 1);", "ini_set", PhpCapabilityRisk.High)]
[InlineData("ini_get('memory_limit');", "ini_get", PhpCapabilityRisk.Low)]
[InlineData("phpinfo();", "phpinfo", PhpCapabilityRisk.High)]
[InlineData("error_reporting(E_ALL);", "error_reporting", PhpCapabilityRisk.Medium)]
[InlineData("set_error_handler($handler);", "set_error_handler", PhpCapabilityRisk.Medium)]
public void ScanContent_ErrorFunctions_DetectsAppropriateRisk(string line, string expectedFunction, PhpCapabilityRisk expectedRisk)
{
var content = $"<?php\n{line}";
var result = PhpCapabilityScanner.ScanContent(content, "test.php");
Assert.NotEmpty(result);
var evidence = result.FirstOrDefault(e => e.Kind == PhpCapabilityKind.ErrorHandling && e.FunctionOrPattern == expectedFunction);
Assert.NotNull(evidence);
Assert.Equal(expectedRisk, evidence.Risk);
}
#endregion
#region Edge Cases and Integration
[Fact]
public void ScanContent_EmptyContent_ReturnsEmpty()
{
var result = PhpCapabilityScanner.ScanContent("", "test.php");
Assert.Empty(result);
}
[Fact]
public void ScanContent_NullContent_ReturnsEmpty()
{
var result = PhpCapabilityScanner.ScanContent(null!, "test.php");
Assert.Empty(result);
}
[Fact]
public void ScanContent_WhitespaceContent_ReturnsEmpty()
{
var result = PhpCapabilityScanner.ScanContent(" \n\t ", "test.php");
Assert.Empty(result);
}
[Fact]
public void ScanContent_MultipleCapabilities_DetectsAll()
{
var content = @"<?php
exec('ls');
$data = file_get_contents('data.txt');
$conn = mysqli_connect('localhost', 'user', 'pass');
$_SESSION['user'] = $user;
unserialize($input);
";
var result = PhpCapabilityScanner.ScanContent(content, "test.php");
Assert.True(result.Count >= 5);
Assert.Contains(result, e => e.Kind == PhpCapabilityKind.Exec);
Assert.Contains(result, e => e.Kind == PhpCapabilityKind.Filesystem);
Assert.Contains(result, e => e.Kind == PhpCapabilityKind.Database);
Assert.Contains(result, e => e.Kind == PhpCapabilityKind.Session);
Assert.Contains(result, e => e.Kind == PhpCapabilityKind.Serialization);
}
[Fact]
public void ScanContent_MultiLineComment_SkipsCommentedCode()
{
var content = @"<?php
/*
exec('ls');
unserialize($data);
*/
echo 'Hello';
";
var result = PhpCapabilityScanner.ScanContent(content, "test.php");
Assert.Empty(result);
}
[Fact]
public void ScanContent_CaseInsensitive_DetectsFunctions()
{
var content = @"<?php
EXEC('ls');
Shell_Exec('whoami');
UNSERIALIZE($data);
";
var result = PhpCapabilityScanner.ScanContent(content, "test.php");
Assert.NotEmpty(result);
Assert.Contains(result, e => e.FunctionOrPattern == "exec");
Assert.Contains(result, e => e.FunctionOrPattern == "shell_exec");
Assert.Contains(result, e => e.FunctionOrPattern == "unserialize");
}
[Fact]
public void ScanContent_CorrectLineNumbers_ReportsAccurately()
{
var content = @"<?php
// Line 2
// Line 3
exec('ls'); // Line 4
// Line 5
shell_exec('pwd'); // Line 6
";
var result = PhpCapabilityScanner.ScanContent(content, "test.php");
Assert.NotEmpty(result);
var execEvidence = result.FirstOrDefault(e => e.FunctionOrPattern == "exec");
var shellExecEvidence = result.FirstOrDefault(e => e.FunctionOrPattern == "shell_exec");
Assert.NotNull(execEvidence);
Assert.NotNull(shellExecEvidence);
Assert.Equal(4, execEvidence.SourceLine);
Assert.Equal(6, shellExecEvidence.SourceLine);
}
[Fact]
public void ScanContent_SnippetTruncation_TruncatesLongLines()
{
var longLine = new string('x', 200);
var content = $"<?php\nexec('{longLine}');";
var result = PhpCapabilityScanner.ScanContent(content, "test.php");
Assert.NotEmpty(result);
var evidence = result.First();
Assert.NotNull(evidence.Snippet);
Assert.True(evidence.Snippet.Length <= 153); // 150 + "..."
}
[Fact]
public void ScanContent_SourceFilePreserved_InEvidence()
{
var content = "<?php\nexec('ls');";
var result = PhpCapabilityScanner.ScanContent(content, "src/controllers/AdminController.php");
Assert.NotEmpty(result);
Assert.All(result, e => Assert.Equal("src/controllers/AdminController.php", e.SourceFile));
}
#endregion
}

View File

@@ -0,0 +1,471 @@
using StellaOps.Scanner.Analyzers.Lang.Php.Internal;
namespace StellaOps.Scanner.Analyzers.Lang.Php.Tests.Internal;
public sealed class PhpComposerManifestReaderTests : IDisposable
{
private readonly string _testDir;
public PhpComposerManifestReaderTests()
{
_testDir = Path.Combine(Path.GetTempPath(), $"manifest-test-{Guid.NewGuid():N}");
Directory.CreateDirectory(_testDir);
}
public void Dispose()
{
try
{
if (Directory.Exists(_testDir))
{
Directory.Delete(_testDir, recursive: true);
}
}
catch
{
// Ignore cleanup errors
}
}
#region PhpComposerManifestReader Tests
[Fact]
public async Task LoadAsync_NullPath_ReturnsNull()
{
var result = await PhpComposerManifestReader.LoadAsync(null!, CancellationToken.None);
Assert.Null(result);
}
[Fact]
public async Task LoadAsync_EmptyPath_ReturnsNull()
{
var result = await PhpComposerManifestReader.LoadAsync("", CancellationToken.None);
Assert.Null(result);
}
[Fact]
public async Task LoadAsync_NonExistentDirectory_ReturnsNull()
{
var result = await PhpComposerManifestReader.LoadAsync("/nonexistent/path", CancellationToken.None);
Assert.Null(result);
}
[Fact]
public async Task LoadAsync_NoComposerJson_ReturnsNull()
{
var result = await PhpComposerManifestReader.LoadAsync(_testDir, CancellationToken.None);
Assert.Null(result);
}
[Fact]
public async Task LoadAsync_InvalidJson_ReturnsNull()
{
await File.WriteAllTextAsync(Path.Combine(_testDir, "composer.json"), "{ invalid json }");
var result = await PhpComposerManifestReader.LoadAsync(_testDir, CancellationToken.None);
Assert.Null(result);
}
[Fact]
public async Task LoadAsync_ValidManifest_ParsesBasicFields()
{
var manifest = @"{
""name"": ""vendor/package"",
""description"": ""A test package"",
""type"": ""library"",
""version"": ""1.2.3"",
""license"": ""MIT""
}";
await File.WriteAllTextAsync(Path.Combine(_testDir, "composer.json"), manifest);
var result = await PhpComposerManifestReader.LoadAsync(_testDir, CancellationToken.None);
Assert.NotNull(result);
Assert.Equal("vendor/package", result.Name);
Assert.Equal("A test package", result.Description);
Assert.Equal("library", result.Type);
Assert.Equal("1.2.3", result.Version);
Assert.Equal("MIT", result.License);
}
[Fact]
public async Task LoadAsync_ParsesLicenseArray()
{
var manifest = @"{
""name"": ""vendor/package"",
""license"": [""MIT"", ""Apache-2.0""]
}";
await File.WriteAllTextAsync(Path.Combine(_testDir, "composer.json"), manifest);
var result = await PhpComposerManifestReader.LoadAsync(_testDir, CancellationToken.None);
Assert.NotNull(result);
Assert.Equal("MIT OR Apache-2.0", result.License);
}
[Fact]
public async Task LoadAsync_ParsesAuthors()
{
var manifest = @"{
""name"": ""vendor/package"",
""authors"": [
{ ""name"": ""John Doe"", ""email"": ""john@example.com"" },
{ ""name"": ""Jane Smith"" }
]
}";
await File.WriteAllTextAsync(Path.Combine(_testDir, "composer.json"), manifest);
var result = await PhpComposerManifestReader.LoadAsync(_testDir, CancellationToken.None);
Assert.NotNull(result);
Assert.Equal(2, result.Authors.Count);
Assert.Contains("John Doe <john@example.com>", result.Authors);
Assert.Contains("Jane Smith", result.Authors);
}
[Fact]
public async Task LoadAsync_ParsesRequireDependencies()
{
var manifest = @"{
""name"": ""vendor/package"",
""require"": {
""php"": "">=8.1"",
""ext-json"": ""*"",
""monolog/monolog"": ""^3.0""
}
}";
await File.WriteAllTextAsync(Path.Combine(_testDir, "composer.json"), manifest);
var result = await PhpComposerManifestReader.LoadAsync(_testDir, CancellationToken.None);
Assert.NotNull(result);
Assert.Equal(3, result.Require.Count);
Assert.Equal(">=8.1", result.Require["php"]);
Assert.Equal("*", result.Require["ext-json"]);
Assert.Equal("^3.0", result.Require["monolog/monolog"]);
}
[Fact]
public async Task LoadAsync_ParsesRequireDevDependencies()
{
var manifest = @"{
""name"": ""vendor/package"",
""require-dev"": {
""phpunit/phpunit"": ""^10.0"",
""phpstan/phpstan"": ""^1.0""
}
}";
await File.WriteAllTextAsync(Path.Combine(_testDir, "composer.json"), manifest);
var result = await PhpComposerManifestReader.LoadAsync(_testDir, CancellationToken.None);
Assert.NotNull(result);
Assert.Equal(2, result.RequireDev.Count);
Assert.Equal("^10.0", result.RequireDev["phpunit/phpunit"]);
Assert.Equal("^1.0", result.RequireDev["phpstan/phpstan"]);
}
[Fact]
public async Task LoadAsync_ParsesAutoloadPsr4()
{
var manifest = @"{
""name"": ""vendor/package"",
""autoload"": {
""psr-4"": {
""Vendor\\Package\\"": ""src/""
}
}
}";
await File.WriteAllTextAsync(Path.Combine(_testDir, "composer.json"), manifest);
var result = await PhpComposerManifestReader.LoadAsync(_testDir, CancellationToken.None);
Assert.NotNull(result);
Assert.NotEmpty(result.Autoload.Psr4);
Assert.Contains("Vendor\\Package\\->src/", result.Autoload.Psr4);
}
[Fact]
public async Task LoadAsync_ParsesAutoloadClassmap()
{
var manifest = @"{
""name"": ""vendor/package"",
""autoload"": {
""classmap"": [""lib/"", ""src/Legacy/""]
}
}";
await File.WriteAllTextAsync(Path.Combine(_testDir, "composer.json"), manifest);
var result = await PhpComposerManifestReader.LoadAsync(_testDir, CancellationToken.None);
Assert.NotNull(result);
Assert.Equal(2, result.Autoload.Classmap.Count);
Assert.Contains("lib/", result.Autoload.Classmap);
Assert.Contains("src/Legacy/", result.Autoload.Classmap);
}
[Fact]
public async Task LoadAsync_ParsesAutoloadFiles()
{
var manifest = @"{
""name"": ""vendor/package"",
""autoload"": {
""files"": [""src/helpers.php"", ""src/functions.php""]
}
}";
await File.WriteAllTextAsync(Path.Combine(_testDir, "composer.json"), manifest);
var result = await PhpComposerManifestReader.LoadAsync(_testDir, CancellationToken.None);
Assert.NotNull(result);
Assert.Equal(2, result.Autoload.Files.Count);
Assert.Contains("src/helpers.php", result.Autoload.Files);
Assert.Contains("src/functions.php", result.Autoload.Files);
}
[Fact]
public async Task LoadAsync_ParsesScripts()
{
var manifest = @"{
""name"": ""vendor/package"",
""scripts"": {
""test"": ""phpunit"",
""lint"": ""phpstan analyse""
}
}";
await File.WriteAllTextAsync(Path.Combine(_testDir, "composer.json"), manifest);
var result = await PhpComposerManifestReader.LoadAsync(_testDir, CancellationToken.None);
Assert.NotNull(result);
Assert.Equal(2, result.Scripts.Count);
Assert.Equal("phpunit", result.Scripts["test"]);
Assert.Equal("phpstan analyse", result.Scripts["lint"]);
}
[Fact]
public async Task LoadAsync_ParsesScriptsArray()
{
var manifest = @"{
""name"": ""vendor/package"",
""scripts"": {
""check"": [""phpstan analyse"", ""phpunit""]
}
}";
await File.WriteAllTextAsync(Path.Combine(_testDir, "composer.json"), manifest);
var result = await PhpComposerManifestReader.LoadAsync(_testDir, CancellationToken.None);
Assert.NotNull(result);
Assert.Single(result.Scripts);
Assert.Contains("phpstan analyse", result.Scripts["check"]);
Assert.Contains("phpunit", result.Scripts["check"]);
}
[Fact]
public async Task LoadAsync_ParsesBin()
{
var manifest = @"{
""name"": ""vendor/package"",
""bin"": [""bin/console"", ""bin/migrate""]
}";
await File.WriteAllTextAsync(Path.Combine(_testDir, "composer.json"), manifest);
var result = await PhpComposerManifestReader.LoadAsync(_testDir, CancellationToken.None);
Assert.NotNull(result);
Assert.Equal(2, result.Bin.Count);
Assert.Equal("bin/console", result.Bin["console"]);
Assert.Equal("bin/migrate", result.Bin["migrate"]);
}
[Fact]
public async Task LoadAsync_ParsesMinimumStability()
{
var manifest = @"{
""name"": ""vendor/package"",
""minimum-stability"": ""dev""
}";
await File.WriteAllTextAsync(Path.Combine(_testDir, "composer.json"), manifest);
var result = await PhpComposerManifestReader.LoadAsync(_testDir, CancellationToken.None);
Assert.NotNull(result);
Assert.Equal("dev", result.MinimumStability);
}
[Fact]
public async Task LoadAsync_ComputesSha256()
{
var manifest = @"{ ""name"": ""vendor/package"" }";
await File.WriteAllTextAsync(Path.Combine(_testDir, "composer.json"), manifest);
var result = await PhpComposerManifestReader.LoadAsync(_testDir, CancellationToken.None);
Assert.NotNull(result);
Assert.NotNull(result.Sha256);
Assert.Equal(64, result.Sha256.Length);
Assert.True(result.Sha256.All(c => char.IsAsciiHexDigitLower(c)));
}
[Fact]
public async Task LoadAsync_SetsManifestPath()
{
var manifest = @"{ ""name"": ""vendor/package"" }";
var manifestPath = Path.Combine(_testDir, "composer.json");
await File.WriteAllTextAsync(manifestPath, manifest);
var result = await PhpComposerManifestReader.LoadAsync(_testDir, CancellationToken.None);
Assert.NotNull(result);
Assert.Equal(manifestPath, result.ManifestPath);
}
#endregion
#region PhpComposerManifest Tests
[Fact]
public void RequiredPhpVersion_ReturnsPhpConstraint()
{
var manifest = new PhpComposerManifest(
"/test/composer.json",
"vendor/package",
null, null, null, null,
Array.Empty<string>(),
new Dictionary<string, string> { { "php", ">=8.1" } },
new Dictionary<string, string>(),
ComposerAutoloadData.Empty,
ComposerAutoloadData.Empty,
new Dictionary<string, string>(),
new Dictionary<string, string>(),
null, null);
Assert.Equal(">=8.1", manifest.RequiredPhpVersion);
}
[Fact]
public void RequiredPhpVersion_ReturnsNullWhenNotSpecified()
{
var manifest = new PhpComposerManifest(
"/test/composer.json",
"vendor/package",
null, null, null, null,
Array.Empty<string>(),
new Dictionary<string, string>(),
new Dictionary<string, string>(),
ComposerAutoloadData.Empty,
ComposerAutoloadData.Empty,
new Dictionary<string, string>(),
new Dictionary<string, string>(),
null, null);
Assert.Null(manifest.RequiredPhpVersion);
}
[Fact]
public void RequiredExtensions_ReturnsExtensionsList()
{
var manifest = new PhpComposerManifest(
"/test/composer.json",
"vendor/package",
null, null, null, null,
Array.Empty<string>(),
new Dictionary<string, string>
{
{ "ext-json", "*" },
{ "ext-mbstring", "*" },
{ "ext-curl", "*" },
{ "monolog/monolog", "^3.0" }
},
new Dictionary<string, string>(),
ComposerAutoloadData.Empty,
ComposerAutoloadData.Empty,
new Dictionary<string, string>(),
new Dictionary<string, string>(),
null, null);
var extensions = manifest.RequiredExtensions.ToList();
Assert.Equal(3, extensions.Count);
Assert.Contains("json", extensions);
Assert.Contains("mbstring", extensions);
Assert.Contains("curl", extensions);
}
[Fact]
public void CreateMetadata_IncludesAllFields()
{
var manifest = new PhpComposerManifest(
"/test/composer.json",
"vendor/package",
"Test package",
"library",
"1.0.0",
"MIT",
new[] { "Author" },
new Dictionary<string, string>
{
{ "php", ">=8.1" },
{ "ext-json", "*" },
{ "monolog/monolog", "^3.0" }
},
new Dictionary<string, string> { { "phpunit/phpunit", "^10.0" } },
ComposerAutoloadData.Empty,
ComposerAutoloadData.Empty,
new Dictionary<string, string>(),
new Dictionary<string, string>(),
null,
"abc123def456");
var metadata = manifest.CreateMetadata().ToDictionary(kv => kv.Key, kv => kv.Value);
Assert.Equal("vendor/package", metadata["composer.manifest.name"]);
Assert.Equal("library", metadata["composer.manifest.type"]);
Assert.Equal("MIT", metadata["composer.manifest.license"]);
Assert.Equal(">=8.1", metadata["composer.manifest.php_version"]);
Assert.Equal("json", metadata["composer.manifest.extensions"]);
Assert.Equal("3", metadata["composer.manifest.require_count"]);
Assert.Equal("1", metadata["composer.manifest.require_dev_count"]);
Assert.Equal("abc123def456", metadata["composer.manifest.sha256"]);
}
[Fact]
public void Empty_HasNullValues()
{
var empty = PhpComposerManifest.Empty;
Assert.Equal(string.Empty, empty.ManifestPath);
Assert.Null(empty.Name);
Assert.Null(empty.Description);
Assert.Null(empty.Type);
Assert.Null(empty.Version);
Assert.Null(empty.License);
Assert.Empty(empty.Authors);
Assert.Empty(empty.Require);
Assert.Empty(empty.RequireDev);
Assert.Null(empty.MinimumStability);
Assert.Null(empty.Sha256);
}
#endregion
#region ComposerAutoloadData Tests
[Fact]
public void ComposerAutoloadData_Empty_HasEmptyCollections()
{
var empty = ComposerAutoloadData.Empty;
Assert.Empty(empty.Psr4);
Assert.Empty(empty.Classmap);
Assert.Empty(empty.Files);
}
#endregion
}

View File

@@ -0,0 +1,417 @@
using StellaOps.Scanner.Analyzers.Lang.Php.Internal;
namespace StellaOps.Scanner.Analyzers.Lang.Php.Tests.Internal;
public sealed class PhpExtensionScannerTests
{
#region PhpExtension Tests
[Fact]
public void PhpExtension_RecordProperties_SetCorrectly()
{
var extension = new PhpExtension(
"pdo_mysql",
"8.2.0",
"/usr/lib/php/extensions/pdo_mysql.so",
PhpExtensionSource.PhpIni,
false,
PhpExtensionCategory.Database);
Assert.Equal("pdo_mysql", extension.Name);
Assert.Equal("8.2.0", extension.Version);
Assert.Equal("/usr/lib/php/extensions/pdo_mysql.so", extension.LibraryPath);
Assert.Equal(PhpExtensionSource.PhpIni, extension.Source);
Assert.False(extension.IsBundled);
Assert.Equal(PhpExtensionCategory.Database, extension.Category);
}
[Fact]
public void PhpExtension_CreateMetadata_IncludesAllFields()
{
var extension = new PhpExtension(
"openssl",
"3.0.0",
"/usr/lib/php/openssl.so",
PhpExtensionSource.ConfD,
false,
PhpExtensionCategory.Crypto);
var metadata = extension.CreateMetadata().ToDictionary(kv => kv.Key, kv => kv.Value);
Assert.Equal("openssl", metadata["extension.name"]);
Assert.Equal("3.0.0", metadata["extension.version"]);
Assert.Equal("/usr/lib/php/openssl.so", metadata["extension.library"]);
Assert.Equal("confd", metadata["extension.source"]);
Assert.Equal("false", metadata["extension.bundled"]);
Assert.Equal("crypto", metadata["extension.category"]);
}
[Fact]
public void PhpExtension_BundledExtension_MarkedCorrectly()
{
var extension = new PhpExtension(
"json",
null,
null,
PhpExtensionSource.Bundled,
true,
PhpExtensionCategory.Core);
Assert.True(extension.IsBundled);
Assert.Equal(PhpExtensionSource.Bundled, extension.Source);
}
#endregion
#region PhpExtensionSource Tests
[Fact]
public void PhpExtensionSource_HasExpectedValues()
{
Assert.Equal(0, (int)PhpExtensionSource.PhpIni);
Assert.Equal(1, (int)PhpExtensionSource.ConfD);
Assert.Equal(2, (int)PhpExtensionSource.Bundled);
Assert.Equal(3, (int)PhpExtensionSource.Container);
Assert.Equal(4, (int)PhpExtensionSource.UsageDetected);
}
#endregion
#region PhpExtensionCategory Tests
[Fact]
public void PhpExtensionCategory_HasExpectedValues()
{
Assert.Equal(0, (int)PhpExtensionCategory.Core);
Assert.Equal(1, (int)PhpExtensionCategory.Database);
Assert.Equal(2, (int)PhpExtensionCategory.Crypto);
Assert.Equal(3, (int)PhpExtensionCategory.Image);
Assert.Equal(4, (int)PhpExtensionCategory.Compression);
Assert.Equal(5, (int)PhpExtensionCategory.Xml);
Assert.Equal(6, (int)PhpExtensionCategory.Cache);
Assert.Equal(7, (int)PhpExtensionCategory.Debug);
Assert.Equal(8, (int)PhpExtensionCategory.Network);
Assert.Equal(9, (int)PhpExtensionCategory.Text);
Assert.Equal(10, (int)PhpExtensionCategory.Other);
}
#endregion
#region PhpEnvironmentSettings Tests
[Fact]
public void PhpEnvironmentSettings_Empty_HasDefaults()
{
var settings = PhpEnvironmentSettings.Empty;
Assert.Empty(settings.Extensions);
Assert.NotNull(settings.Security);
Assert.NotNull(settings.Upload);
Assert.NotNull(settings.Session);
Assert.NotNull(settings.Error);
Assert.NotNull(settings.Limits);
Assert.Empty(settings.WebServerSettings);
}
[Fact]
public void PhpEnvironmentSettings_HasSettings_TrueWithExtensions()
{
var extensions = new[] { new PhpExtension("pdo", null, null, PhpExtensionSource.PhpIni, false, PhpExtensionCategory.Database) };
var settings = new PhpEnvironmentSettings(
extensions,
PhpSecuritySettings.Default,
PhpUploadSettings.Default,
PhpSessionSettings.Default,
PhpErrorSettings.Default,
PhpResourceLimits.Default,
new Dictionary<string, string>());
Assert.True(settings.HasSettings);
}
[Fact]
public void PhpEnvironmentSettings_CreateMetadata_IncludesExtensionCount()
{
var extensions = new[]
{
new PhpExtension("pdo", null, null, PhpExtensionSource.PhpIni, false, PhpExtensionCategory.Database),
new PhpExtension("openssl", null, null, PhpExtensionSource.PhpIni, false, PhpExtensionCategory.Crypto),
new PhpExtension("gd", null, null, PhpExtensionSource.PhpIni, false, PhpExtensionCategory.Image)
};
var settings = new PhpEnvironmentSettings(
extensions,
PhpSecuritySettings.Default,
PhpUploadSettings.Default,
PhpSessionSettings.Default,
PhpErrorSettings.Default,
PhpResourceLimits.Default,
new Dictionary<string, string>());
var metadata = settings.CreateMetadata().ToDictionary(kv => kv.Key, kv => kv.Value);
Assert.Equal("3", metadata["env.extension_count"]);
Assert.Equal("1", metadata["env.extensions_database"]);
Assert.Equal("1", metadata["env.extensions_crypto"]);
Assert.Equal("1", metadata["env.extensions_image"]);
}
#endregion
#region PhpSecuritySettings Tests
[Fact]
public void PhpSecuritySettings_Default_HasExpectedValues()
{
var security = PhpSecuritySettings.Default;
Assert.Empty(security.DisabledFunctions);
Assert.Empty(security.DisabledClasses);
Assert.False(security.OpenBasedir);
Assert.Null(security.OpenBasedirValue);
Assert.True(security.AllowUrlFopen);
Assert.False(security.AllowUrlInclude);
Assert.True(security.ExposePhp);
Assert.False(security.RegisterGlobals);
}
[Fact]
public void PhpSecuritySettings_CreateMetadata_IncludesDisabledFunctions()
{
var security = new PhpSecuritySettings(
new[] { "exec", "shell_exec", "system", "passthru" },
new[] { "Directory" },
true,
"/var/www",
false,
false,
false,
false);
var metadata = security.CreateMetadata().ToDictionary(kv => kv.Key, kv => kv.Value);
Assert.Equal("4", metadata["security.disabled_functions_count"]);
Assert.Contains("exec", metadata["security.disabled_functions"]);
Assert.Contains("shell_exec", metadata["security.disabled_functions"]);
Assert.Equal("1", metadata["security.disabled_classes_count"]);
Assert.Equal("true", metadata["security.open_basedir"]);
Assert.Equal("false", metadata["security.allow_url_fopen"]);
Assert.Equal("false", metadata["security.allow_url_include"]);
Assert.Equal("false", metadata["security.expose_php"]);
}
[Fact]
public void PhpSecuritySettings_DangerousConfiguration_Detectable()
{
var security = new PhpSecuritySettings(
Array.Empty<string>(),
Array.Empty<string>(),
false,
null,
true,
true, // allow_url_include is dangerous!
true,
false);
Assert.True(security.AllowUrlInclude);
Assert.True(security.AllowUrlFopen);
Assert.False(security.OpenBasedir);
}
#endregion
#region PhpUploadSettings Tests
[Fact]
public void PhpUploadSettings_Default_HasExpectedValues()
{
var upload = PhpUploadSettings.Default;
Assert.True(upload.FileUploads);
Assert.Equal("2M", upload.MaxFileSize);
Assert.Equal("8M", upload.MaxPostSize);
Assert.Equal(20, upload.MaxFileUploads);
Assert.Null(upload.UploadTmpDir);
}
[Fact]
public void PhpUploadSettings_CreateMetadata_IncludesAllFields()
{
var upload = new PhpUploadSettings(
true,
"64M",
"128M",
50,
"/tmp/uploads");
var metadata = upload.CreateMetadata().ToDictionary(kv => kv.Key, kv => kv.Value);
Assert.Equal("true", metadata["upload.enabled"]);
Assert.Equal("64M", metadata["upload.max_file_size"]);
Assert.Equal("128M", metadata["upload.max_post_size"]);
Assert.Equal("50", metadata["upload.max_files"]);
}
[Fact]
public void PhpUploadSettings_DisabledUploads()
{
var upload = new PhpUploadSettings(false, null, null, 0, null);
Assert.False(upload.FileUploads);
Assert.Equal(0, upload.MaxFileUploads);
}
#endregion
#region PhpSessionSettings Tests
[Fact]
public void PhpSessionSettings_Default_HasExpectedValues()
{
var session = PhpSessionSettings.Default;
Assert.Equal("files", session.SaveHandler);
Assert.Null(session.SavePath);
Assert.False(session.CookieHttponly);
Assert.False(session.CookieSecure);
Assert.Null(session.CookieSamesite);
}
[Fact]
public void PhpSessionSettings_CreateMetadata_IncludesAllFields()
{
var session = new PhpSessionSettings(
"redis",
"tcp://localhost:6379",
true,
true,
"Strict");
var metadata = session.CreateMetadata().ToDictionary(kv => kv.Key, kv => kv.Value);
Assert.Equal("redis", metadata["session.save_handler"]);
Assert.Equal("true", metadata["session.cookie_httponly"]);
Assert.Equal("true", metadata["session.cookie_secure"]);
Assert.Equal("Strict", metadata["session.cookie_samesite"]);
}
[Fact]
public void PhpSessionSettings_SecureConfiguration()
{
var session = new PhpSessionSettings(
"files",
"/var/lib/php/sessions",
true,
true,
"Lax");
Assert.True(session.CookieHttponly);
Assert.True(session.CookieSecure);
Assert.Equal("Lax", session.CookieSamesite);
}
#endregion
#region PhpErrorSettings Tests
[Fact]
public void PhpErrorSettings_Default_HasExpectedValues()
{
var error = PhpErrorSettings.Default;
Assert.False(error.DisplayErrors);
Assert.False(error.DisplayStartupErrors);
Assert.True(error.LogErrors);
Assert.Equal("E_ALL", error.ErrorReporting);
}
[Fact]
public void PhpErrorSettings_CreateMetadata_IncludesAllFields()
{
var error = new PhpErrorSettings(
true,
true,
false,
"E_ALL & ~E_NOTICE");
var metadata = error.CreateMetadata().ToDictionary(kv => kv.Key, kv => kv.Value);
Assert.Equal("true", metadata["error.display_errors"]);
Assert.Equal("true", metadata["error.display_startup_errors"]);
Assert.Equal("false", metadata["error.log_errors"]);
Assert.Equal("E_ALL & ~E_NOTICE", metadata["error.error_reporting"]);
}
[Fact]
public void PhpErrorSettings_ProductionConfiguration()
{
var error = new PhpErrorSettings(false, false, true, "E_ALL");
Assert.False(error.DisplayErrors);
Assert.False(error.DisplayStartupErrors);
Assert.True(error.LogErrors);
}
[Fact]
public void PhpErrorSettings_DevelopmentConfiguration()
{
var error = new PhpErrorSettings(true, true, true, "E_ALL");
Assert.True(error.DisplayErrors);
Assert.True(error.DisplayStartupErrors);
Assert.True(error.LogErrors);
}
#endregion
#region PhpResourceLimits Tests
[Fact]
public void PhpResourceLimits_Default_HasExpectedValues()
{
var limits = PhpResourceLimits.Default;
Assert.Equal("128M", limits.MemoryLimit);
Assert.Equal(30, limits.MaxExecutionTime);
Assert.Equal(60, limits.MaxInputTime);
Assert.Equal("1000", limits.MaxInputVars);
}
[Fact]
public void PhpResourceLimits_CreateMetadata_IncludesAllFields()
{
var limits = new PhpResourceLimits(
"512M",
120,
180,
"5000");
var metadata = limits.CreateMetadata().ToDictionary(kv => kv.Key, kv => kv.Value);
Assert.Equal("512M", metadata["limits.memory_limit"]);
Assert.Equal("120", metadata["limits.max_execution_time"]);
Assert.Equal("180", metadata["limits.max_input_time"]);
Assert.Equal("5000", metadata["limits.max_input_vars"]);
}
[Fact]
public void PhpResourceLimits_HighPerformanceConfiguration()
{
var limits = new PhpResourceLimits("2G", 300, 300, "10000");
Assert.Equal("2G", limits.MemoryLimit);
Assert.Equal(300, limits.MaxExecutionTime);
}
[Fact]
public void PhpResourceLimits_RestrictedConfiguration()
{
var limits = new PhpResourceLimits("64M", 10, 10, "500");
Assert.Equal("64M", limits.MemoryLimit);
Assert.Equal(10, limits.MaxExecutionTime);
Assert.Equal(10, limits.MaxInputTime);
}
#endregion
}

View File

@@ -0,0 +1,421 @@
using StellaOps.Scanner.Analyzers.Lang.Php.Internal;
namespace StellaOps.Scanner.Analyzers.Lang.Php.Tests.Internal;
public sealed class PhpFrameworkSurfaceScannerTests
{
#region PhpFrameworkSurface Tests
[Fact]
public void Empty_ReturnsEmptySurface()
{
var surface = PhpFrameworkSurface.Empty;
Assert.Empty(surface.Routes);
Assert.Empty(surface.Controllers);
Assert.Empty(surface.Middlewares);
Assert.Empty(surface.CliCommands);
Assert.Empty(surface.CronJobs);
Assert.Empty(surface.EventListeners);
Assert.False(surface.HasSurface);
}
[Fact]
public void HasSurface_TrueWhenRoutesPresent()
{
var routes = new[] { CreateRoute("/api/users", "GET") };
var surface = new PhpFrameworkSurface(
routes,
Array.Empty<PhpController>(),
Array.Empty<PhpMiddleware>(),
Array.Empty<PhpCliCommand>(),
Array.Empty<PhpCronJob>(),
Array.Empty<PhpEventListener>());
Assert.True(surface.HasSurface);
}
[Fact]
public void HasSurface_TrueWhenControllersPresent()
{
var controllers = new[] { new PhpController("UserController", "App\\Http\\Controllers", "app/Http/Controllers/UserController.php", new[] { "index", "show" }, true) };
var surface = new PhpFrameworkSurface(
Array.Empty<PhpRoute>(),
controllers,
Array.Empty<PhpMiddleware>(),
Array.Empty<PhpCliCommand>(),
Array.Empty<PhpCronJob>(),
Array.Empty<PhpEventListener>());
Assert.True(surface.HasSurface);
}
[Fact]
public void HasSurface_TrueWhenMiddlewaresPresent()
{
var middlewares = new[] { new PhpMiddleware("AuthMiddleware", "App\\Http\\Middleware", "app/Http/Middleware/AuthMiddleware.php", PhpMiddlewareKind.Auth) };
var surface = new PhpFrameworkSurface(
Array.Empty<PhpRoute>(),
Array.Empty<PhpController>(),
middlewares,
Array.Empty<PhpCliCommand>(),
Array.Empty<PhpCronJob>(),
Array.Empty<PhpEventListener>());
Assert.True(surface.HasSurface);
}
[Fact]
public void HasSurface_TrueWhenCliCommandsPresent()
{
var commands = new[] { new PhpCliCommand("app:sync", "Sync data", "SyncCommand", "app/Console/Commands/SyncCommand.php") };
var surface = new PhpFrameworkSurface(
Array.Empty<PhpRoute>(),
Array.Empty<PhpController>(),
Array.Empty<PhpMiddleware>(),
commands,
Array.Empty<PhpCronJob>(),
Array.Empty<PhpEventListener>());
Assert.True(surface.HasSurface);
}
[Fact]
public void HasSurface_TrueWhenCronJobsPresent()
{
var cronJobs = new[] { new PhpCronJob("hourly", "ReportCommand", "Generate hourly report", "app/Console/Kernel.php") };
var surface = new PhpFrameworkSurface(
Array.Empty<PhpRoute>(),
Array.Empty<PhpController>(),
Array.Empty<PhpMiddleware>(),
Array.Empty<PhpCliCommand>(),
cronJobs,
Array.Empty<PhpEventListener>());
Assert.True(surface.HasSurface);
}
[Fact]
public void HasSurface_TrueWhenEventListenersPresent()
{
var listeners = new[] { new PhpEventListener("UserRegistered", "SendWelcomeEmail", 0, "app/Providers/EventServiceProvider.php") };
var surface = new PhpFrameworkSurface(
Array.Empty<PhpRoute>(),
Array.Empty<PhpController>(),
Array.Empty<PhpMiddleware>(),
Array.Empty<PhpCliCommand>(),
Array.Empty<PhpCronJob>(),
listeners);
Assert.True(surface.HasSurface);
}
[Fact]
public void CreateMetadata_IncludesAllCounts()
{
var routes = new[] { CreateRoute("/api/users", "GET"), CreateRoute("/api/users/{id}", "GET", true) };
var controllers = new[] { new PhpController("UserController", null, "UserController.php", Array.Empty<string>(), false) };
var middlewares = new[] { new PhpMiddleware("AuthMiddleware", null, "AuthMiddleware.php", PhpMiddlewareKind.Auth) };
var commands = new[] { new PhpCliCommand("app:sync", null, "SyncCommand", "SyncCommand.php") };
var cronJobs = new[] { new PhpCronJob("hourly", "Report", null, "Kernel.php") };
var listeners = new[] { new PhpEventListener("Event", "Handler", 0, "Provider.php") };
var surface = new PhpFrameworkSurface(routes, controllers, middlewares, commands, cronJobs, listeners);
var metadata = surface.CreateMetadata().ToDictionary(kv => kv.Key, kv => kv.Value);
Assert.Equal("2", metadata["surface.route_count"]);
Assert.Equal("1", metadata["surface.controller_count"]);
Assert.Equal("1", metadata["surface.middleware_count"]);
Assert.Equal("1", metadata["surface.cli_command_count"]);
Assert.Equal("1", metadata["surface.cron_job_count"]);
Assert.Equal("1", metadata["surface.event_listener_count"]);
}
[Fact]
public void CreateMetadata_IncludesHttpMethods()
{
var routes = new[]
{
CreateRoute("/users", "GET"),
CreateRoute("/users", "POST"),
CreateRoute("/users/{id}", "PUT"),
CreateRoute("/users/{id}", "DELETE")
};
var surface = new PhpFrameworkSurface(
routes,
Array.Empty<PhpController>(),
Array.Empty<PhpMiddleware>(),
Array.Empty<PhpCliCommand>(),
Array.Empty<PhpCronJob>(),
Array.Empty<PhpEventListener>());
var metadata = surface.CreateMetadata().ToDictionary(kv => kv.Key, kv => kv.Value);
Assert.Contains("GET", metadata["surface.http_methods"]);
Assert.Contains("POST", metadata["surface.http_methods"]);
Assert.Contains("PUT", metadata["surface.http_methods"]);
Assert.Contains("DELETE", metadata["surface.http_methods"]);
}
[Fact]
public void CreateMetadata_CountsProtectedAndPublicRoutes()
{
var routes = new[]
{
CreateRoute("/public", "GET", requiresAuth: false),
CreateRoute("/api/users", "GET", requiresAuth: true),
CreateRoute("/api/admin", "GET", requiresAuth: true)
};
var surface = new PhpFrameworkSurface(
routes,
Array.Empty<PhpController>(),
Array.Empty<PhpMiddleware>(),
Array.Empty<PhpCliCommand>(),
Array.Empty<PhpCronJob>(),
Array.Empty<PhpEventListener>());
var metadata = surface.CreateMetadata().ToDictionary(kv => kv.Key, kv => kv.Value);
Assert.Equal("2", metadata["surface.protected_routes"]);
Assert.Equal("1", metadata["surface.public_routes"]);
}
[Fact]
public void CreateMetadata_IncludesRoutePatterns()
{
var routes = new[]
{
CreateRoute("/api/v1/users", "GET"),
CreateRoute("/api/v1/posts", "GET"),
CreateRoute("/api/v1/comments", "GET")
};
var surface = new PhpFrameworkSurface(
routes,
Array.Empty<PhpController>(),
Array.Empty<PhpMiddleware>(),
Array.Empty<PhpCliCommand>(),
Array.Empty<PhpCronJob>(),
Array.Empty<PhpEventListener>());
var metadata = surface.CreateMetadata().ToDictionary(kv => kv.Key, kv => kv.Value);
Assert.True(metadata.ContainsKey("surface.route_patterns"));
Assert.Contains("/api/v1/users", metadata["surface.route_patterns"]);
}
#endregion
#region PhpRoute Tests
[Fact]
public void PhpRoute_RecordProperties_SetCorrectly()
{
var route = new PhpRoute(
"/api/users/{id}",
new[] { "GET", "HEAD" },
"UserController",
"show",
"users.show",
true,
new[] { "auth", "throttle" },
"routes/api.php",
42);
Assert.Equal("/api/users/{id}", route.Pattern);
Assert.Equal(2, route.Methods.Count);
Assert.Contains("GET", route.Methods);
Assert.Contains("HEAD", route.Methods);
Assert.Equal("UserController", route.Controller);
Assert.Equal("show", route.Action);
Assert.Equal("users.show", route.Name);
Assert.True(route.RequiresAuth);
Assert.Equal(2, route.Middlewares.Count);
Assert.Equal("routes/api.php", route.SourceFile);
Assert.Equal(42, route.SourceLine);
}
#endregion
#region PhpController Tests
[Fact]
public void PhpController_RecordProperties_SetCorrectly()
{
var controller = new PhpController(
"UserController",
"App\\Http\\Controllers",
"app/Http/Controllers/UserController.php",
new[] { "index", "show", "store", "update", "destroy" },
true);
Assert.Equal("UserController", controller.ClassName);
Assert.Equal("App\\Http\\Controllers", controller.Namespace);
Assert.Equal("app/Http/Controllers/UserController.php", controller.SourceFile);
Assert.Equal(5, controller.Actions.Count);
Assert.True(controller.IsApiController);
}
[Fact]
public void PhpController_IsApiController_FalseForWebController()
{
var controller = new PhpController(
"HomeController",
"App\\Http\\Controllers",
"app/Http/Controllers/HomeController.php",
new[] { "index", "about" },
false);
Assert.False(controller.IsApiController);
}
#endregion
#region PhpMiddleware Tests
[Fact]
public void PhpMiddleware_RecordProperties_SetCorrectly()
{
var middleware = new PhpMiddleware(
"AuthenticateMiddleware",
"App\\Http\\Middleware",
"app/Http/Middleware/AuthenticateMiddleware.php",
PhpMiddlewareKind.Auth);
Assert.Equal("AuthenticateMiddleware", middleware.ClassName);
Assert.Equal("App\\Http\\Middleware", middleware.Namespace);
Assert.Equal("app/Http/Middleware/AuthenticateMiddleware.php", middleware.SourceFile);
Assert.Equal(PhpMiddlewareKind.Auth, middleware.Kind);
}
[Fact]
public void PhpMiddlewareKind_HasExpectedValues()
{
Assert.Equal(0, (int)PhpMiddlewareKind.General);
Assert.Equal(1, (int)PhpMiddlewareKind.Auth);
Assert.Equal(2, (int)PhpMiddlewareKind.Cors);
Assert.Equal(3, (int)PhpMiddlewareKind.RateLimit);
Assert.Equal(4, (int)PhpMiddlewareKind.Logging);
Assert.Equal(5, (int)PhpMiddlewareKind.Security);
}
#endregion
#region PhpCliCommand Tests
[Fact]
public void PhpCliCommand_RecordProperties_SetCorrectly()
{
var command = new PhpCliCommand(
"app:import-data",
"Import data from external source",
"ImportDataCommand",
"app/Console/Commands/ImportDataCommand.php");
Assert.Equal("app:import-data", command.Name);
Assert.Equal("Import data from external source", command.Description);
Assert.Equal("ImportDataCommand", command.ClassName);
Assert.Equal("app/Console/Commands/ImportDataCommand.php", command.SourceFile);
}
[Fact]
public void PhpCliCommand_NullDescription_Allowed()
{
var command = new PhpCliCommand(
"app:sync",
null,
"SyncCommand",
"SyncCommand.php");
Assert.Null(command.Description);
}
#endregion
#region PhpCronJob Tests
[Fact]
public void PhpCronJob_RecordProperties_SetCorrectly()
{
var cronJob = new PhpCronJob(
"daily",
"CleanupOldData",
"Remove data older than 30 days",
"app/Console/Kernel.php");
Assert.Equal("daily", cronJob.Schedule);
Assert.Equal("CleanupOldData", cronJob.Handler);
Assert.Equal("Remove data older than 30 days", cronJob.Description);
Assert.Equal("app/Console/Kernel.php", cronJob.SourceFile);
}
[Fact]
public void PhpCronJob_VariousSchedules()
{
var jobs = new[]
{
new PhpCronJob("hourly", "HourlyJob", null, "Kernel.php"),
new PhpCronJob("daily", "DailyJob", null, "Kernel.php"),
new PhpCronJob("weekly", "WeeklyJob", null, "Kernel.php"),
new PhpCronJob("monthly", "MonthlyJob", null, "Kernel.php"),
new PhpCronJob("everyMinute", "MinuteJob", null, "Kernel.php"),
new PhpCronJob("everyFiveMinutes", "FiveMinJob", null, "Kernel.php")
};
Assert.Equal(6, jobs.Length);
Assert.All(jobs, j => Assert.NotNull(j.Schedule));
}
#endregion
#region PhpEventListener Tests
[Fact]
public void PhpEventListener_RecordProperties_SetCorrectly()
{
var listener = new PhpEventListener(
"App\\Events\\UserRegistered",
"App\\Listeners\\SendWelcomeEmail",
10,
"app/Providers/EventServiceProvider.php");
Assert.Equal("App\\Events\\UserRegistered", listener.EventName);
Assert.Equal("App\\Listeners\\SendWelcomeEmail", listener.Handler);
Assert.Equal(10, listener.Priority);
Assert.Equal("app/Providers/EventServiceProvider.php", listener.SourceFile);
}
[Fact]
public void PhpEventListener_DefaultPriority()
{
var listener = new PhpEventListener(
"EventName",
"Handler",
0,
"Provider.php");
Assert.Equal(0, listener.Priority);
}
#endregion
#region Helper Methods
private static PhpRoute CreateRoute(string pattern, string method, bool requiresAuth = false)
{
return new PhpRoute(
pattern,
new[] { method },
null,
null,
null,
requiresAuth,
Array.Empty<string>(),
"routes/web.php",
1);
}
#endregion
}

View File

@@ -0,0 +1,485 @@
using System.Text;
using StellaOps.Scanner.Analyzers.Lang.Php.Internal;
namespace StellaOps.Scanner.Analyzers.Lang.Php.Tests.Internal;
public sealed class PhpPharScannerTests : IDisposable
{
private readonly string _testDir;
public PhpPharScannerTests()
{
_testDir = Path.Combine(Path.GetTempPath(), $"phar-test-{Guid.NewGuid():N}");
Directory.CreateDirectory(_testDir);
}
public void Dispose()
{
try
{
if (Directory.Exists(_testDir))
{
Directory.Delete(_testDir, recursive: true);
}
}
catch
{
// Ignore cleanup errors
}
}
#region PhpPharScanner Tests
[Fact]
public async Task ScanFileAsync_NonExistentFile_ReturnsNull()
{
var result = await PhpPharScanner.ScanFileAsync(
Path.Combine(_testDir, "nonexistent.phar"),
"nonexistent.phar",
CancellationToken.None);
Assert.Null(result);
}
[Fact]
public async Task ScanFileAsync_NullPath_ReturnsNull()
{
var result = await PhpPharScanner.ScanFileAsync(null!, "test.phar", CancellationToken.None);
Assert.Null(result);
}
[Fact]
public async Task ScanFileAsync_EmptyPath_ReturnsNull()
{
var result = await PhpPharScanner.ScanFileAsync("", "test.phar", CancellationToken.None);
Assert.Null(result);
}
[Fact]
public async Task ScanFileAsync_InvalidPharFile_ReturnsNull()
{
var filePath = Path.Combine(_testDir, "invalid.phar");
await File.WriteAllTextAsync(filePath, "This is not a valid PHAR file");
var result = await PhpPharScanner.ScanFileAsync(filePath, "invalid.phar", CancellationToken.None);
Assert.Null(result);
}
[Fact]
public async Task ScanFileAsync_MinimalPhar_ParsesStub()
{
// Create a minimal PHAR structure with __HALT_COMPILER();
var stub = "<?php\necho 'Hello';\n__HALT_COMPILER();";
var pharContent = CreateMinimalPharBytes(stub);
var filePath = Path.Combine(_testDir, "minimal.phar");
await File.WriteAllBytesAsync(filePath, pharContent);
var result = await PhpPharScanner.ScanFileAsync(filePath, "minimal.phar", CancellationToken.None);
// May return null if manifest parsing fails, but should not throw
// The minimal PHAR may not have a valid manifest
if (result is not null)
{
Assert.Contains("__HALT_COMPILER();", result.Stub);
}
}
[Fact]
public async Task ScanFileAsync_ComputesSha256()
{
var stub = "<?php\n__HALT_COMPILER();";
var pharContent = CreateMinimalPharBytes(stub);
var filePath = Path.Combine(_testDir, "hash.phar");
await File.WriteAllBytesAsync(filePath, pharContent);
var result = await PhpPharScanner.ScanFileAsync(filePath, "hash.phar", CancellationToken.None);
if (result is not null)
{
Assert.NotNull(result.Sha256);
Assert.Equal(64, result.Sha256.Length);
Assert.True(result.Sha256.All(c => char.IsAsciiHexDigitLower(c)));
}
}
#endregion
#region PhpPharArchive Tests
[Fact]
public void PhpPharArchive_Constructor_NormalizesBackslashes()
{
var archive = new PhpPharArchive(
@"C:\path\to\file.phar",
@"vendor\file.phar",
null,
null,
Array.Empty<PhpPharEntry>(),
null);
Assert.Equal("C:/path/to/file.phar", archive.FilePath);
Assert.Equal("vendor/file.phar", archive.RelativePath);
}
[Fact]
public void PhpPharArchive_Constructor_RequiresFilePath()
{
Assert.Throws<ArgumentException>(() => new PhpPharArchive(
"",
"test.phar",
null,
null,
Array.Empty<PhpPharEntry>(),
null));
}
[Fact]
public void PhpPharArchive_HasEmbeddedVendor_TrueForVendorPath()
{
var entries = new[]
{
new PhpPharEntry("vendor/autoload.php", 100, 80, 0, 0, PhpPharCompression.None, null),
new PhpPharEntry("src/Main.php", 200, 150, 0, 0, PhpPharCompression.None, null)
};
var archive = new PhpPharArchive("/test.phar", "test.phar", null, null, entries, null);
Assert.True(archive.HasEmbeddedVendor);
}
[Fact]
public void PhpPharArchive_HasEmbeddedVendor_FalseWithoutVendor()
{
var entries = new[]
{
new PhpPharEntry("src/Main.php", 200, 150, 0, 0, PhpPharCompression.None, null),
new PhpPharEntry("lib/Helper.php", 100, 80, 0, 0, PhpPharCompression.None, null)
};
var archive = new PhpPharArchive("/test.phar", "test.phar", null, null, entries, null);
Assert.False(archive.HasEmbeddedVendor);
}
[Fact]
public void PhpPharArchive_HasComposerFiles_TrueForComposerJson()
{
var entries = new[]
{
new PhpPharEntry("composer.json", 500, 400, 0, 0, PhpPharCompression.None, null),
new PhpPharEntry("src/Main.php", 200, 150, 0, 0, PhpPharCompression.None, null)
};
var archive = new PhpPharArchive("/test.phar", "test.phar", null, null, entries, null);
Assert.True(archive.HasComposerFiles);
}
[Fact]
public void PhpPharArchive_HasComposerFiles_TrueForComposerLock()
{
var entries = new[]
{
new PhpPharEntry("composer.lock", 5000, 4000, 0, 0, PhpPharCompression.None, null),
new PhpPharEntry("src/Main.php", 200, 150, 0, 0, PhpPharCompression.None, null)
};
var archive = new PhpPharArchive("/test.phar", "test.phar", null, null, entries, null);
Assert.True(archive.HasComposerFiles);
}
[Fact]
public void PhpPharArchive_FileCount_ReturnsCorrectCount()
{
var entries = new[]
{
new PhpPharEntry("file1.php", 100, 80, 0, 0, PhpPharCompression.None, null),
new PhpPharEntry("file2.php", 200, 150, 0, 0, PhpPharCompression.None, null),
new PhpPharEntry("file3.php", 300, 250, 0, 0, PhpPharCompression.None, null)
};
var archive = new PhpPharArchive("/test.phar", "test.phar", null, null, entries, null);
Assert.Equal(3, archive.FileCount);
}
[Fact]
public void PhpPharArchive_TotalUncompressedSize_SumsCorrectly()
{
var entries = new[]
{
new PhpPharEntry("file1.php", 100, 80, 0, 0, PhpPharCompression.None, null),
new PhpPharEntry("file2.php", 200, 150, 0, 0, PhpPharCompression.None, null),
new PhpPharEntry("file3.php", 300, 250, 0, 0, PhpPharCompression.None, null)
};
var archive = new PhpPharArchive("/test.phar", "test.phar", null, null, entries, null);
Assert.Equal(600, archive.TotalUncompressedSize);
}
[Fact]
public void PhpPharArchive_CreateMetadata_IncludesBasicInfo()
{
var entries = new[]
{
new PhpPharEntry("vendor/autoload.php", 100, 80, 0, 0, PhpPharCompression.None, null),
new PhpPharEntry("composer.json", 200, 150, 0, 0, PhpPharCompression.None, null)
};
var archive = new PhpPharArchive("/test.phar", "test.phar", null, null, entries, "abc123");
var metadata = archive.CreateMetadata().ToDictionary(kv => kv.Key, kv => kv.Value);
Assert.Equal("test.phar", metadata["phar.path"]);
Assert.Equal("2", metadata["phar.file_count"]);
Assert.Equal("300", metadata["phar.total_size"]);
Assert.Equal("true", metadata["phar.has_vendor"]);
Assert.Equal("true", metadata["phar.has_composer"]);
Assert.Equal("abc123", metadata["phar.sha256"]);
}
[Fact]
public void PhpPharArchive_CreateMetadata_IncludesManifestInfo()
{
var manifest = new PhpPharManifest(
"myapp",
"1.2.3",
0x1100,
PhpPharCompression.GZip,
PhpPharSignatureType.Sha256,
new Dictionary<string, string>());
var archive = new PhpPharArchive("/test.phar", "test.phar", manifest, null, Array.Empty<PhpPharEntry>(), null);
var metadata = archive.CreateMetadata().ToDictionary(kv => kv.Key, kv => kv.Value);
Assert.Equal("myapp", metadata["phar.alias"]);
Assert.Equal("1.2.3", metadata["phar.version"]);
Assert.Equal("gzip", metadata["phar.compression"]);
Assert.Equal("sha256", metadata["phar.signature_type"]);
}
[Fact]
public void PhpPharArchive_CreateMetadata_DetectsAutoloadInStub()
{
var stubWithAutoload = "<?php\nspl_autoload_register(function($class) {});\n__HALT_COMPILER();";
var archive = new PhpPharArchive("/test.phar", "test.phar", null, stubWithAutoload, Array.Empty<PhpPharEntry>(), null);
var metadata = archive.CreateMetadata().ToDictionary(kv => kv.Key, kv => kv.Value);
Assert.Equal("true", metadata["phar.stub_has_autoload"]);
}
#endregion
#region PhpPharEntry Tests
[Fact]
public void PhpPharEntry_Extension_ReturnsCorrectExtension()
{
var entry = new PhpPharEntry("src/Main.php", 100, 80, 0, 0, PhpPharCompression.None, null);
Assert.Equal("php", entry.Extension);
}
[Fact]
public void PhpPharEntry_IsPhpFile_TrueForPhpExtension()
{
var entry = new PhpPharEntry("src/Main.php", 100, 80, 0, 0, PhpPharCompression.None, null);
Assert.True(entry.IsPhpFile);
}
[Fact]
public void PhpPharEntry_IsPhpFile_FalseForOtherExtensions()
{
var entry = new PhpPharEntry("config/app.json", 100, 80, 0, 0, PhpPharCompression.None, null);
Assert.False(entry.IsPhpFile);
}
[Fact]
public void PhpPharEntry_IsVendorFile_TrueForVendorPath()
{
var entry = new PhpPharEntry("vendor/monolog/monolog/src/Logger.php", 100, 80, 0, 0, PhpPharCompression.None, null);
Assert.True(entry.IsVendorFile);
}
[Fact]
public void PhpPharEntry_IsVendorFile_FalseForSrcPath()
{
var entry = new PhpPharEntry("src/Main.php", 100, 80, 0, 0, PhpPharCompression.None, null);
Assert.False(entry.IsVendorFile);
}
#endregion
#region PhpPharScanResult Tests
[Fact]
public void PhpPharScanResult_Empty_HasNoContent()
{
var result = PhpPharScanResult.Empty;
Assert.Empty(result.Archives);
Assert.Empty(result.Usages);
Assert.False(result.HasPharContent);
Assert.Equal(0, result.TotalArchivedFiles);
}
[Fact]
public void PhpPharScanResult_HasPharContent_TrueWithArchives()
{
var archive = new PhpPharArchive("/test.phar", "test.phar", null, null, Array.Empty<PhpPharEntry>(), null);
var result = new PhpPharScanResult(new[] { archive }, Array.Empty<PhpPharUsage>());
Assert.True(result.HasPharContent);
}
[Fact]
public void PhpPharScanResult_HasPharContent_TrueWithUsages()
{
var usage = new PhpPharUsage("src/Main.php", 10, "phar://myapp.phar/src/Helper.php", "myapp.phar/src/Helper.php");
var result = new PhpPharScanResult(Array.Empty<PhpPharArchive>(), new[] { usage });
Assert.True(result.HasPharContent);
}
[Fact]
public void PhpPharScanResult_TotalArchivedFiles_SumsAcrossArchives()
{
var entries1 = new[]
{
new PhpPharEntry("file1.php", 100, 80, 0, 0, PhpPharCompression.None, null),
new PhpPharEntry("file2.php", 200, 150, 0, 0, PhpPharCompression.None, null)
};
var entries2 = new[]
{
new PhpPharEntry("file3.php", 300, 250, 0, 0, PhpPharCompression.None, null)
};
var archive1 = new PhpPharArchive("/test1.phar", "test1.phar", null, null, entries1, null);
var archive2 = new PhpPharArchive("/test2.phar", "test2.phar", null, null, entries2, null);
var result = new PhpPharScanResult(new[] { archive1, archive2 }, Array.Empty<PhpPharUsage>());
Assert.Equal(3, result.TotalArchivedFiles);
}
[Fact]
public void PhpPharScanResult_ArchivesWithVendor_FiltersCorrectly()
{
var entriesWithVendor = new[]
{
new PhpPharEntry("vendor/autoload.php", 100, 80, 0, 0, PhpPharCompression.None, null)
};
var entriesWithoutVendor = new[]
{
new PhpPharEntry("src/Main.php", 200, 150, 0, 0, PhpPharCompression.None, null)
};
var archive1 = new PhpPharArchive("/with-vendor.phar", "with-vendor.phar", null, null, entriesWithVendor, null);
var archive2 = new PhpPharArchive("/without-vendor.phar", "without-vendor.phar", null, null, entriesWithoutVendor, null);
var result = new PhpPharScanResult(new[] { archive1, archive2 }, Array.Empty<PhpPharUsage>());
var archivesWithVendor = result.ArchivesWithVendor.ToList();
Assert.Single(archivesWithVendor);
Assert.Equal("with-vendor.phar", archivesWithVendor[0].RelativePath);
}
[Fact]
public void PhpPharScanResult_CreateMetadata_IncludesAllCounts()
{
var entries = new[]
{
new PhpPharEntry("vendor/autoload.php", 100, 80, 0, 0, PhpPharCompression.None, null)
};
var archive = new PhpPharArchive("/test.phar", "test.phar", null, null, entries, null);
var usage = new PhpPharUsage("src/Main.php", 10, "phar://test.phar/file.php", "test.phar/file.php");
var result = new PhpPharScanResult(new[] { archive }, new[] { usage });
var metadata = result.CreateMetadata().ToDictionary(kv => kv.Key, kv => kv.Value);
Assert.Equal("1", metadata["phar.archive_count"]);
Assert.Equal("1", metadata["phar.usage_count"]);
Assert.Equal("1", metadata["phar.total_archived_files"]);
Assert.Equal("1", metadata["phar.archives_with_vendor"]);
}
#endregion
#region PhpPharUsage Tests
[Fact]
public void PhpPharUsage_RecordProperties_SetCorrectly()
{
var usage = new PhpPharUsage("src/Main.php", 42, "include 'phar://app.phar/Helper.php';", "app.phar/Helper.php");
Assert.Equal("src/Main.php", usage.SourceFile);
Assert.Equal(42, usage.SourceLine);
Assert.Equal("include 'phar://app.phar/Helper.php';", usage.Snippet);
Assert.Equal("app.phar/Helper.php", usage.PharPath);
}
#endregion
#region Compression and Signature Enums Tests
[Fact]
public void PhpPharCompression_HasExpectedValues()
{
Assert.Equal(0, (int)PhpPharCompression.None);
Assert.Equal(1, (int)PhpPharCompression.GZip);
Assert.Equal(2, (int)PhpPharCompression.BZip2);
}
[Fact]
public void PhpPharSignatureType_HasExpectedValues()
{
Assert.Equal(0, (int)PhpPharSignatureType.None);
Assert.Equal(1, (int)PhpPharSignatureType.Md5);
Assert.Equal(2, (int)PhpPharSignatureType.Sha1);
Assert.Equal(3, (int)PhpPharSignatureType.Sha256);
Assert.Equal(4, (int)PhpPharSignatureType.Sha512);
Assert.Equal(5, (int)PhpPharSignatureType.OpenSsl);
}
#endregion
#region Helper Methods
private static byte[] CreateMinimalPharBytes(string stub)
{
// Create a minimal PHAR file structure
// This is a simplified version - real PHARs have more complex structure
var stubBytes = Encoding.UTF8.GetBytes(stub);
// Add some padding and a minimal manifest structure after __HALT_COMPILER();
var padding = new byte[] { 0x0D, 0x0A }; // CRLF
// Minimal manifest: 4 bytes length + 4 bytes file count + 2 bytes API + 4 bytes flags + 4 bytes alias len + 4 bytes metadata len
var manifestLength = 18u;
var fileCount = 0u;
var apiVersion = (ushort)0x1100;
var flags = 0u;
var aliasLength = 0u;
var metadataLength = 0u;
using var ms = new MemoryStream();
ms.Write(stubBytes, 0, stubBytes.Length);
ms.Write(padding, 0, padding.Length);
ms.Write(BitConverter.GetBytes(manifestLength), 0, 4);
ms.Write(BitConverter.GetBytes(fileCount), 0, 4);
ms.Write(BitConverter.GetBytes(apiVersion), 0, 2);
ms.Write(BitConverter.GetBytes(flags), 0, 4);
ms.Write(BitConverter.GetBytes(aliasLength), 0, 4);
ms.Write(BitConverter.GetBytes(metadataLength), 0, 4);
return ms.ToArray();
}
#endregion
}

View File

@@ -30,7 +30,6 @@
</ItemGroup>
<ItemGroup>
<ProjectReference Include="../StellaOps.Scanner.Analyzers.Lang.Tests/StellaOps.Scanner.Analyzers.Lang.Tests.csproj" />
<ProjectReference Include="../../__Libraries/StellaOps.Scanner.Analyzers.Lang/StellaOps.Scanner.Analyzers.Lang.csproj" />
<ProjectReference Include="../../__Libraries/StellaOps.Scanner.Analyzers.Lang.Php/StellaOps.Scanner.Analyzers.Lang.Php.csproj" />
<ProjectReference Include="../../__Libraries/StellaOps.Scanner.Core/StellaOps.Scanner.Core.csproj" />

View File

@@ -5,8 +5,14 @@
<TargetFramework>net10.0</TargetFramework>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
<EnableDefaultItems>false</EnableDefaultItems>
</PropertyGroup>
<ItemGroup>
<Compile Include="**/*.cs" Exclude="obj/**;bin/**" />
<None Include="**/*" Exclude="**/*.cs;bin/**;obj/**" />
</ItemGroup>
<ItemGroup>
<PackageReference Include="Microsoft.Extensions.Configuration.Abstractions" Version="10.0.0" />
<PackageReference Include="Microsoft.Extensions.DependencyInjection.Abstractions" Version="10.0.0" />