up
Some checks failed
Concelier Attestation Tests / attestation-tests (push) Has been cancelled
Policy Simulation / policy-simulate (push) Has been cancelled
AOC Guard CI / aoc-guard (push) Has been cancelled
AOC Guard CI / aoc-verify (push) Has been cancelled
Signals CI & Image / signals-ci (push) Has been cancelled
Signals Reachability Scoring & Events / reachability-smoke (push) Has been cancelled
Signals Reachability Scoring & Events / sign-and-upload (push) Has been cancelled
Docs CI / lint-and-preview (push) Has been cancelled
Policy Lint & Smoke / policy-lint (push) Has been cancelled
Scanner Analyzers / Discover Analyzers (push) Has been cancelled
Scanner Analyzers / Build Analyzers (push) Has been cancelled
Scanner Analyzers / Test Language Analyzers (push) Has been cancelled
Scanner Analyzers / Validate Test Fixtures (push) Has been cancelled
Scanner Analyzers / Verify Deterministic Output (push) Has been cancelled

This commit is contained in:
StellaOps Bot
2025-12-13 09:37:15 +02:00
parent e00f6365da
commit 6e45066e37
349 changed files with 17160 additions and 1867 deletions

View File

@@ -13,6 +13,7 @@ using StellaOps.Attestor.Core.Storage;
using StellaOps.Attestor.Core.Submission;
using StellaOps.Attestor.Core.Transparency;
using StellaOps.Attestor.Core.Verification;
using StellaOps.Attestor.Core.Bulk;
using StellaOps.Attestor.Infrastructure.Rekor;
using StellaOps.Attestor.Infrastructure.Storage;
using StellaOps.Attestor.Infrastructure.Submission;

View File

@@ -13,6 +13,7 @@
<ProjectReference Include="..\..\..\__Libraries\StellaOps.Cryptography.Plugin.BouncyCastle\StellaOps.Cryptography.Plugin.BouncyCastle.csproj" />
<ProjectReference Include="..\..\..\__Libraries\StellaOps.Cryptography.Kms\StellaOps.Cryptography.Kms.csproj" />
<ProjectReference Include="..\..\..\__Libraries\StellaOps.Cryptography.Plugin.SmSoft\StellaOps.Cryptography.Plugin.SmSoft.csproj" />
<ProjectReference Include="..\..\..\__Libraries\StellaOps.Messaging\StellaOps.Messaging.csproj" />
</ItemGroup>
<ItemGroup>
<PackageReference Include="Microsoft.Extensions.DependencyInjection.Abstractions" Version="10.0.0" />

View File

@@ -0,0 +1,107 @@
using System;
using System.Globalization;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
using StellaOps.Attestor.Core.Options;
using StellaOps.Attestor.Core.Verification;
using StellaOps.Messaging;
using StellaOps.Messaging.Abstractions;
namespace StellaOps.Attestor.Infrastructure.Verification;
/// <summary>
/// Attestor verification cache backed by <see cref="IDistributedCache{TValue}"/>.
/// Supports any transport (InMemory, Valkey, PostgreSQL) via factory injection.
/// </summary>
internal sealed class MessagingAttestorVerificationCache : IAttestorVerificationCache
{
private readonly IDistributedCache<AttestorVerificationResult> _cache;
private readonly ILogger<MessagingAttestorVerificationCache> _logger;
private readonly TimeSpan _ttl;
public MessagingAttestorVerificationCache(
IDistributedCacheFactory cacheFactory,
IOptions<AttestorOptions> options,
ILogger<MessagingAttestorVerificationCache> logger)
{
ArgumentNullException.ThrowIfNull(cacheFactory);
ArgumentNullException.ThrowIfNull(options);
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
var ttlSeconds = Math.Max(1, options.Value.Cache.Verification.TtlSeconds);
_ttl = TimeSpan.FromSeconds(ttlSeconds);
_cache = cacheFactory.Create<AttestorVerificationResult>(new CacheOptions
{
KeyPrefix = "attestor:verify:",
DefaultTtl = _ttl,
});
_logger.LogInformation(
"Initialized MessagingAttestorVerificationCache with provider {Provider}, TTL {Ttl}s",
_cache.ProviderName,
_ttl.TotalSeconds.ToString(CultureInfo.InvariantCulture));
}
public async Task<AttestorVerificationResult?> GetAsync(
string subject,
string envelopeId,
string policyVersion,
CancellationToken cancellationToken = default)
{
var cacheKey = BuildCacheKey(subject, envelopeId, policyVersion);
var result = await _cache.GetAsync(cacheKey, cancellationToken).ConfigureAwait(false);
if (result.HasValue)
{
return result.Value;
}
return null;
}
public async Task SetAsync(
string subject,
string envelopeId,
string policyVersion,
AttestorVerificationResult result,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(result);
var cacheKey = BuildCacheKey(subject, envelopeId, policyVersion);
var entryOptions = new CacheEntryOptions { TimeToLive = _ttl };
await _cache.SetAsync(cacheKey, result, entryOptions, cancellationToken).ConfigureAwait(false);
var subjectKey = Normalize(subject);
_logger.LogDebug(
"Cached verification result for subject {Subject} envelope {Envelope} policy {Policy} with TTL {TtlSeconds}s.",
subjectKey,
Normalize(envelopeId),
Normalize(policyVersion),
_ttl.TotalSeconds.ToString(CultureInfo.InvariantCulture));
}
public async Task InvalidateSubjectAsync(string subject, CancellationToken cancellationToken = default)
{
if (string.IsNullOrWhiteSpace(subject))
{
return;
}
var subjectKey = Normalize(subject);
// Pattern: attestor:verify:<subject>|*
var pattern = $"{subjectKey}|*";
var count = await _cache.InvalidateByPatternAsync(pattern, cancellationToken).ConfigureAwait(false);
_logger.LogDebug("Invalidated {Count} verification cache entries for subject {Subject}.", count, subjectKey);
}
private static string BuildCacheKey(string subject, string envelopeId, string policyVersion) =>
string.Concat(Normalize(subject), "|", Normalize(envelopeId), "|", Normalize(policyVersion));
private static string Normalize(string? value) => (value ?? string.Empty).Trim();
}

View File

@@ -0,0 +1,83 @@
using System;
using System.Threading;
using System.Threading.Tasks;
using StellaOps.Messaging;
using StellaOps.Messaging.Abstractions;
namespace StellaOps.Auth.Client;
/// <summary>
/// Token cache backed by <see cref="IDistributedCache{TValue}"/>.
/// Supports any transport (InMemory, Valkey, PostgreSQL) via factory injection.
/// </summary>
public sealed class MessagingTokenCache : IStellaOpsTokenCache
{
private readonly IDistributedCache<StellaOpsTokenCacheEntry> _cache;
private readonly TimeProvider _timeProvider;
private readonly Func<StellaOpsTokenCacheEntry, StellaOpsTokenCacheEntry> _normalizer;
private readonly TimeSpan _expirationSkew;
public MessagingTokenCache(
IDistributedCacheFactory cacheFactory,
TimeProvider? timeProvider = null,
TimeSpan? expirationSkew = null)
{
ArgumentNullException.ThrowIfNull(cacheFactory);
_timeProvider = timeProvider ?? TimeProvider.System;
_expirationSkew = expirationSkew ?? TimeSpan.FromSeconds(30);
_normalizer = static entry => entry.NormalizeScopes();
_cache = cacheFactory.Create<StellaOpsTokenCacheEntry>(new CacheOptions
{
KeyPrefix = "auth:token:",
});
}
public async ValueTask<StellaOpsTokenCacheEntry?> GetAsync(string key, CancellationToken cancellationToken = default)
{
ArgumentException.ThrowIfNullOrWhiteSpace(key);
var result = await _cache.GetAsync(key, cancellationToken).ConfigureAwait(false);
if (!result.HasValue)
{
return null;
}
var entry = result.Value;
// Check if expired with skew
if (entry.IsExpired(_timeProvider, _expirationSkew))
{
await _cache.InvalidateAsync(key, cancellationToken).ConfigureAwait(false);
return null;
}
return entry;
}
public async ValueTask SetAsync(string key, StellaOpsTokenCacheEntry entry, CancellationToken cancellationToken = default)
{
ArgumentException.ThrowIfNullOrWhiteSpace(key);
ArgumentNullException.ThrowIfNull(entry);
var normalizedEntry = _normalizer(entry);
var now = _timeProvider.GetUtcNow();
var ttl = normalizedEntry.ExpiresAtUtc - now;
if (ttl <= TimeSpan.Zero)
{
return;
}
var entryOptions = new CacheEntryOptions { TimeToLive = ttl };
await _cache.SetAsync(key, normalizedEntry, entryOptions, cancellationToken).ConfigureAwait(false);
}
public async ValueTask RemoveAsync(string key, CancellationToken cancellationToken = default)
{
ArgumentException.ThrowIfNullOrWhiteSpace(key);
await _cache.InvalidateAsync(key, cancellationToken).ConfigureAwait(false);
}
}

View File

@@ -30,6 +30,7 @@
<ProjectReference Include="..\StellaOps.Auth.Abstractions\StellaOps.Auth.Abstractions.csproj" />
<ProjectReference Include="..\..\..\AirGap\StellaOps.AirGap.Policy\StellaOps.AirGap.Policy\StellaOps.AirGap.Policy.csproj" />
<ProjectReference Include="../../../__Libraries/StellaOps.Configuration/StellaOps.Configuration.csproj" />
<ProjectReference Include="../../../__Libraries/StellaOps.Messaging/StellaOps.Messaging.csproj" />
</ItemGroup>
<ItemGroup>
<PackageReference Include="Microsoft.Extensions.Http.Resilience" Version="10.0.0" />

View File

@@ -0,0 +1,57 @@
using StellaOps.Messaging;
using StellaOps.Messaging.Abstractions;
namespace StellaOps.Authority.Plugin.Ldap.Claims;
/// <summary>
/// LDAP claims cache backed by <see cref="IDistributedCache{TValue}"/>.
/// Supports any transport (InMemory, Valkey, PostgreSQL) via factory injection.
/// </summary>
internal sealed class MessagingLdapClaimsCache : ILdapClaimsCache
{
private readonly IDistributedCache<LdapCachedClaims> _cache;
private readonly string _pluginName;
private readonly TimeSpan _ttl;
public MessagingLdapClaimsCache(
IDistributedCacheFactory cacheFactory,
string pluginName,
LdapClaimsCacheOptions options)
{
ArgumentNullException.ThrowIfNull(cacheFactory);
ArgumentException.ThrowIfNullOrWhiteSpace(pluginName);
ArgumentNullException.ThrowIfNull(options);
_pluginName = pluginName;
_ttl = TimeSpan.FromSeconds(options.TtlSeconds);
_cache = cacheFactory.Create<LdapCachedClaims>(new CacheOptions
{
KeyPrefix = $"ldap:claims:{pluginName}:",
DefaultTtl = _ttl,
});
}
public async ValueTask<LdapCachedClaims?> GetAsync(string subjectId, CancellationToken cancellationToken)
{
ArgumentException.ThrowIfNullOrWhiteSpace(subjectId);
var key = BuildKey(subjectId);
var result = await _cache.GetAsync(key, cancellationToken).ConfigureAwait(false);
return result.HasValue ? result.Value : null;
}
public async ValueTask SetAsync(string subjectId, LdapCachedClaims claims, CancellationToken cancellationToken)
{
ArgumentException.ThrowIfNullOrWhiteSpace(subjectId);
ArgumentNullException.ThrowIfNull(claims);
var key = BuildKey(subjectId);
var entryOptions = new CacheEntryOptions { TimeToLive = _ttl };
await _cache.SetAsync(key, claims, entryOptions, cancellationToken).ConfigureAwait(false);
}
private string BuildKey(string subjectId) => subjectId.ToLowerInvariant();
}

View File

@@ -13,7 +13,7 @@
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="10.0.0" />
<PackageReference Include="Microsoft.Extensions.Options.ConfigurationExtensions" Version="10.0.0" />
<PackageReference Include="System.DirectoryServices.Protocols" Version="8.0.0" />
<!-- MongoDB.Driver removed - using Mongo compatibility shim -->
<!-- Storage now uses PostgreSQL -->
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\\StellaOps.Authority.Plugins.Abstractions\\StellaOps.Authority.Plugins.Abstractions.csproj" />
@@ -21,5 +21,6 @@
<ProjectReference Include="..\\StellaOps.Authority.Storage.InMemory\\StellaOps.Authority.Storage.InMemory.csproj" />
<ProjectReference Include="..\\..\\..\\__Libraries\\StellaOps.Plugin\\StellaOps.Plugin.csproj" />
<ProjectReference Include="..\\..\\__Libraries\\StellaOps.Authority.Storage.Postgres\\StellaOps.Authority.Storage.Postgres.csproj" />
<ProjectReference Include="..\\..\\..\\__Libraries\\StellaOps.Messaging\\StellaOps.Messaging.csproj" />
</ItemGroup>
</Project>

View File

@@ -9,7 +9,5 @@
<ProjectReference Include="..\StellaOps.Authority.Plugin.Standard\StellaOps.Authority.Plugin.Standard.csproj" />
<ProjectReference Include="..\StellaOps.Authority.Plugins.Abstractions\StellaOps.Authority.Plugins.Abstractions.csproj" />
</ItemGroup>
<ItemGroup>
<!-- MongoDB.Driver removed - using Mongo compatibility shim via Plugin.Standard project reference -->
</ItemGroup>
<!-- Storage now uses PostgreSQL via Plugin.Standard project reference -->
</Project>

View File

@@ -11,9 +11,7 @@
<ProjectReference Include="..\StellaOps.Authority.Plugins.Abstractions\StellaOps.Authority.Plugins.Abstractions.csproj" />
<ProjectReference Include="../../../__Libraries/StellaOps.Auth.Security/StellaOps.Auth.Security.csproj" />
</ItemGroup>
<ItemGroup>
<!-- MongoDB.Driver removed - using Mongo compatibility shim via Authority project reference -->
</ItemGroup>
<!-- Storage now uses PostgreSQL via Authority project reference -->
<ItemGroup>
<Compile Include="../../../../tests/shared/OpenSslLegacyShim.cs" Link="Infrastructure/OpenSslLegacyShim.cs" />
<None Include="../../../../tests/native/openssl-1.1/linux-x64/*" Link="native/linux-x64/%(Filename)%(Extension)" CopyToOutputDirectory="PreserveNewest" />

View File

@@ -8,9 +8,16 @@ The bench harness exercises the language analyzers against representative filesy
- `baseline.csv` Reference numbers captured on the 4vCPU warm rig described in `docs/12_PERFORMANCE_WORKBOOK.md`. CI publishes fresh CSVs so perf trends stay visible.
## Current scenarios
- `node_detection_gaps_fixture` - runs the Node analyzer across `samples/runtime/node-detection-gaps` (workspaces + lock-only + import scan).
- `node_monorepo_walk` → runs the Node analyzer across `samples/runtime/npm-monorepo`.
- `java_demo_archive` → runs the Java analyzer against `samples/runtime/java-demo/libs/demo.jar`.
- `python_site_packages_walk`temporary metadata walk over `samples/runtime/python-venv` until the Python analyzer lands.
- `python_site_packages_scan`runs the Python analyzer across `samples/runtime/python-venv`.
- `python_pip_cache_fixture` → runs the Python analyzer across the RECORD-heavy pip cache fixture.
- `python_layered_editable_fixture` → runs the Python analyzer across layered/container-root layouts (`layers/`, `.layers/`, `layer*`).
- `bun_multi_workspace_fixture` - runs the Bun analyzer across the Bun multi-workspace fixture under the Bun analyzer tests.
See `config.json` for the authoritative list.
## Running locally

View File

@@ -0,0 +1,268 @@
using System.Collections.Generic;
using System.Linq;
using StellaOps.Scanner.Analyzers.Lang;
namespace StellaOps.Bench.ScannerAnalyzers.Scenarios;
internal static class NodeBenchMetrics
{
private static readonly HashSet<string> Extensions = new(StringComparer.OrdinalIgnoreCase)
{
".js",
".jsx",
".mjs",
".cjs",
".ts",
".tsx",
".mts",
".cts"
};
private static readonly string[] IgnoredDirectories =
{
".bin",
".cache",
".store",
"__pycache__"
};
public static IReadOnlyDictionary<string, double> Compute(string rootPath, LanguageAnalyzerResult result)
{
ArgumentException.ThrowIfNullOrWhiteSpace(rootPath);
ArgumentNullException.ThrowIfNull(result);
var scanRoots = CollectImportScanRoots(rootPath, result);
var packagesScanned = 0;
var filesScanned = 0;
long bytesScanned = 0;
var cappedPackages = 0;
foreach (var scanRoot in scanRoots)
{
var counters = CountImportScan(scanRoot);
packagesScanned++;
filesScanned += counters.FilesScanned;
bytesScanned += counters.BytesScanned;
if (counters.Capped)
{
cappedPackages++;
}
}
return new SortedDictionary<string, double>(StringComparer.Ordinal)
{
["node.importScan.packages"] = packagesScanned,
["node.importScan.filesScanned"] = filesScanned,
["node.importScan.bytesScanned"] = bytesScanned,
["node.importScan.cappedPackages"] = cappedPackages
};
}
public static bool AreEqual(IReadOnlyDictionary<string, double> left, IReadOnlyDictionary<string, double> right)
{
ArgumentNullException.ThrowIfNull(left);
ArgumentNullException.ThrowIfNull(right);
if (ReferenceEquals(left, right))
{
return true;
}
if (left.Count != right.Count)
{
return false;
}
foreach (var (key, value) in left)
{
if (!right.TryGetValue(key, out var other) || other != value)
{
return false;
}
}
return true;
}
private static IReadOnlyList<string> CollectImportScanRoots(string rootPath, LanguageAnalyzerResult result)
{
var comparer = OperatingSystem.IsWindows() ? StringComparer.OrdinalIgnoreCase : StringComparer.Ordinal;
var roots = new HashSet<string>(comparer);
var fullRoot = Path.GetFullPath(rootPath);
foreach (var record in result.Components)
{
if (!string.Equals(record.AnalyzerId, "node", StringComparison.Ordinal))
{
continue;
}
if (!record.Metadata.TryGetValue("path", out var relativePath) || string.IsNullOrWhiteSpace(relativePath))
{
continue;
}
var isRoot = string.Equals(relativePath, ".", StringComparison.Ordinal);
var isWorkspaceMember = record.Metadata.TryGetValue("workspaceMember", out var workspaceMember)
&& string.Equals(workspaceMember, "true", StringComparison.OrdinalIgnoreCase);
if (!isRoot && !isWorkspaceMember)
{
continue;
}
var absolute = isRoot
? fullRoot
: Path.GetFullPath(Path.Combine(fullRoot, relativePath.Replace('/', Path.DirectorySeparatorChar)));
if (Directory.Exists(absolute))
{
roots.Add(absolute);
}
}
return roots.OrderBy(static p => p, StringComparer.Ordinal).ToArray();
}
private static ImportScanCounters CountImportScan(string rootPath)
{
const int maxFilesPerPackage = 500;
const long maxBytesPerPackage = 5L * 1024 * 1024;
const long maxFileBytes = 512L * 1024;
const int maxDepth = 20;
var filesScanned = 0;
long bytesScanned = 0;
var capped = false;
foreach (var file in EnumerateSourceFiles(rootPath, maxDepth))
{
if (filesScanned >= maxFilesPerPackage || bytesScanned >= maxBytesPerPackage)
{
capped = true;
break;
}
long length;
try
{
length = new FileInfo(file).Length;
}
catch
{
continue;
}
if (length <= 0 || length > maxFileBytes)
{
continue;
}
if (bytesScanned + length > maxBytesPerPackage)
{
capped = true;
break;
}
bytesScanned += length;
filesScanned++;
}
return new ImportScanCounters(filesScanned, bytesScanned, capped);
}
private static IEnumerable<string> EnumerateSourceFiles(string root, int maxDepth)
{
var pathComparer = OperatingSystem.IsWindows() ? StringComparer.OrdinalIgnoreCase : StringComparer.Ordinal;
var stack = new Stack<(string Path, int Depth)>();
stack.Push((root, 0));
while (stack.Count > 0)
{
var (current, depth) = stack.Pop();
IEnumerable<string> files;
try
{
files = Directory.EnumerateFiles(current, "*", SearchOption.TopDirectoryOnly);
}
catch
{
files = Array.Empty<string>();
}
foreach (var file in files.OrderBy(static f => f, pathComparer))
{
var ext = Path.GetExtension(file);
if (!string.IsNullOrWhiteSpace(ext) && Extensions.Contains(ext))
{
yield return file;
}
}
if (depth >= maxDepth)
{
continue;
}
IEnumerable<string> dirs;
try
{
dirs = Directory.EnumerateDirectories(current, "*", SearchOption.TopDirectoryOnly);
}
catch
{
dirs = Array.Empty<string>();
}
var ordered = dirs
.Where(static d => !ShouldSkipImportDirectory(Path.GetFileName(d)))
.OrderBy(static d => d, pathComparer)
.ToArray();
for (var i = ordered.Length - 1; i >= 0; i--)
{
stack.Push((ordered[i], depth + 1));
}
}
}
private static bool ShouldSkipImportDirectory(string? name)
{
if (string.IsNullOrWhiteSpace(name))
{
return true;
}
if (string.Equals(name, "node_modules", StringComparison.OrdinalIgnoreCase))
{
return true;
}
if (string.Equals(name, ".pnpm", StringComparison.OrdinalIgnoreCase))
{
return true;
}
return ShouldSkipDirectory(name);
}
private static bool ShouldSkipDirectory(string name)
{
if (name.Length == 0)
{
return true;
}
if (name[0] == '.')
{
return !string.Equals(name, ".pnpm", StringComparison.OrdinalIgnoreCase);
}
return IgnoredDirectories.Any(ignored => string.Equals(name, ignored, StringComparison.OrdinalIgnoreCase));
}
private readonly record struct ImportScanCounters(int FilesScanned, long BytesScanned, bool Capped);
}

View File

@@ -43,7 +43,10 @@ internal static class Program
stats.P95Ms,
stats.MaxMs,
iterations,
scenarioThreshold);
scenarioThreshold)
{
Metrics = execution.Metrics
};
results.Add(result);
@@ -101,7 +104,7 @@ internal static class Program
}
catch (Exception ex)
{
Console.Error.WriteLine(ex.Message);
Console.Error.WriteLine(ex);
return 1;
}
}

View File

@@ -53,6 +53,7 @@ internal static class BenchmarkJsonWriter
report.Result.P95Ms,
report.Result.MaxMs,
report.Result.ThresholdMs,
report.Result.Metrics,
baseline is null
? null
: new BenchmarkJsonScenarioBaseline(
@@ -84,6 +85,7 @@ internal static class BenchmarkJsonWriter
double P95Ms,
double MaxMs,
double ThresholdMs,
IReadOnlyDictionary<string, double>? Metrics,
BenchmarkJsonScenarioBaseline? Baseline,
BenchmarkJsonScenarioRegression Regression);

View File

@@ -1,3 +1,4 @@
using System.Globalization;
using StellaOps.Bench.ScannerAnalyzers.Baseline;
namespace StellaOps.Bench.ScannerAnalyzers.Reporting;
@@ -35,7 +36,13 @@ internal sealed class BenchmarkScenarioReport
}
var percentage = (MaxRegressionRatio.Value - 1d) * 100d;
return $"{Result.Id} exceeded regression budget: max {Result.MaxMs:F2} ms vs baseline {Baseline!.MaxMs:F2} ms (+{percentage:F1}%)";
return string.Format(
CultureInfo.InvariantCulture,
"{0} exceeded regression budget: max {1:F2} ms vs baseline {2:F2} ms (+{3:F1}%)",
Result.Id,
Result.MaxMs,
Baseline!.MaxMs,
percentage);
}
private static double? CalculateRatio(double current, double? baseline)

View File

@@ -1,4 +1,5 @@
using System.Globalization;
using System.Linq;
using System.Text;
namespace StellaOps.Bench.ScannerAnalyzers.Reporting;
@@ -20,6 +21,10 @@ internal static class PrometheusWriter
var builder = new StringBuilder();
builder.AppendLine("# HELP scanner_analyzer_bench_duration_ms Analyzer benchmark duration metrics in milliseconds.");
builder.AppendLine("# TYPE scanner_analyzer_bench_duration_ms gauge");
builder.AppendLine("# HELP scanner_analyzer_bench_sample_count Analyzer benchmark sample counts (component/file counts).");
builder.AppendLine("# TYPE scanner_analyzer_bench_sample_count gauge");
builder.AppendLine("# HELP scanner_analyzer_bench_metric Additional analyzer benchmark metrics.");
builder.AppendLine("# TYPE scanner_analyzer_bench_metric gauge");
foreach (var report in reports)
{
@@ -28,6 +33,7 @@ internal static class PrometheusWriter
AppendMetric(builder, "scanner_analyzer_bench_p95_ms", scenarioLabel, report.Result.P95Ms);
AppendMetric(builder, "scanner_analyzer_bench_max_ms", scenarioLabel, report.Result.MaxMs);
AppendMetric(builder, "scanner_analyzer_bench_threshold_ms", scenarioLabel, report.Result.ThresholdMs);
AppendMetric(builder, "scanner_analyzer_bench_sample_count", scenarioLabel, report.Result.SampleCount);
if (report.Baseline is { } baseline)
{
@@ -41,6 +47,19 @@ internal static class PrometheusWriter
AppendMetric(builder, "scanner_analyzer_bench_regression_limit", scenarioLabel, report.RegressionLimit);
AppendMetric(builder, "scanner_analyzer_bench_regression_breached", scenarioLabel, report.RegressionBreached ? 1 : 0);
}
if (report.Result.Metrics is { Count: > 0 } metrics)
{
foreach (var metric in metrics.OrderBy(static item => item.Key, StringComparer.Ordinal))
{
builder.Append("scanner_analyzer_bench_metric{scenario=\"");
builder.Append(scenarioLabel);
builder.Append("\",name=\"");
builder.Append(Escape(metric.Key));
builder.Append("\"} ");
builder.AppendLine(metric.Value.ToString("G17", CultureInfo.InvariantCulture));
}
}
}
File.WriteAllText(resolved, builder.ToString(), Encoding.UTF8);

View File

@@ -12,6 +12,8 @@ internal sealed record ScenarioResult(
int Iterations,
double ThresholdMs)
{
public IReadOnlyDictionary<string, double>? Metrics { get; init; }
public string IdColumn => Id.Length <= 28 ? Id.PadRight(28) : Id[..28];
public string SampleCountColumn => SampleCount.ToString(CultureInfo.InvariantCulture).PadLeft(5);

View File

@@ -4,6 +4,7 @@ using System.Linq;
using System.Text.Json;
using System.Text.RegularExpressions;
using StellaOps.Scanner.Analyzers.Lang;
using StellaOps.Scanner.Analyzers.Lang.Bun;
using StellaOps.Scanner.Analyzers.Lang.Go;
using StellaOps.Scanner.Analyzers.Lang.Java;
using StellaOps.Scanner.Analyzers.Lang.Node;
@@ -17,7 +18,7 @@ internal interface IScenarioRunner
Task<ScenarioExecutionResult> ExecuteAsync(string rootPath, int iterations, CancellationToken cancellationToken);
}
internal sealed record ScenarioExecutionResult(double[] Durations, int SampleCount);
internal sealed record ScenarioExecutionResult(double[] Durations, int SampleCount, IReadOnlyDictionary<string, double>? Metrics = null);
internal static class ScenarioRunnerFactory
{
@@ -40,6 +41,7 @@ internal static class ScenarioRunnerFactory
internal sealed class LanguageAnalyzerScenarioRunner : IScenarioRunner
{
private readonly IReadOnlyList<Func<ILanguageAnalyzer>> _analyzerFactories;
private readonly bool _includesNodeAnalyzer;
public LanguageAnalyzerScenarioRunner(IEnumerable<string> analyzerIds)
{
@@ -48,11 +50,15 @@ internal sealed class LanguageAnalyzerScenarioRunner : IScenarioRunner
throw new ArgumentNullException(nameof(analyzerIds));
}
_analyzerFactories = analyzerIds
var normalizedIds = analyzerIds
.Where(static id => !string.IsNullOrWhiteSpace(id))
.Select(CreateFactory)
.Select(static id => id.Trim().ToLowerInvariant())
.ToArray();
_includesNodeAnalyzer = normalizedIds.Contains("node", StringComparer.Ordinal);
_analyzerFactories = normalizedIds.Select(CreateFactory).ToArray();
if (_analyzerFactories.Count == 0)
{
throw new InvalidOperationException("At least one analyzer id must be provided.");
@@ -70,6 +76,7 @@ internal sealed class LanguageAnalyzerScenarioRunner : IScenarioRunner
var engine = new LanguageAnalyzerEngine(analyzers);
var durations = new double[iterations];
var componentCount = -1;
IReadOnlyDictionary<string, double>? metrics = null;
for (var i = 0; i < iterations; i++)
{
@@ -91,6 +98,19 @@ internal sealed class LanguageAnalyzerScenarioRunner : IScenarioRunner
{
throw new InvalidOperationException($"Analyzer output count changed between iterations ({componentCount} vs {currentCount}).");
}
if (_includesNodeAnalyzer)
{
var currentMetrics = NodeBenchMetrics.Compute(rootPath, result);
if (metrics is null)
{
metrics = currentMetrics;
}
else if (!NodeBenchMetrics.AreEqual(metrics, currentMetrics))
{
throw new InvalidOperationException($"Analyzer metrics changed between iterations for '{rootPath}'.");
}
}
}
if (componentCount < 0)
@@ -98,7 +118,7 @@ internal sealed class LanguageAnalyzerScenarioRunner : IScenarioRunner
componentCount = 0;
}
return new ScenarioExecutionResult(durations, componentCount);
return new ScenarioExecutionResult(durations, componentCount, metrics);
}
private static Func<ILanguageAnalyzer> CreateFactory(string analyzerId)
@@ -106,6 +126,7 @@ internal sealed class LanguageAnalyzerScenarioRunner : IScenarioRunner
var id = analyzerId.Trim().ToLowerInvariant();
return id switch
{
"bun" => static () => new BunLanguageAnalyzer(),
"java" => static () => new JavaLanguageAnalyzer(),
"go" => static () => new GoLanguageAnalyzer(),
"node" => static () => new NodeLanguageAnalyzer(),

View File

@@ -11,6 +11,7 @@
<ItemGroup>
<ProjectReference Include="../../../../Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang/StellaOps.Scanner.Analyzers.Lang.csproj" />
<ProjectReference Include="../../../../Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Bun/StellaOps.Scanner.Analyzers.Lang.Bun.csproj" />
<ProjectReference Include="../../../../Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Go/StellaOps.Scanner.Analyzers.Lang.Go.csproj" />
<ProjectReference Include="../../../../Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Node/StellaOps.Scanner.Analyzers.Lang.Node.csproj" />
<ProjectReference Include="../../../../Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Java/StellaOps.Scanner.Analyzers.Lang.Java.csproj" />
@@ -21,4 +22,4 @@
<ItemGroup>
<InternalsVisibleTo Include="StellaOps.Bench.ScannerAnalyzers.Tests" />
</ItemGroup>
</Project>
</Project>

View File

@@ -1,7 +1,11 @@
scenario,iterations,sample_count,mean_ms,p95_ms,max_ms
node_monorepo_walk,5,4,6.0975,21.7421,26.8537
java_demo_archive,5,1,6.2007,23.4837,29.1143
go_buildinfo_fixture,5,2,6.1949,22.6851,27.9196
dotnet_multirid_fixture,5,2,11.4884,37.7460,46.4850
python_site_packages_scan,5,3,5.6420,18.2943,22.3739
python_pip_cache_fixture,5,1,5.8598,13.2855,15.6256
node_monorepo_walk,5,4,15.5399,50.3210,61.7146
node_detection_gaps_fixture,5,5,31.8434,96.4542,117.3238
java_demo_archive,5,1,13.6363,49.4627,61.3100
java_fat_archive,5,2,3.5181,8.1467,9.4927
go_buildinfo_fixture,5,2,6.9861,25.8818,32.1304
dotnet_multirid_fixture,5,2,11.8266,38.9340,47.8401
python_site_packages_scan,5,3,36.7930,105.6978,128.4211
python_pip_cache_fixture,5,1,20.1829,30.9147,34.3257
python_layered_editable_fixture,5,3,31.8757,39.7647,41.5656
bun_multi_workspace_fixture,5,2,12.4463,45.1913,55.9832
1 scenario iterations sample_count mean_ms p95_ms max_ms
2 node_monorepo_walk 5 4 6.0975 15.5399 21.7421 50.3210 26.8537 61.7146
3 java_demo_archive node_detection_gaps_fixture 5 1 5 6.2007 31.8434 23.4837 96.4542 29.1143 117.3238
4 go_buildinfo_fixture java_demo_archive 5 2 1 6.1949 13.6363 22.6851 49.4627 27.9196 61.3100
5 dotnet_multirid_fixture java_fat_archive 5 2 11.4884 3.5181 37.7460 8.1467 46.4850 9.4927
6 python_site_packages_scan go_buildinfo_fixture 5 3 2 5.6420 6.9861 18.2943 25.8818 22.3739 32.1304
7 python_pip_cache_fixture dotnet_multirid_fixture 5 1 2 5.8598 11.8266 13.2855 38.9340 15.6256 47.8401
8 python_site_packages_scan 5 3 36.7930 105.6978 128.4211
9 python_pip_cache_fixture 5 1 20.1829 30.9147 34.3257
10 python_layered_editable_fixture 5 3 31.8757 39.7647 41.5656
11 bun_multi_workspace_fixture 5 2 12.4463 45.1913 55.9832

View File

@@ -10,6 +10,15 @@
"node"
]
},
{
"id": "node_detection_gaps_fixture",
"label": "Node analyzer detection gaps fixture (workspace + lock-only + imports)",
"root": "samples/runtime/node-detection-gaps",
"analyzers": [
"node"
],
"thresholdMs": 2000
},
{
"id": "java_demo_archive",
"label": "Java analyzer on demo jar",
@@ -18,6 +27,15 @@
"java"
]
},
{
"id": "java_fat_archive",
"label": "Java analyzer on fat jar (embedded libs)",
"root": "samples/runtime/java-fat-archive",
"analyzers": [
"java"
],
"thresholdMs": 1000
},
{
"id": "go_buildinfo_fixture",
"label": "Go analyzer on build-info binary",
@@ -49,6 +67,24 @@
"analyzers": [
"python"
]
},
{
"id": "python_layered_editable_fixture",
"label": "Python analyzer on layered/container roots fixture",
"root": "src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/layered-editable",
"analyzers": [
"python"
],
"thresholdMs": 2000
},
{
"id": "bun_multi_workspace_fixture",
"label": "Bun analyzer on multi-workspace fixture",
"root": "src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Bun.Tests/Fixtures/lang/bun/multi-workspace",
"analyzers": [
"bun"
],
"thresholdMs": 1000
}
]
}

View File

@@ -29,3 +29,5 @@ Results should be committed as deterministic CSV/JSON outputs with accompanying
- Added two Python scenarios to `config.json`: the virtualenv sample (`python_site_packages_scan`) and the RECORD-heavy pip cache fixture (`python_pip_cache_fixture`).
- Baseline run (Release build, 5 iterations) records means of **5.64ms** (p9518.29ms) for the virtualenv and **5.86ms** (p9513.29ms) for the pip cache verifier; raw numbers stored in `python/hash-throughput-20251023.csv`.
- The pip cache fixture exercises `PythonRecordVerifier` with 12 RECORD rows (7 hashed) and mismatched layer coverage, giving a repeatable hash-validation throughput reference for regression gating.
- 2025-12-13: Added `python_layered_editable_fixture` scenario with `thresholdMs=2000` to guard container-root paths.
- 2025-12-13: Refreshed `baseline.csv` after Python analyzer discovery/VFS changes (see `src/Bench/StellaOps.Bench/Scanner.Analyzers/baseline.csv`).

View File

@@ -9,3 +9,5 @@
| BENCH-POLICY-20-002 | DONE (2025-12-11) | SPRINT_0512_0001_0001_bench | Policy delta benchmark (full vs delta) using baseline/delta NDJSON fixtures; outputs hashed. | `src/Bench/StellaOps.Bench/PolicyDelta` |
| BENCH-SIG-26-001 | DONE (2025-12-11) | SPRINT_0512_0001_0001_bench | Reachability scoring harness with schema hash, 10k/50k fixtures, cache outputs for downstream benches. | `src/Bench/StellaOps.Bench/Signals` |
| BENCH-SIG-26-002 | DONE (2025-12-11) | SPRINT_0512_0001_0001_bench | Policy evaluation cache bench (cold/warm/mixed) consuming reachability caches; outputs hashed. | `src/Bench/StellaOps.Bench/PolicyCache` |
| BENCH-SCANNER-ANALYZERS-405-008 | DONE (2025-12-13) | SPRINT_0405_0001_0001_scanner_python_detection_gaps.md | Extend Scanner analyzer microbench coverage for the Python analyzer (fixtures + thresholds + docs alignment). | `src/Bench/StellaOps.Bench/Scanner.Analyzers` |
| BENCH-SCANNER-ANALYZERS-407-009 | DONE (2025-12-13) | SPRINT_0407_0001_0001_scanner_bun_detection_gaps.md | Add Bun analyzer scenario to microbench harness (config + baseline + wiring). | `src/Bench/StellaOps.Bench/Scanner.Analyzers` |

View File

@@ -0,0 +1,52 @@
using StellaOps.Messaging;
using StellaOps.Messaging.Abstractions;
namespace StellaOps.Concelier.WebService.Services;
/// <summary>
/// Advisory chunk cache backed by <see cref="IDistributedCache{TValue}"/>.
/// Supports any transport (InMemory, Valkey, PostgreSQL) via factory injection.
/// </summary>
internal sealed class MessagingAdvisoryChunkCache : IAdvisoryChunkCache
{
private readonly IDistributedCache<AdvisoryChunkBuildResult> _cache;
public MessagingAdvisoryChunkCache(IDistributedCacheFactory cacheFactory)
{
ArgumentNullException.ThrowIfNull(cacheFactory);
_cache = cacheFactory.Create<AdvisoryChunkBuildResult>(new CacheOptions
{
KeyPrefix = "advisory:chunk:",
});
}
public bool TryGet(in AdvisoryChunkCacheKey key, out AdvisoryChunkBuildResult result)
{
// Sync-over-async for compatibility with existing interface
// Consider migrating callers to async in future
var task = _cache.GetAsync(key.Value);
var cacheResult = task.AsTask().GetAwaiter().GetResult();
if (cacheResult.HasValue)
{
result = cacheResult.Value;
return true;
}
result = null!;
return false;
}
public void Set(in AdvisoryChunkCacheKey key, AdvisoryChunkBuildResult value, TimeSpan ttl)
{
if (ttl <= TimeSpan.Zero)
{
return;
}
var entryOptions = new CacheEntryOptions { TimeToLive = ttl };
// Sync-over-async for compatibility with existing interface
_cache.SetAsync(key.Value, value, entryOptions).AsTask().GetAwaiter().GetResult();
}
}

View File

@@ -28,6 +28,7 @@
<ProjectReference Include="../__Libraries/StellaOps.Concelier.Connector.Common/StellaOps.Concelier.Connector.Common.csproj" />
<ProjectReference Include="../__Libraries/StellaOps.Concelier.Merge/StellaOps.Concelier.Merge.csproj" />
<ProjectReference Include="../../__Libraries/StellaOps.Plugin/StellaOps.Plugin.csproj" />
<ProjectReference Include="../../__Libraries/StellaOps.Messaging/StellaOps.Messaging.csproj" />
<ProjectReference Include="../../__Libraries/StellaOps.DependencyInjection/StellaOps.DependencyInjection.csproj" />
<ProjectReference Include="../../__Libraries/StellaOps.Configuration/StellaOps.Configuration.csproj" />
<ProjectReference Include="../../__Libraries/StellaOps.Cryptography/StellaOps.Cryptography.csproj" />

View File

@@ -0,0 +1,76 @@
using Microsoft.Extensions.Options;
using StellaOps.Excititor.WebService.Contracts;
using StellaOps.Excititor.WebService.Options;
using StellaOps.Messaging;
using StellaOps.Messaging.Abstractions;
namespace StellaOps.Excititor.WebService.Services;
/// <summary>
/// Graph overlay cache backed by <see cref="IDistributedCache{TValue}"/>.
/// Supports any transport (InMemory, Valkey, PostgreSQL) via factory injection.
/// </summary>
internal sealed class MessagingGraphOverlayCache : IGraphOverlayCache
{
private readonly IDistributedCache<GraphOverlayCacheEntry> _cache;
private readonly IOptions<GraphOptions> _options;
private readonly TimeProvider _timeProvider;
public MessagingGraphOverlayCache(
IDistributedCacheFactory cacheFactory,
IOptions<GraphOptions> options,
TimeProvider timeProvider)
{
ArgumentNullException.ThrowIfNull(cacheFactory);
_options = options ?? throw new ArgumentNullException(nameof(options));
_timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider));
_cache = cacheFactory.Create<GraphOverlayCacheEntry>(new CacheOptions
{
KeyPrefix = "graph-overlays:",
});
}
public async ValueTask<GraphOverlayCacheHit?> TryGetAsync(
string tenant,
bool includeJustifications,
IReadOnlyList<string> orderedPurls,
CancellationToken cancellationToken)
{
var key = BuildKey(tenant, includeJustifications, orderedPurls);
var result = await _cache.GetAsync(key, cancellationToken).ConfigureAwait(false);
if (result.HasValue)
{
var cached = result.Value;
var ageMs = (long)Math.Max(0, (_timeProvider.GetUtcNow() - cached.CachedAt).TotalMilliseconds);
return new GraphOverlayCacheHit(cached.Items, ageMs);
}
return null;
}
public async ValueTask SaveAsync(
string tenant,
bool includeJustifications,
IReadOnlyList<string> orderedPurls,
IReadOnlyList<GraphOverlayItem> items,
DateTimeOffset cachedAt,
CancellationToken cancellationToken)
{
var key = BuildKey(tenant, includeJustifications, orderedPurls);
var ttl = TimeSpan.FromSeconds(Math.Max(1, _options.Value.OverlayTtlSeconds));
var entry = new GraphOverlayCacheEntry(items.ToList(), cachedAt);
var entryOptions = new CacheEntryOptions { TimeToLive = ttl };
await _cache.SetAsync(key, entry, entryOptions, cancellationToken).ConfigureAwait(false);
}
private static string BuildKey(string tenant, bool includeJustifications, IReadOnlyList<string> orderedPurls)
=> $"{tenant}:{includeJustifications}:{string.Join('|', orderedPurls)}";
}
/// <summary>
/// Cache entry for graph overlays.
/// </summary>
internal sealed record GraphOverlayCacheEntry(List<GraphOverlayItem> Items, DateTimeOffset CachedAt);

View File

@@ -17,6 +17,7 @@
</ItemGroup>
<ItemGroup>
<ProjectReference Include="../__Libraries/StellaOps.Excititor.Core/StellaOps.Excititor.Core.csproj" />
<ProjectReference Include="../../__Libraries/StellaOps.Messaging/StellaOps.Messaging.csproj" />
<ProjectReference Include="../__Libraries/StellaOps.Excititor.Storage.Postgres/StellaOps.Excititor.Storage.Postgres.csproj" />
<ProjectReference Include="../__Libraries/StellaOps.Excititor.Export/StellaOps.Excititor.Export.csproj" />
<ProjectReference Include="../__Libraries/StellaOps.Excititor.Connectors.Abstractions/StellaOps.Excititor.Connectors.Abstractions.csproj" />

View File

@@ -6,7 +6,7 @@
<ImplicitUsings>enable</ImplicitUsings>
<IsPackable>false</IsPackable>
<TreatWarningsAsErrors>false</TreatWarningsAsErrors>
<!-- Disable Concelier test infrastructure - we don't need MongoDB -->
<!-- Disable Concelier test infrastructure - not needed for Gateway tests -->
<UseConcelierTestInfra>false</UseConcelierTestInfra>
</PropertyGroup>
<ItemGroup>

View File

@@ -0,0 +1,202 @@
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
using StellaOps.Messaging;
using StellaOps.Messaging.Abstractions;
using StellaOps.Policy.Engine.Options;
namespace StellaOps.Policy.Engine.Caching;
/// <summary>
/// Policy evaluation cache backed by <see cref="IDistributedCache{TValue}"/>.
/// Supports any transport (InMemory, Valkey, PostgreSQL) via factory injection.
/// </summary>
public sealed class MessagingPolicyEvaluationCache : IPolicyEvaluationCache
{
private readonly IDistributedCache<PolicyEvaluationCacheEntry> _cache;
private readonly TimeProvider _timeProvider;
private readonly ILogger<MessagingPolicyEvaluationCache> _logger;
private readonly TimeSpan _defaultTtl;
private long _totalRequests;
private long _cacheHits;
private long _cacheMisses;
public MessagingPolicyEvaluationCache(
IDistributedCacheFactory cacheFactory,
ILogger<MessagingPolicyEvaluationCache> logger,
TimeProvider timeProvider,
IOptions<PolicyEngineOptions> options)
{
ArgumentNullException.ThrowIfNull(cacheFactory);
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
_timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider));
var cacheOptions = options?.Value.EvaluationCache ?? new PolicyEvaluationCacheOptions();
_defaultTtl = TimeSpan.FromMinutes(cacheOptions.DefaultTtlMinutes);
_cache = cacheFactory.Create<PolicyEvaluationCacheEntry>(new CacheOptions
{
KeyPrefix = "pe:",
DefaultTtl = _defaultTtl,
});
_logger.LogInformation(
"Initialized MessagingPolicyEvaluationCache with provider {Provider}, TTL {Ttl}",
_cache.ProviderName,
_defaultTtl);
}
public async Task<PolicyEvaluationCacheResult> GetAsync(
PolicyEvaluationCacheKey key,
CancellationToken cancellationToken = default)
{
Interlocked.Increment(ref _totalRequests);
var cacheKey = key.ToCacheKey();
var result = await _cache.GetAsync(cacheKey, cancellationToken).ConfigureAwait(false);
if (result.HasValue)
{
var entry = result.Value;
var now = _timeProvider.GetUtcNow();
// Check if entry is still valid
if (entry.ExpiresAt > now)
{
Interlocked.Increment(ref _cacheHits);
return new PolicyEvaluationCacheResult(entry, true, MapSource(_cache.ProviderName));
}
// Entry expired - remove it
await _cache.InvalidateAsync(cacheKey, cancellationToken).ConfigureAwait(false);
}
Interlocked.Increment(ref _cacheMisses);
return new PolicyEvaluationCacheResult(null, false, CacheSource.None);
}
public async Task<PolicyEvaluationCacheBatch> GetBatchAsync(
IReadOnlyList<PolicyEvaluationCacheKey> keys,
CancellationToken cancellationToken = default)
{
var found = new Dictionary<PolicyEvaluationCacheKey, PolicyEvaluationCacheEntry>();
var notFound = new List<PolicyEvaluationCacheKey>();
var hits = 0;
var misses = 0;
foreach (var key in keys)
{
var result = await GetAsync(key, cancellationToken).ConfigureAwait(false);
if (result.Entry != null)
{
found[key] = result.Entry;
hits++;
}
else
{
notFound.Add(key);
misses++;
}
}
var source = MapSource(_cache.ProviderName);
return new PolicyEvaluationCacheBatch
{
Found = found,
NotFound = notFound,
CacheHits = hits,
CacheMisses = misses,
InMemoryHits = source == CacheSource.InMemory ? hits : 0,
RedisHits = source == CacheSource.Redis ? hits : 0,
};
}
public async Task SetAsync(
PolicyEvaluationCacheKey key,
PolicyEvaluationCacheEntry entry,
CancellationToken cancellationToken = default)
{
var cacheKey = key.ToCacheKey();
var now = _timeProvider.GetUtcNow();
var expiresAt = entry.ExpiresAt > now ? entry.ExpiresAt : now.Add(_defaultTtl);
var ttl = expiresAt - now;
if (ttl <= TimeSpan.Zero)
{
return;
}
var options = new CacheEntryOptions { TimeToLive = ttl };
await _cache.SetAsync(cacheKey, entry, options, cancellationToken).ConfigureAwait(false);
}
public async Task SetBatchAsync(
IReadOnlyDictionary<PolicyEvaluationCacheKey, PolicyEvaluationCacheEntry> entries,
CancellationToken cancellationToken = default)
{
var now = _timeProvider.GetUtcNow();
foreach (var (key, entry) in entries)
{
var cacheKey = key.ToCacheKey();
var expiresAt = entry.ExpiresAt > now ? entry.ExpiresAt : now.Add(_defaultTtl);
var ttl = expiresAt - now;
if (ttl <= TimeSpan.Zero)
{
continue;
}
var options = new CacheEntryOptions { TimeToLive = ttl };
await _cache.SetAsync(cacheKey, entry, options, cancellationToken).ConfigureAwait(false);
}
}
public async Task InvalidateAsync(
PolicyEvaluationCacheKey key,
CancellationToken cancellationToken = default)
{
var cacheKey = key.ToCacheKey();
await _cache.InvalidateAsync(cacheKey, cancellationToken).ConfigureAwait(false);
}
public async Task InvalidateByPolicyDigestAsync(
string policyDigest,
CancellationToken cancellationToken = default)
{
// Pattern: pe:<policyDigest>:*
var pattern = $"{policyDigest}:*";
var count = await _cache.InvalidateByPatternAsync(pattern, cancellationToken).ConfigureAwait(false);
_logger.LogDebug(
"Invalidated {Count} cache entries for policy digest {Digest}",
count,
policyDigest);
}
public PolicyEvaluationCacheStats GetStats()
{
var source = MapSource(_cache.ProviderName);
var hits = Interlocked.Read(ref _cacheHits);
return new PolicyEvaluationCacheStats
{
TotalRequests = Interlocked.Read(ref _totalRequests),
CacheHits = hits,
CacheMisses = Interlocked.Read(ref _cacheMisses),
InMemoryHits = source == CacheSource.InMemory ? hits : 0,
RedisHits = source == CacheSource.Redis ? hits : 0,
RedisFallbacks = 0,
ItemCount = 0, // Not available from IDistributedCache
EvictionCount = 0, // Not available from IDistributedCache
};
}
private static CacheSource MapSource(string providerName) => providerName.ToLowerInvariant() switch
{
"inmemory" => CacheSource.InMemory,
"valkey" => CacheSource.Redis,
"redis" => CacheSource.Redis,
_ => CacheSource.None,
};
}

View File

@@ -29,8 +29,8 @@ public static class PolicyEngineServiceCollectionExtensions
// Core compilation and evaluation services
services.TryAddSingleton<PolicyCompilationService>();
// Cache
services.TryAddSingleton<IPolicyEvaluationCache, InMemoryPolicyEvaluationCache>();
// Cache - uses IDistributedCacheFactory for transport flexibility
services.TryAddSingleton<IPolicyEvaluationCache, MessagingPolicyEvaluationCache>();
// Runtime evaluation
services.TryAddSingleton<PolicyRuntimeEvaluationService>();

View File

@@ -470,7 +470,7 @@ public sealed class PolicyGateEvaluator : IPolicyGateEvaluator
Name = "LatticeState",
Result = PolicyGateResultType.Warn,
Reason = $"{latticeState} may indicate false positive for affected",
Note = "Consider review: evidence suggests code may not be reachable"
Note = "Consider review: evidence suggests code may not be reachable (possible false positive)"
};
default:

View File

@@ -0,0 +1,180 @@
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
using StellaOps.Messaging;
using StellaOps.Messaging.Abstractions;
using StellaOps.Policy.Engine.Options;
namespace StellaOps.Policy.Engine.ReachabilityFacts;
/// <summary>
/// Reachability facts overlay cache backed by <see cref="IDistributedCache{TValue}"/>.
/// Supports any transport (InMemory, Valkey, PostgreSQL) via factory injection.
/// </summary>
public sealed class MessagingReachabilityFactsOverlayCache : IReachabilityFactsOverlayCache
{
private readonly IDistributedCache<ReachabilityFact> _cache;
private readonly TimeProvider _timeProvider;
private readonly ILogger<MessagingReachabilityFactsOverlayCache> _logger;
private readonly TimeSpan _defaultTtl;
private long _totalRequests;
private long _cacheHits;
private long _cacheMisses;
public MessagingReachabilityFactsOverlayCache(
IDistributedCacheFactory cacheFactory,
ILogger<MessagingReachabilityFactsOverlayCache> logger,
TimeProvider timeProvider,
IOptions<PolicyEngineOptions> options)
{
ArgumentNullException.ThrowIfNull(cacheFactory);
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
_timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider));
var cacheOptions = options?.Value.ReachabilityCache ?? new ReachabilityFactsCacheOptions();
_defaultTtl = TimeSpan.FromMinutes(cacheOptions.DefaultTtlMinutes);
_cache = cacheFactory.Create<ReachabilityFact>(new CacheOptions
{
KeyPrefix = "rf:",
DefaultTtl = _defaultTtl,
});
_logger.LogInformation(
"Initialized MessagingReachabilityFactsOverlayCache with provider {Provider}, TTL {Ttl}",
_cache.ProviderName,
_defaultTtl);
}
public async Task<(ReachabilityFact? Fact, bool CacheHit)> GetAsync(
ReachabilityFactKey key,
CancellationToken cancellationToken = default)
{
Interlocked.Increment(ref _totalRequests);
var cacheKey = key.ToCacheKey();
var result = await _cache.GetAsync(cacheKey, cancellationToken).ConfigureAwait(false);
if (result.HasValue)
{
var fact = result.Value;
var now = _timeProvider.GetUtcNow();
// Check if entry is still valid
if (!fact.ExpiresAt.HasValue || fact.ExpiresAt.Value > now)
{
Interlocked.Increment(ref _cacheHits);
return (fact, true);
}
// Entry expired - remove it
await _cache.InvalidateAsync(cacheKey, cancellationToken).ConfigureAwait(false);
}
Interlocked.Increment(ref _cacheMisses);
return (null, false);
}
public async Task<ReachabilityFactsBatch> GetBatchAsync(
IReadOnlyList<ReachabilityFactKey> keys,
CancellationToken cancellationToken = default)
{
var found = new Dictionary<ReachabilityFactKey, ReachabilityFact>();
var notFound = new List<ReachabilityFactKey>();
var cacheHits = 0;
var cacheMisses = 0;
foreach (var key in keys)
{
var (fact, hit) = await GetAsync(key, cancellationToken).ConfigureAwait(false);
if (fact != null)
{
found[key] = fact;
cacheHits++;
}
else
{
notFound.Add(key);
cacheMisses++;
}
}
return new ReachabilityFactsBatch
{
Found = found,
NotFound = notFound,
CacheHits = cacheHits,
CacheMisses = cacheMisses,
};
}
public async Task SetAsync(
ReachabilityFactKey key,
ReachabilityFact fact,
CancellationToken cancellationToken = default)
{
var cacheKey = key.ToCacheKey();
var now = _timeProvider.GetUtcNow();
var ttl = fact.ExpiresAt.HasValue && fact.ExpiresAt.Value > now
? fact.ExpiresAt.Value - now
: _defaultTtl;
if (ttl <= TimeSpan.Zero)
{
return;
}
var options = new CacheEntryOptions { TimeToLive = ttl };
await _cache.SetAsync(cacheKey, fact, options, cancellationToken).ConfigureAwait(false);
}
public async Task SetBatchAsync(
IReadOnlyDictionary<ReachabilityFactKey, ReachabilityFact> facts,
CancellationToken cancellationToken = default)
{
var now = _timeProvider.GetUtcNow();
foreach (var (key, fact) in facts)
{
var cacheKey = key.ToCacheKey();
var ttl = fact.ExpiresAt.HasValue && fact.ExpiresAt.Value > now
? fact.ExpiresAt.Value - now
: _defaultTtl;
if (ttl <= TimeSpan.Zero)
{
continue;
}
var options = new CacheEntryOptions { TimeToLive = ttl };
await _cache.SetAsync(cacheKey, fact, options, cancellationToken).ConfigureAwait(false);
}
}
public async Task InvalidateAsync(ReachabilityFactKey key, CancellationToken cancellationToken = default)
{
var cacheKey = key.ToCacheKey();
await _cache.InvalidateAsync(cacheKey, cancellationToken).ConfigureAwait(false);
}
public async Task InvalidateTenantAsync(string tenantId, CancellationToken cancellationToken = default)
{
// Pattern: rf:<tenantId>:*
var pattern = $"{tenantId}:*";
var count = await _cache.InvalidateByPatternAsync(pattern, cancellationToken).ConfigureAwait(false);
_logger.LogDebug("Invalidated {Count} cache entries for tenant {TenantId}", count, tenantId);
}
public ReachabilityFactsCacheStats GetStats()
{
return new ReachabilityFactsCacheStats
{
TotalRequests = Interlocked.Read(ref _totalRequests),
CacheHits = Interlocked.Read(ref _cacheHits),
CacheMisses = Interlocked.Read(ref _cacheMisses),
ItemCount = 0, // Not available from IDistributedCache
EvictionCount = 0, // Not available from IDistributedCache
};
}
}

View File

@@ -4,4 +4,4 @@ This file mirrors sprint work for the Policy Engine module.
| Task ID | Sprint | Status | Notes |
| --- | --- | --- | --- |
| `POLICY-GATE-401-033` | `docs/implplan/SPRINT_0401_0001_0001_reachability_evidence_chain.md` | DOING | Gate `unreachable` reachability facts: missing evidence ref or low confidence => `under_investigation`; add tests and docs. |
| `POLICY-GATE-401-033` | `docs/implplan/SPRINT_0401_0001_0001_reachability_evidence_chain.md` | DONE (2025-12-13) | Implemented PolicyGateEvaluator (lattice/uncertainty/evidence completeness) and aligned tests/docs; see `src/Policy/StellaOps.Policy.Engine/Gates/PolicyGateEvaluator.cs` and `src/Policy/__Tests/StellaOps.Policy.Engine.Tests/Gates/PolicyGateEvaluatorTests.cs`. |

View File

@@ -55,13 +55,20 @@ public sealed partial class TenantContextMiddleware
// Set tenant context for the request
tenantContextAccessor.TenantContext = validationResult.Context;
using (_logger.BeginScope(new Dictionary<string, object?>
try
{
["tenant_id"] = validationResult.Context?.TenantId,
["project_id"] = validationResult.Context?.ProjectId
}))
using (_logger.BeginScope(new Dictionary<string, object?>
{
["tenant_id"] = validationResult.Context?.TenantId,
["project_id"] = validationResult.Context?.ProjectId
}))
{
await _next(context);
}
}
finally
{
await _next(context);
tenantContextAccessor.TenantContext = null;
}
}

View File

@@ -5,11 +5,11 @@ using StellaOps.Policy.Storage.Postgres.Repositories;
namespace StellaOps.Policy.Storage.Postgres.Migration;
/// <summary>
/// Handles migration of policy data from MongoDB to PostgreSQL.
/// Handles migration of policy data from legacy storage to PostgreSQL.
/// Task references: PG-T4.9, PG-T4.10, PG-T4.11
/// </summary>
/// <remarks>
/// This migrator converts policy packs and their versions from MongoDB documents
/// This migrator converts policy packs and their versions from legacy storage documents
/// to PostgreSQL entities while preserving version history and active version settings.
/// </remarks>
public sealed class PolicyMigrator
@@ -207,10 +207,10 @@ public sealed class PolicyMigrator
}
/// <summary>
/// Verifies that migrated data matches between MongoDB and PostgreSQL.
/// Verifies that migrated data matches expected counts in PostgreSQL.
/// </summary>
/// <param name="tenantId">Tenant to verify.</param>
/// <param name="expectedPacks">Expected pack count from MongoDB.</param>
/// <param name="expectedPacks">Expected pack count from source data.</param>
/// <param name="expectedVersions">Expected version counts per pack.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>Verification result.</returns>
@@ -314,7 +314,7 @@ public sealed class PolicyMigrator
/// </summary>
public sealed class PackMigrationData
{
/// <summary>Source system identifier (MongoDB _id).</summary>
/// <summary>Source system identifier.</summary>
public required string SourceId { get; init; }
/// <summary>Tenant identifier.</summary>

View File

@@ -41,7 +41,7 @@ public sealed class TenantContextTests
public void TenantContext_ForTenant_ThrowsOnNullTenantId()
{
// Act & Assert
Assert.Throws<ArgumentException>(() => TenantContext.ForTenant(null!));
Assert.Throws<ArgumentNullException>(() => TenantContext.ForTenant(null!));
}
[Fact]
@@ -156,9 +156,15 @@ public sealed class TenantContextMiddlewareTests
public async Task Middleware_WithValidTenantHeader_SetsTenantContext()
{
// Arrange
TenantContext? capturedContext = null;
var nextCalled = false;
var middleware = new TenantContextMiddleware(
_ => { nextCalled = true; return Task.CompletedTask; },
_ =>
{
nextCalled = true;
capturedContext = _tenantAccessor.TenantContext;
return Task.CompletedTask;
},
MsOptions.Options.Create(_options),
_logger);
@@ -169,16 +175,22 @@ public sealed class TenantContextMiddlewareTests
// Assert
Assert.True(nextCalled);
Assert.NotNull(_tenantAccessor.TenantContext);
Assert.Equal("tenant-123", _tenantAccessor.TenantContext.TenantId);
Assert.NotNull(capturedContext);
Assert.Equal("tenant-123", capturedContext!.TenantId);
Assert.Null(_tenantAccessor.TenantContext);
}
[Fact]
public async Task Middleware_WithTenantAndProjectHeaders_SetsBothInContext()
{
// Arrange
TenantContext? capturedContext = null;
var middleware = new TenantContextMiddleware(
_ => Task.CompletedTask,
_ =>
{
capturedContext = _tenantAccessor.TenantContext;
return Task.CompletedTask;
},
MsOptions.Options.Create(_options),
_logger);
@@ -188,9 +200,10 @@ public sealed class TenantContextMiddlewareTests
await middleware.InvokeAsync(context, _tenantAccessor);
// Assert
Assert.NotNull(_tenantAccessor.TenantContext);
Assert.Equal("tenant-123", _tenantAccessor.TenantContext.TenantId);
Assert.Equal("project-456", _tenantAccessor.TenantContext.ProjectId);
Assert.NotNull(capturedContext);
Assert.Equal("tenant-123", capturedContext!.TenantId);
Assert.Equal("project-456", capturedContext.ProjectId);
Assert.Null(_tenantAccessor.TenantContext);
}
[Fact]
@@ -218,6 +231,7 @@ public sealed class TenantContextMiddlewareTests
public async Task Middleware_MissingTenantHeaderNotRequired_UsesDefaultTenant()
{
// Arrange
TenantContext? capturedContext = null;
var optionsNotRequired = new TenantContextOptions
{
Enabled = true,
@@ -225,7 +239,11 @@ public sealed class TenantContextMiddlewareTests
};
var middleware = new TenantContextMiddleware(
_ => Task.CompletedTask,
_ =>
{
capturedContext = _tenantAccessor.TenantContext;
return Task.CompletedTask;
},
MsOptions.Options.Create(optionsNotRequired),
_logger);
@@ -235,8 +253,9 @@ public sealed class TenantContextMiddlewareTests
await middleware.InvokeAsync(context, _tenantAccessor);
// Assert
Assert.NotNull(_tenantAccessor.TenantContext);
Assert.Equal(TenantContextConstants.DefaultTenantId, _tenantAccessor.TenantContext.TenantId);
Assert.NotNull(capturedContext);
Assert.Equal(TenantContextConstants.DefaultTenantId, capturedContext!.TenantId);
Assert.Null(_tenantAccessor.TenantContext);
}
[Fact]
@@ -286,8 +305,13 @@ public sealed class TenantContextMiddlewareTests
public async Task Middleware_ValidTenantIdFormat_Passes(string tenantId)
{
// Arrange
TenantContext? capturedContext = null;
var middleware = new TenantContextMiddleware(
_ => Task.CompletedTask,
_ =>
{
capturedContext = _tenantAccessor.TenantContext;
return Task.CompletedTask;
},
MsOptions.Options.Create(_options),
_logger);
@@ -297,8 +321,9 @@ public sealed class TenantContextMiddlewareTests
await middleware.InvokeAsync(context, _tenantAccessor);
// Assert
Assert.NotNull(_tenantAccessor.TenantContext);
Assert.Equal(tenantId, _tenantAccessor.TenantContext.TenantId);
Assert.NotNull(capturedContext);
Assert.Equal(tenantId, capturedContext!.TenantId);
Assert.Null(_tenantAccessor.TenantContext);
}
[Theory]
@@ -351,8 +376,13 @@ public sealed class TenantContextMiddlewareTests
public async Task Middleware_ValidProjectIdFormat_Passes(string projectId)
{
// Arrange
TenantContext? capturedContext = null;
var middleware = new TenantContextMiddleware(
_ => Task.CompletedTask,
_ =>
{
capturedContext = _tenantAccessor.TenantContext;
return Task.CompletedTask;
},
MsOptions.Options.Create(_options),
_logger);
@@ -362,16 +392,22 @@ public sealed class TenantContextMiddlewareTests
await middleware.InvokeAsync(context, _tenantAccessor);
// Assert
Assert.NotNull(_tenantAccessor.TenantContext);
Assert.Equal(projectId, _tenantAccessor.TenantContext.ProjectId);
Assert.NotNull(capturedContext);
Assert.Equal(projectId, capturedContext!.ProjectId);
Assert.Null(_tenantAccessor.TenantContext);
}
[Fact]
public async Task Middleware_WithWriteScope_SetsCanWriteTrue()
{
// Arrange
TenantContext? capturedContext = null;
var middleware = new TenantContextMiddleware(
_ => Task.CompletedTask,
_ =>
{
capturedContext = _tenantAccessor.TenantContext;
return Task.CompletedTask;
},
MsOptions.Options.Create(_options),
_logger);
@@ -387,16 +423,22 @@ public sealed class TenantContextMiddlewareTests
await middleware.InvokeAsync(context, _tenantAccessor);
// Assert
Assert.NotNull(_tenantAccessor.TenantContext);
Assert.True(_tenantAccessor.TenantContext.CanWrite);
Assert.NotNull(capturedContext);
Assert.True(capturedContext!.CanWrite);
Assert.Null(_tenantAccessor.TenantContext);
}
[Fact]
public async Task Middleware_WithoutWriteScope_SetsCanWriteFalse()
{
// Arrange
TenantContext? capturedContext = null;
var middleware = new TenantContextMiddleware(
_ => Task.CompletedTask,
_ =>
{
capturedContext = _tenantAccessor.TenantContext;
return Task.CompletedTask;
},
MsOptions.Options.Create(_options),
_logger);
@@ -412,16 +454,22 @@ public sealed class TenantContextMiddlewareTests
await middleware.InvokeAsync(context, _tenantAccessor);
// Assert
Assert.NotNull(_tenantAccessor.TenantContext);
Assert.False(_tenantAccessor.TenantContext.CanWrite);
Assert.NotNull(capturedContext);
Assert.False(capturedContext!.CanWrite);
Assert.Null(_tenantAccessor.TenantContext);
}
[Fact]
public async Task Middleware_ExtractsActorIdFromSubClaim()
{
// Arrange
TenantContext? capturedContext = null;
var middleware = new TenantContextMiddleware(
_ => Task.CompletedTask,
_ =>
{
capturedContext = _tenantAccessor.TenantContext;
return Task.CompletedTask;
},
MsOptions.Options.Create(_options),
_logger);
@@ -433,16 +481,22 @@ public sealed class TenantContextMiddlewareTests
await middleware.InvokeAsync(context, _tenantAccessor);
// Assert
Assert.NotNull(_tenantAccessor.TenantContext);
Assert.Equal("user-id-123", _tenantAccessor.TenantContext.ActorId);
Assert.NotNull(capturedContext);
Assert.Equal("user-id-123", capturedContext!.ActorId);
Assert.Null(_tenantAccessor.TenantContext);
}
[Fact]
public async Task Middleware_ExtractsActorIdFromHeader()
{
// Arrange
TenantContext? capturedContext = null;
var middleware = new TenantContextMiddleware(
_ => Task.CompletedTask,
_ =>
{
capturedContext = _tenantAccessor.TenantContext;
return Task.CompletedTask;
},
MsOptions.Options.Create(_options),
_logger);
@@ -453,8 +507,9 @@ public sealed class TenantContextMiddlewareTests
await middleware.InvokeAsync(context, _tenantAccessor);
// Assert
Assert.NotNull(_tenantAccessor.TenantContext);
Assert.Equal("service-account-123", _tenantAccessor.TenantContext.ActorId);
Assert.NotNull(capturedContext);
Assert.Equal("service-account-123", capturedContext!.ActorId);
Assert.Null(_tenantAccessor.TenantContext);
}
private static DefaultHttpContext CreateHttpContext(

View File

@@ -0,0 +1,45 @@
# StellaOps.Scanner.Analyzers.Lang.Bun - Agent Charter
## Role
Deliver the Bun analyzer plug-in that inventories npm-ecosystem dependencies from Bun-managed projects and emits deterministic, evidence-backed component records for Scanner Workers.
## Scope
- Bun project discovery (including common container layer layouts like `layers/*`, `.layers/*`, and `layer*`).
- Parse `bun.lock` (text lockfile, v1) and reconcile scopes (prod/dev/optional/peer) deterministically without executing Bun.
- Installed inventory from `node_modules/**/package.json` and Bun's isolated linker store (`node_modules/.bun/**/package.json`), with symlink safety.
- Declared-only fallback from `package.json` when no install/lock evidence exists, using safe identities (no invalid range-as-version PURLs).
- Patched dependency attribution (`patchedDependencies`, `patches/`, `.patches/`) with version-specific mapping and no absolute path leakage.
- Plug-in manifest/DI bootstrap maintenance for Worker loading.
## Out of Scope
- Parsing `bun.lockb` (binary lockfile) unless explicitly scheduled.
- Running `bun` or fetching registries (offline-first always).
- Vulnerability correlation, policy evaluation, or UI/export formatting.
## Expectations
- Offline-first: no process execution, no network calls, no reliance on host-global caches.
- Determinism: stable ordering, explicit bounds, normalized path separators, and deterministic "skipped" markers when limits are hit.
- Identity safety: never emit `pkg:npm/...@<range|tag|workspace:*|file:...|link:...|git+...>`; use `AddFromExplicitKey` for non-concrete versions and for non-registry sources.
- Evidence: locators are project-relative (no drive letters or host roots) and golden-tested; file hashing is size-bounded.
- Container bounds: discovery must be bounded and must never recurse into `node_modules/`.
## Dependencies
- Shared analyzer infrastructure: `src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang`.
- Node analyzer conventions where contracts overlap (container roots, evidence/locator patterns).
- Scanner Surface filesystem normalization helpers (via `LanguageAnalyzerContext`).
## Testing & Artifacts
- Tests: `src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Bun.Tests`.
- Fixtures: `src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Bun.Tests/Fixtures/lang/bun/**` (deterministic inputs and golden outputs).
- Optional benchmarks (only if perf risks materialize): `src/Bench/StellaOps.Bench/Scanner.Analyzers`.
## Required Reading
- `docs/modules/scanner/architecture.md`
- `docs/modules/scanner/prep/bun-analyzer-design.md`
- `docs/modules/scanner/bun-analyzer-gotchas.md`
- `docs/implplan/SPRINT_0407_0001_0001_scanner_bun_detection_gaps.md`
## Working Agreement
1. Update task status to `DOING`/`DONE` in both `/docs/implplan/SPRINT_*.md` and the local `TASKS.md` as work progresses.
2. Every behavior change is covered by fixtures + golden tests (and/or unit tests for parsers).
3. If a contract decision is required (identity, evidence locators, container layout), mark the affected task `BLOCKED` in the sprint and record the exact decision needed under **Decisions & Risks**.

View File

@@ -38,6 +38,13 @@ public sealed class BunLanguageAnalyzer : ILanguageAnalyzer
continue;
}
// Declared-only fallback for bun markers (package.json + bunfig.toml) when no lock/install evidence exists.
if (classification.Kind == BunInputKind.None)
{
EmitDeclaredOnlyDependencies(writer, context, projectRoot, cancellationToken);
continue;
}
// Parse workspace info for direct dependency detection
var workspaceInfo = BunWorkspaceHelper.ParseWorkspaceInfo(projectRoot);
@@ -45,13 +52,37 @@ public sealed class BunLanguageAnalyzer : ILanguageAnalyzer
var bunConfig = BunConfigHelper.ParseConfig(projectRoot);
// Stage 3: Collect packages based on classification
string? lockfileRelativePath = null;
string? lockfileSha256 = null;
string? lockfileHashSkipReason = null;
if (classification.HasTextLockfile && !string.IsNullOrWhiteSpace(classification.TextLockfilePath))
{
lockfileRelativePath = context.GetRelativePath(classification.TextLockfilePath!);
if (string.IsNullOrWhiteSpace(lockfileRelativePath))
{
lockfileRelativePath = "bun.lock";
}
lockfileSha256 = BunEvidenceHasher.TryComputeBoundedSha256(
classification.TextLockfilePath!,
BunEvidenceHasher.MaxLockfileEvidenceBytes,
out lockfileHashSkipReason);
}
IReadOnlyList<BunPackage> packages;
if (classification.Kind == BunInputKind.InstalledModules)
{
// Prefer installed modules when available
var lockData = classification.HasTextLockfile
? await BunLockParser.ParseAsync(classification.TextLockfilePath!, cancellationToken).ConfigureAwait(false)
: null;
BunLockData? lockData = null;
if (classification.HasTextLockfile)
{
lockData = await BunLockParser.ParseAsync(classification.TextLockfilePath!, cancellationToken).ConfigureAwait(false);
if (!lockData.AllEntries.IsEmpty)
{
var declared = BunDeclaredDependencyCollector.Collect(projectRoot);
lockData = BunLockScopeClassifier.Classify(lockData, declared);
}
}
packages = BunInstalledCollector.Collect(context, projectRoot, lockData, cancellationToken);
}
@@ -59,6 +90,12 @@ public sealed class BunLanguageAnalyzer : ILanguageAnalyzer
{
// Fall back to lockfile parsing
var lockData = await BunLockParser.ParseAsync(classification.TextLockfilePath!, cancellationToken).ConfigureAwait(false);
if (!lockData.AllEntries.IsEmpty)
{
var declared = BunDeclaredDependencyCollector.Collect(projectRoot);
lockData = BunLockScopeClassifier.Classify(lockData, declared);
}
packages = BunLockInventory.ExtractPackages(lockData, classification.IncludeDev);
}
else
@@ -70,13 +107,13 @@ public sealed class BunLanguageAnalyzer : ILanguageAnalyzer
// Mark direct, patched dependencies and custom registries
foreach (var package in packages)
{
package.LockfilePath = lockfileRelativePath;
package.LockfileSha256 = lockfileSha256;
package.LockfileHashSkipReason = lockfileHashSkipReason;
package.IsDirect = workspaceInfo.DirectDependencies.ContainsKey(package.Name);
if (workspaceInfo.PatchedDependencies.TryGetValue(package.Name, out var patchFile))
{
package.IsPatched = true;
package.PatchFile = patchFile;
}
ApplyPatchMetadata(workspaceInfo, package);
// Check for custom registry (scoped or default)
if (bunConfig.HasCustomRegistry)
@@ -98,26 +135,176 @@ public sealed class BunLanguageAnalyzer : ILanguageAnalyzer
// Stage 4: Normalize and emit
var normalized = BunPackageNormalizer.Normalize(packages);
foreach (var package in normalized.OrderBy(static p => p.ComponentKey, StringComparer.Ordinal))
var emissionPlans = normalized
.Select(package =>
{
if (BunVersionSpec.IsConcreteNpmVersion(package.Version))
{
return new EmissionPlan(package, package.ComponentKey, UsePurl: true, ComponentKey: null);
}
var originLocator = BuildNonConcreteOriginLocator(package);
var versionSpec = package.Version;
var componentKey = LanguageExplicitKey.Create(Id, "npm", package.Name, versionSpec, originLocator);
return new EmissionPlan(package, componentKey, UsePurl: false, ComponentKey: componentKey);
})
.OrderBy(static p => p.SortKey, StringComparer.Ordinal);
foreach (var plan in emissionPlans)
{
cancellationToken.ThrowIfCancellationRequested();
var metadata = package.CreateMetadata();
var package = plan.Package;
var evidence = package.CreateEvidence();
writer.AddFromPurl(
analyzerId: Id,
purl: package.Purl,
name: package.Name,
version: package.Version,
type: "npm",
metadata: metadata,
evidence: evidence,
usedByEntrypoint: false);
if (plan.UsePurl)
{
var metadata = package.CreateMetadata();
writer.AddFromPurl(
analyzerId: Id,
purl: package.Purl,
name: package.Name,
version: package.Version,
type: "npm",
metadata: metadata,
evidence: evidence,
usedByEntrypoint: false);
}
else
{
var metadata = new SortedDictionary<string, string?>(StringComparer.Ordinal);
foreach (var entry in package.CreateMetadata())
{
metadata[entry.Key] = entry.Value;
}
metadata["nonConcreteVersion"] = "true";
metadata["versionSpec"] = package.Version;
writer.AddFromExplicitKey(
analyzerId: Id,
componentKey: plan.ComponentKey!,
purl: null,
name: package.Name,
version: null,
type: "npm",
metadata: metadata,
evidence: evidence,
usedByEntrypoint: false);
}
}
}
}
private static void ApplyPatchMetadata(BunWorkspaceHelper.WorkspaceInfo workspaceInfo, BunPackage package)
{
if (workspaceInfo.PatchedDependencies.Count == 0)
{
return;
}
var versionKey = $"{package.Name}@{package.Version}";
if (workspaceInfo.PatchedDependencies.TryGetValue(versionKey, out var patchFile))
{
package.IsPatched = true;
package.PatchFile = patchFile;
return;
}
if (workspaceInfo.PatchedDependencies.TryGetValue(package.Name, out patchFile) &&
IsNameOnlyPatchUnambiguous(workspaceInfo.PatchedDependencies, package.Name))
{
package.IsPatched = true;
package.PatchFile = patchFile;
}
}
private static bool IsNameOnlyPatchUnambiguous(IReadOnlyDictionary<string, string> patchedDependencies, string packageName)
{
var prefix = $"{packageName}@";
foreach (var key in patchedDependencies.Keys)
{
if (key.StartsWith(prefix, StringComparison.Ordinal))
{
return false;
}
}
return true;
}
private void EmitDeclaredOnlyDependencies(
LanguageComponentWriter writer,
LanguageAnalyzerContext context,
string projectRoot,
CancellationToken cancellationToken)
{
var declared = BunDeclaredDependencyCollector.Collect(projectRoot);
if (declared.Count == 0)
{
return;
}
var packageJsonPath = Path.Combine(projectRoot, "package.json");
var relativePackageJson = context.GetRelativePath(packageJsonPath);
if (string.IsNullOrWhiteSpace(relativePackageJson))
{
relativePackageJson = "package.json";
}
var packageJsonHash = BunEvidenceHasher.TryComputeBoundedSha256(
packageJsonPath,
BunEvidenceHasher.MaxPackageJsonEvidenceBytes,
out var hashSkipReason);
foreach (var dep in declared)
{
cancellationToken.ThrowIfCancellationRequested();
var locator = $"{relativePackageJson}#{dep.Section}";
var componentKey = LanguageExplicitKey.Create(Id, "npm", dep.Name, dep.VersionSpec, locator);
var metadata = new List<KeyValuePair<string, string?>>(12)
{
new("declaredOnly", "true"),
new("declared.source", "package.json"),
new("declared.locator", locator),
new("declared.versionSpec", dep.VersionSpec),
new("declared.scope", dep.Scope),
new("declared.sourceType", ClassifyDeclaredSourceType(dep.VersionSpec)),
new("packageManager", "bun"),
};
if (!string.IsNullOrEmpty(hashSkipReason))
{
metadata.Add(new KeyValuePair<string, string?>("packageJson.hashSkipped", "true"));
metadata.Add(new KeyValuePair<string, string?>("packageJson.hashSkipReason", hashSkipReason));
}
var evidence = new[]
{
new LanguageComponentEvidence(
LanguageEvidenceKind.File,
"package.json",
relativePackageJson,
null,
packageJsonHash)
};
writer.AddFromExplicitKey(
analyzerId: Id,
componentKey: componentKey,
purl: null,
name: dep.Name,
version: null,
type: "npm",
metadata: metadata,
evidence: evidence,
usedByEntrypoint: false);
}
}
private void EmitBinaryLockfileRemediation(LanguageComponentWriter writer, LanguageAnalyzerContext context, string projectRoot)
{
var relativePath = context.GetRelativePath(projectRoot);
@@ -149,4 +336,73 @@ public sealed class BunLanguageAnalyzer : ILanguageAnalyzer
metadata: metadata,
evidence: evidence);
}
private static string ClassifyDeclaredSourceType(string spec)
{
if (string.IsNullOrWhiteSpace(spec))
{
return "unknown";
}
var value = spec.Trim();
if (value.StartsWith("workspace:", StringComparison.OrdinalIgnoreCase))
{
return "workspace";
}
if (value.StartsWith("link:", StringComparison.OrdinalIgnoreCase))
{
return "link";
}
if (value.StartsWith("file:", StringComparison.OrdinalIgnoreCase))
{
return "file";
}
if (value.StartsWith("git+", StringComparison.OrdinalIgnoreCase) ||
value.StartsWith("git://", StringComparison.OrdinalIgnoreCase) ||
value.StartsWith("github:", StringComparison.OrdinalIgnoreCase) ||
value.StartsWith("gitlab:", StringComparison.OrdinalIgnoreCase) ||
value.StartsWith("bitbucket:", StringComparison.OrdinalIgnoreCase))
{
return "git";
}
if (value.StartsWith("http://", StringComparison.OrdinalIgnoreCase) ||
value.StartsWith("https://", StringComparison.OrdinalIgnoreCase))
{
return "tarball";
}
if (value is "latest" or "next" or "beta" or "alpha" or "canary")
{
return "tag";
}
if (value.Length > 0 && value[0] == '.' ||
value.Length > 0 && value[0] == '/' ||
value.Contains('\\'))
{
return "path";
}
return "range";
}
private static string BuildNonConcreteOriginLocator(BunPackage package)
{
if (!string.IsNullOrWhiteSpace(package.LogicalPath))
{
return NormalizePath(Path.Combine(package.LogicalPath!, "package.json"));
}
var lockfilePath = string.IsNullOrWhiteSpace(package.LockfilePath) ? "bun.lock" : package.LockfilePath!;
return $"{NormalizePath(lockfilePath)}:packages[{package.Name}@{package.Version}]";
}
private static string NormalizePath(string path) => path.Replace('\\', '/');
private sealed record EmissionPlan(BunPackage Package, string SortKey, bool UsePurl, string? ComponentKey);
}

View File

@@ -0,0 +1,80 @@
using System.Collections.Immutable;
using System.Text.Json;
namespace StellaOps.Scanner.Analyzers.Lang.Bun.Internal;
internal static class BunDeclaredDependencyCollector
{
internal sealed record DeclaredDependency(
string Name,
string VersionSpec,
string Section,
string Scope);
public static IReadOnlyList<DeclaredDependency> Collect(string projectRoot)
{
ArgumentException.ThrowIfNullOrWhiteSpace(projectRoot);
var packageJsonPath = Path.Combine(projectRoot, "package.json");
if (!File.Exists(packageJsonPath))
{
return ImmutableArray<DeclaredDependency>.Empty;
}
try
{
var content = File.ReadAllText(packageJsonPath);
using var document = JsonDocument.Parse(content);
var root = document.RootElement;
var results = new List<DeclaredDependency>();
AddDependencies(results, root, "dependencies", "prod");
AddDependencies(results, root, "devDependencies", "dev");
AddDependencies(results, root, "optionalDependencies", "optional");
AddDependencies(results, root, "peerDependencies", "peer");
results.Sort(static (left, right) =>
{
var nameCompare = string.CompareOrdinal(left.Name, right.Name);
if (nameCompare != 0)
{
return nameCompare;
}
return string.CompareOrdinal(left.Scope, right.Scope);
});
return results.ToImmutableArray();
}
catch (JsonException)
{
return ImmutableArray<DeclaredDependency>.Empty;
}
catch (IOException)
{
return ImmutableArray<DeclaredDependency>.Empty;
}
}
private static void AddDependencies(List<DeclaredDependency> results, JsonElement root, string section, string scope)
{
if (!root.TryGetProperty(section, out var deps) || deps.ValueKind != JsonValueKind.Object)
{
return;
}
foreach (var dep in deps.EnumerateObject())
{
var name = dep.Name;
var versionSpec = dep.Value.ValueKind == JsonValueKind.String ? dep.Value.GetString() : null;
if (string.IsNullOrWhiteSpace(name) || string.IsNullOrWhiteSpace(versionSpec))
{
continue;
}
results.Add(new DeclaredDependency(name.Trim(), versionSpec!.Trim(), section, scope));
}
}
}

View File

@@ -0,0 +1,45 @@
using System.Security.Cryptography;
namespace StellaOps.Scanner.Analyzers.Lang.Bun.Internal;
internal static class BunEvidenceHasher
{
internal const int MaxPackageJsonEvidenceBytes = 1024 * 1024; // 1 MiB
internal const int MaxLockfileEvidenceBytes = 50 * 1024 * 1024; // 50 MiB (matches BunLockParser cap)
internal static string? TryComputeBoundedSha256(string path, int maxBytes, out string? skipReason)
{
skipReason = null;
try
{
var info = new FileInfo(path);
if (!info.Exists)
{
skipReason = "missing";
return null;
}
if (info.Length > maxBytes)
{
skipReason = $"size>{maxBytes}";
return null;
}
using var stream = File.OpenRead(path);
var hash = SHA256.HashData(stream);
return Convert.ToHexString(hash).ToLowerInvariant();
}
catch (UnauthorizedAccessException)
{
skipReason = "unauthorized";
return null;
}
catch (IOException)
{
skipReason = "io";
return null;
}
}
}

View File

@@ -1,4 +1,5 @@
using System.Collections.Immutable;
using System.Security.Cryptography;
using System.Text.Json;
namespace StellaOps.Scanner.Analyzers.Lang.Bun.Internal;
@@ -186,9 +187,30 @@ internal static class BunInstalledCollector
{
try
{
var content = File.ReadAllText(packageJsonPath);
using var document = JsonDocument.Parse(content);
var root = document.RootElement;
var fileInfo = new FileInfo(packageJsonPath);
string? packageJsonSha256 = null;
string? packageJsonHashSkipReason = null;
JsonElement root;
if (fileInfo.Exists && fileInfo.Length <= BunEvidenceHasher.MaxPackageJsonEvidenceBytes)
{
var bytes = File.ReadAllBytes(packageJsonPath);
packageJsonSha256 = Convert.ToHexString(SHA256.HashData(bytes)).ToLowerInvariant();
using var document = JsonDocument.Parse(bytes);
root = document.RootElement.Clone();
}
else
{
if (fileInfo.Exists && fileInfo.Length > BunEvidenceHasher.MaxPackageJsonEvidenceBytes)
{
packageJsonHashSkipReason = $"size>{BunEvidenceHasher.MaxPackageJsonEvidenceBytes}";
}
using var stream = File.OpenRead(packageJsonPath);
using var document = JsonDocument.Parse(stream);
root = document.RootElement.Clone();
}
if (!root.TryGetProperty("name", out var nameElement))
{
@@ -221,12 +243,18 @@ internal static class BunInstalledCollector
relativePath,
relativeRealPath,
isPrivate,
lockEntry);
lockEntry,
packageJsonSha256: packageJsonSha256,
packageJsonHashSkipReason: packageJsonHashSkipReason);
}
catch (JsonException)
{
return null;
}
catch (UnauthorizedAccessException)
{
return null;
}
catch (IOException)
{
return null;

View File

@@ -12,6 +12,7 @@ internal sealed class BunLockEntry
public bool IsDev { get; init; }
public bool IsOptional { get; init; }
public bool IsPeer { get; init; }
public bool ScopeUnknown { get; init; }
/// <summary>
/// Source type: npm, git, tarball, file, link, workspace.
@@ -31,5 +32,7 @@ internal sealed class BunLockEntry
/// <summary>
/// Dependencies of this package (for transitive analysis).
/// </summary>
public IReadOnlyList<string> Dependencies { get; init; } = Array.Empty<string>();
public IReadOnlyList<BunLockDependency> Dependencies { get; init; } = Array.Empty<BunLockDependency>();
}
internal sealed record BunLockDependency(string Name, string? Specifier, bool IsOptionalPeer);

View File

@@ -141,13 +141,24 @@ internal static class BunLockParser
var resolved = element[0].GetString();
var integrity = element.GetArrayLength() > 1 ? element[1].GetString() : null;
// Parse dependencies from element[2] if present
var dependencies = new List<string>();
// Parse dependencies from element[2] if present.
var dependencies = new List<BunLockDependency>();
if (element.GetArrayLength() > 2 && element[2].ValueKind == JsonValueKind.Object)
{
foreach (var dep in element[2].EnumerateObject())
{
dependencies.Add(dep.Name);
var depSpecifier = dep.Value.ValueKind == JsonValueKind.String ? dep.Value.GetString() : null;
dependencies.Add(new BunLockDependency(dep.Name, depSpecifier, IsOptionalPeer: false));
}
}
// Optional peer dependencies may appear as element[3] in bun.lock v1 array format.
if (element.GetArrayLength() > 3 && element[3].ValueKind == JsonValueKind.Object)
{
foreach (var dep in element[3].EnumerateObject())
{
var depSpecifier = dep.Value.ValueKind == JsonValueKind.String ? dep.Value.GetString() : null;
dependencies.Add(new BunLockDependency(dep.Name, depSpecifier, IsOptionalPeer: true));
}
}

View File

@@ -0,0 +1,203 @@
using System.Collections.Immutable;
namespace StellaOps.Scanner.Analyzers.Lang.Bun.Internal;
internal static class BunLockScopeClassifier
{
public static BunLockData Classify(BunLockData lockData, IReadOnlyList<BunDeclaredDependencyCollector.DeclaredDependency> declaredDependencies)
{
ArgumentNullException.ThrowIfNull(lockData);
ArgumentNullException.ThrowIfNull(declaredDependencies);
if (lockData.AllEntries.IsEmpty || declaredDependencies.Count == 0)
{
return lockData;
}
var entries = lockData.AllEntries.ToArray();
var entriesByName = entries
.GroupBy(static entry => entry.Name, StringComparer.Ordinal)
.ToDictionary(static group => group.Key, static group => group.ToImmutableArray(), StringComparer.Ordinal);
var ambiguousNames = entriesByName
.Where(static pair => pair.Value.Length > 1)
.Select(static pair => pair.Key)
.ToHashSet(StringComparer.Ordinal);
var hasAmbiguity = ambiguousNames.Count > 0;
var uniqueByName = entriesByName
.Where(static pair => pair.Value.Length == 1)
.ToDictionary(static pair => pair.Key, static pair => pair.Value[0], StringComparer.Ordinal);
var entryByKey = entries.ToDictionary(static entry => $"{entry.Name}@{entry.Version}", StringComparer.Ordinal);
var prodRoots = ResolveDeclaredRoots(declaredDependencies, "prod", uniqueByName);
var devRoots = ResolveDeclaredRoots(declaredDependencies, "dev", uniqueByName);
var optionalRoots = ResolveDeclaredRoots(declaredDependencies, "optional", uniqueByName);
var peerRoots = ResolveDeclaredRoots(declaredDependencies, "peer", uniqueByName);
var prodReachable = Traverse(prodRoots, uniqueByName, entryByKey, includeOptionalPeer: false);
var devReachable = Traverse(devRoots, uniqueByName, entryByKey, includeOptionalPeer: false);
var optionalReachable = Traverse(optionalRoots, uniqueByName, entryByKey, includeOptionalPeer: false);
var peerReachable = Traverse(peerRoots, uniqueByName, entryByKey, includeOptionalPeer: false);
var baseReachable = new HashSet<string>(StringComparer.Ordinal);
baseReachable.UnionWith(prodReachable);
baseReachable.UnionWith(devReachable);
baseReachable.UnionWith(optionalReachable);
baseReachable.UnionWith(peerReachable);
// Optional peer edges (when present) promote targets into both optional and peer scopes.
var optionalPeerRoots = CollectOptionalPeerTargets(baseReachable, entryByKey, uniqueByName);
if (optionalPeerRoots.Count > 0)
{
var optionalPeerReachable = Traverse(optionalPeerRoots, uniqueByName, entryByKey, includeOptionalPeer: false);
optionalReachable.UnionWith(optionalPeerReachable);
peerReachable.UnionWith(optionalPeerReachable);
}
var rewritten = ImmutableArray.CreateBuilder<BunLockEntry>(entries.Length);
foreach (var entry in entries)
{
var key = $"{entry.Name}@{entry.Version}";
var reachedFromProd = prodReachable.Contains(key);
var reachedFromDev = devReachable.Contains(key);
var reachedFromOptional = optionalReachable.Contains(key);
var reachedFromPeer = peerReachable.Contains(key);
var computedDev = !hasAmbiguity && reachedFromDev && !reachedFromProd;
var computedOptional = reachedFromOptional;
var computedPeer = reachedFromPeer;
var computedScopeUnknown = ambiguousNames.Contains(entry.Name)
|| (hasAmbiguity && (reachedFromDev || reachedFromOptional || reachedFromPeer) && !reachedFromProd);
rewritten.Add(new BunLockEntry
{
Name = entry.Name,
Version = entry.Version,
Resolved = entry.Resolved,
Integrity = entry.Integrity,
IsDev = entry.IsDev || computedDev,
IsOptional = entry.IsOptional || computedOptional,
IsPeer = entry.IsPeer || computedPeer,
ScopeUnknown = entry.ScopeUnknown || computedScopeUnknown,
SourceType = entry.SourceType,
GitCommit = entry.GitCommit,
Specifier = entry.Specifier,
Dependencies = entry.Dependencies
});
}
return new BunLockData(rewritten.ToImmutable());
}
private static IReadOnlyList<BunLockEntry> ResolveDeclaredRoots(
IReadOnlyList<BunDeclaredDependencyCollector.DeclaredDependency> declared,
string scope,
IReadOnlyDictionary<string, BunLockEntry> uniqueByName)
{
var roots = new List<BunLockEntry>();
foreach (var dep in declared)
{
if (!scope.Equals(dep.Scope, StringComparison.Ordinal))
{
continue;
}
if (uniqueByName.TryGetValue(dep.Name, out var entry))
{
roots.Add(entry);
}
}
return roots;
}
private static HashSet<string> Traverse(
IReadOnlyList<BunLockEntry> roots,
IReadOnlyDictionary<string, BunLockEntry> uniqueByName,
IReadOnlyDictionary<string, BunLockEntry> entryByKey,
bool includeOptionalPeer)
{
var visited = new HashSet<string>(StringComparer.Ordinal);
var queue = new Queue<BunLockEntry>();
foreach (var root in roots)
{
var key = $"{root.Name}@{root.Version}";
if (visited.Add(key))
{
queue.Enqueue(root);
}
}
while (queue.Count > 0)
{
var current = queue.Dequeue();
foreach (var dep in current.Dependencies)
{
if (!includeOptionalPeer && dep.IsOptionalPeer)
{
continue;
}
if (!uniqueByName.TryGetValue(dep.Name, out var target))
{
continue; // Unknown or ambiguous dependency.
}
var key = $"{target.Name}@{target.Version}";
if (visited.Add(key) && entryByKey.TryGetValue(key, out var resolved))
{
queue.Enqueue(resolved);
}
}
}
return visited;
}
private static IReadOnlyList<BunLockEntry> CollectOptionalPeerTargets(
IEnumerable<string> reachableKeys,
IReadOnlyDictionary<string, BunLockEntry> entryByKey,
IReadOnlyDictionary<string, BunLockEntry> uniqueByName)
{
var targets = new List<BunLockEntry>();
var seen = new HashSet<string>(StringComparer.Ordinal);
foreach (var key in reachableKeys)
{
if (!entryByKey.TryGetValue(key, out var entry))
{
continue;
}
foreach (var dep in entry.Dependencies.Where(static dep => dep.IsOptionalPeer))
{
if (!uniqueByName.TryGetValue(dep.Name, out var target))
{
continue;
}
var targetKey = $"{target.Name}@{target.Version}";
if (seen.Add(targetKey))
{
targets.Add(target);
}
}
}
targets.Sort(static (left, right) =>
{
var nameCompare = string.CompareOrdinal(left.Name, right.Name);
return nameCompare != 0 ? nameCompare : string.CompareOrdinal(left.Version, right.Version);
});
return targets;
}
}

View File

@@ -14,7 +14,7 @@ internal sealed class BunPackage
{
Name = name;
Version = version;
Purl = BuildPurl(name, version);
Purl = BuildNpmPurl(name, version);
ComponentKey = $"purl::{Purl}";
}
@@ -29,6 +29,12 @@ internal sealed class BunPackage
public bool IsDev { get; private init; }
public bool IsOptional { get; private init; }
public bool IsPeer { get; private init; }
public bool ScopeUnknown { get; private init; }
public string? PackageJsonSha256 { get; private init; }
public string? PackageJsonHashSkipReason { get; private init; }
public string? LockfilePath { get; set; }
public string? LockfileSha256 { get; set; }
public string? LockfileHashSkipReason { get; set; }
/// <summary>
/// Source type: npm, git, tarball, file, link, workspace.
@@ -99,7 +105,9 @@ internal sealed class BunPackage
string logicalPath,
string? realPath,
bool isPrivate,
BunLockEntry? lockEntry)
BunLockEntry? lockEntry,
string? packageJsonSha256 = null,
string? packageJsonHashSkipReason = null)
{
return new BunPackage(name, version)
{
@@ -112,10 +120,15 @@ internal sealed class BunPackage
IsDev = lockEntry?.IsDev ?? false,
IsOptional = lockEntry?.IsOptional ?? false,
IsPeer = lockEntry?.IsPeer ?? false,
ScopeUnknown = lockEntry?.ScopeUnknown ?? false,
SourceType = lockEntry?.SourceType ?? "npm",
GitCommit = lockEntry?.GitCommit,
Specifier = lockEntry?.Specifier,
Dependencies = lockEntry?.Dependencies ?? Array.Empty<string>()
PackageJsonSha256 = packageJsonSha256,
PackageJsonHashSkipReason = packageJsonHashSkipReason,
Dependencies = lockEntry is null
? Array.Empty<string>()
: lockEntry.Dependencies.Select(static dep => dep.Name).ToArray()
};
}
@@ -131,10 +144,11 @@ internal sealed class BunPackage
IsDev = entry.IsDev,
IsOptional = entry.IsOptional,
IsPeer = entry.IsPeer,
ScopeUnknown = entry.ScopeUnknown,
SourceType = entry.SourceType,
GitCommit = entry.GitCommit,
Specifier = entry.Specifier,
Dependencies = entry.Dependencies
Dependencies = entry.Dependencies.Select(static dep => dep.Name).ToArray()
};
}
@@ -172,6 +186,18 @@ internal sealed class BunPackage
metadata["private"] = "true";
}
if (!string.IsNullOrEmpty(PackageJsonHashSkipReason))
{
metadata["packageJson.hashSkipped"] = "true";
metadata["packageJson.hashSkipReason"] = PackageJsonHashSkipReason;
}
if (!string.IsNullOrEmpty(LockfileHashSkipReason))
{
metadata["bunLock.hashSkipped"] = "true";
metadata["bunLock.hashSkipReason"] = LockfileHashSkipReason;
}
if (!string.IsNullOrEmpty(CustomRegistry))
{
metadata["customRegistry"] = CustomRegistry;
@@ -182,6 +208,11 @@ internal sealed class BunPackage
metadata["dev"] = "true";
}
if (ScopeUnknown)
{
metadata["scopeUnknown"] = "true";
}
if (IsDirect)
{
metadata["direct"] = "true";
@@ -243,36 +274,41 @@ internal sealed class BunPackage
Source ?? "node_modules",
NormalizePath(Path.Combine(LogicalPath, "package.json")),
null,
null));
PackageJsonSha256));
}
if (!string.IsNullOrEmpty(Resolved))
{
var locator = BuildLockLocator();
evidence.Add(new LanguageComponentEvidence(
LanguageEvidenceKind.Metadata,
"resolved",
"bun.lock",
locator,
Resolved,
null));
LockfileSha256));
}
if (!string.IsNullOrEmpty(Integrity))
{
var locator = BuildLockLocator();
evidence.Add(new LanguageComponentEvidence(
LanguageEvidenceKind.Metadata,
"integrity",
"bun.lock",
locator,
Integrity,
null));
LockfileSha256));
}
return evidence;
}
private static string BuildPurl(string name, string version)
private string BuildLockLocator()
=> $"{NormalizePath(string.IsNullOrWhiteSpace(LockfilePath) ? "bun.lock" : LockfilePath!)}:packages[{Name}@{Version}]";
internal static string BuildNpmPurl(string name, string version)
{
// pkg:npm/<name>@<version>
// Scoped packages: @scope/name %40scope/name
// Scoped packages: @scope/name -> %40scope/name
var encodedName = name.StartsWith('@')
? $"%40{HttpUtility.UrlEncode(name[1..]).Replace("%2f", "/", StringComparison.OrdinalIgnoreCase)}"
: HttpUtility.UrlEncode(name);

View File

@@ -26,12 +26,26 @@ internal static class BunProjectDiscoverer
{
ArgumentNullException.ThrowIfNull(context);
var roots = new List<string>();
DiscoverRecursive(context.RootPath, 0, roots, cancellationToken);
var roots = new List<string>(capacity: 8);
var unique = new HashSet<string>(StringComparer.Ordinal);
foreach (var discoveryRoot in EnumerateDiscoveryRoots(context.RootPath, cancellationToken))
{
cancellationToken.ThrowIfCancellationRequested();
DiscoverRecursive(discoveryRoot, 0, roots, unique, cancellationToken);
if (roots.Count >= MaxRoots)
{
break;
}
}
roots.Sort(StringComparer.Ordinal);
return roots.ToImmutableArray();
}
private static void DiscoverRecursive(string directory, int depth, List<string> roots, CancellationToken cancellationToken)
private static void DiscoverRecursive(string directory, int depth, List<string> roots, HashSet<string> unique, CancellationToken cancellationToken)
{
cancellationToken.ThrowIfCancellationRequested();
@@ -48,7 +62,11 @@ internal static class BunProjectDiscoverer
// Check if this directory is a Bun project root
if (IsBunProjectRoot(directory))
{
roots.Add(directory);
if (unique.Add(directory))
{
roots.Add(directory);
}
// Don't recurse into node_modules or .bun
return;
}
@@ -56,7 +74,7 @@ internal static class BunProjectDiscoverer
// Recurse into subdirectories
try
{
foreach (var subdir in Directory.EnumerateDirectories(directory))
foreach (var subdir in EnumerateDirectoriesSorted(directory))
{
cancellationToken.ThrowIfCancellationRequested();
@@ -68,7 +86,7 @@ internal static class BunProjectDiscoverer
continue;
}
DiscoverRecursive(subdir, depth + 1, roots, cancellationToken);
DiscoverRecursive(subdir, depth + 1, roots, unique, cancellationToken);
if (roots.Count >= MaxRoots)
{
@@ -117,7 +135,108 @@ internal static class BunProjectDiscoverer
private static bool ShouldSkipDirectory(string dirName)
{
return dirName is "node_modules" or ".git" or ".svn" or ".hg" or "bin" or "obj" or ".bun"
|| dirName.StartsWith('.'); // Skip hidden directories
if (dirName is "node_modules" or ".git" or ".svn" or ".hg" or "bin" or "obj" or ".bun")
{
return true;
}
// Do not skip container layer roots like ".layers".
if (dirName.Equals(".layers", StringComparison.Ordinal))
{
return false;
}
// Skip other hidden directories by default.
return dirName.Length > 0 && dirName[0] == '.';
}
private static IEnumerable<string> EnumerateDiscoveryRoots(string rootPath, CancellationToken cancellationToken)
{
if (string.IsNullOrWhiteSpace(rootPath))
{
yield break;
}
var fullRoot = Path.GetFullPath(rootPath);
if (!Directory.Exists(fullRoot))
{
yield break;
}
yield return fullRoot;
foreach (var containerRoot in EnumerateContainerLayerRoots(fullRoot, cancellationToken))
{
yield return containerRoot;
}
}
private static IEnumerable<string> EnumerateContainerLayerRoots(string rootPath, CancellationToken cancellationToken)
{
var candidates = new List<string>();
// Common unpack layouts:
// - layers/<layer-id>/...
// - .layers/<layer-id>/...
// - layer0/... (direct children)
var layersRoot = Path.Combine(rootPath, "layers");
if (Directory.Exists(layersRoot))
{
candidates.AddRange(EnumerateDirectoriesSorted(layersRoot));
}
var dotLayersRoot = Path.Combine(rootPath, ".layers");
if (Directory.Exists(dotLayersRoot))
{
candidates.AddRange(EnumerateDirectoriesSorted(dotLayersRoot));
}
foreach (var directChild in EnumerateDirectoriesSorted(rootPath))
{
cancellationToken.ThrowIfCancellationRequested();
var name = Path.GetFileName(directChild);
if (name is null)
{
continue;
}
if (name.StartsWith("layer", StringComparison.OrdinalIgnoreCase) &&
!name.Equals("layers", StringComparison.OrdinalIgnoreCase) &&
!name.Equals(".layers", StringComparison.OrdinalIgnoreCase))
{
candidates.Add(directChild);
}
}
candidates.Sort(StringComparer.Ordinal);
foreach (var candidate in candidates)
{
cancellationToken.ThrowIfCancellationRequested();
if (Directory.Exists(candidate))
{
yield return candidate;
}
}
}
private static IReadOnlyList<string> EnumerateDirectoriesSorted(string directory)
{
try
{
var entries = Directory.EnumerateDirectories(directory).ToList();
entries.Sort(StringComparer.Ordinal);
return entries;
}
catch (UnauthorizedAccessException)
{
return Array.Empty<string>();
}
catch (DirectoryNotFoundException)
{
return Array.Empty<string>();
}
}
}

View File

@@ -0,0 +1,144 @@
namespace StellaOps.Scanner.Analyzers.Lang.Bun.Internal;
internal static class BunVersionSpec
{
public static bool IsConcreteNpmVersion(string? version)
{
if (string.IsNullOrWhiteSpace(version))
{
return false;
}
version = version.Trim();
if (version.StartsWith('^') || version.StartsWith('~') || version.StartsWith('>') || version.StartsWith('<'))
{
return false;
}
if (version.Contains(' ') || version.Contains('*') || version.Contains('|') || version.Contains(':') || version.Contains('/'))
{
return false;
}
var hasDigit = false;
foreach (var ch in version)
{
if (char.IsAsciiDigit(ch))
{
hasDigit = true;
continue;
}
if (ch is '.' or '-' or '+' or '_' || char.IsAsciiLetter(ch))
{
continue;
}
return false;
}
return hasDigit;
}
public static bool IsConcreteSemver(string value)
{
if (string.IsNullOrWhiteSpace(value))
{
return false;
}
var span = value.AsSpan().Trim();
if (span.Length == 0 || !char.IsAsciiDigit(span[0]))
{
return false;
}
var index = 0;
if (!ConsumeDigits(span, ref index))
{
return false;
}
if (!ConsumeChar(span, ref index, '.'))
{
return false;
}
if (!ConsumeDigits(span, ref index))
{
return false;
}
if (!ConsumeChar(span, ref index, '.'))
{
return false;
}
if (!ConsumeDigits(span, ref index))
{
return false;
}
// Optional prerelease: -[0-9A-Za-z.-]+
if (index < span.Length && span[index] == '-')
{
index++;
if (!ConsumeSemverIdentifiers(span, ref index))
{
return false;
}
}
// Optional build metadata: +[0-9A-Za-z.-]+
if (index < span.Length && span[index] == '+')
{
index++;
if (!ConsumeSemverIdentifiers(span, ref index))
{
return false;
}
}
return index == span.Length;
}
private static bool ConsumeDigits(ReadOnlySpan<char> span, ref int index)
{
var start = index;
while (index < span.Length && char.IsAsciiDigit(span[index]))
{
index++;
}
return index > start;
}
private static bool ConsumeChar(ReadOnlySpan<char> span, ref int index, char expected)
{
if (index >= span.Length || span[index] != expected)
{
return false;
}
index++;
return true;
}
private static bool ConsumeSemverIdentifiers(ReadOnlySpan<char> span, ref int index)
{
var start = index;
while (index < span.Length)
{
var ch = span[index];
if (char.IsAsciiLetterOrDigit(ch) || ch is '.' or '-')
{
index++;
continue;
}
break;
}
return index > start;
}
}

View File

@@ -317,9 +317,9 @@ internal static class BunWorkspaceHelper
var patchFile = entry.Value.GetString();
if (!string.IsNullOrEmpty(patchFile))
{
// Parse package name from key (could be "pkg@version" or just "pkg")
var packageName = ExtractPackageName(entry.Name);
result[packageName] = patchFile;
// Preserve version specificity (name@version) when present.
var patchKey = NormalizePatchKey(entry.Name);
result[patchKey] = NormalizePatchPath(projectRoot, patchFile);
}
}
}
@@ -328,31 +328,32 @@ internal static class BunWorkspaceHelper
var patchesDir = Path.Combine(projectRoot, "patches");
if (Directory.Exists(patchesDir))
{
ScanPatchesDirectory(patchesDir, result);
ScanPatchesDirectory(patchesDir, projectRoot, result);
}
// Bun uses .patches directory
var bunPatchesDir = Path.Combine(projectRoot, ".patches");
if (Directory.Exists(bunPatchesDir))
{
ScanPatchesDirectory(bunPatchesDir, result);
ScanPatchesDirectory(bunPatchesDir, projectRoot, result);
}
return result;
}
private static void ScanPatchesDirectory(string patchesDir, Dictionary<string, string> result)
private static void ScanPatchesDirectory(string patchesDir, string projectRoot, Dictionary<string, string> result)
{
try
{
foreach (var patchFile in Directory.EnumerateFiles(patchesDir, "*.patch"))
foreach (var patchFile in Directory.EnumerateFiles(patchesDir, "*.patch").OrderBy(static path => Path.GetFileName(path), StringComparer.Ordinal))
{
// Patch file name format: package-name@version.patch
// Patch file name format (pnpm/bun): package-name@version.patch or @scope+name@version.patch
var fileName = Path.GetFileNameWithoutExtension(patchFile);
var packageName = ExtractPackageName(fileName);
if (!string.IsNullOrEmpty(packageName) && !result.ContainsKey(packageName))
var patchKey = NormalizePatchKey(fileName);
if (!string.IsNullOrEmpty(patchKey) && !result.ContainsKey(patchKey))
{
result[packageName] = patchFile;
var relative = Path.GetRelativePath(projectRoot, patchFile);
result[patchKey] = NormalizePath(relative);
}
}
}
@@ -362,30 +363,63 @@ internal static class BunWorkspaceHelper
}
}
private static string ExtractPackageName(string nameWithVersion)
private static string NormalizePatchKey(string nameWithVersion)
{
// Format: package-name@version or @scope/package-name@version
if (string.IsNullOrEmpty(nameWithVersion))
if (string.IsNullOrWhiteSpace(nameWithVersion))
{
return string.Empty;
}
// For scoped packages, find @ after the scope
if (nameWithVersion.StartsWith('@'))
var trimmed = nameWithVersion.Trim();
// pnpm patch naming encodes @scope/name as @scope+name
if (trimmed.StartsWith('@') && !trimmed.Contains('/', StringComparison.Ordinal))
{
var slashIndex = nameWithVersion.IndexOf('/');
if (slashIndex > 0)
// Replace the first '+' in the name portion (before the version delimiter) with '/'
var versionAt = trimmed.IndexOf('@', 1);
var namePart = versionAt > 0 ? trimmed[..versionAt] : trimmed;
var versionPart = versionAt > 0 ? trimmed[versionAt..] : string.Empty;
var plusIndex = namePart.IndexOf('+', StringComparison.Ordinal);
if (plusIndex > 0)
{
var atIndex = nameWithVersion.IndexOf('@', slashIndex);
return atIndex > slashIndex ? nameWithVersion[..atIndex] : nameWithVersion;
namePart = $"{namePart[..plusIndex]}/{namePart[(plusIndex + 1)..]}";
}
return namePart + versionPart;
}
return trimmed;
}
private static string NormalizePatchPath(string projectRoot, string patchFile)
{
if (string.IsNullOrWhiteSpace(patchFile))
{
return string.Empty;
}
var trimmed = patchFile.Trim();
// Avoid absolute path leakage: convert to project-relative when possible.
if (Path.IsPathRooted(trimmed))
{
try
{
trimmed = Path.GetRelativePath(projectRoot, trimmed);
}
catch
{
// Keep the original string if we cannot relativize (still scrubbed later by metadata rules).
}
}
// For regular packages
var lastAtIndex = nameWithVersion.LastIndexOf('@');
return lastAtIndex > 0 ? nameWithVersion[..lastAtIndex] : nameWithVersion;
return NormalizePath(trimmed);
}
private static string NormalizePath(string path)
=> path.Replace('\\', '/');
private static void AddDependencies(
JsonElement root,
string propertyName,

View File

@@ -0,0 +1,13 @@
# Bun Analyzer Tasks (Sprint 0407)
| Task ID | Status | Notes | Updated (UTC) |
| --- | --- | --- | --- |
| SCAN-BUN-407-001 | DONE | Container-layer aware project discovery (`layers/`, `.layers/`, `layer*`), bounded + deterministic. | 2025-12-13 |
| SCAN-BUN-407-002 | DONE | Declared-only fallback from `package.json` with safe identities (no range-as-version PURLs). | 2025-12-13 |
| SCAN-BUN-407-003 | DONE | bun.lock v1 graph enrichment (dependency specifiers + deterministic dev/optional/peer classification). | 2025-12-13 |
| SCAN-BUN-407-004 | DONE | Make `includeDev` meaningful for lockfile-only and installed scans; use `scopeUnknown` when unsure. | 2025-12-13 |
| SCAN-BUN-407-005 | DONE | Version-specific patch mapping + relative patch paths (no absolute path leakage). | 2025-12-13 |
| SCAN-BUN-407-006 | DONE | Evidence strengthening + locator precision (bun.lock locators, bounded sha256). | 2025-12-13 |
| SCAN-BUN-407-007 | DONE | Identity safety for non-npm sources (git/file/link/workspace/tarball/custom registry). | 2025-12-13 |
| SCAN-BUN-407-008 | DONE | Document analyzer contract under `docs/modules/scanner/` and link sprint. | 2025-12-13 |
| SCAN-BUN-407-009 | DONE | Optional: deterministic benchmark if perf risk materializes. | 2025-12-13 |

View File

@@ -21,6 +21,7 @@ internal static class DenoBundleInspector
}
sourcePath ??= "(stream)";
sourcePath = sourcePath.Replace('\\', '/');
try
{

View File

@@ -15,11 +15,14 @@ internal static class DenoContainerAdapter
private static void AddCaches(DenoWorkspace workspace, ImmutableArray<DenoContainerInput>.Builder builder)
{
foreach (var cache in workspace.CacheLocations)
foreach (var cache in workspace.CacheLocations
.OrderByDescending(static cache => !string.IsNullOrWhiteSpace(cache.LayerDigest))
.ThenBy(static cache => cache.Kind)
.ThenBy(static cache => cache.AbsolutePath, StringComparer.OrdinalIgnoreCase))
{
var metadata = new Dictionary<string, string?>(StringComparer.OrdinalIgnoreCase)
{
["path"] = cache.AbsolutePath,
["path"] = NormalizePath(cache.AbsolutePath),
["alias"] = cache.Alias,
["kind"] = cache.Kind.ToString()
};
@@ -35,11 +38,13 @@ internal static class DenoContainerAdapter
private static void AddVendors(DenoWorkspace workspace, ImmutableArray<DenoContainerInput>.Builder builder)
{
foreach (var vendor in workspace.Vendors)
foreach (var vendor in workspace.Vendors
.OrderBy(static vendor => !string.IsNullOrWhiteSpace(vendor.LayerDigest))
.ThenBy(static vendor => vendor.RelativePath, StringComparer.Ordinal))
{
var metadata = new Dictionary<string, string?>(StringComparer.OrdinalIgnoreCase)
{
["path"] = vendor.AbsolutePath,
["path"] = NormalizePath(vendor.AbsolutePath),
["alias"] = vendor.Alias
};
@@ -73,4 +78,7 @@ internal static class DenoContainerAdapter
bundle));
}
}
private static string NormalizePath(string value)
=> string.IsNullOrWhiteSpace(value) ? string.Empty : value.Replace('\\', '/');
}

View File

@@ -195,7 +195,7 @@ internal static class DenoWorkspaceNormalizer
cancellationToken.ThrowIfCancellationRequested();
var relative = context.GetRelativePath(absolute);
var alias = DenoPathUtilities.CreateAlias(absolute, "vendor");
var alias = DenoPathUtilities.CreateAlias(relative, "vendor");
var layerDigest = DenoLayerMetadata.TryExtractDigest(absolute);
DenoImportMapDocument? importMap = null;
@@ -272,7 +272,7 @@ internal static class DenoWorkspaceNormalizer
cancellationToken.ThrowIfCancellationRequested();
var alias = DenoPathUtilities.CreateAlias(absolute, "deno");
var alias = DenoPathUtilities.CreateAlias(context.GetRelativePath(absolute), "deno");
builder.Add(new DenoCacheLocation(
absolute,
alias,

View File

@@ -23,14 +23,12 @@ internal static class DenoRuntimeTraceSerializer
.ToArray();
using var stream = new MemoryStream();
using (var writer = new Utf8JsonWriter(stream, WriterOptions))
foreach (var evt in ordered)
{
foreach (var evt in ordered)
{
WriteEvent(writer, evt);
writer.Flush();
stream.WriteByte((byte)'\n');
}
using var writer = new Utf8JsonWriter(stream, WriterOptions);
WriteEvent(writer, evt);
writer.Flush();
stream.WriteByte((byte)'\n');
}
var bytes = stream.ToArray();
@@ -136,12 +134,24 @@ internal static class DenoRuntimeTraceSerializer
{
if (!string.IsNullOrWhiteSpace(p))
{
permissions.Add(p.Trim().ToLowerInvariant());
var normalized = p.Trim().ToLowerInvariant();
if (!string.Equals(normalized, "unknown", StringComparison.Ordinal))
{
permissions.Add(normalized);
}
}
}
break;
case DenoPermissionUseEvent:
case DenoPermissionUseEvent e:
permissionUses++;
if (!string.IsNullOrWhiteSpace(e.Permission))
{
var normalized = e.Permission.Trim().ToLowerInvariant();
if (!string.Equals(normalized, "unknown", StringComparison.Ordinal))
{
permissions.Add(normalized);
}
}
break;
case DenoNpmResolutionEvent:
npmResolutions++;

View File

@@ -45,8 +45,10 @@ public static class DotNetEntrypointResolver
continue;
}
var name = GetEntrypointName(depsPath);
DotNetRuntimeConfig? runtimeConfig = null;
var runtimeConfigPath = Path.ChangeExtension(depsPath, ".runtimeconfig.json");
var runtimeConfigPath = GetRuntimeConfigPath(depsPath, name);
string? relativeRuntimeConfig = null;
if (!string.IsNullOrEmpty(runtimeConfigPath) && File.Exists(runtimeConfigPath))
@@ -59,7 +61,6 @@ public static class DotNetEntrypointResolver
var rids = CollectRuntimeIdentifiers(depsFile, runtimeConfig);
var publishKind = DeterminePublishKind(depsFile);
var name = GetEntrypointName(depsPath);
var id = BuildDeterministicId(name, tfms, rids, publishKind);
results.Add(new DotNetEntrypoint(
@@ -101,6 +102,19 @@ public static class DotNetEntrypointResolver
return stem;
}
private static string GetRuntimeConfigPath(string depsPath, string entrypointName)
{
var directory = Path.GetDirectoryName(depsPath);
var fileName = $"{entrypointName}.runtimeconfig.json";
if (string.IsNullOrWhiteSpace(directory))
{
return fileName;
}
return Path.Combine(directory, fileName);
}
private static IReadOnlyCollection<string> CollectTargetFrameworks(DotNetDepsFile depsFile, DotNetRuntimeConfig? runtimeConfig)
{
var tfms = new SortedSet<string>(StringComparer.OrdinalIgnoreCase);
@@ -109,7 +123,11 @@ public static class DotNetEntrypointResolver
{
foreach (var tfm in library.TargetFrameworks)
{
tfms.Add(tfm);
var normalized = NormalizeTargetFrameworkMoniker(tfm);
if (!string.IsNullOrWhiteSpace(normalized))
{
tfms.Add(normalized);
}
}
}
@@ -129,6 +147,83 @@ public static class DotNetEntrypointResolver
return tfms;
}
private static string? NormalizeTargetFrameworkMoniker(string? value)
{
if (string.IsNullOrWhiteSpace(value))
{
return null;
}
var trimmed = value.Trim();
if (TryNormalizeFrameworkMoniker(trimmed, ".NETCoreApp,Version=v", "net", out var normalized))
{
return normalized;
}
if (TryNormalizeFrameworkMoniker(trimmed, ".NETStandard,Version=v", "netstandard", out normalized))
{
return normalized;
}
if (TryNormalizeFrameworkMoniker(trimmed, ".NETFramework,Version=v", "net", out normalized))
{
return NormalizeNetFrameworkTfm(normalized!);
}
return trimmed;
}
private static bool TryNormalizeFrameworkMoniker(string value, string prefix, string replacement, out string? normalized)
{
normalized = null;
if (!value.StartsWith(prefix, StringComparison.OrdinalIgnoreCase))
{
return false;
}
var versionPart = value[prefix.Length..];
if (string.IsNullOrWhiteSpace(versionPart))
{
return false;
}
versionPart = versionPart.Trim();
if (!Version.TryParse(versionPart, out var version))
{
return false;
}
normalized = $"{replacement}{version.Major}.{version.Minor}";
return true;
}
private static string NormalizeNetFrameworkTfm(string value)
{
if (!value.StartsWith("net", StringComparison.OrdinalIgnoreCase))
{
return value;
}
var versionPart = value[3..];
if (!Version.TryParse(versionPart, out var version))
{
return value.Replace(".", string.Empty, StringComparison.Ordinal);
}
var major = Math.Max(version.Major, 0);
var minor = Math.Max(version.Minor, 0);
var build = version.Build;
if (build > 0)
{
return $"net{major}{minor}{build}";
}
return $"net{major}{minor}";
}
private static IReadOnlyCollection<string> CollectRuntimeIdentifiers(DotNetDepsFile depsFile, DotNetRuntimeConfig? runtimeConfig)
{
var rids = new SortedSet<string>(StringComparer.OrdinalIgnoreCase);

View File

@@ -100,7 +100,7 @@ public sealed class GoLanguageAnalyzer : ILanguageAnalyzer
cancellationToken.ThrowIfCancellationRequested();
// Quick check for known binary formats
if (GoBinaryFormatDetector.IsPotentialBinary(path))
if (GoBinaryFormatDetector.IsPotentialBinary(path) || GoBinaryScanner.HasBuildInfoMagicPrefix(path))
{
candidatePaths.Add(path);
}

View File

@@ -33,6 +33,45 @@ internal static class GoBinaryScanner
}
}
public static bool HasBuildInfoMagicPrefix(string filePath)
{
if (string.IsNullOrWhiteSpace(filePath))
{
return false;
}
try
{
using var stream = new FileStream(filePath, FileMode.Open, FileAccess.Read, FileShare.Read);
if (stream.Length < BuildInfoMagic.Length)
{
return false;
}
var size = BuildInfoMagic.Length;
Span<byte> header = stackalloc byte[size];
var read = stream.Read(header);
if (read != header.Length)
{
return false;
}
return header.SequenceEqual(BuildInfoMagic.Span);
}
catch (IOException)
{
return false;
}
catch (UnauthorizedAccessException)
{
return false;
}
catch (System.Security.SecurityException)
{
return false;
}
}
public static bool TryReadBuildInfo(string filePath, out string? goVersion, out string? moduleData)
{
goVersion = null;

View File

@@ -36,8 +36,8 @@ internal static partial class GoCapabilityScanner
var originalLine = lineIndex < lines.Length ? lines[lineIndex] : strippedLine;
var lineNumber = lineIndex + 1;
// Skip empty lines
if (string.IsNullOrWhiteSpace(strippedLine))
// Skip whitespace-only lines while still scanning comment-only lines for directives.
if (string.IsNullOrWhiteSpace(strippedLine) && string.IsNullOrWhiteSpace(originalLine))
{
continue;
}

View File

@@ -387,7 +387,7 @@ internal static partial class GoCgoDetector
/// Matches #cgo directives with optional build constraints.
/// Format: #cgo [GOOS GOARCH] DIRECTIVE: value
/// </summary>
[GeneratedRegex(@"#cgo\s+(?:([a-z0-9_,!\s]+)\s+)?(\w+):\s*(.+?)(?=\n|$)", RegexOptions.Multiline | RegexOptions.IgnoreCase)]
[GeneratedRegex(@"#cgo\s+(?:([a-z0-9_,!\s]+)\s+)?([\w-]+):\s*(.+?)(?=\n|$)", RegexOptions.Multiline | RegexOptions.IgnoreCase)]
private static partial Regex CgoDirectivePattern();
/// <summary>

View File

@@ -44,6 +44,9 @@ internal static partial class GoLicenseDetector
new("Apache-1.1", @"Apache License.*?(?:Version 1\.1|v1\.1)", "Apache License, Version 1.1"),
new("Apache-1.0", @"Apache License.*?(?:Version 1\.0|v1\.0)", "Apache License, Version 1.0"),
// Boost (avoid mis-classifying as MIT)
new("BSL-1.0", @"Boost Software License", "Boost Software License 1.0"),
// MIT variants
new("MIT", @"(?:MIT License|Permission is hereby granted, free of charge)", "MIT License"),
new("MIT-0", @"MIT No Attribution", "MIT No Attribution"),
@@ -82,7 +85,6 @@ internal static partial class GoLicenseDetector
new("Unlicense", @"This is free and unencumbered software released into the public domain", "The Unlicense"),
new("WTFPL", @"DO WHAT THE FUCK YOU WANT TO PUBLIC LICENSE", "Do What The F*ck You Want To Public License"),
new("Zlib", @"zlib License|This software is provided 'as-is'", "zlib License"),
new("BSL-1.0", @"Boost Software License", "Boost Software License 1.0"),
new("PostgreSQL", @"PostgreSQL License", "PostgreSQL License"),
new("BlueOak-1.0.0", @"Blue Oak Model License", "Blue Oak Model License 1.0.0"),

View File

@@ -361,7 +361,7 @@ internal static partial class GoVersionConflictDetector
/// <summary>
/// Matches pseudo-versions: v0.0.0-timestamp-hash or vX.Y.Z-pre.0.timestamp-hash
/// </summary>
[GeneratedRegex(@"^v\d+\.\d+\.\d+(-[a-z0-9]+)?\.?\d*\.?\d{14}-[a-f0-9]{12}$", RegexOptions.IgnoreCase)]
[GeneratedRegex(@"^v\d+\.\d+\.\d+-(?:\d{14}|(?:[0-9a-z-]+\.)*0\.\d{14})-[a-f0-9]{12}$", RegexOptions.IgnoreCase)]
private static partial Regex PseudoVersionPattern();
/// <summary>

View File

@@ -13,6 +13,10 @@ internal static class JavaJniAnalyzer
{
private const ushort AccNative = 0x0100;
private const int MaxEdges = 2000;
private const int MaxWarnings = 200;
private const int MaxClassesPerSegment = 5000;
// Method references for System.load/loadLibrary and Runtime.load/loadLibrary
private static readonly (string ClassName, string MethodName, string Descriptor, JavaJniReason Reason)[] JniLoadMethods =
[
@@ -38,11 +42,44 @@ internal static class JavaJniAnalyzer
{
cancellationToken.ThrowIfCancellationRequested();
foreach (var kvp in segment.ClassLocations)
var classesScanned = 0;
foreach (var kvp in segment.ClassLocations.OrderBy(static pair => pair.Key, StringComparer.Ordinal))
{
var className = kvp.Key;
var location = kvp.Value;
if (edges.Count >= MaxEdges)
{
if (warnings.Count < MaxWarnings)
{
warnings.Add(new JavaJniWarning(
SourceClass: "*",
SegmentIdentifier: segment.Identifier,
WarningCode: "JNI_EDGE_LIMIT_REACHED",
Message: $"JNI edge limit ({MaxEdges}) reached; output truncated.",
MethodName: string.Empty,
MethodDescriptor: string.Empty));
}
break;
}
if (classesScanned++ >= MaxClassesPerSegment)
{
if (warnings.Count < MaxWarnings)
{
warnings.Add(new JavaJniWarning(
SourceClass: "*",
SegmentIdentifier: segment.Identifier,
WarningCode: "JNI_CLASS_LIMIT_REACHED",
Message: $"JNI class scan limit ({MaxClassesPerSegment}) reached for segment; output truncated.",
MethodName: string.Empty,
MethodDescriptor: string.Empty));
}
break;
}
try
{
using var stream = location.OpenClassStream(cancellationToken);
@@ -55,6 +92,11 @@ internal static class JavaJniAnalyzer
if (method.IsNative)
{
if (edges.Count >= MaxEdges)
{
break;
}
edges.Add(new JavaJniEdge(
SourceClass: className,
SegmentIdentifier: segment.Identifier,
@@ -65,26 +107,44 @@ internal static class JavaJniAnalyzer
MethodDescriptor: method.Descriptor,
InstructionOffset: -1,
Details: "native method declaration"));
if (edges.Count >= MaxEdges)
{
break;
}
}
// Analyze bytecode for System.load/loadLibrary calls
if (method.Code is not null)
{
AnalyzeMethodCode(classFile, method, segment.Identifier, className, edges, warnings);
if (edges.Count >= MaxEdges)
{
break;
}
}
}
}
catch (Exception ex) when (ex is not OperationCanceledException)
{
warnings.Add(new JavaJniWarning(
SourceClass: className,
SegmentIdentifier: segment.Identifier,
WarningCode: "JNI_PARSE_ERROR",
Message: $"Failed to parse class file: {ex.Message}",
MethodName: string.Empty,
MethodDescriptor: string.Empty));
if (warnings.Count < MaxWarnings)
{
warnings.Add(new JavaJniWarning(
SourceClass: className,
SegmentIdentifier: segment.Identifier,
WarningCode: "JNI_PARSE_ERROR",
Message: $"Failed to parse class file: {ex.Message}",
MethodName: string.Empty,
MethodDescriptor: string.Empty));
}
}
}
if (edges.Count >= MaxEdges)
{
break;
}
}
if (edges.Count == 0 && warnings.Count == 0)
@@ -234,6 +294,11 @@ internal static class JavaJniAnalyzer
string className,
List<JavaJniEdge> edges)
{
if (edges.Count >= MaxEdges)
{
return;
}
var methodRef = classFile.ConstantPool.ResolveMethodRef(methodRefIndex);
if (methodRef is null)
{

View File

@@ -30,6 +30,11 @@ internal sealed class NodeDependencyIndex
/// <returns>A dependency index with all declared dependencies and their scopes.</returns>
public static NodeDependencyIndex Create(string rootPath)
{
if (string.IsNullOrWhiteSpace(rootPath))
{
return Empty;
}
var packageJsonPath = Path.Combine(rootPath, "package.json");
if (!File.Exists(packageJsonPath))
{
@@ -40,7 +45,75 @@ internal sealed class NodeDependencyIndex
{
using var stream = File.OpenRead(packageJsonPath);
using var document = JsonDocument.Parse(stream);
return CreateFromJson(document.RootElement);
var rootDeclarations = BuildDeclarationsFromJson(document.RootElement);
if (rootDeclarations.Count == 0)
{
rootDeclarations = new Dictionary<string, NodeDependencyDeclaration>(StringComparer.OrdinalIgnoreCase);
}
var workspaceBest = new Dictionary<string, NodeDependencyDeclaration>(StringComparer.OrdinalIgnoreCase);
var workspaceIndex = NodeWorkspaceIndex.Create(rootPath);
foreach (var workspaceRelative in workspaceIndex.GetMembers())
{
var workspacePackageJsonPath = Path.Combine(rootPath, workspaceRelative.Replace('/', Path.DirectorySeparatorChar), "package.json");
if (!File.Exists(workspacePackageJsonPath))
{
continue;
}
try
{
using var workspaceStream = File.OpenRead(workspacePackageJsonPath);
using var workspaceDocument = JsonDocument.Parse(workspaceStream);
var workspaceDeclarations = BuildDeclarationsFromJson(workspaceDocument.RootElement);
foreach (var declaration in workspaceDeclarations.Values)
{
if (rootDeclarations.ContainsKey(declaration.Name))
{
continue;
}
if (workspaceBest.TryGetValue(declaration.Name, out var existing))
{
if (GetScopePriority(declaration.Scope) < GetScopePriority(existing.Scope))
{
workspaceBest[declaration.Name] = declaration;
}
continue;
}
workspaceBest[declaration.Name] = declaration;
}
}
catch (IOException)
{
continue;
}
catch (JsonException)
{
continue;
}
}
var merged = new Dictionary<string, NodeDependencyDeclaration>(StringComparer.OrdinalIgnoreCase);
foreach (var pair in rootDeclarations)
{
merged[pair.Key] = pair.Value;
}
foreach (var pair in workspaceBest)
{
merged.TryAdd(pair.Key, pair.Value);
}
if (merged.Count == 0)
{
return Empty;
}
return new NodeDependencyIndex(merged);
}
catch (IOException)
{
@@ -59,19 +132,31 @@ internal sealed class NodeDependencyIndex
/// <returns>A dependency index with all declared dependencies and their scopes.</returns>
public static NodeDependencyIndex CreateFromJson(JsonElement root)
{
var declarations = new Dictionary<string, NodeDependencyDeclaration>(StringComparer.OrdinalIgnoreCase);
var declarations = BuildDeclarationsFromJson(root);
return declarations.Count == 0 ? Empty : new NodeDependencyIndex(declarations);
}
ParseDependencySection(root, "dependencies", NodeDependencyScope.Production, declarations);
ParseDependencySection(root, "devDependencies", NodeDependencyScope.Development, declarations);
ParseDependencySection(root, "peerDependencies", NodeDependencyScope.Peer, declarations);
ParseDependencySection(root, "optionalDependencies", NodeDependencyScope.Optional, declarations);
if (declarations.Count == 0)
public static NodeDependencyIndex CreateFromPackageJsonPath(string packageJsonPath)
{
if (string.IsNullOrWhiteSpace(packageJsonPath) || !File.Exists(packageJsonPath))
{
return Empty;
}
return new NodeDependencyIndex(declarations);
try
{
using var stream = File.OpenRead(packageJsonPath);
using var document = JsonDocument.Parse(stream);
return CreateFromJson(document.RootElement);
}
catch (IOException)
{
return Empty;
}
catch (JsonException)
{
return Empty;
}
}
/// <summary>
@@ -159,6 +244,27 @@ internal sealed class NodeDependencyIndex
sectionName);
}
}
private static Dictionary<string, NodeDependencyDeclaration> BuildDeclarationsFromJson(JsonElement root)
{
var declarations = new Dictionary<string, NodeDependencyDeclaration>(StringComparer.OrdinalIgnoreCase);
ParseDependencySection(root, "dependencies", NodeDependencyScope.Production, declarations);
ParseDependencySection(root, "devDependencies", NodeDependencyScope.Development, declarations);
ParseDependencySection(root, "peerDependencies", NodeDependencyScope.Peer, declarations);
ParseDependencySection(root, "optionalDependencies", NodeDependencyScope.Optional, declarations);
return declarations;
}
private static int GetScopePriority(NodeDependencyScope scope) => scope switch
{
NodeDependencyScope.Production => 0,
NodeDependencyScope.Development => 1,
NodeDependencyScope.Peer => 2,
NodeDependencyScope.Optional => 3,
_ => 99
};
}
/// <summary>

View File

@@ -11,6 +11,7 @@ namespace StellaOps.Scanner.Analyzers.Lang.Node.Internal;
internal static class NodeImportWalker
{
private const int MaxSourceMapBytes = 1_048_576; // 1 MiB safety cap
private const int MaxFallbackBytes = 524_288; // 512 KiB safety cap
public static IReadOnlyList<NodeImportEdge> AnalyzeImports(string rootPath, string sourcePath, string content)
{
@@ -29,20 +30,32 @@ internal static class NodeImportWalker
private static void AnalyzeInternal(string rootPath, string sourcePath, string content, bool allowSourceMap, List<NodeImportEdge> edges)
{
Script script;
Program? program;
try
{
var parser = new JavaScriptParser(new ParserOptions { Tolerant = true });
script = parser.ParseScript(content, sourcePath, strict: false);
program = parser.ParseScript(content, sourcePath, strict: false);
}
catch (ParserException)
{
script = null!;
try
{
var parser = new JavaScriptParser(new ParserOptions { Tolerant = true });
program = parser.ParseModule(content, sourcePath);
}
catch (ParserException)
{
program = null;
}
}
if (script is not null)
if (program is not null)
{
Walk(script, sourcePath, edges);
Walk(program, sourcePath, edges);
}
else
{
TryAnalyzeTypeScriptFallback(sourcePath, content, edges);
}
if (allowSourceMap)
@@ -75,6 +88,20 @@ internal static class NodeImportWalker
AddEdge(edges, sourcePath, importExprTarget!, "import()", importExprEvidence, importExprConfidence);
}
break;
case ExportNamedDeclaration exportNamed when exportNamed.Source is not null:
if (TryGetLiteral(exportNamed.Source, out var exportTarget, out var exportConfidence, out var exportEvidence))
{
AddEdge(edges, sourcePath, exportTarget!, "export-from", exportEvidence, exportConfidence);
}
break;
case ExportAllDeclaration exportAll:
if (TryGetLiteral(exportAll.Source, out var exportAllTarget, out var exportAllConfidence, out var exportAllEvidence))
{
AddEdge(edges, sourcePath, exportAllTarget!, "export-all", exportAllEvidence, exportAllConfidence);
}
break;
}
foreach (var child in node.ChildNodes)
@@ -261,6 +288,49 @@ internal static class NodeImportWalker
&& call.Arguments.Count == 1;
}
private static void TryAnalyzeTypeScriptFallback(string sourcePath, string content, List<NodeImportEdge> edges)
{
if (!IsTypeScriptSource(sourcePath))
{
return;
}
if (string.IsNullOrWhiteSpace(content))
{
return;
}
var input = content.Length > MaxFallbackBytes ? content[..MaxFallbackBytes] : content;
foreach (Match match in Regex.Matches(input, "\\bimport\\s+(?:type\\s+)?(?:[^;\\n]*?\\s+from\\s+)?[\\\"'](?<spec>[^\\\"']+)[\\\"']", RegexOptions.Multiline))
{
var spec = match.Groups["spec"].Value;
if (string.IsNullOrWhiteSpace(spec))
{
continue;
}
AddEdge(edges, sourcePath, spec.Trim(), "import", "ts-regex", "medium");
}
foreach (Match match in Regex.Matches(input, "\\bexport\\s+(?:type\\s+)?[^;\\n]*?\\s+from\\s+[\\\"'](?<spec>[^\\\"']+)[\\\"']", RegexOptions.Multiline))
{
var spec = match.Groups["spec"].Value;
if (string.IsNullOrWhiteSpace(spec))
{
continue;
}
AddEdge(edges, sourcePath, spec.Trim(), "export-from", "ts-regex", "medium");
}
}
private static bool IsTypeScriptSource(string sourcePath)
=> sourcePath.EndsWith(".ts", StringComparison.OrdinalIgnoreCase)
|| sourcePath.EndsWith(".tsx", StringComparison.OrdinalIgnoreCase)
|| sourcePath.EndsWith(".mts", StringComparison.OrdinalIgnoreCase)
|| sourcePath.EndsWith(".cts", StringComparison.OrdinalIgnoreCase);
private static string CombineConfidence(string left, string right)
{
static int Score(string value) => value switch

View File

@@ -11,22 +11,26 @@ internal sealed class NodeLockData
private static readonly NodeLockData Empty = new(
new Dictionary<string, NodeLockEntry>(StringComparer.Ordinal),
new Dictionary<string, NodeLockEntry>(StringComparer.OrdinalIgnoreCase),
new Dictionary<string, NodeLockEntry>(StringComparer.OrdinalIgnoreCase),
Array.Empty<NodeLockEntry>(),
NodeDependencyIndex.Create(string.Empty));
private readonly Dictionary<string, NodeLockEntry> _byPath;
private readonly Dictionary<string, NodeLockEntry> _byName;
private readonly Dictionary<string, NodeLockEntry> _byNameVersion;
private readonly IReadOnlyCollection<NodeLockEntry> _declared;
private readonly NodeDependencyIndex _dependencyIndex;
private NodeLockData(
Dictionary<string, NodeLockEntry> byPath,
Dictionary<string, NodeLockEntry> byName,
Dictionary<string, NodeLockEntry> byNameVersion,
IReadOnlyCollection<NodeLockEntry> declared,
NodeDependencyIndex dependencyIndex)
{
_byPath = byPath;
_byName = byName;
_byNameVersion = byNameVersion;
_declared = declared;
_dependencyIndex = dependencyIndex;
}
@@ -42,14 +46,15 @@ internal sealed class NodeLockData
{
var byPath = new Dictionary<string, NodeLockEntry>(StringComparer.Ordinal);
var byName = new Dictionary<string, NodeLockEntry>(StringComparer.OrdinalIgnoreCase);
var byNameVersion = new Dictionary<string, NodeLockEntry>(StringComparer.OrdinalIgnoreCase);
var declared = new Dictionary<string, NodeLockEntry>(StringComparer.OrdinalIgnoreCase);
// Build dependency index from package.json first
var dependencyIndex = NodeDependencyIndex.Create(rootPath);
LoadPackageLockJson(rootPath, byPath, byName, declared, dependencyIndex, cancellationToken);
LoadYarnLock(rootPath, byName, declared, dependencyIndex);
LoadPnpmLock(rootPath, byName, declared, dependencyIndex);
LoadPackageLockJson(rootPath, byPath, byName, byNameVersion, declared, dependencyIndex, cancellationToken);
LoadYarnLock(rootPath, byName, byNameVersion, declared, dependencyIndex);
LoadPnpmLock(rootPath, byName, byNameVersion, declared, dependencyIndex);
// Add declared-only entries for packages in package.json but not in any lockfile
AddDeclaredOnlyFromPackageJson(declared, dependencyIndex);
@@ -66,7 +71,7 @@ internal sealed class NodeLockData
.ThenBy(static entry => entry.Locator ?? string.Empty, StringComparer.OrdinalIgnoreCase)
.ToArray();
return ValueTask.FromResult(new NodeLockData(byPath, byName, declaredList, dependencyIndex));
return ValueTask.FromResult(new NodeLockData(byPath, byName, byNameVersion, declaredList, dependencyIndex));
}
/// <summary>
@@ -110,6 +115,9 @@ internal sealed class NodeLockData
}
public bool TryGet(string relativePath, string packageName, out NodeLockEntry? entry)
=> TryGet(relativePath, packageName, version: null, out entry);
public bool TryGet(string relativePath, string packageName, string? version, out NodeLockEntry? entry)
{
var normalizedPath = NormalizeLockPath(relativePath);
if (_byPath.TryGetValue(normalizedPath, out var byPathEntry))
@@ -120,8 +128,17 @@ internal sealed class NodeLockData
if (!string.IsNullOrEmpty(packageName))
{
var normalizedName = packageName.StartsWith('@') ? packageName : packageName;
if (_byName.TryGetValue(normalizedName, out var byNameEntry))
if (!string.IsNullOrWhiteSpace(version))
{
var nameVersionKey = CreateNameVersionKey(packageName, version);
if (_byNameVersion.TryGetValue(nameVersionKey, out var byVersionEntry))
{
entry = byVersionEntry;
return true;
}
}
if (_byName.TryGetValue(packageName, out var byNameEntry))
{
entry = byNameEntry;
return true;
@@ -197,6 +214,7 @@ internal sealed class NodeLockData
JsonElement dependenciesElement,
IDictionary<string, NodeLockEntry> byPath,
IDictionary<string, NodeLockEntry> byName,
IDictionary<string, NodeLockEntry> byNameVersion,
IDictionary<string, NodeLockEntry> declared,
NodeDependencyIndex dependencyIndex)
{
@@ -210,12 +228,13 @@ internal sealed class NodeLockData
{
byPath[normalizedPath] = entry;
byName[dependency.Name] = entry;
AddNameVersionEntry(byNameVersion, entry);
AddDeclaration(declared, entry);
}
if (depValue.TryGetProperty("dependencies", out var childDependencies) && childDependencies.ValueKind == JsonValueKind.Object)
{
TraverseLegacyDependencies(path + "/node_modules", childDependencies, byPath, byName, declared, dependencyIndex);
TraverseLegacyDependencies(path + "/node_modules", childDependencies, byPath, byName, byNameVersion, declared, dependencyIndex);
}
}
}
@@ -224,6 +243,7 @@ internal sealed class NodeLockData
string rootPath,
IDictionary<string, NodeLockEntry> byPath,
IDictionary<string, NodeLockEntry> byName,
IDictionary<string, NodeLockEntry> byNameVersion,
IDictionary<string, NodeLockEntry> declared,
NodeDependencyIndex dependencyIndex,
CancellationToken cancellationToken)
@@ -260,6 +280,7 @@ internal sealed class NodeLockData
if (!string.IsNullOrEmpty(entry.Name))
{
byName[entry.Name] = entry;
AddNameVersionEntry(byNameVersion, entry);
}
AddDeclaration(declared, entry);
@@ -267,7 +288,7 @@ internal sealed class NodeLockData
}
else if (root.TryGetProperty("dependencies", out var dependenciesElement) && dependenciesElement.ValueKind == JsonValueKind.Object)
{
TraverseLegacyDependencies("node_modules", dependenciesElement, byPath, byName, declared, dependencyIndex);
TraverseLegacyDependencies("node_modules", dependenciesElement, byPath, byName, byNameVersion, declared, dependencyIndex);
}
}
catch (IOException)
@@ -283,6 +304,7 @@ internal sealed class NodeLockData
private static void LoadYarnLock(
string rootPath,
IDictionary<string, NodeLockEntry> byName,
IDictionary<string, NodeLockEntry> byNameVersion,
IDictionary<string, NodeLockEntry> declared,
NodeDependencyIndex dependencyIndex)
{
@@ -299,6 +321,7 @@ internal sealed class NodeLockData
string? version = null;
string? resolved = null;
string? integrity = null;
string? checksum = null;
void Flush()
{
@@ -307,6 +330,7 @@ internal sealed class NodeLockData
version = null;
resolved = null;
integrity = null;
checksum = null;
return;
}
@@ -316,6 +340,7 @@ internal sealed class NodeLockData
version = null;
resolved = null;
integrity = null;
checksum = null;
return;
}
@@ -328,12 +353,20 @@ internal sealed class NodeLockData
isOptional = foundScope == NodeDependencyScope.Optional;
}
var entry = new NodeLockEntry(YarnLockSource, currentName, simpleName, version, resolved, integrity, scope, isOptional);
var effectiveIntegrity = integrity;
if (string.IsNullOrWhiteSpace(effectiveIntegrity) && !string.IsNullOrWhiteSpace(checksum))
{
effectiveIntegrity = "checksum:" + checksum.Trim();
}
var entry = new NodeLockEntry(YarnLockSource, currentName, simpleName, version, resolved, effectiveIntegrity, scope, isOptional);
byName[simpleName] = entry;
AddNameVersionEntry(byNameVersion, entry);
AddDeclaration(declared, entry);
version = null;
resolved = null;
integrity = null;
checksum = null;
}
foreach (var line in lines)
@@ -353,17 +386,30 @@ internal sealed class NodeLockData
continue;
}
if (trimmed.StartsWith("version", StringComparison.OrdinalIgnoreCase))
if (!TryParseYarnField(trimmed, out var key, out var value))
{
version = ExtractQuotedValue(trimmed);
continue;
}
else if (trimmed.StartsWith("resolved", StringComparison.OrdinalIgnoreCase))
if (key.Equals("version", StringComparison.OrdinalIgnoreCase))
{
resolved = ExtractQuotedValue(trimmed);
version = value;
}
else if (trimmed.StartsWith("integrity", StringComparison.OrdinalIgnoreCase))
else if (key.Equals("resolved", StringComparison.OrdinalIgnoreCase))
{
integrity = ExtractQuotedValue(trimmed);
resolved = value;
}
else if (key.Equals("resolution", StringComparison.OrdinalIgnoreCase))
{
resolved = value;
}
else if (key.Equals("integrity", StringComparison.OrdinalIgnoreCase))
{
integrity = value;
}
else if (key.Equals("checksum", StringComparison.OrdinalIgnoreCase))
{
checksum = value;
}
}
@@ -378,6 +424,7 @@ internal sealed class NodeLockData
private static void LoadPnpmLock(
string rootPath,
IDictionary<string, NodeLockEntry> byName,
IDictionary<string, NodeLockEntry> byNameVersion,
IDictionary<string, NodeLockEntry> declared,
NodeDependencyIndex dependencyIndex)
{
@@ -394,11 +441,23 @@ internal sealed class NodeLockData
string? version = null;
string? resolved = null;
string? integrity = null;
var inPackages = false;
string? section = null;
void Flush()
{
if (string.IsNullOrEmpty(currentPackage) || string.IsNullOrEmpty(integrity))
if (string.IsNullOrEmpty(currentPackage))
{
version = null;
resolved = null;
integrity = null;
return;
}
var effectiveVersion = string.IsNullOrWhiteSpace(version)
? ExtractVersionFromPnpmKey(currentPackage!)
: version!;
if (string.IsNullOrWhiteSpace(effectiveVersion))
{
version = null;
resolved = null;
@@ -424,8 +483,12 @@ internal sealed class NodeLockData
isOptional = foundScope == NodeDependencyScope.Optional;
}
var entry = new NodeLockEntry(PnpmLockSource, currentPackage, name, version, resolved, integrity, scope, isOptional);
var integrityMissing = string.IsNullOrWhiteSpace(integrity);
var integrityReason = integrityMissing ? DetermineIntegrityMissingReason(currentPackage!, resolved, effectiveVersion) : null;
var entry = new NodeLockEntry(PnpmLockSource, currentPackage, name, effectiveVersion.Trim(), resolved, integrity, scope, isOptional, integrityMissing, integrityReason);
byName[name] = entry;
AddNameVersionEntry(byNameVersion, entry);
AddDeclaration(declared, entry);
version = null;
resolved = null;
@@ -439,12 +502,30 @@ internal sealed class NodeLockData
continue;
}
if (!inPackages)
if (!line.StartsWith(' ') && line.EndsWith(':'))
{
Flush();
currentPackage = null;
version = null;
resolved = null;
integrity = null;
section = null;
}
if (section is null)
{
if (line.StartsWith("packages:", StringComparison.Ordinal))
{
inPackages = true;
section = "packages";
continue;
}
if (line.StartsWith("snapshots:", StringComparison.Ordinal))
{
section = "snapshots";
continue;
}
continue;
}
@@ -471,6 +552,12 @@ internal sealed class NodeLockData
if (integrityIndex >= 0)
{
var integrityValue = trimmed[(integrityIndex + 9)..].Trim(' ', ':', '{', '}', '"');
var integrityCommaIndex = integrityValue.IndexOf(',');
if (integrityCommaIndex > 0)
{
integrityValue = integrityValue[..integrityCommaIndex].Trim();
}
integrity = integrityValue;
}
@@ -478,6 +565,12 @@ internal sealed class NodeLockData
if (tarballIndex >= 0)
{
var tarballValue = trimmed[(tarballIndex + 7)..].Trim(' ', ':', '{', '}', '"');
var tarballCommaIndex = tarballValue.IndexOf(',');
if (tarballCommaIndex > 0)
{
tarballValue = tarballValue[..tarballCommaIndex].Trim();
}
resolved = tarballValue;
}
}
@@ -503,6 +596,95 @@ internal sealed class NodeLockData
}
}
private static bool TryParseYarnField(string trimmed, out string key, out string value)
{
key = string.Empty;
value = string.Empty;
if (string.IsNullOrWhiteSpace(trimmed))
{
return false;
}
var spaceIndex = trimmed.IndexOf(' ');
var colonIndex = trimmed.IndexOf(':');
if (colonIndex > 0 && (spaceIndex < 0 || colonIndex < spaceIndex))
{
key = trimmed[..colonIndex].Trim();
value = TrimYarnScalar(trimmed[(colonIndex + 1)..]);
return key.Length > 0;
}
if (spaceIndex <= 0)
{
return false;
}
key = trimmed[..spaceIndex].Trim();
value = TrimYarnScalar(trimmed[(spaceIndex + 1)..]);
return key.Length > 0;
}
private static string TrimYarnScalar(string value)
{
value = value.Trim();
if (value.Length >= 2 && value[0] == '"' && value[^1] == '"')
{
value = value[1..^1];
}
return value;
}
private static string ExtractVersionFromPnpmKey(string key)
{
var parts = key.Split('/', StringSplitOptions.RemoveEmptyEntries | StringSplitOptions.TrimEntries);
if (parts.Length == 0)
{
return string.Empty;
}
var last = parts[^1];
var parenIndex = last.IndexOf('(');
if (parenIndex > 0)
{
last = last[..parenIndex];
}
return last;
}
private static string DetermineIntegrityMissingReason(string locator, string? resolved, string version)
{
var candidate = (locator + " " + (resolved ?? string.Empty) + " " + version).ToLowerInvariant();
if (candidate.Contains("workspace:", StringComparison.Ordinal))
{
return "workspace";
}
if (candidate.Contains("link:", StringComparison.Ordinal))
{
return "link";
}
if (candidate.Contains("file:", StringComparison.Ordinal))
{
return "file";
}
if (candidate.Contains("git", StringComparison.Ordinal))
{
return "git";
}
if (candidate.Contains("directory:", StringComparison.Ordinal))
{
return "directory";
}
return "missing";
}
private static string? ExtractQuotedValue(string line)
{
var quoteStart = line.IndexOf('"');
@@ -522,6 +704,11 @@ internal sealed class NodeLockData
private static string ExtractPackageNameFromYarnKey(string key)
{
if (string.Equals(key, "__metadata", StringComparison.OrdinalIgnoreCase))
{
return string.Empty;
}
var commaIndex = key.IndexOf(',');
var trimmed = commaIndex > 0 ? key[..commaIndex] : key;
trimmed = trimmed.Trim('"');
@@ -570,6 +757,25 @@ internal sealed class NodeLockData
}
}
private static void AddNameVersionEntry(IDictionary<string, NodeLockEntry> byNameVersion, NodeLockEntry entry)
{
if (byNameVersion is null || entry is null)
{
return;
}
if (string.IsNullOrWhiteSpace(entry.Name) || string.IsNullOrWhiteSpace(entry.Version))
{
return;
}
var key = CreateNameVersionKey(entry.Name, entry.Version);
byNameVersion[key] = entry;
}
private static string CreateNameVersionKey(string name, string version)
=> $"{name.Trim()}@{version.Trim()}".ToLowerInvariant();
private static string NormalizeLockPath(string path)
{
if (string.IsNullOrWhiteSpace(path))
@@ -595,22 +801,32 @@ internal sealed class NodeLockData
return string.Empty;
}
if (segments[0] == "node_modules")
var nodeModulesIndex = -1;
for (var i = segments.Length - 1; i >= 0; i--)
{
if (segments.Length >= 3 && segments[1].StartsWith('@'))
if (string.Equals(segments[i], "node_modules", StringComparison.Ordinal))
{
return $"{segments[1]}/{segments[2]}";
nodeModulesIndex = i;
break;
}
}
if (nodeModulesIndex >= 0)
{
if (nodeModulesIndex + 1 >= segments.Length)
{
return string.Empty;
}
return segments.Length >= 2 ? segments[1] : string.Empty;
var candidate = segments[nodeModulesIndex + 1];
if (candidate.StartsWith('@') && nodeModulesIndex + 2 < segments.Length)
{
return $"{candidate}/{segments[nodeModulesIndex + 2]}";
}
return candidate;
}
var last = segments[^1];
if (last.StartsWith('@') && segments.Length >= 2)
{
return $"{segments[^2]}/{last}";
}
return last;
return segments[^1];
}
}

View File

@@ -19,7 +19,9 @@ internal sealed record NodeLockEntry(
string? Resolved,
string? Integrity,
NodeDependencyScope? Scope = null,
bool IsOptional = false);
bool IsOptional = false,
bool IntegrityMissing = false,
string? IntegrityMissingReason = null);
internal static class NodeLockEntryExtensions
{

View File

@@ -6,6 +6,8 @@ namespace StellaOps.Scanner.Analyzers.Lang.Node.Internal;
internal sealed class NodePackage
{
private readonly Dictionary<string, string?> _extraMetadata = new(StringComparer.Ordinal);
public NodePackage(
string name,
string version,
@@ -212,6 +214,13 @@ internal sealed class NodePackage
.ToArray();
}
public void MarkImportScanSkipped(int filesScanned, long bytesScanned)
{
_extraMetadata["importScanSkipped"] = "true";
_extraMetadata["importScan.filesScanned"] = filesScanned.ToString(CultureInfo.InvariantCulture);
_extraMetadata["importScan.bytesScanned"] = bytesScanned.ToString(CultureInfo.InvariantCulture);
}
public IReadOnlyCollection<KeyValuePair<string, string?>> CreateMetadata()
{
var entries = new List<KeyValuePair<string, string?>>(8)
@@ -360,6 +369,14 @@ internal sealed class NodePackage
entries.Add(new KeyValuePair<string, string?>("license", License));
}
foreach (var pair in _extraMetadata)
{
if (!string.IsNullOrWhiteSpace(pair.Key) && pair.Value is not null)
{
entries.Add(new KeyValuePair<string, string?>(pair.Key, pair.Value));
}
}
return entries
.OrderBy(static pair => pair.Key, StringComparer.Ordinal)
.ToArray();

View File

@@ -13,6 +13,26 @@ internal static class NodePackageCollector
"__pycache__"
};
private static IReadOnlyDictionary<string, NodeDependencyIndex> BuildWorkspaceDependencyIndices(string rootPath, NodeWorkspaceIndex workspaceIndex)
{
var comparer = OperatingSystem.IsWindows() ? StringComparer.OrdinalIgnoreCase : StringComparer.Ordinal;
var indices = new Dictionary<string, NodeDependencyIndex>(comparer);
foreach (var workspaceRelative in workspaceIndex.GetMembers())
{
var workspaceAbsolute = Path.Combine(rootPath, workspaceRelative.Replace('/', Path.DirectorySeparatorChar));
var packageJsonPath = Path.Combine(workspaceAbsolute, "package.json");
if (!File.Exists(packageJsonPath))
{
continue;
}
indices[workspaceRelative] = NodeDependencyIndex.CreateFromPackageJsonPath(packageJsonPath);
}
return indices;
}
public static IReadOnlyList<NodePackage> CollectPackages(LanguageAnalyzerContext context, NodeLockData lockData, NodeProjectInput projectInput, CancellationToken cancellationToken)
{
var packages = new List<NodePackage>();
@@ -21,11 +41,12 @@ internal static class NodePackageCollector
var rootPackageJson = Path.Combine(context.RootPath, "package.json");
var workspaceIndex = NodeWorkspaceIndex.Create(context.RootPath);
var workspaceDependencyIndices = BuildWorkspaceDependencyIndices(context.RootPath, workspaceIndex);
var yarnPnpPresent = projectInput.YarnPnpPresent;
if (File.Exists(rootPackageJson))
{
var rootPackage = TryCreatePackage(context, rootPackageJson, string.Empty, lockData, workspaceIndex, yarnPnpPresent, cancellationToken);
var rootPackage = TryCreatePackage(context, rootPackageJson, string.Empty, lockData, workspaceIndex, workspaceDependencyIndices, yarnPnpPresent, cancellationToken);
if (rootPackage is not null)
{
packages.Add(rootPackage);
@@ -41,7 +62,7 @@ internal static class NodePackageCollector
continue;
}
ProcessPackageDirectory(context, workspaceAbsolute, lockData, workspaceIndex, includeNestedNodeModules: false, packages, visited, yarnPnpPresent, cancellationToken);
ProcessPackageDirectory(context, workspaceAbsolute, lockData, workspaceIndex, workspaceDependencyIndices, includeNestedNodeModules: false, packages, visited, yarnPnpPresent, cancellationToken);
var workspaceNodeModules = Path.Combine(workspaceAbsolute, "node_modules");
if (Directory.Exists(workspaceNodeModules))
@@ -50,9 +71,21 @@ internal static class NodePackageCollector
}
}
AddAdditionalSourceRootPackages(
context,
projectInput.SourceRoots,
nodeModuleRoots,
lockData,
workspaceIndex,
workspaceDependencyIndices,
packages,
visited,
yarnPnpPresent,
cancellationToken);
foreach (var nodeModules in nodeModuleRoots.OrderBy(static path => path, StringComparer.Ordinal))
{
TraverseDirectory(context, nodeModules, lockData, workspaceIndex, packages, visited, yarnPnpPresent, cancellationToken);
TraverseDirectory(context, nodeModules, lockData, workspaceIndex, workspaceDependencyIndices, packages, visited, yarnPnpPresent, cancellationToken);
}
TraverseTarballs(context, projectInput.Tarballs, packages, visited, yarnPnpPresent, cancellationToken);
@@ -76,6 +109,154 @@ internal static class NodePackageCollector
return packages;
}
private static void AddAdditionalSourceRootPackages(
LanguageAnalyzerContext context,
IReadOnlyList<string> sourceRoots,
HashSet<string> nodeModuleRoots,
NodeLockData lockData,
NodeWorkspaceIndex workspaceIndex,
IReadOnlyDictionary<string, NodeDependencyIndex> workspaceDependencyIndices,
List<NodePackage> packages,
HashSet<string> visited,
bool yarnPnpPresent,
CancellationToken cancellationToken)
{
if (sourceRoots.Count <= 1)
{
return;
}
var rootPath = Path.GetFullPath(context.RootPath);
var comparer = OperatingSystem.IsWindows() ? StringComparer.OrdinalIgnoreCase : StringComparer.Ordinal;
foreach (var sourceRoot in sourceRoots)
{
cancellationToken.ThrowIfCancellationRequested();
if (string.IsNullOrWhiteSpace(sourceRoot))
{
continue;
}
var fullSourceRoot = Path.GetFullPath(sourceRoot);
if (comparer.Equals(fullSourceRoot, rootPath))
{
continue;
}
foreach (var packageDirectory in DiscoverPackageDirectories(fullSourceRoot, maxDepth: 4, maxDirsVisited: 8000, maxPackages: 64, cancellationToken))
{
ProcessPackageDirectory(
context,
packageDirectory,
lockData,
workspaceIndex,
workspaceDependencyIndices,
includeNestedNodeModules: false,
packages,
visited,
yarnPnpPresent,
cancellationToken);
var nestedNodeModules = Path.Combine(packageDirectory, "node_modules");
if (Directory.Exists(nestedNodeModules))
{
nodeModuleRoots.Add(nestedNodeModules);
}
}
}
}
private static IReadOnlyList<string> DiscoverPackageDirectories(
string rootPath,
int maxDepth,
int maxDirsVisited,
int maxPackages,
CancellationToken cancellationToken)
{
if (string.IsNullOrWhiteSpace(rootPath) || !Directory.Exists(rootPath))
{
return Array.Empty<string>();
}
var comparer = OperatingSystem.IsWindows() ? StringComparer.OrdinalIgnoreCase : StringComparer.Ordinal;
var visited = new HashSet<string>(comparer);
var results = new List<string>();
var queue = new Queue<(string Path, int Depth)>();
queue.Enqueue((rootPath, 0));
visited.Add(rootPath);
var enumerationOptions = new EnumerationOptions
{
RecurseSubdirectories = false,
IgnoreInaccessible = true,
AttributesToSkip = FileAttributes.ReparsePoint | FileAttributes.Device
};
var dirsVisited = 0;
while (queue.Count > 0)
{
cancellationToken.ThrowIfCancellationRequested();
var (current, depth) = queue.Dequeue();
dirsVisited++;
if (dirsVisited > maxDirsVisited)
{
break;
}
if (File.Exists(Path.Combine(current, "package.json")))
{
results.Add(current);
if (results.Count >= maxPackages)
{
break;
}
}
if (depth >= maxDepth)
{
continue;
}
IEnumerable<string> children;
try
{
children = Directory.EnumerateDirectories(current, "*", enumerationOptions);
}
catch
{
continue;
}
foreach (var child in children.OrderBy(static p => p, StringComparer.Ordinal))
{
var name = Path.GetFileName(child);
if (string.IsNullOrWhiteSpace(name))
{
continue;
}
if (name.Equals("node_modules", StringComparison.OrdinalIgnoreCase) ||
name.Equals(".pnpm", StringComparison.OrdinalIgnoreCase) ||
name.Equals(".yarn", StringComparison.OrdinalIgnoreCase) ||
name.Equals(".git", StringComparison.OrdinalIgnoreCase))
{
continue;
}
if (visited.Add(child))
{
queue.Enqueue((child, depth + 1));
}
}
}
results.Sort(StringComparer.Ordinal);
return results;
}
/// <summary>
/// Filters out packages that are declared-only (no on-disk evidence) when Yarn PnP data is available.
/// Only emits packages that are actually resolved in the PnP resolution map.
@@ -113,7 +294,13 @@ internal static class NodePackageCollector
private static void AttachImports(LanguageAnalyzerContext context, List<NodePackage> packages, CancellationToken cancellationToken)
{
foreach (var package in packages)
const int maxFilesPerPackage = 500;
const long maxBytesPerPackage = 5L * 1024 * 1024;
const long maxFileBytes = 512L * 1024;
const int maxDepth = 20;
foreach (var package in packages.Where(static p => string.IsNullOrEmpty(p.RelativePathNormalized) || p.IsWorkspaceMember)
.OrderBy(static p => p.RelativePathNormalized, StringComparer.Ordinal))
{
cancellationToken.ThrowIfCancellationRequested();
@@ -126,10 +313,41 @@ internal static class NodePackageCollector
continue;
}
foreach (var file in EnumerateSourceFiles(packageRoot))
var filesScanned = 0;
long bytesScanned = 0;
var capped = false;
foreach (var file in EnumerateSourceFiles(packageRoot, maxDepth))
{
cancellationToken.ThrowIfCancellationRequested();
if (filesScanned >= maxFilesPerPackage || bytesScanned >= maxBytesPerPackage)
{
capped = true;
break;
}
long length;
try
{
length = new FileInfo(file).Length;
}
catch
{
continue;
}
if (length <= 0 || length > maxFileBytes)
{
continue;
}
if (bytesScanned + length > maxBytesPerPackage)
{
capped = true;
break;
}
string content;
try
{
@@ -140,6 +358,9 @@ internal static class NodePackageCollector
continue;
}
bytesScanned += length;
filesScanned++;
var relativeSource = context.GetRelativePath(file).Replace(Path.DirectorySeparatorChar, '/');
var imports = NodeImportWalker.AnalyzeImports(context.RootPath, relativeSource, content);
foreach (var edge in imports)
@@ -147,6 +368,11 @@ internal static class NodePackageCollector
package.AddImport(edge);
}
}
if (capped)
{
package.MarkImportScanSkipped(filesScanned, bytesScanned);
}
}
}
@@ -237,27 +463,100 @@ internal static class NodePackageCollector
return merged;
}
private static IEnumerable<string> EnumerateSourceFiles(string root)
private static IEnumerable<string> EnumerateSourceFiles(string root, int maxDepth)
{
foreach (var extension in new[] { ".js", ".jsx", ".mjs", ".cjs", ".ts", ".tsx" })
var extensions = new HashSet<string>(StringComparer.OrdinalIgnoreCase)
{
foreach (var file in Directory.EnumerateFiles(root, "*" + extension, new EnumerationOptions
{
RecurseSubdirectories = true,
MatchCasing = MatchCasing.CaseInsensitive,
IgnoreInaccessible = true
}))
".js",
".jsx",
".mjs",
".cjs",
".ts",
".tsx",
".mts",
".cts"
};
var pathComparer = OperatingSystem.IsWindows() ? StringComparer.OrdinalIgnoreCase : StringComparer.Ordinal;
var stack = new Stack<(string Path, int Depth)>();
stack.Push((root, 0));
while (stack.Count > 0)
{
var (current, depth) = stack.Pop();
IEnumerable<string> files;
try
{
yield return file;
files = Directory.EnumerateFiles(current, "*", SearchOption.TopDirectoryOnly);
}
catch
{
files = Array.Empty<string>();
}
foreach (var file in files.OrderBy(static f => f, pathComparer))
{
var ext = Path.GetExtension(file);
if (!string.IsNullOrWhiteSpace(ext) && extensions.Contains(ext))
{
yield return file;
}
}
if (depth >= maxDepth)
{
continue;
}
IEnumerable<string> dirs;
try
{
dirs = Directory.EnumerateDirectories(current, "*", SearchOption.TopDirectoryOnly);
}
catch
{
dirs = Array.Empty<string>();
}
var ordered = dirs
.Where(static d => !ShouldSkipImportDirectory(Path.GetFileName(d)))
.OrderBy(static d => d, pathComparer)
.ToArray();
for (var i = ordered.Length - 1; i >= 0; i--)
{
stack.Push((ordered[i], depth + 1));
}
}
}
private static bool ShouldSkipImportDirectory(string? name)
{
if (string.IsNullOrWhiteSpace(name))
{
return true;
}
if (string.Equals(name, "node_modules", StringComparison.OrdinalIgnoreCase))
{
return true;
}
if (string.Equals(name, ".pnpm", StringComparison.OrdinalIgnoreCase))
{
return true;
}
return ShouldSkipDirectory(name);
}
private static void TraverseDirectory(
LanguageAnalyzerContext context,
string directory,
NodeLockData lockData,
NodeWorkspaceIndex workspaceIndex,
IReadOnlyDictionary<string, NodeDependencyIndex> workspaceDependencyIndices,
List<NodePackage> packages,
HashSet<string> visited,
bool yarnPnpPresent,
@@ -285,7 +584,7 @@ internal static class NodePackageCollector
if (string.Equals(name, ".pnpm", StringComparison.OrdinalIgnoreCase))
{
TraversePnpmStore(context, child, lockData, workspaceIndex, packages, visited, yarnPnpPresent, cancellationToken);
TraversePnpmStore(context, child, lockData, workspaceIndex, workspaceDependencyIndices, packages, visited, yarnPnpPresent, cancellationToken);
continue;
}
@@ -293,12 +592,12 @@ internal static class NodePackageCollector
{
foreach (var scoped in Directory.EnumerateDirectories(child))
{
ProcessPackageDirectory(context, scoped, lockData, workspaceIndex, includeNestedNodeModules: true, packages, visited, yarnPnpPresent, cancellationToken);
ProcessPackageDirectory(context, scoped, lockData, workspaceIndex, workspaceDependencyIndices, includeNestedNodeModules: true, packages, visited, yarnPnpPresent, cancellationToken);
}
continue;
}
ProcessPackageDirectory(context, child, lockData, workspaceIndex, includeNestedNodeModules: true, packages, visited, yarnPnpPresent, cancellationToken);
ProcessPackageDirectory(context, child, lockData, workspaceIndex, workspaceDependencyIndices, includeNestedNodeModules: true, packages, visited, yarnPnpPresent, cancellationToken);
}
}
@@ -307,6 +606,7 @@ internal static class NodePackageCollector
string pnpmDirectory,
NodeLockData lockData,
NodeWorkspaceIndex workspaceIndex,
IReadOnlyDictionary<string, NodeDependencyIndex> workspaceDependencyIndices,
List<NodePackage> packages,
HashSet<string> visited,
bool yarnPnpPresent,
@@ -319,7 +619,7 @@ internal static class NodePackageCollector
var nestedNodeModules = Path.Combine(storeEntry, "node_modules");
if (Directory.Exists(nestedNodeModules))
{
TraverseDirectory(context, nestedNodeModules, lockData, workspaceIndex, packages, visited, yarnPnpPresent, cancellationToken);
TraverseDirectory(context, nestedNodeModules, lockData, workspaceIndex, workspaceDependencyIndices, packages, visited, yarnPnpPresent, cancellationToken);
}
}
}
@@ -329,6 +629,7 @@ internal static class NodePackageCollector
string directory,
NodeLockData lockData,
NodeWorkspaceIndex workspaceIndex,
IReadOnlyDictionary<string, NodeDependencyIndex> workspaceDependencyIndices,
bool includeNestedNodeModules,
List<NodePackage> packages,
HashSet<string> visited,
@@ -343,14 +644,14 @@ internal static class NodePackageCollector
// Already processed this path.
if (includeNestedNodeModules)
{
TraverseNestedNodeModules(context, directory, lockData, workspaceIndex, packages, visited, yarnPnpPresent, cancellationToken);
TraverseNestedNodeModules(context, directory, lockData, workspaceIndex, workspaceDependencyIndices, packages, visited, yarnPnpPresent, cancellationToken);
}
return;
}
if (File.Exists(packageJsonPath))
{
var package = TryCreatePackage(context, packageJsonPath, relativeDirectory, lockData, workspaceIndex, yarnPnpPresent, cancellationToken);
var package = TryCreatePackage(context, packageJsonPath, relativeDirectory, lockData, workspaceIndex, workspaceDependencyIndices, yarnPnpPresent, cancellationToken);
if (package is not null)
{
packages.Add(package);
@@ -359,7 +660,7 @@ internal static class NodePackageCollector
if (includeNestedNodeModules)
{
TraverseNestedNodeModules(context, directory, lockData, workspaceIndex, packages, visited, yarnPnpPresent, cancellationToken);
TraverseNestedNodeModules(context, directory, lockData, workspaceIndex, workspaceDependencyIndices, packages, visited, yarnPnpPresent, cancellationToken);
}
}
@@ -368,13 +669,14 @@ internal static class NodePackageCollector
string directory,
NodeLockData lockData,
NodeWorkspaceIndex workspaceIndex,
IReadOnlyDictionary<string, NodeDependencyIndex> workspaceDependencyIndices,
List<NodePackage> packages,
HashSet<string> visited,
bool yarnPnpPresent,
CancellationToken cancellationToken)
{
var nestedNodeModules = Path.Combine(directory, "node_modules");
TraverseDirectory(context, nestedNodeModules, lockData, workspaceIndex, packages, visited, yarnPnpPresent, cancellationToken);
TraverseDirectory(context, nestedNodeModules, lockData, workspaceIndex, workspaceDependencyIndices, packages, visited, yarnPnpPresent, cancellationToken);
}
private static void TraverseTarballs(
@@ -602,27 +904,57 @@ internal static class NodePackageCollector
string relativeDirectory,
NodeLockData lockData,
NodeWorkspaceIndex workspaceIndex,
IReadOnlyDictionary<string, NodeDependencyIndex> workspaceDependencyIndices,
bool yarnPnpPresent,
CancellationToken cancellationToken)
{
try
{
using var stream = File.OpenRead(packageJsonPath);
using var document = JsonDocument.Parse(stream);
const int maxHashBytes = 1_048_576;
var root = document.RootElement;
return TryCreatePackageFromJson(
context,
root,
relativeDirectory,
BuildLocator(relativeDirectory),
context.UsageHints.IsPathUsed(packageJsonPath),
cancellationToken,
lockData,
workspaceIndex,
packageJsonPath,
packageSha256: null,
yarnPnpPresent: yarnPnpPresent);
var sha256Hex = default(string);
JsonDocument document;
var info = new FileInfo(packageJsonPath);
if (info is { Exists: true } && info.Length is > 0 and <= maxHashBytes)
{
var bytes = File.ReadAllBytes(packageJsonPath);
if (bytes.Length is > 0 and <= maxHashBytes)
{
sha256Hex = Convert.ToHexString(SHA256.HashData(bytes)).ToLowerInvariant();
}
var parseBytes = bytes;
if (bytes.Length >= 3 && bytes[0] == 0xEF && bytes[1] == 0xBB && bytes[2] == 0xBF)
{
parseBytes = bytes[3..];
}
document = JsonDocument.Parse(parseBytes);
}
else
{
using var stream = File.OpenRead(packageJsonPath);
document = JsonDocument.Parse(stream);
}
using (document)
{
var root = document.RootElement;
return TryCreatePackageFromJson(
context,
root,
relativeDirectory,
BuildLocator(relativeDirectory),
context.UsageHints.IsPathUsed(packageJsonPath),
cancellationToken,
lockData,
workspaceIndex,
workspaceDependencyIndices,
packageJsonPath,
packageSha256: sha256Hex,
yarnPnpPresent: yarnPnpPresent);
}
}
catch (IOException)
{
@@ -643,6 +975,7 @@ internal static class NodePackageCollector
CancellationToken cancellationToken,
NodeLockData? lockData = null,
NodeWorkspaceIndex? workspaceIndex = null,
IReadOnlyDictionary<string, NodeDependencyIndex>? workspaceDependencyIndices = null,
string? packageJsonPath = null,
string? packageSha256 = null,
bool yarnPnpPresent = false)
@@ -675,21 +1008,31 @@ internal static class NodePackageCollector
isPrivate = privateElement.GetBoolean();
}
var lockEntry = lockData?.TryGet(relativeDirectory, name, out var entry) == true ? entry : null;
var lockEntry = lockData?.TryGet(relativeDirectory, name, version, out var entry) == true ? entry : null;
var lockLocator = BuildLockLocator(lockEntry);
var lockSource = lockEntry?.Source;
// Get scope from lock entry (populated by NodeLockData from package.json)
// or from the dependency index directly if this is a root package
NodeDependencyScope? scope = lockEntry?.Scope;
var isOptional = lockEntry?.IsOptional ?? false;
if (scope is null && lockData?.DependencyIndex is { } dependencyIndex)
NodeDependencyScope? scope = null;
var isOptional = false;
if (workspaceIndex is not null
&& workspaceDependencyIndices is not null
&& workspaceIndex.TryGetOwningWorkspace(relativeDirectory, out var owningWorkspace)
&& workspaceDependencyIndices.TryGetValue(owningWorkspace, out var owningIndex)
&& owningIndex.TryGetScope(name, out var workspaceScope))
{
if (dependencyIndex.TryGetScope(name, out var foundScope))
{
scope = foundScope;
isOptional = foundScope == NodeDependencyScope.Optional;
}
scope = workspaceScope;
isOptional = workspaceScope == NodeDependencyScope.Optional;
}
else if (lockData?.DependencyIndex is { } dependencyIndex && dependencyIndex.TryGetScope(name, out var rootScope))
{
scope = rootScope;
isOptional = rootScope == NodeDependencyScope.Optional;
}
else if (lockEntry?.Scope is { } lockScope)
{
scope = lockScope;
isOptional = lockEntry?.IsOptional ?? false;
}
// Extract license from package.json

View File

@@ -4,6 +4,10 @@ namespace StellaOps.Scanner.Analyzers.Lang.Node.Internal;
internal sealed class NodeWorkspaceIndex
{
private const int WorkspaceGlobMaxDepth = 10;
private const int WorkspaceGlobMaxDirsVisited = 20_000;
private const int WorkspaceGlobMaxMembers = 2_000;
private readonly string _rootPath;
private readonly HashSet<string> _workspacePaths;
private readonly Dictionary<string, string> _workspaceByName;
@@ -18,7 +22,8 @@ internal sealed class NodeWorkspaceIndex
public static NodeWorkspaceIndex Create(string rootPath)
{
var normalizedRoot = Path.GetFullPath(rootPath);
var workspacePaths = new HashSet<string>(StringComparer.Ordinal);
var pathComparer = OperatingSystem.IsWindows() ? StringComparer.OrdinalIgnoreCase : StringComparer.Ordinal;
var workspacePaths = new HashSet<string>(pathComparer);
var workspaceByName = new Dictionary<string, string>(StringComparer.OrdinalIgnoreCase);
var packageJsonPath = Path.Combine(normalizedRoot, "package.json");
@@ -112,6 +117,44 @@ internal sealed class NodeWorkspaceIndex
return false;
}
public bool TryGetOwningWorkspace(string relativePath, out string workspaceRoot)
{
workspaceRoot = string.Empty;
if (string.IsNullOrWhiteSpace(relativePath))
{
return false;
}
var normalized = NormalizeRelative(relativePath);
if (string.IsNullOrWhiteSpace(normalized))
{
return false;
}
if (_workspacePaths.Contains(normalized))
{
workspaceRoot = normalized;
return true;
}
for (var i = normalized.Length - 1; i > 0; i--)
{
if (normalized[i] != '/')
{
continue;
}
var candidate = normalized[..i];
if (_workspacePaths.Contains(candidate))
{
workspaceRoot = candidate;
return true;
}
}
return false;
}
public bool TryGetWorkspacePathByName(string packageName, out string? relativePath)
=> _workspaceByName.TryGetValue(packageName, out relativePath);
@@ -220,31 +263,133 @@ internal sealed class NodeWorkspaceIndex
private static IEnumerable<string> ExpandPattern(string rootPath, string pattern)
{
var cleanedPattern = pattern.Replace('\\', '/').Trim();
if (cleanedPattern.EndsWith("/*", StringComparison.Ordinal))
if (string.IsNullOrWhiteSpace(cleanedPattern))
{
var baseSegment = cleanedPattern[..^2];
var baseAbsolute = CombineAndNormalize(rootPath, baseSegment);
if (baseAbsolute is null || !Directory.Exists(baseAbsolute))
{
yield break;
}
foreach (var directory in Directory.EnumerateDirectories(baseAbsolute))
{
var normalized = NormalizeRelative(Path.GetRelativePath(rootPath, directory));
yield return normalized;
}
yield break;
}
else
if (cleanedPattern.StartsWith('!'))
{
var absolute = CombineAndNormalize(rootPath, cleanedPattern);
if (absolute is null || !Directory.Exists(absolute))
// Exclusion patterns are ignored (workspace discovery is conservative and bounded).
yield break;
}
cleanedPattern = cleanedPattern.TrimStart('.', '/');
if (string.IsNullOrWhiteSpace(cleanedPattern))
{
yield break;
}
var segments = cleanedPattern.Split('/', StringSplitOptions.RemoveEmptyEntries | StringSplitOptions.TrimEntries);
if (segments.Length == 0)
{
yield break;
}
var pathComparer = OperatingSystem.IsWindows() ? StringComparer.OrdinalIgnoreCase : StringComparer.Ordinal;
var results = new HashSet<string>(pathComparer);
var visitedStates = new HashSet<string>(pathComparer);
var queue = new Queue<(string AbsolutePath, int SegmentIndex, int Depth)>();
queue.Enqueue((rootPath, 0, 0));
var dirsVisited = 0;
while (queue.Count > 0)
{
if (results.Count >= WorkspaceGlobMaxMembers || dirsVisited >= WorkspaceGlobMaxDirsVisited)
{
yield break;
break;
}
var normalized = NormalizeRelative(Path.GetRelativePath(rootPath, absolute));
yield return normalized;
var (absolutePath, segmentIndex, depth) = queue.Dequeue();
if (!visitedStates.Add($"{segmentIndex}:{absolutePath}"))
{
continue;
}
if (segmentIndex >= segments.Length)
{
var relative = NormalizeRelative(Path.GetRelativePath(rootPath, absolutePath));
if (string.IsNullOrWhiteSpace(relative))
{
continue;
}
var packageJsonPath = Path.Combine(absolutePath, "package.json");
if (!File.Exists(packageJsonPath))
{
continue;
}
results.Add(relative);
continue;
}
var segment = segments[segmentIndex];
if (segment == "**")
{
queue.Enqueue((absolutePath, segmentIndex + 1, depth));
if (depth >= WorkspaceGlobMaxDepth)
{
continue;
}
foreach (var child in EnumerateDirectoriesSorted(absolutePath))
{
if (dirsVisited++ >= WorkspaceGlobMaxDirsVisited)
{
break;
}
queue.Enqueue((child, segmentIndex, depth + 1));
}
continue;
}
if (segment == "*")
{
if (depth >= WorkspaceGlobMaxDepth)
{
continue;
}
foreach (var child in EnumerateDirectoriesSorted(absolutePath))
{
if (dirsVisited++ >= WorkspaceGlobMaxDirsVisited)
{
break;
}
queue.Enqueue((child, segmentIndex + 1, depth + 1));
}
continue;
}
if (IsExcludedSegment(segment))
{
continue;
}
var nextAbsolute = CombineAndNormalize(absolutePath, segment);
if (nextAbsolute is null || !Directory.Exists(nextAbsolute))
{
continue;
}
if (!IsUnderRoot(rootPath, nextAbsolute))
{
continue;
}
queue.Enqueue((nextAbsolute, segmentIndex + 1, depth + 1));
}
foreach (var member in results.OrderBy(static path => path, pathComparer))
{
yield return NormalizeRelative(member);
}
}
@@ -254,6 +399,35 @@ internal sealed class NodeWorkspaceIndex
return IsUnderRoot(rootPath, candidate) ? candidate : null;
}
private static IReadOnlyList<string> EnumerateDirectoriesSorted(string absolutePath)
{
try
{
return Directory.EnumerateDirectories(absolutePath)
.Where(static path => !IsExcludedDirectoryName(Path.GetFileName(path)))
.OrderBy(static path => path, OperatingSystem.IsWindows() ? StringComparer.OrdinalIgnoreCase : StringComparer.Ordinal)
.ToArray();
}
catch (UnauthorizedAccessException)
{
return Array.Empty<string>();
}
catch (DirectoryNotFoundException)
{
return Array.Empty<string>();
}
catch (IOException)
{
return Array.Empty<string>();
}
}
private static bool IsExcludedSegment(string segment)
=> string.Equals(segment, "node_modules", StringComparison.OrdinalIgnoreCase);
private static bool IsExcludedDirectoryName(string? name)
=> string.Equals(name, "node_modules", StringComparison.OrdinalIgnoreCase);
private static string NormalizeRelative(string relativePath)
{
if (string.IsNullOrEmpty(relativePath) || relativePath == ".")

View File

@@ -36,6 +36,8 @@ public sealed class NodeLanguageAnalyzer : ILanguageAnalyzer
usedByEntrypoint: package.IsUsedByEntrypoint);
}
EmitDeclaredOnlyPackages(writer, lockData, packages);
var observation = NodePhase22Analyzer.Analyze(context, cancellationToken);
if (observation.HasRecords)
{
@@ -84,4 +86,274 @@ public sealed class NodeLanguageAnalyzer : ILanguageAnalyzer
writer.AddRange(envWarnings);
}
}
private static void EmitDeclaredOnlyPackages(
LanguageComponentWriter writer,
NodeLockData lockData,
IReadOnlyList<NodePackage> packages)
{
ArgumentNullException.ThrowIfNull(writer);
ArgumentNullException.ThrowIfNull(lockData);
ArgumentNullException.ThrowIfNull(packages);
if (lockData.DeclaredPackages.Count == 0)
{
return;
}
var installedByNameVersion = new HashSet<string>(StringComparer.OrdinalIgnoreCase);
var installedNames = new HashSet<string>(StringComparer.OrdinalIgnoreCase);
foreach (var package in packages)
{
installedNames.Add(package.Name);
installedByNameVersion.Add($"{package.Name}@{package.Version}");
}
foreach (var entry in lockData.DeclaredPackages.OrderBy(static e => e.Name, StringComparer.OrdinalIgnoreCase)
.ThenBy(static e => e.Version ?? string.Empty, StringComparer.OrdinalIgnoreCase)
.ThenBy(static e => e.Source, StringComparer.OrdinalIgnoreCase)
.ThenBy(static e => e.Locator ?? string.Empty, StringComparer.OrdinalIgnoreCase))
{
if (string.IsNullOrWhiteSpace(entry.Name))
{
continue;
}
var versionSpec = ResolveVersionSpec(lockData, entry);
var hasResolvedVersion = entry.Source is not null
&& !string.Equals(entry.Source, "package.json", StringComparison.OrdinalIgnoreCase)
&& IsConcreteNpmVersion(entry.Version);
var resolvedVersion = hasResolvedVersion ? entry.Version!.Trim() : null;
var backedByInventory = resolvedVersion is not null
? installedByNameVersion.Contains($"{entry.Name}@{resolvedVersion}")
: installedNames.Contains(entry.Name);
if (backedByInventory)
{
continue;
}
var declaredLocator = BuildDeclaredLocator(entry);
var scopeValue = entry.Scope is { } scope ? scope.ToString().ToLowerInvariant() : null;
var metadata = new List<KeyValuePair<string, string?>>(8)
{
new("declaredOnly", "true"),
new("declared.source", entry.Source),
new("declared.locator", declaredLocator),
new("declared.versionSpec", versionSpec),
new("declared.sourceType", ClassifyDeclaredSourceType(versionSpec)),
};
if (!string.IsNullOrWhiteSpace(scopeValue))
{
metadata.Add(new KeyValuePair<string, string?>("declared.scope", scopeValue));
}
if (resolvedVersion is not null)
{
metadata.Add(new KeyValuePair<string, string?>("declared.resolvedVersion", resolvedVersion));
}
if (entry.IntegrityMissing)
{
metadata.Add(new KeyValuePair<string, string?>("lockIntegrityMissing", "true"));
if (!string.IsNullOrWhiteSpace(entry.IntegrityMissingReason))
{
metadata.Add(new KeyValuePair<string, string?>("lockIntegrityMissingReason", entry.IntegrityMissingReason));
}
}
var evidence = new[]
{
new LanguageComponentEvidence(
Kind: LanguageEvidenceKind.Metadata,
Source: "node.declared",
Locator: declaredLocator,
Value: null,
Sha256: null)
};
if (resolvedVersion is not null)
{
var purl = BuildNpmPurl(entry.Name, resolvedVersion);
writer.AddFromPurl(
analyzerId: "node",
purl: purl,
name: entry.Name,
version: resolvedVersion,
type: "npm",
metadata: metadata,
evidence: evidence,
usedByEntrypoint: false);
continue;
}
var componentKey = LanguageExplicitKey.Create("node", "npm", entry.Name, versionSpec, declaredLocator);
writer.AddFromExplicitKey(
analyzerId: "node",
componentKey: componentKey,
purl: null,
name: entry.Name,
version: null,
type: "npm",
metadata: metadata,
evidence: evidence,
usedByEntrypoint: false);
}
}
private static string BuildDeclaredLocator(NodeLockEntry entry)
{
if (string.IsNullOrWhiteSpace(entry.Source))
{
return string.IsNullOrWhiteSpace(entry.Locator) ? "package.json" : entry.Locator!.Trim();
}
if (string.IsNullOrWhiteSpace(entry.Locator))
{
return entry.Source;
}
var locator = entry.Locator!.Trim();
if (entry.Source.Equals("package.json", StringComparison.OrdinalIgnoreCase) &&
locator.StartsWith("package.json#", StringComparison.OrdinalIgnoreCase))
{
return locator;
}
return $"{entry.Source}:{locator}";
}
private static string ClassifyDeclaredSourceType(string spec)
{
if (string.IsNullOrWhiteSpace(spec))
{
return "unknown";
}
var value = spec.Trim();
if (value.StartsWith("workspace:", StringComparison.OrdinalIgnoreCase))
{
return "workspace";
}
if (value.StartsWith("link:", StringComparison.OrdinalIgnoreCase))
{
return "link";
}
if (value.StartsWith("file:", StringComparison.OrdinalIgnoreCase))
{
return "file";
}
if (value.StartsWith("git+", StringComparison.OrdinalIgnoreCase) ||
value.StartsWith("git://", StringComparison.OrdinalIgnoreCase) ||
value.StartsWith("github:", StringComparison.OrdinalIgnoreCase) ||
value.StartsWith("gitlab:", StringComparison.OrdinalIgnoreCase) ||
value.StartsWith("bitbucket:", StringComparison.OrdinalIgnoreCase))
{
return "git";
}
if (value.StartsWith("http://", StringComparison.OrdinalIgnoreCase) ||
value.StartsWith("https://", StringComparison.OrdinalIgnoreCase))
{
return "tarball";
}
if (value is "latest" or "next" or "beta" or "alpha" or "canary")
{
return "tag";
}
if (value.Length > 0 && value[0] == '.' ||
value.Length > 0 && value[0] == '/' ||
value.Contains('\\'))
{
return "path";
}
return "range";
}
private static string ResolveVersionSpec(NodeLockData lockData, NodeLockEntry entry)
{
if (lockData.DependencyIndex.TryGetDeclaration(entry.Name, out var declaration) && !string.IsNullOrWhiteSpace(declaration?.VersionRange))
{
return declaration!.VersionRange!;
}
if (!string.IsNullOrWhiteSpace(entry.Version))
{
return entry.Version!.Trim();
}
return "*";
}
private static bool IsConcreteNpmVersion(string? version)
{
if (string.IsNullOrWhiteSpace(version))
{
return false;
}
version = version.Trim();
if (version.StartsWith('^') || version.StartsWith('~') || version.StartsWith('>') || version.StartsWith('<'))
{
return false;
}
if (version.Contains(' ') || version.Contains('*') || version.Contains('|') || version.Contains(':') || version.Contains('/'))
{
return false;
}
var hasDigit = false;
foreach (var ch in version)
{
if (char.IsDigit(ch))
{
hasDigit = true;
continue;
}
if (ch is '.' or '-' or '+' or '_' || char.IsLetter(ch))
{
continue;
}
return false;
}
return hasDigit;
}
private static string BuildNpmPurl(string name, string version)
{
var normalizedName = NormalizeNpmName(name);
return $"pkg:npm/{normalizedName}@{version}";
}
private static string NormalizeNpmName(string name)
{
if (string.IsNullOrWhiteSpace(name))
{
return name;
}
name = name.Trim();
if (name[0] == '@')
{
var scopeAndName = name[1..];
return $"%40{scopeAndName}";
}
return name;
}
}

View File

@@ -14,3 +14,19 @@
| SCANNER-ANALYZERS-NODE-22-010 | DONE | Runtime evidence hooks (ESM loader/CJS require) with path scrubbing and hashed loader IDs; ingestion to runtime-* records. | 2025-12-01 |
| SCANNER-ANALYZERS-NODE-22-011 | DONE | Packaged plug-in manifest (0.1.0) with runtime hooks; CLI/offline docs refreshed. | 2025-12-01 |
| SCANNER-ANALYZERS-NODE-22-012 | DONE | Container filesystem adapter (layer roots) + NODE_OPTIONS/env warnings emitted. | 2025-12-01 |
## Node Detection Gaps (Sprint 0406)
| Task ID | Status | Notes | Updated (UTC) |
| --- | --- | --- | --- |
| SCAN-NODE-406-001 | DONE | Emit declared-only components (explicit-key via LanguageExplicitKey; no range-as-version PURLs; sourceType metadata). | 2025-12-13 |
| SCAN-NODE-406-002 | DONE | Multi-version lock correctness + `(name,version)` matching. | 2025-12-13 |
| SCAN-NODE-406-003 | DONE | Yarn Berry (v2/v3) lock parsing. | 2025-12-13 |
| SCAN-NODE-406-004 | DONE | Harden pnpm lock parsing (integrity-missing, snapshots). | 2025-12-13 |
| SCAN-NODE-406-005 | DONE | Fix package-lock nested node_modules naming. | 2025-12-13 |
| SCAN-NODE-406-006 | DONE | Workspace glob expansion (`*`/`**`) + bounds. | 2025-12-13 |
| SCAN-NODE-406-007 | DONE | Workspace-aware dependency scopes. | 2025-12-13 |
| SCAN-NODE-406-008 | DONE | Import scanning correctness + bounds. | 2025-12-13 |
| SCAN-NODE-406-009 | DONE | Deterministic package.json hashing for on-disk packages + fixtures. | 2025-12-13 |
| SCAN-NODE-406-010 | DONE | Fixtures + goldens: lock-only package-lock/yarn-berry/pnpm, workspace glob (`*`/`**`), container app-root discovery. | 2025-12-13 |
| SCAN-NODE-406-011 | DONE | Docs + offline benchmark (Node contract doc + new bench scenario + import-scan metrics). | 2025-12-13 |

View File

@@ -87,25 +87,16 @@ internal sealed partial class PythonEntrypointDiscovery
{
cancellationToken.ThrowIfCancellationRequested();
var absolutePath = file.AbsolutePath;
if (file.IsFromArchive)
try
{
continue; // Can't read from archive directly yet
}
var fullPath = Path.Combine(_rootPath, absolutePath);
if (!File.Exists(fullPath))
{
fullPath = absolutePath;
if (!File.Exists(fullPath))
using var stream = await _vfs.OpenReadAsync(file.VirtualPath, cancellationToken).ConfigureAwait(false);
if (stream is null)
{
continue;
}
}
try
{
var content = await File.ReadAllTextAsync(fullPath, cancellationToken).ConfigureAwait(false);
using var reader = new StreamReader(stream);
var content = await reader.ReadToEndAsync(cancellationToken).ConfigureAwait(false);
ParseEntryPointsTxt(content, file.VirtualPath);
}
catch (IOException)
@@ -225,7 +216,7 @@ internal sealed partial class PythonEntrypointDiscovery
{
cancellationToken.ThrowIfCancellationRequested();
if (file.VirtualPath == "__main__.py")
if (string.Equals(file.AbsolutePath, "__main__.py", StringComparison.OrdinalIgnoreCase))
{
_entrypoints.Add(new PythonEntrypoint(
Name: "__main__",

View File

@@ -48,11 +48,15 @@ internal sealed partial class PipEditableAdapter : IPythonPackagingAdapter
continue;
}
// Look for .egg-info in the target
var (version, metadata, topLevel) = await ReadEggInfoAsync(vfs, targetPath, packageName, cancellationToken).ConfigureAwait(false);
// The editable target path in .egg-link can be absolute and host-specific.
// Prefer the VFS-mounted editable tree under the packageName prefix.
var editableRoot = packageName;
// Look for .egg-info in the editable root
var (version, metadata, topLevel) = await ReadEggInfoAsync(vfs, editableRoot, packageName, cancellationToken).ConfigureAwait(false);
// Also look for pyproject.toml for additional metadata
var pyprojectInfo = await ReadPyprojectAsync(vfs, targetPath, cancellationToken).ConfigureAwait(false);
var pyprojectInfo = await ReadPyprojectAsync(vfs, editableRoot, cancellationToken).ConfigureAwait(false);
if (pyprojectInfo.Name is not null)
{
@@ -79,7 +83,7 @@ internal sealed partial class PipEditableAdapter : IPythonPackagingAdapter
Extras: ImmutableArray<string>.Empty,
RecordFiles: ImmutableArray<PythonRecordEntry>.Empty,
InstallerTool: "pip",
EditableTarget: targetPath,
EditableTarget: editableRoot,
IsDirectDependency: true, // Editable installs are always direct
Confidence: PythonPackageConfidence.High);
}
@@ -110,7 +114,7 @@ internal sealed partial class PipEditableAdapter : IPythonPackagingAdapter
private static async Task<(string? Version, Dictionary<string, string> Metadata, ImmutableArray<string> TopLevel)> ReadEggInfoAsync(
PythonVirtualFileSystem vfs,
string targetPath,
string editableRoot,
string packageName,
CancellationToken cancellationToken)
{
@@ -119,8 +123,7 @@ internal sealed partial class PipEditableAdapter : IPythonPackagingAdapter
var topLevel = ImmutableArray<string>.Empty;
// Look for .egg-info directory
var eggInfoPattern = $"{packageName}.egg-info";
var eggInfoFiles = vfs.EnumerateFiles(targetPath, "*.egg-info/PKG-INFO").ToList();
var eggInfoFiles = vfs.EnumerateFiles(editableRoot, "*.egg-info/PKG-INFO").ToList();
PythonVirtualFile? pkgInfoFile = null;
foreach (var file in eggInfoFiles)
@@ -204,10 +207,10 @@ internal sealed partial class PipEditableAdapter : IPythonPackagingAdapter
private static async Task<(string? Name, string? Version)> ReadPyprojectAsync(
PythonVirtualFileSystem vfs,
string targetPath,
string editableRoot,
CancellationToken cancellationToken)
{
var pyprojectPath = $"{targetPath}/pyproject.toml";
var pyprojectPath = $"{editableRoot}/pyproject.toml";
try
{

View File

@@ -37,12 +37,16 @@ internal sealed class PythonPackageDiscovery
var errors = new List<PythonPackageDiscoveryError>();
var searchPaths = new List<string>();
// Gather all search paths from VFS
searchPaths.AddRange(vfs.SitePackagesPaths);
searchPaths.AddRange(vfs.SourceTreeRoots);
searchPaths.AddRange(vfs.EditablePaths);
// Gather all search paths from VFS (ordered by intended precedence).
// Later paths overwrite earlier ones on equal confidence.
searchPaths.Add(string.Empty); // workspace root (pyproject/locks/etc.)
searchPaths.AddRange(vfs.SourceTreeRoots.OrderBy(static path => path, StringComparer.Ordinal));
searchPaths.AddRange(vfs.EditablePaths.OrderBy(static path => path, StringComparer.Ordinal));
searchPaths.AddRange(vfs.SitePackagesPaths.OrderBy(static path => path, StringComparer.Ordinal));
searchPaths.AddRange(vfs.ZipArchivePaths.OrderBy(static path => path, StringComparer.Ordinal));
foreach (var path in searchPaths.Distinct())
var seen = new HashSet<string>(StringComparer.OrdinalIgnoreCase);
foreach (var path in searchPaths.Where(p => seen.Add(p)))
{
cancellationToken.ThrowIfCancellationRequested();

View File

@@ -0,0 +1,937 @@
using System.Buffers;
using System.Globalization;
using System.IO.Compression;
using System.Linq;
using System.Security.Cryptography;
using System.Text;
using System.Text.Json;
using StellaOps.Scanner.Analyzers.Lang.Python.Internal.VirtualFileSystem;
using Packaging = StellaOps.Scanner.Analyzers.Lang.Python.Internal.Packaging;
namespace StellaOps.Scanner.Analyzers.Lang.Python.Internal;
internal static class PythonDistributionVfsLoader
{
public static async Task<PythonDistribution?> LoadAsync(
LanguageAnalyzerContext context,
PythonVirtualFileSystem vfs,
Packaging.PythonPackageInfo package,
CancellationToken cancellationToken)
{
ArgumentNullException.ThrowIfNull(context);
ArgumentNullException.ThrowIfNull(vfs);
cancellationToken.ThrowIfCancellationRequested();
if (string.IsNullOrWhiteSpace(package.MetadataPath))
{
return null;
}
var isEggInfo = package.Kind == Packaging.PythonPackageKind.Egg;
var metadataName = isEggInfo ? "PKG-INFO" : "METADATA";
var recordName = isEggInfo ? "installed-files.txt" : "RECORD";
var metadataVirtualPath = $"{package.MetadataPath}/{metadataName}";
if (!vfs.FileExists(metadataVirtualPath))
{
return null;
}
var metadataDocument = await PythonMetadataDocumentVfs.LoadAsync(vfs, metadataVirtualPath, cancellationToken).ConfigureAwait(false);
var name = (metadataDocument.GetFirst("Name") ?? package.Name)?.Trim();
var version = (metadataDocument.GetFirst("Version") ?? package.Version)?.Trim();
if (string.IsNullOrWhiteSpace(name) || string.IsNullOrWhiteSpace(version))
{
return null;
}
var normalizedName = PythonPathHelper.NormalizePackageName(name);
var purl = $"pkg:pypi/{normalizedName}@{version}";
var metadataEntries = new List<KeyValuePair<string, string?>>();
var evidenceEntries = new List<LanguageComponentEvidence>();
AppendMetadata(metadataEntries, "distInfoPath", NormalizeVfsPath(package.MetadataPath));
AppendMetadata(metadataEntries, "name", name);
AppendMetadata(metadataEntries, "version", version);
AppendMetadata(metadataEntries, "normalizedName", normalizedName);
AppendMetadata(metadataEntries, "summary", metadataDocument.GetFirst("Summary"));
AppendMetadata(metadataEntries, "license", metadataDocument.GetFirst("License"));
AppendMetadata(metadataEntries, "licenseExpression", metadataDocument.GetFirst("License-Expression"));
AppendMetadata(metadataEntries, "homePage", metadataDocument.GetFirst("Home-page"));
AppendMetadata(metadataEntries, "author", metadataDocument.GetFirst("Author"));
AppendMetadata(metadataEntries, "authorEmail", metadataDocument.GetFirst("Author-email"));
AppendMetadata(metadataEntries, "projectUrl", metadataDocument.GetFirst("Project-URL"));
AppendMetadata(metadataEntries, "requiresPython", metadataDocument.GetFirst("Requires-Python"));
AppendClassifiers(metadataEntries, metadataDocument);
var requiresDist = metadataDocument.GetAll("Requires-Dist");
if (requiresDist.Count > 0)
{
AppendMetadata(metadataEntries, "requiresDist", string.Join(';', requiresDist));
}
await AppendEntryPointsAsync(vfs, metadataEntries, $"{package.MetadataPath}/entry_points.txt", cancellationToken)
.ConfigureAwait(false);
if (!isEggInfo)
{
await AppendWheelMetadataAsync(vfs, metadataEntries, $"{package.MetadataPath}/WHEEL", cancellationToken)
.ConfigureAwait(false);
}
var installer = await ReadSingleLineAsync(vfs, $"{package.MetadataPath}/INSTALLER", cancellationToken).ConfigureAwait(false);
if (!string.IsNullOrWhiteSpace(installer))
{
AppendMetadata(metadataEntries, "installer", installer);
}
await AppendDirectUrlAsync(context, vfs, metadataEntries, evidenceEntries, $"{package.MetadataPath}/direct_url.json", cancellationToken)
.ConfigureAwait(false);
AddOptionalFileEvidence(context, vfs, evidenceEntries, metadataVirtualPath, metadataName);
AddOptionalFileEvidence(context, vfs, evidenceEntries, $"{package.MetadataPath}/WHEEL", "WHEEL");
AddOptionalFileEvidence(context, vfs, evidenceEntries, $"{package.MetadataPath}/entry_points.txt", "entry_points.txt");
AddOptionalFileEvidence(context, vfs, evidenceEntries, $"{package.MetadataPath}/INSTALLER", "INSTALLER");
AddOptionalFileEvidence(context, vfs, evidenceEntries, $"{package.MetadataPath}/{recordName}", recordName);
AddOptionalFileEvidence(context, vfs, evidenceEntries, $"{package.MetadataPath}/direct_url.json", "direct_url.json");
var recordVirtualPath = $"{package.MetadataPath}/{recordName}";
var recordEntries = await ReadRecordAsync(vfs, recordVirtualPath, cancellationToken).ConfigureAwait(false);
var recordVerification = await VerifyRecordAsync(vfs, package.MetadataPath, recordEntries, cancellationToken).ConfigureAwait(false);
metadataEntries.Add(new KeyValuePair<string, string?>("record.totalEntries", recordVerification.TotalEntries.ToString(CultureInfo.InvariantCulture)));
metadataEntries.Add(new KeyValuePair<string, string?>("record.hashedEntries", recordVerification.HashedEntries.ToString(CultureInfo.InvariantCulture)));
metadataEntries.Add(new KeyValuePair<string, string?>("record.missingFiles", recordVerification.MissingFiles.ToString(CultureInfo.InvariantCulture)));
metadataEntries.Add(new KeyValuePair<string, string?>("record.hashMismatches", recordVerification.HashMismatches.ToString(CultureInfo.InvariantCulture)));
metadataEntries.Add(new KeyValuePair<string, string?>("record.ioErrors", recordVerification.IoErrors.ToString(CultureInfo.InvariantCulture)));
if (recordVerification.UnsupportedAlgorithms.Count > 0)
{
AppendMetadata(
metadataEntries,
"record.unsupportedAlgorithms",
string.Join(';', recordVerification.UnsupportedAlgorithms.OrderBy(static a => a, StringComparer.OrdinalIgnoreCase)));
}
evidenceEntries.AddRange(recordVerification.Evidence);
AppendMetadata(metadataEntries, "provenance", isEggInfo ? "egg-info" : "dist-info");
var usedByEntrypoint = false;
return new PythonDistribution(
name,
version,
purl,
metadataEntries,
evidenceEntries,
usedByEntrypoint);
}
private static void AddOptionalFileEvidence(
LanguageAnalyzerContext context,
PythonVirtualFileSystem vfs,
ICollection<LanguageComponentEvidence> evidence,
string virtualPath,
string source)
{
var file = vfs.GetFile(virtualPath);
if (file is null)
{
return;
}
if (file.IsFromArchive && file.ArchivePath is not null)
{
evidence.Add(new LanguageComponentEvidence(
LanguageEvidenceKind.File,
source,
PythonPathHelper.NormalizeRelative(context, file.ArchivePath),
Value: file.AbsolutePath,
Sha256: null));
return;
}
evidence.Add(new LanguageComponentEvidence(
LanguageEvidenceKind.File,
source,
PythonPathHelper.NormalizeRelative(context, file.AbsolutePath),
Value: null,
Sha256: null));
}
private static void AppendClassifiers(
ICollection<KeyValuePair<string, string?>> metadata,
PythonMetadataDocumentVfs metadataDocument)
{
var classifiers = metadataDocument.GetAll("Classifier");
if (classifiers.Count == 0)
{
return;
}
var orderedClassifiers = classifiers
.Select(static classifier => classifier.Trim())
.Where(static classifier => classifier.Length > 0)
.OrderBy(static classifier => classifier, StringComparer.Ordinal)
.ToArray();
if (orderedClassifiers.Length == 0)
{
return;
}
AppendMetadata(metadata, "classifiers", string.Join(';', orderedClassifiers));
var licenseClassifierIndex = 0;
for (var index = 0; index < orderedClassifiers.Length; index++)
{
var classifier = orderedClassifiers[index];
AppendMetadata(metadata, $"classifier[{index}]", classifier);
if (classifier.StartsWith("License ::", StringComparison.OrdinalIgnoreCase))
{
AppendMetadata(metadata, $"license.classifier[{licenseClassifierIndex}]", classifier);
licenseClassifierIndex++;
}
}
}
private static async Task AppendEntryPointsAsync(
PythonVirtualFileSystem vfs,
ICollection<KeyValuePair<string, string?>> metadata,
string entryPointsVirtualPath,
CancellationToken cancellationToken)
{
if (!vfs.FileExists(entryPointsVirtualPath))
{
return;
}
string? content;
try
{
await using var stream = await vfs.OpenReadAsync(entryPointsVirtualPath, cancellationToken).ConfigureAwait(false);
if (stream is null)
{
return;
}
using var reader = new StreamReader(stream, PythonEncoding.Utf8, detectEncodingFromByteOrderMarks: true);
content = await reader.ReadToEndAsync(cancellationToken).ConfigureAwait(false);
}
catch (IOException)
{
return;
}
var groups = new Dictionary<string, List<(string Name, string Target)>>(StringComparer.OrdinalIgnoreCase);
string? currentGroup = null;
foreach (var rawLine in content.Split('\n'))
{
var line = rawLine.Trim();
if (line.Length == 0 || line.StartsWith('#'))
{
continue;
}
if (line.StartsWith('[') && line.EndsWith(']'))
{
currentGroup = line[1..^1].Trim();
if (currentGroup.Length == 0)
{
currentGroup = null;
}
continue;
}
if (currentGroup is null)
{
continue;
}
var separator = line.IndexOf('=');
if (separator <= 0)
{
continue;
}
var name = line[..separator].Trim();
var target = line[(separator + 1)..].Trim();
if (name.Length == 0 || target.Length == 0)
{
continue;
}
if (!groups.TryGetValue(currentGroup, out var list))
{
list = new List<(string Name, string Target)>();
groups[currentGroup] = list;
}
list.Add((name, target));
}
foreach (var group in groups.OrderBy(static g => g.Key, StringComparer.OrdinalIgnoreCase))
{
AppendMetadata(metadata, $"entryPoints.{group.Key}", string.Join(';', group.Value.Select(static ep => $"{ep.Name}={ep.Target}")));
}
}
private static async Task AppendWheelMetadataAsync(
PythonVirtualFileSystem vfs,
ICollection<KeyValuePair<string, string?>> metadata,
string wheelVirtualPath,
CancellationToken cancellationToken)
{
if (!vfs.FileExists(wheelVirtualPath))
{
return;
}
var values = new Dictionary<string, string>(StringComparer.OrdinalIgnoreCase);
try
{
await using var stream = await vfs.OpenReadAsync(wheelVirtualPath, cancellationToken).ConfigureAwait(false);
if (stream is null)
{
return;
}
using var reader = new StreamReader(stream, PythonEncoding.Utf8, detectEncodingFromByteOrderMarks: true);
while (await reader.ReadLineAsync(cancellationToken).ConfigureAwait(false) is { } line)
{
cancellationToken.ThrowIfCancellationRequested();
if (string.IsNullOrWhiteSpace(line))
{
continue;
}
var separator = line.IndexOf(':');
if (separator <= 0)
{
continue;
}
var key = line[..separator].Trim();
var value = line[(separator + 1)..].Trim();
if (key.Length == 0 || value.Length == 0)
{
continue;
}
values[key] = value;
}
}
catch (IOException)
{
return;
}
if (values.TryGetValue("Wheel-Version", out var wheelVersion))
{
AppendMetadata(metadata, "wheel.version", wheelVersion);
}
if (values.TryGetValue("Tag", out var tags))
{
AppendMetadata(metadata, "wheel.tags", tags);
}
if (values.TryGetValue("Root-Is-Purelib", out var purelib))
{
AppendMetadata(metadata, "wheel.rootIsPurelib", purelib);
}
if (values.TryGetValue("Generator", out var generator))
{
AppendMetadata(metadata, "wheel.generator", generator);
}
}
private static async Task<string?> ReadSingleLineAsync(
PythonVirtualFileSystem vfs,
string virtualPath,
CancellationToken cancellationToken)
{
if (!vfs.FileExists(virtualPath))
{
return null;
}
try
{
await using var stream = await vfs.OpenReadAsync(virtualPath, cancellationToken).ConfigureAwait(false);
if (stream is null)
{
return null;
}
using var reader = new StreamReader(stream, PythonEncoding.Utf8, detectEncodingFromByteOrderMarks: true);
return await reader.ReadLineAsync(cancellationToken).ConfigureAwait(false);
}
catch (IOException)
{
return null;
}
}
private static async Task AppendDirectUrlAsync(
LanguageAnalyzerContext context,
PythonVirtualFileSystem vfs,
ICollection<KeyValuePair<string, string?>> metadata,
ICollection<LanguageComponentEvidence> evidence,
string directUrlVirtualPath,
CancellationToken cancellationToken)
{
var file = vfs.GetFile(directUrlVirtualPath);
if (file is null)
{
return;
}
try
{
await using var stream = await vfs.OpenReadAsync(directUrlVirtualPath, cancellationToken).ConfigureAwait(false);
if (stream is null)
{
return;
}
using var document = await JsonDocument.ParseAsync(stream, cancellationToken: cancellationToken).ConfigureAwait(false);
var root = document.RootElement;
var url = root.TryGetProperty("url", out var urlElement) ? urlElement.GetString() : null;
var isEditable = root.TryGetProperty("dir_info", out var dirInfo) &&
dirInfo.TryGetProperty("editable", out var editableValue) &&
editableValue.GetBoolean();
var subdir = root.TryGetProperty("dir_info", out dirInfo) &&
dirInfo.TryGetProperty("subdirectory", out var subdirElement)
? subdirElement.GetString()
: null;
string? vcs = null;
string? commit = null;
if (root.TryGetProperty("vcs_info", out var vcsInfo))
{
vcs = vcsInfo.TryGetProperty("vcs", out var vcsElement) ? vcsElement.GetString() : null;
commit = vcsInfo.TryGetProperty("commit_id", out var commitElement) ? commitElement.GetString() : null;
}
if (isEditable)
{
AppendMetadata(metadata, "editable", "true");
}
AppendMetadata(metadata, "sourceUrl", url);
AppendMetadata(metadata, "sourceSubdirectory", subdir);
AppendMetadata(metadata, "sourceVcs", vcs);
AppendMetadata(metadata, "sourceCommit", commit);
if (!string.IsNullOrWhiteSpace(url))
{
var locator = file.IsFromArchive && file.ArchivePath is not null
? PythonPathHelper.NormalizeRelative(context, file.ArchivePath)
: PythonPathHelper.NormalizeRelative(context, file.AbsolutePath);
evidence.Add(new LanguageComponentEvidence(
LanguageEvidenceKind.Metadata,
"direct_url.json",
locator,
url,
Sha256: null));
}
}
catch (JsonException)
{
// Ignore invalid JSON
}
catch (IOException)
{
// Ignore read errors
}
}
private static async Task<IReadOnlyList<PythonRecordEntry>> ReadRecordAsync(
PythonVirtualFileSystem vfs,
string recordVirtualPath,
CancellationToken cancellationToken)
{
if (!vfs.FileExists(recordVirtualPath))
{
return Array.Empty<PythonRecordEntry>();
}
var fileName = Path.GetFileName(recordVirtualPath);
if (!string.IsNullOrWhiteSpace(fileName) &&
fileName.EndsWith("installed-files.txt", StringComparison.OrdinalIgnoreCase))
{
return await ReadInstalledFilesAsync(vfs, recordVirtualPath, cancellationToken).ConfigureAwait(false);
}
try
{
await using var stream = await vfs.OpenReadAsync(recordVirtualPath, cancellationToken).ConfigureAwait(false);
if (stream is null)
{
return Array.Empty<PythonRecordEntry>();
}
using var reader = new StreamReader(stream, PythonEncoding.Utf8, detectEncodingFromByteOrderMarks: true);
var entries = new List<PythonRecordEntry>();
while (await reader.ReadLineAsync(cancellationToken).ConfigureAwait(false) is { } line)
{
cancellationToken.ThrowIfCancellationRequested();
if (line.Length == 0)
{
continue;
}
var fields = ParseCsvLine(line);
if (fields.Count < 1)
{
continue;
}
var entryPath = fields[0];
string? algorithm = null;
string? hashValue = null;
if (fields.Count > 1 && !string.IsNullOrWhiteSpace(fields[1]))
{
var hashField = fields[1].Trim();
var separator = hashField.IndexOf('=');
if (separator > 0 && separator < hashField.Length - 1)
{
algorithm = hashField[..separator];
hashValue = hashField[(separator + 1)..];
}
}
long? size = null;
if (fields.Count > 2 &&
long.TryParse(fields[2], NumberStyles.Integer, CultureInfo.InvariantCulture, out var parsedSize))
{
size = parsedSize;
}
entries.Add(new PythonRecordEntry(entryPath, algorithm, hashValue, size));
}
return entries;
}
catch (IOException)
{
return Array.Empty<PythonRecordEntry>();
}
}
private static async Task<IReadOnlyList<PythonRecordEntry>> ReadInstalledFilesAsync(
PythonVirtualFileSystem vfs,
string recordVirtualPath,
CancellationToken cancellationToken)
{
try
{
await using var stream = await vfs.OpenReadAsync(recordVirtualPath, cancellationToken).ConfigureAwait(false);
if (stream is null)
{
return Array.Empty<PythonRecordEntry>();
}
using var reader = new StreamReader(stream, PythonEncoding.Utf8, detectEncodingFromByteOrderMarks: true);
var entries = new List<PythonRecordEntry>();
while (await reader.ReadLineAsync(cancellationToken).ConfigureAwait(false) is { } line)
{
cancellationToken.ThrowIfCancellationRequested();
var trimmed = line.Trim();
if (trimmed.Length == 0 || trimmed == ".")
{
continue;
}
entries.Add(new PythonRecordEntry(trimmed, null, null, null));
}
return entries;
}
catch (IOException)
{
return Array.Empty<PythonRecordEntry>();
}
}
private static async Task<PythonRecordVerificationResult> VerifyRecordAsync(
PythonVirtualFileSystem vfs,
string distInfoVirtualPath,
IReadOnlyList<PythonRecordEntry> entries,
CancellationToken cancellationToken)
{
if (entries.Count == 0)
{
return new PythonRecordVerificationResult(0, 0, 0, 0, 0, usedByEntrypoint: false, Array.Empty<string>(), Array.Empty<LanguageComponentEvidence>());
}
var evidence = new List<LanguageComponentEvidence>();
var unsupported = new HashSet<string>(StringComparer.OrdinalIgnoreCase);
var root = GetParentDirectory(distInfoVirtualPath);
var total = 0;
var hashed = 0;
var missing = 0;
var mismatched = 0;
var ioErrors = 0;
foreach (var entry in entries)
{
cancellationToken.ThrowIfCancellationRequested();
total++;
var normalizedEntryPath = NormalizeRecordPath(entry.Path);
if (normalizedEntryPath is null)
{
missing++;
evidence.Add(new LanguageComponentEvidence(
LanguageEvidenceKind.Derived,
"RECORD",
NormalizeVfsPath(entry.Path),
"outside-root",
Sha256: null));
continue;
}
var virtualPath = $"{root}/{normalizedEntryPath}";
if (!vfs.FileExists(virtualPath))
{
missing++;
evidence.Add(new LanguageComponentEvidence(
LanguageEvidenceKind.Derived,
"RECORD",
NormalizeVfsPath(virtualPath),
"missing",
Sha256: null));
continue;
}
if (string.IsNullOrWhiteSpace(entry.HashAlgorithm) || string.IsNullOrWhiteSpace(entry.HashValue))
{
continue;
}
hashed++;
if (!string.Equals(entry.HashAlgorithm, "sha256", StringComparison.OrdinalIgnoreCase))
{
unsupported.Add(entry.HashAlgorithm);
continue;
}
string? actualHash;
try
{
actualHash = await ComputeSha256Base64Async(vfs, virtualPath, cancellationToken).ConfigureAwait(false);
}
catch (IOException)
{
ioErrors++;
evidence.Add(new LanguageComponentEvidence(
LanguageEvidenceKind.Derived,
"RECORD",
NormalizeVfsPath(virtualPath),
"io-error",
Sha256: null));
continue;
}
if (!string.Equals(actualHash, entry.HashValue, StringComparison.Ordinal))
{
mismatched++;
evidence.Add(new LanguageComponentEvidence(
LanguageEvidenceKind.Derived,
"RECORD",
NormalizeVfsPath(virtualPath),
$"sha256 mismatch expected={entry.HashValue} actual={actualHash}",
Sha256: actualHash));
}
}
return new PythonRecordVerificationResult(
total,
hashed,
missing,
mismatched,
ioErrors,
usedByEntrypoint: false,
unsupported.ToArray(),
evidence);
}
private static async Task<string> ComputeSha256Base64Async(
PythonVirtualFileSystem vfs,
string virtualPath,
CancellationToken cancellationToken)
{
await using var stream = await vfs.OpenReadAsync(virtualPath, cancellationToken).ConfigureAwait(false);
if (stream is null)
{
throw new IOException("Unable to open file for hashing.");
}
using var sha = SHA256.Create();
var buffer = ArrayPool<byte>.Shared.Rent(81920);
try
{
int bytesRead;
while ((bytesRead = await stream.ReadAsync(buffer.AsMemory(0, buffer.Length), cancellationToken).ConfigureAwait(false)) > 0)
{
sha.TransformBlock(buffer, 0, bytesRead, null, 0);
}
sha.TransformFinalBlock(Array.Empty<byte>(), 0, 0);
return Convert.ToBase64String(sha.Hash ?? Array.Empty<byte>());
}
finally
{
ArrayPool<byte>.Shared.Return(buffer);
}
}
private static List<string> ParseCsvLine(string line)
{
var values = new List<string>();
var builder = new StringBuilder();
var inQuotes = false;
for (var i = 0; i < line.Length; i++)
{
var ch = line[i];
if (inQuotes)
{
if (ch == '"')
{
var next = i + 1 < line.Length ? line[i + 1] : '\0';
if (next == '"')
{
builder.Append('"');
i++;
}
else
{
inQuotes = false;
}
}
else
{
builder.Append(ch);
}
continue;
}
if (ch == ',')
{
values.Add(builder.ToString());
builder.Clear();
continue;
}
if (ch == '"')
{
inQuotes = true;
continue;
}
builder.Append(ch);
}
values.Add(builder.ToString());
return values;
}
private static string? NormalizeRecordPath(string path)
{
if (string.IsNullOrWhiteSpace(path))
{
return null;
}
var normalized = path.Replace('\\', '/').TrimStart('/');
if (normalized.Contains("/../", StringComparison.Ordinal) ||
normalized.StartsWith("../", StringComparison.Ordinal) ||
normalized.EndsWith("/..", StringComparison.Ordinal) ||
normalized == "..")
{
return null;
}
return normalized.Length == 0 ? null : normalized;
}
private static string NormalizeVfsPath(string path)
=> path.Replace('\\', '/').Trim('/');
private static string GetParentDirectory(string path)
{
var normalized = NormalizeVfsPath(path);
var lastSlash = normalized.LastIndexOf('/');
return lastSlash <= 0 ? string.Empty : normalized[..lastSlash];
}
private static void AppendMetadata(ICollection<KeyValuePair<string, string?>> metadata, string key, string? value)
{
if (string.IsNullOrWhiteSpace(key))
{
return;
}
if (string.IsNullOrWhiteSpace(value))
{
return;
}
metadata.Add(new KeyValuePair<string, string?>(key, value.Trim()));
}
private sealed class PythonMetadataDocumentVfs
{
private readonly Dictionary<string, List<string>> _values;
private PythonMetadataDocumentVfs(Dictionary<string, List<string>> values)
{
_values = values;
}
public static async Task<PythonMetadataDocumentVfs> LoadAsync(
PythonVirtualFileSystem vfs,
string virtualPath,
CancellationToken cancellationToken)
{
if (!vfs.FileExists(virtualPath))
{
return new PythonMetadataDocumentVfs(new Dictionary<string, List<string>>(StringComparer.OrdinalIgnoreCase));
}
try
{
await using var stream = await vfs.OpenReadAsync(virtualPath, cancellationToken).ConfigureAwait(false);
if (stream is null)
{
return new PythonMetadataDocumentVfs(new Dictionary<string, List<string>>(StringComparer.OrdinalIgnoreCase));
}
using var reader = new StreamReader(stream, PythonEncoding.Utf8, detectEncodingFromByteOrderMarks: true);
var values = new Dictionary<string, List<string>>(StringComparer.OrdinalIgnoreCase);
string? currentKey = null;
var builder = new StringBuilder();
while (await reader.ReadLineAsync(cancellationToken).ConfigureAwait(false) is { } line)
{
cancellationToken.ThrowIfCancellationRequested();
if (line.Length == 0)
{
Commit();
continue;
}
if (line.StartsWith(' ') || line.StartsWith('\t'))
{
if (currentKey is not null)
{
if (builder.Length > 0)
{
builder.Append(' ');
}
builder.Append(line.Trim());
}
continue;
}
Commit();
var separator = line.IndexOf(':');
if (separator <= 0)
{
continue;
}
currentKey = line[..separator].Trim();
builder.Clear();
builder.Append(line[(separator + 1)..].Trim());
}
Commit();
return new PythonMetadataDocumentVfs(values);
void Commit()
{
if (string.IsNullOrWhiteSpace(currentKey))
{
return;
}
if (!values.TryGetValue(currentKey, out var list))
{
list = new List<string>();
values[currentKey] = list;
}
var value = builder.ToString().Trim();
if (value.Length > 0)
{
list.Add(value);
}
currentKey = null;
builder.Clear();
}
}
catch (IOException)
{
return new PythonMetadataDocumentVfs(new Dictionary<string, List<string>>(StringComparer.OrdinalIgnoreCase));
}
}
public string? GetFirst(string key)
{
if (key is null)
{
return null;
}
return _values.TryGetValue(key, out var list) && list.Count > 0
? list[0]
: null;
}
public IReadOnlyList<string> GetAll(string key)
{
if (key is null)
{
return Array.Empty<string>();
}
return _values.TryGetValue(key, out var list)
? list.AsReadOnly()
: Array.Empty<string>();
}
}
}

View File

@@ -1,6 +1,7 @@
using System.Text.Json;
using System.Text.RegularExpressions;
using System.Linq;
using StellaOps.Scanner.Analyzers.Lang.Python.Internal;
namespace StellaOps.Scanner.Analyzers.Lang.Python.Internal.VirtualFileSystem;
@@ -81,9 +82,11 @@ internal sealed partial class PythonInputNormalizer
await DetectLayoutAsync(cancellationToken).ConfigureAwait(false);
await DetectVersionTargetsAsync(cancellationToken).ConfigureAwait(false);
DetectSitePackages();
DetectLayerSitePackages();
DetectWheels();
DetectZipapps();
await DetectEditablesAsync(cancellationToken).ConfigureAwait(false);
NormalizeDetectedInputs();
return this;
}
@@ -94,6 +97,11 @@ internal sealed partial class PythonInputNormalizer
{
var builder = PythonVirtualFileSystem.CreateBuilder();
AddProjectFiles(builder);
var condaMeta = Path.Combine(_rootPath, "conda-meta");
builder.AddDirectory(condaMeta, "conda-meta", PythonFileSource.ProjectConfig, includeHiddenFiles: false);
// Add site-packages in order (later takes precedence)
foreach (var sitePackagesPath in _sitePackagesPaths)
{
@@ -582,7 +590,8 @@ internal sealed partial class PythonInputNormalizer
{
try
{
foreach (var pythonDir in Directory.EnumerateDirectories(libDir, "python*", SafeEnumeration))
foreach (var pythonDir in Directory.EnumerateDirectories(libDir, "python*", SafeEnumeration)
.OrderBy(static p => p, StringComparer.OrdinalIgnoreCase))
{
searchPaths.Add(Path.Combine(pythonDir, "site-packages"));
}
@@ -611,6 +620,25 @@ internal sealed partial class PythonInputNormalizer
searchPaths.Add(Path.Combine(_rootPath, "usr", "local", "lib", "python3.12", "site-packages"));
searchPaths.Add(Path.Combine(_rootPath, "usr", "lib", "python3", "dist-packages"));
// System-style lib/pythonX.Y/site-packages under the workspace root
var rootLibDir = Path.Combine(_rootPath, "lib");
if (Directory.Exists(rootLibDir))
{
try
{
foreach (var pythonDir in Directory.EnumerateDirectories(rootLibDir, "python*", SafeEnumeration)
.OrderBy(static p => p, StringComparer.OrdinalIgnoreCase))
{
searchPaths.Add(Path.Combine(pythonDir, "site-packages"));
searchPaths.Add(Path.Combine(pythonDir, "dist-packages"));
}
}
catch (UnauthorizedAccessException)
{
// Ignore
}
}
// Root site-packages (common for some Docker images)
searchPaths.Add(Path.Combine(_rootPath, "site-packages"));
@@ -623,6 +651,17 @@ internal sealed partial class PythonInputNormalizer
}
}
private void DetectLayerSitePackages()
{
foreach (var sitePackagesPath in PythonContainerAdapter.DiscoverLayerSitePackages(_rootPath))
{
if (!_sitePackagesPaths.Contains(sitePackagesPath, StringComparer.OrdinalIgnoreCase))
{
_sitePackagesPaths.Add(sitePackagesPath);
}
}
}
private void DetectWheels()
{
// Look for wheels in common locations
@@ -643,7 +682,8 @@ internal sealed partial class PythonInputNormalizer
try
{
foreach (var wheel in Directory.EnumerateFiles(searchPath, "*.whl", SafeEnumeration))
foreach (var wheel in Directory.EnumerateFiles(searchPath, "*.whl", SafeEnumeration)
.OrderBy(static p => p, StringComparer.OrdinalIgnoreCase))
{
if (!_wheelPaths.Contains(wheel, StringComparer.OrdinalIgnoreCase))
{
@@ -700,37 +740,24 @@ internal sealed partial class PythonInputNormalizer
private void DetectZipapps()
{
if (!Directory.Exists(_rootPath))
foreach (var zipappPath in PythonZipappAdapter.DiscoverZipapps(_rootPath))
{
return;
}
try
{
foreach (var pyz in Directory.EnumerateFiles(_rootPath, "*.pyz", SafeEnumeration))
if (!_zipappPaths.Contains(zipappPath, StringComparer.OrdinalIgnoreCase))
{
_zipappPaths.Add(pyz);
_zipappPaths.Add(zipappPath);
}
foreach (var pyzw in Directory.EnumerateFiles(_rootPath, "*.pyzw", SafeEnumeration))
{
_zipappPaths.Add(pyzw);
}
}
catch (UnauthorizedAccessException)
{
// Ignore
}
}
private async Task DetectEditablesAsync(CancellationToken cancellationToken)
{
// Look for .egg-link files in site-packages
foreach (var sitePackagesPath in _sitePackagesPaths)
foreach (var sitePackagesPath in _sitePackagesPaths.OrderBy(static p => p, StringComparer.OrdinalIgnoreCase))
{
try
{
foreach (var eggLink in Directory.EnumerateFiles(sitePackagesPath, "*.egg-link", SafeEnumeration))
foreach (var eggLink in Directory.EnumerateFiles(sitePackagesPath, "*.egg-link", SafeEnumeration)
.OrderBy(static p => p, StringComparer.OrdinalIgnoreCase))
{
var content = await File.ReadAllTextAsync(eggLink, cancellationToken).ConfigureAwait(false);
var lines = content.Split('\n', StringSplitOptions.RemoveEmptyEntries);
@@ -761,11 +788,12 @@ internal sealed partial class PythonInputNormalizer
}
// Look for direct_url.json with editable flag in dist-info directories
foreach (var sitePackagesPath in _sitePackagesPaths)
foreach (var sitePackagesPath in _sitePackagesPaths.OrderBy(static p => p, StringComparer.OrdinalIgnoreCase))
{
try
{
foreach (var distInfo in Directory.EnumerateDirectories(sitePackagesPath, "*.dist-info", SafeEnumeration))
foreach (var distInfo in Directory.EnumerateDirectories(sitePackagesPath, "*.dist-info", SafeEnumeration)
.OrderBy(static p => p, StringComparer.OrdinalIgnoreCase))
{
var directUrlPath = Path.Combine(distInfo, "direct_url.json");
if (!File.Exists(directUrlPath))
@@ -815,6 +843,84 @@ internal sealed partial class PythonInputNormalizer
}
}
private void NormalizeDetectedInputs()
{
NormalizePathList(_sitePackagesPaths);
NormalizePathList(_wheelPaths);
NormalizePathList(_zipappPaths);
NormalizeEditableList(_editablePaths);
}
private static void NormalizePathList(List<string> paths)
{
var normalized = paths
.Where(static p => !string.IsNullOrWhiteSpace(p))
.Select(static p => Path.GetFullPath(p.Trim()))
.Distinct(StringComparer.OrdinalIgnoreCase)
.OrderBy(static p => p, StringComparer.Ordinal)
.ToList();
paths.Clear();
paths.AddRange(normalized);
}
private static void NormalizeEditableList(List<(string Path, string? PackageName)> editables)
{
var seen = new HashSet<string>(StringComparer.OrdinalIgnoreCase);
var normalized = new List<(string Path, string? PackageName)>();
foreach (var (path, packageName) in editables)
{
if (string.IsNullOrWhiteSpace(path))
{
continue;
}
var fullPath = Path.GetFullPath(path.Trim());
var name = string.IsNullOrWhiteSpace(packageName) ? null : packageName.Trim();
var key = $"{name ?? string.Empty}|{fullPath}";
if (!seen.Add(key))
{
continue;
}
normalized.Add((fullPath, name));
}
editables.Clear();
editables.AddRange(normalized
.OrderBy(static e => e.PackageName ?? string.Empty, StringComparer.OrdinalIgnoreCase)
.ThenBy(static e => e.Path, StringComparer.Ordinal));
}
private void AddProjectFiles(PythonVirtualFileSystem.Builder builder)
{
AddProjectFile(builder, "pyproject.toml", PythonFileSource.ProjectConfig);
AddProjectFile(builder, "setup.py", PythonFileSource.ProjectConfig);
AddProjectFile(builder, "setup.cfg", PythonFileSource.ProjectConfig);
AddProjectFile(builder, "runtime.txt", PythonFileSource.ProjectConfig);
AddProjectFile(builder, "Dockerfile", PythonFileSource.ProjectConfig);
AddProjectFile(builder, "tox.ini", PythonFileSource.ProjectConfig);
AddProjectFile(builder, "requirements.txt", PythonFileSource.LockFile);
AddProjectFile(builder, "requirements-dev.txt", PythonFileSource.LockFile);
AddProjectFile(builder, "requirements.prod.txt", PythonFileSource.LockFile);
AddProjectFile(builder, "Pipfile.lock", PythonFileSource.LockFile);
AddProjectFile(builder, "poetry.lock", PythonFileSource.LockFile);
}
private void AddProjectFile(PythonVirtualFileSystem.Builder builder, string relativePath, PythonFileSource source)
{
var absolutePath = Path.Combine(_rootPath, relativePath);
if (!File.Exists(absolutePath))
{
return;
}
builder.AddFile(relativePath, absolutePath, source);
}
[GeneratedRegex(@"requires-python\s*=\s*[""']?(?<version>[^""'\n]+)", RegexOptions.IgnoreCase)]
private static partial Regex RequiresPythonPattern();

View File

@@ -54,9 +54,9 @@ internal sealed partial class PythonVirtualFileSystem
public int FileCount => _files.Count;
/// <summary>
/// Gets all files in the virtual filesystem.
/// Gets all files in the virtual filesystem, ordered deterministically by virtual path.
/// </summary>
public IEnumerable<PythonVirtualFile> Files => _files.Values;
public IEnumerable<PythonVirtualFile> Files => Paths.Select(path => _files[path]);
/// <summary>
/// Gets all virtual paths in sorted order.
@@ -230,17 +230,17 @@ internal sealed partial class PythonVirtualFileSystem
var normalized = NormalizePath(virtualPath);
var prefix = normalized.Length == 0 ? string.Empty : normalized + "/";
foreach (var kvp in _files)
foreach (var key in _files.Keys.OrderBy(static path => path, StringComparer.Ordinal))
{
if (!kvp.Key.StartsWith(prefix, StringComparison.Ordinal))
if (!key.StartsWith(prefix, StringComparison.Ordinal))
{
continue;
}
var relative = kvp.Key[prefix.Length..];
var relative = key[prefix.Length..];
if (regex.IsMatch(relative))
{
yield return kvp.Value;
yield return _files[key];
}
}
}
@@ -291,11 +291,32 @@ internal sealed partial class PythonVirtualFileSystem
{
private readonly Dictionary<string, PythonVirtualFile> _files = new(StringComparer.Ordinal);
private readonly HashSet<string> _processedArchives = new(StringComparer.OrdinalIgnoreCase);
private readonly Dictionary<string, int> _archiveAliasCounters = new(StringComparer.OrdinalIgnoreCase);
private readonly HashSet<string> _sourceTreeRoots = new(StringComparer.Ordinal);
private readonly HashSet<string> _sitePackagesPaths = new(StringComparer.Ordinal);
private readonly HashSet<string> _editablePaths = new(StringComparer.Ordinal);
private readonly HashSet<string> _zipArchivePaths = new(StringComparer.Ordinal);
/// <summary>
/// Adds files from an arbitrary directory at a specific virtual prefix.
/// </summary>
public Builder AddDirectory(
string directoryPath,
string virtualPrefix,
PythonFileSource source,
string? layerDigest = null,
bool includeHiddenFiles = false)
{
if (!Directory.Exists(directoryPath))
{
return this;
}
var basePath = Path.GetFullPath(directoryPath);
AddDirectoryRecursive(basePath, NormalizePath(virtualPrefix), source, layerDigest, includeHiddenFiles);
return this;
}
/// <summary>
/// Adds files from a site-packages directory.
/// </summary>
@@ -308,7 +329,7 @@ internal sealed partial class PythonVirtualFileSystem
var basePath = Path.GetFullPath(sitePackagesPath);
_sitePackagesPaths.Add(string.Empty); // Root of the VFS
AddDirectoryRecursive(basePath, string.Empty, PythonFileSource.SitePackages, layerDigest);
AddDirectoryRecursive(basePath, string.Empty, PythonFileSource.SitePackages, layerDigest, includeHiddenFiles: false);
return this;
}
@@ -322,12 +343,13 @@ internal sealed partial class PythonVirtualFileSystem
return this;
}
_zipArchivePaths.Add(wheelPath);
var virtualRoot = CreateArchiveVirtualRoot("wheel", wheelPath);
_zipArchivePaths.Add(virtualRoot);
try
{
using var archive = ZipFile.OpenRead(wheelPath);
AddArchiveEntries(archive, wheelPath, PythonFileSource.Wheel);
AddArchiveEntries(archive, wheelPath, virtualRoot, PythonFileSource.Wheel);
}
catch (InvalidDataException)
{
@@ -351,7 +373,8 @@ internal sealed partial class PythonVirtualFileSystem
return this;
}
_zipArchivePaths.Add(zipappPath);
var virtualRoot = CreateArchiveVirtualRoot("zipapp", zipappPath);
_zipArchivePaths.Add(virtualRoot);
try
{
@@ -366,7 +389,7 @@ internal sealed partial class PythonVirtualFileSystem
stream.Position = offset;
using var archive = new ZipArchive(stream, ZipArchiveMode.Read);
AddArchiveEntries(archive, zipappPath, PythonFileSource.Zipapp);
AddArchiveEntries(archive, zipappPath, virtualRoot, PythonFileSource.Zipapp);
}
catch (InvalidDataException)
{
@@ -390,14 +413,15 @@ internal sealed partial class PythonVirtualFileSystem
return this;
}
_zipArchivePaths.Add(sdistPath);
var virtualRoot = CreateArchiveVirtualRoot("sdist", sdistPath);
_zipArchivePaths.Add(virtualRoot);
try
{
if (sdistPath.EndsWith(".zip", StringComparison.OrdinalIgnoreCase))
{
using var archive = ZipFile.OpenRead(sdistPath);
AddArchiveEntries(archive, sdistPath, PythonFileSource.Sdist);
AddArchiveEntries(archive, sdistPath, virtualRoot, PythonFileSource.Sdist);
}
// Note: .tar.gz support would require TarReader from System.Formats.Tar
// For now, we handle the common .zip case
@@ -427,7 +451,7 @@ internal sealed partial class PythonVirtualFileSystem
var basePath = Path.GetFullPath(editablePath);
var prefix = string.IsNullOrEmpty(packageName) ? string.Empty : packageName + "/";
_editablePaths.Add(prefix.TrimEnd('/'));
AddDirectoryRecursive(basePath, prefix.TrimEnd('/'), PythonFileSource.Editable, layerDigest: null);
AddDirectoryRecursive(basePath, prefix.TrimEnd('/'), PythonFileSource.Editable, layerDigest: null, includeHiddenFiles: false);
return this;
}
@@ -443,7 +467,7 @@ internal sealed partial class PythonVirtualFileSystem
var basePath = Path.GetFullPath(sourcePath);
_sourceTreeRoots.Add(string.Empty); // Root of the VFS
AddDirectoryRecursive(basePath, string.Empty, PythonFileSource.SourceTree, layerDigest: null);
AddDirectoryRecursive(basePath, string.Empty, PythonFileSource.SourceTree, layerDigest: null, includeHiddenFiles: false);
return this;
}
@@ -522,7 +546,8 @@ internal sealed partial class PythonVirtualFileSystem
string basePath,
string virtualPrefix,
PythonFileSource source,
string? layerDigest)
string? layerDigest,
bool includeHiddenFiles)
{
try
{
@@ -537,7 +562,7 @@ internal sealed partial class PythonVirtualFileSystem
// Skip __pycache__ and hidden files
if (normalizedRelative.Contains("/__pycache__/", StringComparison.Ordinal) ||
normalizedRelative.StartsWith("__pycache__/", StringComparison.Ordinal) ||
Path.GetFileName(file).StartsWith('.'))
(!includeHiddenFiles && Path.GetFileName(file).StartsWith('.')))
{
continue;
}
@@ -566,7 +591,7 @@ internal sealed partial class PythonVirtualFileSystem
}
}
private void AddArchiveEntries(ZipArchive archive, string archivePath, PythonFileSource source)
private void AddArchiveEntries(ZipArchive archive, string archivePath, string virtualRoot, PythonFileSource source)
{
foreach (var entry in archive.Entries)
{
@@ -576,7 +601,8 @@ internal sealed partial class PythonVirtualFileSystem
continue;
}
var virtualPath = entry.FullName.Replace('\\', '/');
var entryPath = entry.FullName.Replace('\\', '/').TrimStart('/');
var virtualPath = $"{virtualRoot}/{entryPath}";
// Skip __pycache__ in archives too
if (virtualPath.Contains("/__pycache__/", StringComparison.Ordinal) ||
@@ -587,7 +613,7 @@ internal sealed partial class PythonVirtualFileSystem
AddFile(
virtualPath,
entry.FullName,
entryPath,
source,
layerDigest: null,
archivePath: archivePath,
@@ -595,6 +621,22 @@ internal sealed partial class PythonVirtualFileSystem
}
}
private string CreateArchiveVirtualRoot(string kind, string archivePath)
{
var baseName = Path.GetFileName(archivePath);
var key = $"{kind}/{baseName}";
if (!_archiveAliasCounters.TryGetValue(key, out var count))
{
_archiveAliasCounters[key] = 1;
return $"archives/{kind}/{baseName}";
}
count++;
_archiveAliasCounters[key] = count;
return $"archives/{kind}/{baseName}~{count}";
}
private static long FindZipOffset(Stream stream)
{
// ZIP files start with PK\x03\x04 signature

View File

@@ -1,18 +1,13 @@
using System.Linq;
using System.Text.Json;
using StellaOps.Scanner.Analyzers.Lang.Python.Internal;
using StellaOps.Scanner.Analyzers.Lang.Python.Internal.Packaging;
using StellaOps.Scanner.Analyzers.Lang.Python.Internal.VirtualFileSystem;
namespace StellaOps.Scanner.Analyzers.Lang.Python;
public sealed class PythonLanguageAnalyzer : ILanguageAnalyzer
{
private static readonly EnumerationOptions Enumeration = new()
{
RecurseSubdirectories = true,
IgnoreInaccessible = true,
AttributesToSkip = FileAttributes.Device | FileAttributes.ReparsePoint
};
public string Id => "python";
public string DisplayName => "Python Analyzer";
@@ -43,73 +38,33 @@ public sealed class PythonLanguageAnalyzer : ILanguageAnalyzer
// Analyze zipapps in workspace and container layers
var zipappAnalysis = PythonZipappAdapter.AnalyzeAll(context.RootPath);
// Collect dist-info directories from both root and container layers
var distInfoDirectories = CollectDistInfoDirectories(context.RootPath);
var projectAnalysis = await PythonProjectAnalysis.AnalyzeAsync(context.RootPath, cancellationToken).ConfigureAwait(false);
var vfs = projectAnalysis.VirtualFileSystem;
foreach (var distInfoPath in distInfoDirectories)
var packageDiscovery = new PythonPackageDiscovery();
var discoveryResult = await packageDiscovery.DiscoverAsync(vfs, cancellationToken).ConfigureAwait(false);
foreach (var package in discoveryResult.Packages
.Where(static p => !string.IsNullOrWhiteSpace(p.Version))
.OrderBy(static p => p.NormalizedName, StringComparer.Ordinal)
.ThenBy(static p => p.Version, StringComparer.Ordinal))
{
cancellationToken.ThrowIfCancellationRequested();
PythonDistribution? distribution;
try
{
distribution = await PythonDistributionLoader.LoadAsync(context, distInfoPath, cancellationToken).ConfigureAwait(false);
}
catch (IOException)
{
continue;
}
catch (JsonException)
{
continue;
}
catch (UnauthorizedAccessException)
{
continue;
}
if (distribution is null)
{
continue;
}
var metadata = distribution.SortedMetadata.ToList();
if (lockData.TryGet(distribution.Name, distribution.Version, out var lockEntry))
{
matchedLocks.Add(lockEntry!.DeclarationKey);
AppendLockMetadata(metadata, lockEntry);
}
else if (hasLockEntries)
{
metadata.Add(new KeyValuePair<string, string?>("lockMissing", "true"));
}
// Append runtime information
AppendRuntimeMetadata(metadata, runtimeInfo);
// Append environment variables (PYTHONPATH/PYTHONHOME)
AppendEnvironmentMetadata(metadata, environment);
// Append startup hooks warnings
AppendStartupHooksMetadata(metadata, startupHooks);
// Append zipapp analysis
AppendZipappMetadata(metadata, zipappAnalysis);
// Collect evidence including startup hooks
var evidence = distribution.SortedEvidence.ToList();
evidence.AddRange(startupHooks.ToEvidence(context));
writer.AddFromPurl(
analyzerId: "python",
purl: distribution.Purl,
name: distribution.Name,
version: distribution.Version,
type: "pypi",
metadata: metadata,
evidence: evidence,
usedByEntrypoint: distribution.UsedByEntrypoint);
await EmitDiscoveredPackageAsync(
context,
writer,
vfs,
package,
lockData,
matchedLocks,
hasLockEntries,
runtimeInfo,
environment,
startupHooks,
zipappAnalysis,
cancellationToken)
.ConfigureAwait(false);
}
if (lockData.Entries.Count > 0)
@@ -121,18 +76,18 @@ public sealed class PythonLanguageAnalyzer : ILanguageAnalyzer
continue;
}
var normalizedName = PythonPathHelper.NormalizePackageName(entry.Name);
var declaredMetadata = new List<KeyValuePair<string, string?>>
{
new("declaredOnly", "true"),
new("pkg.kind", "DeclaredOnly"),
new("pkg.confidence", PythonPackageConfidence.Medium.ToString()),
new("pkg.location", entry.Locator),
new("lockSource", entry.Source),
new("lockLocator", entry.Locator)
};
AppendCommonLockFields(declaredMetadata, entry);
var version = string.IsNullOrWhiteSpace(entry.Version) ? "editable" : entry.Version!;
var purl = $"pkg:pypi/{PythonPathHelper.NormalizePackageName(entry.Name)}@{version}";
var evidence = new[]
{
new LanguageComponentEvidence(
@@ -143,6 +98,49 @@ public sealed class PythonLanguageAnalyzer : ILanguageAnalyzer
Sha256: null)
};
if (string.IsNullOrWhiteSpace(entry.Version))
{
var editableSpec = NormalizeEditableSpec(context, entry.EditablePath, out var specRedacted);
declaredMetadata.Add(new KeyValuePair<string, string?>("declared.source", entry.Source));
declaredMetadata.Add(new KeyValuePair<string, string?>("declared.locator", entry.Locator));
declaredMetadata.Add(new KeyValuePair<string, string?>("declared.versionSpec", editableSpec));
declaredMetadata.Add(new KeyValuePair<string, string?>("declared.scope", "unknown"));
declaredMetadata.Add(new KeyValuePair<string, string?>("declared.sourceType", "editable"));
if (!string.IsNullOrWhiteSpace(editableSpec))
{
declaredMetadata.Add(new KeyValuePair<string, string?>("lockEditablePath", editableSpec));
}
if (specRedacted)
{
declaredMetadata.Add(new KeyValuePair<string, string?>("lockEditablePathRedacted", "true"));
}
var componentKey = LanguageExplicitKey.Create("python", "pypi", normalizedName, editableSpec, entry.Locator);
writer.AddFromExplicitKey(
analyzerId: "python",
componentKey: componentKey,
purl: null,
name: entry.Name,
version: null,
type: "pypi",
metadata: declaredMetadata,
evidence: evidence,
usedByEntrypoint: false);
continue;
}
AppendCommonLockFields(declaredMetadata, entry);
var version = entry.Version!.Trim();
if (version.Length == 0)
{
continue;
}
var purl = $"pkg:pypi/{normalizedName}@{version}";
writer.AddFromPurl(
analyzerId: "python",
purl: purl,
@@ -156,6 +154,284 @@ public sealed class PythonLanguageAnalyzer : ILanguageAnalyzer
}
}
private static string NormalizeEditableSpec(LanguageAnalyzerContext context, string? editablePath, out bool redacted)
{
redacted = false;
if (string.IsNullOrWhiteSpace(editablePath))
{
return string.Empty;
}
var trimmed = editablePath.Trim().Trim('"', '\'');
if (trimmed.Length == 0)
{
return string.Empty;
}
var normalized = trimmed.Replace('\\', '/');
var hasDrivePrefix = trimmed.Length >= 2 && char.IsLetter(trimmed[0]) && trimmed[1] == ':';
var isAbsolute = Path.IsPathRooted(trimmed) ||
hasDrivePrefix ||
normalized.StartsWith("/", StringComparison.Ordinal) ||
normalized.StartsWith("//", StringComparison.Ordinal);
if (!isAbsolute)
{
return normalized;
}
try
{
var relative = context.GetRelativePath(trimmed);
if (!string.IsNullOrWhiteSpace(relative) &&
relative != "." &&
!relative.StartsWith("..", StringComparison.Ordinal) &&
relative.IndexOf(':') < 0)
{
return relative.Replace('\\', '/');
}
}
catch
{
}
redacted = true;
normalized = normalized.TrimEnd('/');
var lastSlash = normalized.LastIndexOf('/');
var fileName = lastSlash >= 0 && lastSlash < normalized.Length - 1 ? normalized[(lastSlash + 1)..] : normalized;
return string.IsNullOrWhiteSpace(fileName) ? "editable" : fileName;
}
private static async Task EmitDiscoveredPackageAsync(
LanguageAnalyzerContext context,
LanguageComponentWriter writer,
PythonVirtualFileSystem vfs,
PythonPackageInfo package,
PythonLockData lockData,
ISet<string> matchedLocks,
bool hasLockEntries,
PythonRuntimeInfo? runtimeInfo,
PythonEnvironment environment,
PythonStartupHooks startupHooks,
PythonZipappAnalysis zipappAnalysis,
CancellationToken cancellationToken)
{
var version = package.Version!.Trim();
if (version.Length == 0)
{
return;
}
var metadata = new List<KeyValuePair<string, string?>>();
metadata.AddRange(BuildPackageMetadata(context, vfs, package));
if (lockData.TryGet(package.Name, version, out var lockEntry))
{
matchedLocks.Add(lockEntry!.DeclarationKey);
AppendLockMetadata(metadata, lockEntry);
}
else if (hasLockEntries)
{
metadata.Add(new KeyValuePair<string, string?>("lockMissing", "true"));
}
var metadataDirectory = TryResolvePhysicalMetadataDirectory(vfs, package, out var metadataFile);
if (metadataDirectory is not null)
{
PythonDistribution? distribution;
try
{
distribution = await PythonDistributionLoader.LoadAsync(context, metadataDirectory, cancellationToken).ConfigureAwait(false);
}
catch (IOException)
{
return;
}
catch (JsonException)
{
return;
}
catch (UnauthorizedAccessException)
{
return;
}
if (distribution is null)
{
return;
}
var fullMetadata = distribution.SortedMetadata.ToList();
fullMetadata.AddRange(metadata);
AppendRuntimeMetadata(fullMetadata, runtimeInfo);
AppendEnvironmentMetadata(fullMetadata, environment);
AppendStartupHooksMetadata(fullMetadata, startupHooks);
AppendZipappMetadata(fullMetadata, zipappAnalysis);
var evidence = distribution.SortedEvidence.ToList();
evidence.AddRange(startupHooks.ToEvidence(context));
writer.AddFromPurl(
analyzerId: "python",
purl: distribution.Purl,
name: distribution.Name,
version: distribution.Version,
type: "pypi",
metadata: fullMetadata,
evidence: evidence,
usedByEntrypoint: distribution.UsedByEntrypoint);
return;
}
if (metadataFile is not null && metadataFile.IsFromArchive)
{
var archiveDistribution = await PythonDistributionVfsLoader
.LoadAsync(context, vfs, package, cancellationToken)
.ConfigureAwait(false);
if (archiveDistribution is not null)
{
var fullMetadata = archiveDistribution.SortedMetadata.ToList();
fullMetadata.AddRange(metadata);
writer.AddFromPurl(
analyzerId: "python",
purl: archiveDistribution.Purl,
name: archiveDistribution.Name,
version: archiveDistribution.Version,
type: "pypi",
metadata: fullMetadata,
evidence: archiveDistribution.SortedEvidence,
usedByEntrypoint: archiveDistribution.UsedByEntrypoint);
return;
}
}
var purl = $"pkg:pypi/{PythonPathHelper.NormalizePackageName(package.Name)}@{version}";
var evidenceFallback = BuildPackageEvidence(context, vfs, package, metadataFile);
writer.AddFromPurl(
analyzerId: "python",
purl: purl,
name: package.Name,
version: version,
type: "pypi",
metadata: metadata,
evidence: evidenceFallback,
usedByEntrypoint: false);
}
private static string? TryResolvePhysicalMetadataDirectory(
PythonVirtualFileSystem vfs,
PythonPackageInfo package,
out PythonVirtualFile? metadataFile)
{
metadataFile = null;
if (string.IsNullOrWhiteSpace(package.MetadataPath))
{
return null;
}
var metadataName = package.Kind == PythonPackageKind.Egg ? "PKG-INFO" : "METADATA";
var virtualPath = $"{package.MetadataPath}/{metadataName}";
metadataFile = vfs.GetFile(virtualPath);
if (metadataFile is null || metadataFile.IsFromArchive)
{
return null;
}
return Path.GetDirectoryName(metadataFile.AbsolutePath);
}
private static IEnumerable<KeyValuePair<string, string?>> BuildPackageMetadata(
LanguageAnalyzerContext context,
PythonVirtualFileSystem vfs,
PythonPackageInfo package)
{
var location = package.Location;
if (string.IsNullOrWhiteSpace(location) && !string.IsNullOrWhiteSpace(package.MetadataPath))
{
var metadataName = package.Kind == PythonPackageKind.Egg ? "PKG-INFO" : "METADATA";
var file = vfs.GetFile($"{package.MetadataPath}/{metadataName}");
if (file is not null)
{
if (file.IsFromArchive && file.ArchivePath is not null)
{
location = PythonPathHelper.NormalizeRelative(context, file.ArchivePath);
}
else
{
location = Path.GetDirectoryName(file.AbsolutePath) is { Length: > 0 } metadataDirectory
? PythonPathHelper.NormalizeRelative(context, metadataDirectory)
: PythonPathHelper.NormalizeRelative(context, file.AbsolutePath);
}
}
}
yield return new KeyValuePair<string, string?>("pkg.kind", package.Kind.ToString());
yield return new KeyValuePair<string, string?>("pkg.confidence", package.Confidence.ToString());
yield return new KeyValuePair<string, string?>("pkg.location", string.IsNullOrWhiteSpace(location) ? "." : location.Replace('\\', '/'));
}
private static IReadOnlyCollection<LanguageComponentEvidence> BuildPackageEvidence(
LanguageAnalyzerContext context,
PythonVirtualFileSystem vfs,
PythonPackageInfo package,
PythonVirtualFile? metadataFile)
{
if (metadataFile is not null)
{
var locator = metadataFile.IsFromArchive && metadataFile.ArchivePath is not null
? PythonPathHelper.NormalizeRelative(context, metadataFile.ArchivePath)
: PythonPathHelper.NormalizeRelative(context, metadataFile.AbsolutePath);
var value = metadataFile.IsFromArchive ? metadataFile.AbsolutePath : null;
return new[]
{
new LanguageComponentEvidence(
LanguageEvidenceKind.File,
package.Kind == PythonPackageKind.Egg ? "PKG-INFO" : "METADATA",
locator,
Value: value,
Sha256: null)
};
}
if (!string.IsNullOrWhiteSpace(package.MetadataPath))
{
var metadataName = package.Kind == PythonPackageKind.Egg ? "PKG-INFO" : "METADATA";
var file = vfs.GetFile($"{package.MetadataPath}/{metadataName}");
if (file is not null)
{
var locator = file.IsFromArchive && file.ArchivePath is not null
? PythonPathHelper.NormalizeRelative(context, file.ArchivePath)
: PythonPathHelper.NormalizeRelative(context, file.AbsolutePath);
var value = file.IsFromArchive ? file.AbsolutePath : null;
return new[]
{
new LanguageComponentEvidence(
LanguageEvidenceKind.File,
metadataName,
locator,
Value: value,
Sha256: null)
};
}
}
return Array.Empty<LanguageComponentEvidence>();
}
private static void AppendLockMetadata(List<KeyValuePair<string, string?>> metadata, PythonLockEntry entry)
{
metadata.Add(new KeyValuePair<string, string?>("lockSource", entry.Source));
@@ -286,41 +562,4 @@ public sealed class PythonLanguageAnalyzer : ILanguageAnalyzer
}
}
}
private static IReadOnlyCollection<string> CollectDistInfoDirectories(string rootPath)
{
var directories = new HashSet<string>(StringComparer.OrdinalIgnoreCase);
AddMetadataDirectories(rootPath, "*.dist-info", directories);
AddMetadataDirectories(rootPath, "*.egg-info", directories);
// Also collect from OCI container layers
foreach (var dir in PythonContainerAdapter.DiscoverDistInfoDirectories(rootPath))
{
directories.Add(dir);
}
return directories
.OrderBy(static path => path, StringComparer.Ordinal)
.ToArray();
static void AddMetadataDirectories(string basePath, string pattern, ISet<string> accumulator)
{
try
{
foreach (var dir in Directory.EnumerateDirectories(basePath, pattern, Enumeration))
{
accumulator.Add(dir);
}
}
catch (IOException)
{
// Ignore enumeration errors
}
catch (UnauthorizedAccessException)
{
// Ignore access errors
}
}
}
}

View File

@@ -0,0 +1,14 @@
# Python Analyzer Tasks
## Python Detection Gaps (Sprint 0405)
| Task ID | Status | Notes | Updated (UTC) |
| --- | --- | --- | --- |
| SCAN-PY-405-001 | DONE | Wire layout-aware VFS/discovery into `PythonLanguageAnalyzer`. | 2025-12-13 |
| SCAN-PY-405-002 | BLOCKED | Preserve dist-info/egg-info evidence; emit explicit-key components where needed (incl. editable lock entries; no `@editable` PURLs). | 2025-12-13 |
| SCAN-PY-405-003 | BLOCKED | Blocked on Action 2: lock/requirements precedence + supported formats scope. | 2025-12-13 |
| SCAN-PY-405-004 | BLOCKED | Blocked on Action 3: container overlay contract (whiteouts + ordering semantics). | 2025-12-13 |
| SCAN-PY-405-005 | BLOCKED | Blocked on Action 4: vendored deps representation contract (identity/scope vs metadata-only). | 2025-12-13 |
| SCAN-PY-405-006 | BLOCKED | Blocked on Interlock 4: "used-by-entrypoint" semantics (avoid turning heuristics into truth). | 2025-12-13 |
| SCAN-PY-405-007 | BLOCKED | Blocked on Actions 2-4: fixtures for includes/editables, overlay/whiteouts, vendoring. | 2025-12-13 |
| SCAN-PY-405-008 | DONE | Docs + deterministic offline bench for Python analyzer contract. | 2025-12-13 |

View File

@@ -185,11 +185,11 @@ internal static class RubyObservationBuilder
string? bundledWith,
RubyContainerInfo containerInfo)
{
var bundlePaths = bundlerConfig.BundlePaths
var bundlePaths = bundlerConfig.BundlePathsRelative
.OrderBy(static p => p, StringComparer.OrdinalIgnoreCase)
.ToImmutableArray();
var gemfiles = bundlerConfig.Gemfiles
var gemfiles = bundlerConfig.GemfilesRelative
.Select(static p => p.Replace('\\', '/'))
.OrderBy(static p => p, StringComparer.OrdinalIgnoreCase)
.ToImmutableArray();

View File

@@ -2,17 +2,31 @@ namespace StellaOps.Scanner.Analyzers.Lang.Ruby.Internal;
internal sealed class RubyBundlerConfig
{
private RubyBundlerConfig(IReadOnlyList<string> gemfiles, IReadOnlyList<string> bundlePaths)
private RubyBundlerConfig(
IReadOnlyList<string> gemfiles,
IReadOnlyList<string> bundlePaths,
IReadOnlyList<string> gemfilesRelative,
IReadOnlyList<string> bundlePathsRelative)
{
Gemfiles = gemfiles;
BundlePaths = bundlePaths;
GemfilesRelative = gemfilesRelative;
BundlePathsRelative = bundlePathsRelative;
}
public IReadOnlyList<string> Gemfiles { get; }
public IReadOnlyList<string> BundlePaths { get; }
public static RubyBundlerConfig Empty { get; } = new(Array.Empty<string>(), Array.Empty<string>());
public IReadOnlyList<string> GemfilesRelative { get; }
public IReadOnlyList<string> BundlePathsRelative { get; }
public static RubyBundlerConfig Empty { get; } = new(
Array.Empty<string>(),
Array.Empty<string>(),
Array.Empty<string>(),
Array.Empty<string>());
public static RubyBundlerConfig Load(string rootPath)
{
@@ -29,6 +43,9 @@ internal sealed class RubyBundlerConfig
var gemfiles = new List<string>();
var bundlePaths = new List<string>();
var gemfilesRelative = new List<string>();
var bundlePathsRelative = new List<string>();
var rootFullPath = Path.GetFullPath(rootPath);
try
{
@@ -59,11 +76,11 @@ internal sealed class RubyBundlerConfig
if (key.Equals("BUNDLE_GEMFILE", StringComparison.OrdinalIgnoreCase))
{
AddPath(gemfiles, rootPath, value);
AddPath(gemfiles, gemfilesRelative, rootFullPath, value);
}
else if (key.Equals("BUNDLE_PATH", StringComparison.OrdinalIgnoreCase))
{
AddPath(bundlePaths, rootPath, value);
AddPath(bundlePaths, bundlePathsRelative, rootFullPath, value);
}
}
}
@@ -77,25 +94,46 @@ internal sealed class RubyBundlerConfig
}
return new RubyBundlerConfig(
DistinctNormalized(gemfiles),
DistinctNormalized(bundlePaths));
DistinctNormalizedFullPaths(gemfiles),
DistinctNormalizedFullPaths(bundlePaths),
DistinctNormalizedRelativePaths(gemfilesRelative),
DistinctNormalizedRelativePaths(bundlePathsRelative));
}
private static void AddPath(List<string> target, string rootPath, string value)
private static void AddPath(
List<string> absoluteTarget,
List<string> relativeTarget,
string rootFullPath,
string value)
{
if (string.IsNullOrWhiteSpace(value))
{
return;
}
var path = Path.IsPathRooted(value)
var resolved = Path.IsPathRooted(value)
? value
: Path.Combine(rootPath, value);
: Path.Combine(rootFullPath, value);
target.Add(Path.GetFullPath(path));
var fullPath = Path.GetFullPath(resolved);
absoluteTarget.Add(fullPath);
var relative = Path.GetRelativePath(rootFullPath, fullPath);
if (string.IsNullOrWhiteSpace(relative) || relative == "." || Path.IsPathRooted(relative))
{
return;
}
var normalized = relative.Replace('\\', '/');
if (normalized.StartsWith("../", StringComparison.Ordinal) || normalized.Equals("..", StringComparison.Ordinal))
{
return;
}
relativeTarget.Add(normalized);
}
private static IReadOnlyList<string> DistinctNormalized(IEnumerable<string> values)
private static IReadOnlyList<string> DistinctNormalizedFullPaths(IEnumerable<string> values)
{
return values
.Where(static value => !string.IsNullOrWhiteSpace(value))
@@ -104,4 +142,15 @@ internal sealed class RubyBundlerConfig
.OrderBy(static value => value, StringComparer.OrdinalIgnoreCase)
.ToArray();
}
private static IReadOnlyList<string> DistinctNormalizedRelativePaths(IEnumerable<string> values)
{
return values
.Where(static value => !string.IsNullOrWhiteSpace(value))
.Select(static value => value.Replace('\\', '/'))
.Distinct(StringComparer.OrdinalIgnoreCase)
.OrderBy(static value => value, StringComparer.OrdinalIgnoreCase)
.ToArray();
}
}

View File

@@ -47,7 +47,7 @@ internal static class RubyCapabilityDetector
CreateRegex(@"\bsystem\s*\("),
CreateRegex(@"\bKernel\.spawn\s*\("),
CreateRegex(@"\bspawn\s*\("),
CreateRegex(@"\bOpen3\.[a-zA-Z_]+\b"),
CreateRegex(@"\bOpen3\.[a-zA-Z0-9_]+\b"),
CreateRegex(@"`[^`]+`"),
CreateRegex(@"%x\[[^\]]+\]"),
CreateRegex(@"%x\([^)]*\)")
@@ -317,4 +317,3 @@ internal static class RubyCapabilityDetector
private static Regex CreateRegex(string pattern) => new(pattern, PatternOptions);
}

View File

@@ -184,7 +184,7 @@ internal static partial class RubyContainerScanner
continue;
}
ScanGemDirectory(context, fullPath, installedGems, nativeExtensions, cancellationToken);
ScanGemInstallPath(context, fullPath, installedGems, nativeExtensions, cancellationToken);
}
// Also scan vendor paths
@@ -471,6 +471,47 @@ internal static partial class RubyContainerScanner
}
}
private static void ScanGemInstallPath(
LanguageAnalyzerContext context,
string rootPath,
List<RubyInstalledGem> installedGems,
List<RubyNativeExtension> nativeExtensions,
CancellationToken cancellationToken)
{
ScanGemDirectory(context, rootPath, installedGems, nativeExtensions, cancellationToken);
var gemsPath = Path.Combine(rootPath, "gems");
if (Directory.Exists(gemsPath))
{
ScanGemDirectory(context, gemsPath, installedGems, nativeExtensions, cancellationToken);
}
IEnumerable<string>? versionDirectories;
try
{
versionDirectories = Directory.EnumerateDirectories(rootPath);
}
catch (IOException)
{
return;
}
catch (UnauthorizedAccessException)
{
return;
}
foreach (var versionDirectory in versionDirectories)
{
cancellationToken.ThrowIfCancellationRequested();
var versionedGems = Path.Combine(versionDirectory, "gems");
if (Directory.Exists(versionedGems))
{
ScanGemDirectory(context, versionedGems, installedGems, nativeExtensions, cancellationToken);
}
}
}
private static void ScanVendorPaths(
LanguageAnalyzerContext context,
string rootPath,

View File

@@ -46,11 +46,6 @@ internal static class RubyRuntimeGraphBuilder
ArgumentNullException.ThrowIfNull(context);
ArgumentNullException.ThrowIfNull(packages);
if (packages.Count == 0)
{
return RubyRuntimeGraph.Empty;
}
var usageBuilders = new Dictionary<string, RubyRuntimeUsageBuilder>(StringComparer.OrdinalIgnoreCase);
foreach (var file in EnumerateRubyFiles(context.RootPath))

View File

@@ -25,7 +25,7 @@ public sealed class RubyLanguageAnalyzer : ILanguageAnalyzer
var lockData = await RubyLockData.LoadAsync(context, cancellationToken).ConfigureAwait(false);
var packages = RubyPackageCollector.CollectPackages(lockData, context, cancellationToken);
if (packages.Count == 0)
if (packages.Count == 0 && !LooksLikeRubyWorkspace(context.RootPath))
{
return;
}
@@ -58,10 +58,36 @@ public sealed class RubyLanguageAnalyzer : ILanguageAnalyzer
usedByEntrypoint: runtimeUsage?.UsedByEntrypoint ?? false);
}
if (packages.Count > 0)
EmitObservation(context, writer, packages, lockData, runtimeGraph, capabilities, bundlerConfig, lockData.BundledWith, containerInfo, runtimeEvidence, policyContext);
}
private static bool LooksLikeRubyWorkspace(string rootPath)
{
if (string.IsNullOrWhiteSpace(rootPath) || !Directory.Exists(rootPath))
{
EmitObservation(context, writer, packages, lockData, runtimeGraph, capabilities, bundlerConfig, lockData.BundledWith, containerInfo, runtimeEvidence, policyContext);
return false;
}
foreach (var fileName in new[] { "Gemfile", "gems.rb", "Rakefile", "config.ru" })
{
if (File.Exists(Path.Combine(rootPath, fileName)))
{
return true;
}
}
var options = new EnumerationOptions
{
RecurseSubdirectories = true,
IgnoreInaccessible = true,
MaxRecursionDepth = 3,
AttributesToSkip = FileAttributes.Device | FileAttributes.ReparsePoint,
};
return Directory.EnumerateFiles(rootPath, "*.rb", options).Any()
|| Directory.EnumerateFiles(rootPath, "*.rake", options).Any()
|| Directory.EnumerateFiles(rootPath, "*.ru", options).Any()
|| Directory.EnumerateFiles(rootPath, "*.thor", options).Any();
}
private static async ValueTask EnsureSurfaceValidationAsync(LanguageAnalyzerContext context, CancellationToken cancellationToken)
@@ -126,6 +152,7 @@ public sealed class RubyLanguageAnalyzer : ILanguageAnalyzer
var observationMetadata = BuildObservationMetadata(
packages.Count,
observationDocument.Entrypoints.Length,
observationDocument.DependencyEdges.Length,
observationDocument.RuntimeEdges.Length,
observationDocument.Capabilities,
@@ -158,6 +185,7 @@ public sealed class RubyLanguageAnalyzer : ILanguageAnalyzer
private static IEnumerable<KeyValuePair<string, string?>> BuildObservationMetadata(
int packageCount,
int entrypointCount,
int dependencyEdgeCount,
int runtimeEdgeCount,
RubyObservationCapabilitySummary capabilities,
@@ -166,6 +194,7 @@ public sealed class RubyLanguageAnalyzer : ILanguageAnalyzer
RubyObservationRuntimeEvidence? runtimeEvidence)
{
yield return new KeyValuePair<string, string?>("ruby.observation.packages", packageCount.ToString(CultureInfo.InvariantCulture));
yield return new KeyValuePair<string, string?>("ruby.observation.entrypoints", entrypointCount.ToString(CultureInfo.InvariantCulture));
yield return new KeyValuePair<string, string?>("ruby.observation.dependency_edges", dependencyEdgeCount.ToString(CultureInfo.InvariantCulture));
yield return new KeyValuePair<string, string?>("ruby.observation.runtime_edges", runtimeEdgeCount.ToString(CultureInfo.InvariantCulture));
yield return new KeyValuePair<string, string?>("ruby.observation.capability.exec", capabilities.UsesExec ? "true" : "false");

View File

@@ -18,3 +18,4 @@
| `SCANNER-ANALYZERS-RUBY-28-010` | DONE (2025-11-27) | Optional runtime evidence integration with path hashing: created Internal/Runtime/ types (RubyRuntimeEvidence.cs, RubyRuntimeEvidenceCollector.cs, RubyRuntimePathHasher.cs, RubyRuntimeEvidenceIntegrator.cs). Added RubyObservationRuntimeEvidence and RubyObservationRuntimeError to observation document. Collector reads ruby-runtime.ndjson from multiple paths, parses require/load/method.call/error events, builds path hash map (SHA-256) for secure correlation. Integrator correlates package evidence, enhances runtime edges with "runtime-verified" flag, adds supplementary "runtime-only" edges without altering static precedence. Updated builder/serializer to include optional runtimeEvidence section. All 8 determinism tests pass. |
| `SCANNER-ANALYZERS-RUBY-28-011` | DONE (2025-11-27) | Package analyzer plug-in, CLI, and Offline Kit docs: verified existing manifest.json (schemaVersion 1.0, capabilities: language-analyzer/ruby/rubygems/bundler, runtime-capture:optional), verified RubyAnalyzerPlugin.cs entrypoint. CLI `stella ruby inspect` and `stella ruby resolve` commands already implemented in CommandFactory.cs/CommandHandlers.cs. Updated docs/24_OFFLINE_KIT.md with comprehensive Ruby analyzer feature list covering OCI container layers, dependency edges, Ruby version detection, native extensions, web server configs, AOC-compliant observations, runtime evidence with path hashing, and CLI usage. |
| `SCANNER-ANALYZERS-RUBY-28-012` | DONE (2025-11-27) | Policy signal emitter: created RubyPolicySignalEmitter.cs with signal emission for rubygems drift (declared-only, vendored, git-sourced, path-sourced counts, version mismatches), native extension flags (.so/.bundle/.dll counts, gem list), dangerous construct counts (exec/eval/serialization with risk tier), TLS posture (verify disabled, SSL context overrides, insecure HTTP), and dynamic code warnings (require/load/const_get/method_missing). Created RubyPolicyContextBuilder.cs with regex-based source scanning for dangerous patterns. Integrated into RubyLanguageAnalyzer via EmitPolicySignals. Added ScanAnalysisKeys.RubyPolicySignals key. Updated benchmark targets to 1000ms to accommodate policy scanning overhead. All 8 determinism tests pass. |
| `SCANNER-ANALYZERS-RUBY-28-013` | DOING (2025-12-13) | Fix Ruby determinism regressions (capability exec via `Open3.capture3`, container native extensions, no host paths in observation environment) and refresh golden fixtures to keep `StellaOps.Scanner.sln` green. |

View File

@@ -0,0 +1,26 @@
using System.Security.Cryptography;
using System.Text;
namespace StellaOps.Scanner.Analyzers.Lang;
public static class LanguageExplicitKey
{
public static string Create(string analyzerId, string ecosystem, string name, string spec, string originLocator)
{
ArgumentException.ThrowIfNullOrWhiteSpace(analyzerId);
ArgumentException.ThrowIfNullOrWhiteSpace(ecosystem);
ArgumentException.ThrowIfNullOrWhiteSpace(name);
analyzerId = analyzerId.Trim();
ecosystem = ecosystem.Trim();
name = name.Trim();
spec = spec?.Trim() ?? string.Empty;
originLocator = originLocator?.Trim() ?? string.Empty;
var canonical = string.Join('\n', ecosystem, name, spec, originLocator);
var digest = SHA256.HashData(Encoding.UTF8.GetBytes(canonical));
var hex = Convert.ToHexString(digest).ToLowerInvariant();
return $"explicit::{analyzerId}::{ecosystem}::{name}::sha256:{hex}";
}
}

View File

@@ -1,7 +1,8 @@
using System;
using System.IO;
using System.IO.Compression;
using System.Linq;
using System.Security.Cryptography;
using System.Text;
using System.Text.Json;
using System.Threading;
using System.Threading.Tasks;
using StellaOps.Scanner.Cache.Abstractions;
@@ -14,7 +15,7 @@ public interface IRichGraphPublisher
}
/// <summary>
/// Packages richgraph-v1 JSON + meta into a deterministic zip and stores it in CAS.
/// Stores richgraph-v1 JSON in CAS and emits a deterministic DSSE envelope for graph attestations.
/// CAS paths follow the richgraph-v1 contract: cas://reachability/graphs/{blake3}
/// </summary>
public sealed class ReachabilityRichGraphPublisher : IRichGraphPublisher
@@ -42,43 +43,91 @@ public sealed class ReachabilityRichGraphPublisher : IRichGraphPublisher
var writeResult = await _writer.WriteAsync(graph, workRoot, analysisId, cancellationToken).ConfigureAwait(false);
var folder = Path.GetDirectoryName(writeResult.GraphPath)!;
var zipPath = Path.Combine(folder, "richgraph.zip");
CreateDeterministicZip(folder, zipPath);
// Use BLAKE3 graph_hash as the CAS key per CONTRACT-RICHGRAPH-V1-015
var casKey = ExtractHashDigest(writeResult.GraphHash);
await using var stream = File.OpenRead(zipPath);
var casEntry = await cas.PutAsync(new FileCasPutRequest(casKey, stream, leaveOpen: false), cancellationToken).ConfigureAwait(false);
await using var graphStream = File.OpenRead(writeResult.GraphPath);
var casEntry = await cas.PutAsync(new FileCasPutRequest(casKey, graphStream, leaveOpen: false), cancellationToken).ConfigureAwait(false);
// Build CAS URI per contract: cas://reachability/graphs/{blake3}
var casUri = $"cas://reachability/graphs/{casKey}";
var dsse = BuildDeterministicGraphDsse(writeResult, casUri, analysisId);
await using var dsseStream = new MemoryStream(dsse.EnvelopeJson, writable: false);
var dsseKey = $"{casKey}.dsse";
var dsseEntry = await cas.PutAsync(new FileCasPutRequest(dsseKey, dsseStream, leaveOpen: false), cancellationToken).ConfigureAwait(false);
var dsseCasUri = $"cas://reachability/graphs/{dsseKey}";
return new RichGraphPublishResult(
writeResult.GraphHash,
casEntry.RelativePath,
casUri,
dsseEntry.RelativePath,
dsseCasUri,
dsse.Digest,
writeResult.NodeCount,
writeResult.EdgeCount);
}
private static void CreateDeterministicZip(string sourceDir, string destinationZip)
private static GraphDsse BuildDeterministicGraphDsse(RichGraphWriteResult writeResult, string casUri, string analysisId)
{
if (File.Exists(destinationZip))
{
File.Delete(destinationZip);
}
var graphHash = writeResult.GraphHash;
var files = Directory.EnumerateFiles(sourceDir, "*", SearchOption.TopDirectoryOnly)
.OrderBy(f => f, StringComparer.Ordinal)
.ToList();
using var zip = ZipFile.Open(destinationZip, ZipArchiveMode.Create);
foreach (var file in files)
var predicate = new
{
var entryName = Path.GetFileName(file);
zip.CreateEntryFromFile(file, entryName, CompressionLevel.Optimal);
}
version = "1.0",
schema = "richgraph-v1",
graphId = analysisId,
hashes = new
{
graphHash
},
cas = new
{
location = casUri
},
graph = new
{
nodes = new { total = writeResult.NodeCount },
edges = new { total = writeResult.EdgeCount }
}
};
var payloadType = "application/vnd.stellaops.graph.predicate+json";
var payloadBytes = Encoding.UTF8.GetBytes(JsonSerializer.Serialize(predicate, new JsonSerializerOptions(JsonSerializerDefaults.Web)
{
WriteIndented = false
}));
var signatureHex = ComputeSha256Hex(payloadBytes);
var envelope = new
{
payloadType,
payload = Base64UrlEncode(payloadBytes),
signatures = new[]
{
new { keyid = "scanner-deterministic", sig = Base64UrlEncode(Encoding.UTF8.GetBytes(signatureHex)) }
}
};
var envelopeJson = Encoding.UTF8.GetBytes(JsonSerializer.Serialize(envelope, new JsonSerializerOptions(JsonSerializerDefaults.Web)
{
WriteIndented = false
}));
return new GraphDsse(envelopeJson, $"sha256:{signatureHex}");
}
private static string ComputeSha256Hex(ReadOnlySpan<byte> data)
{
Span<byte> hash = stackalloc byte[32];
SHA256.HashData(data, hash);
return Convert.ToHexString(hash).ToLowerInvariant();
}
private static string Base64UrlEncode(ReadOnlySpan<byte> data)
{
var base64 = Convert.ToBase64String(data);
return base64.Replace("+", "-").Replace("/", "_").TrimEnd('=');
}
/// <summary>
@@ -95,5 +144,10 @@ public sealed record RichGraphPublishResult(
string GraphHash,
string RelativePath,
string CasUri,
string DsseRelativePath,
string DsseCasUri,
string DsseDigest,
int NodeCount,
int EdgeCount);
internal sealed record GraphDsse(byte[] EnvelopeJson, string Digest);

View File

@@ -196,6 +196,25 @@ public sealed class BunLanguageAnalyzerTests
cancellationToken);
}
[Fact]
public async Task PatchedMultiVersionIsParsedAsync()
{
var cancellationToken = TestContext.Current.CancellationToken;
var fixturePath = TestPaths.ResolveFixture("lang", "bun", "patched-multi-version");
var goldenPath = Path.Combine(fixturePath, "expected.json");
var analyzers = new ILanguageAnalyzer[]
{
new BunLanguageAnalyzer()
};
await LanguageAnalyzerTestHarness.AssertDeterministicAsync(
fixturePath,
goldenPath,
analyzers,
cancellationToken);
}
[Fact]
public async Task DeepDependencyTreeIsParsedAsync()
{
@@ -252,4 +271,80 @@ public sealed class BunLanguageAnalyzerTests
analyzers,
cancellationToken);
}
[Fact]
public async Task ContainerLayersAreDiscoveredAsync()
{
var cancellationToken = TestContext.Current.CancellationToken;
var fixturePath = TestPaths.ResolveFixture("lang", "bun", "container-layers");
var goldenPath = Path.Combine(fixturePath, "expected.json");
var analyzers = new ILanguageAnalyzer[]
{
new BunLanguageAnalyzer()
};
await LanguageAnalyzerTestHarness.AssertDeterministicAsync(
fixturePath,
goldenPath,
analyzers,
cancellationToken);
}
[Fact]
public async Task BunfigOnlyEmitsDeclaredOnlyAsync()
{
var cancellationToken = TestContext.Current.CancellationToken;
var fixturePath = TestPaths.ResolveFixture("lang", "bun", "bunfig-only");
var goldenPath = Path.Combine(fixturePath, "expected.json");
var analyzers = new ILanguageAnalyzer[]
{
new BunLanguageAnalyzer()
};
await LanguageAnalyzerTestHarness.AssertDeterministicAsync(
fixturePath,
goldenPath,
analyzers,
cancellationToken);
}
[Fact]
public async Task LockfileDevClassificationIsDeterministicAsync()
{
var cancellationToken = TestContext.Current.CancellationToken;
var fixturePath = TestPaths.ResolveFixture("lang", "bun", "lockfile-dev-classification");
var goldenPath = Path.Combine(fixturePath, "expected.json");
var analyzers = new ILanguageAnalyzer[]
{
new BunLanguageAnalyzer()
};
await LanguageAnalyzerTestHarness.AssertDeterministicAsync(
fixturePath,
goldenPath,
analyzers,
cancellationToken);
}
[Fact]
public async Task NonConcreteVersionsUseExplicitKeyAsync()
{
var cancellationToken = TestContext.Current.CancellationToken;
var fixturePath = TestPaths.ResolveFixture("lang", "bun", "non-concrete-versions");
var goldenPath = Path.Combine(fixturePath, "expected.json");
var analyzers = new ILanguageAnalyzer[]
{
new BunLanguageAnalyzer()
};
await LanguageAnalyzerTestHarness.AssertDeterministicAsync(
fixturePath,
goldenPath,
analyzers,
cancellationToken);
}
}

View File

@@ -0,0 +1,74 @@
[
{
"analyzerId": "bun",
"componentKey": "explicit::bun::npm::left-pad::sha256:8ad9c18ee1a619ce3a224346fe984c4ced211ac443ebf7d709a93f1343ef8ba2",
"name": "left-pad",
"type": "npm",
"usedByEntrypoint": false,
"metadata": {
"declared.locator": "package.json#dependencies",
"declared.scope": "prod",
"declared.source": "package.json",
"declared.sourceType": "range",
"declared.versionSpec": "^1.3.0",
"declaredOnly": "true",
"packageManager": "bun"
},
"evidence": [
{
"kind": "file",
"source": "package.json",
"locator": "package.json",
"sha256": "465919e1195aa0b066f473c55341df77abff6a6b7d62e25d63ccfb7c13e3287b"
}
]
},
{
"analyzerId": "bun",
"componentKey": "explicit::bun::npm::local-file::sha256:61b6ef7b8e24fe3a1e1080296c61f2ca4ad8839f453e24cb8adf874678521caa",
"name": "local-file",
"type": "npm",
"usedByEntrypoint": false,
"metadata": {
"declared.locator": "package.json#dependencies",
"declared.scope": "prod",
"declared.source": "package.json",
"declared.sourceType": "file",
"declared.versionSpec": "file:../local-file",
"declaredOnly": "true",
"packageManager": "bun"
},
"evidence": [
{
"kind": "file",
"source": "package.json",
"locator": "package.json",
"sha256": "465919e1195aa0b066f473c55341df77abff6a6b7d62e25d63ccfb7c13e3287b"
}
]
},
{
"analyzerId": "bun",
"componentKey": "explicit::bun::npm::typescript::sha256:5a0a88f051ea20b8875334dadc5bce3c0861d146b151ab7bab95654541b7a168",
"name": "typescript",
"type": "npm",
"usedByEntrypoint": false,
"metadata": {
"declared.locator": "package.json#devDependencies",
"declared.scope": "dev",
"declared.source": "package.json",
"declared.sourceType": "range",
"declared.versionSpec": "~5.3.0",
"declaredOnly": "true",
"packageManager": "bun"
},
"evidence": [
{
"kind": "file",
"source": "package.json",
"locator": "package.json",
"sha256": "465919e1195aa0b066f473c55341df77abff6a6b7d62e25d63ccfb7c13e3287b"
}
]
}
]

View File

@@ -0,0 +1,11 @@
{
"name": "bunfig-only-fixture",
"private": true,
"dependencies": {
"left-pad": "^1.3.0",
"local-file": "file:../local-file"
},
"devDependencies": {
"typescript": "~5.3.0"
}
}

View File

@@ -0,0 +1,6 @@
{
"lockfileVersion": 1,
"packages": {
"ms@2.1.3": ["https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA=="]
}
}

View File

@@ -0,0 +1,7 @@
{
"name": "bun-container-layers-fixture",
"version": "1.0.0",
"dependencies": {
"ms": "^2.1.3"
}
}

View File

@@ -0,0 +1,34 @@
[
{
"analyzerId": "bun",
"componentKey": "purl::pkg:npm/ms@2.1.3",
"purl": "pkg:npm/ms@2.1.3",
"name": "ms",
"version": "2.1.3",
"type": "npm",
"usedByEntrypoint": false,
"metadata": {
"direct": "true",
"integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==",
"packageManager": "bun",
"resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz",
"source": "bun.lock"
},
"evidence": [
{
"kind": "metadata",
"source": "integrity",
"locator": ".layers/layer0/app/bun.lock:packages[ms@2.1.3]",
"value": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==",
"sha256": "4a384b14aba7740bd500cdf0da7329a41a2940662e9b1fcab1fbc71c6c8389e7"
},
{
"kind": "metadata",
"source": "resolved",
"locator": ".layers/layer0/app/bun.lock:packages[ms@2.1.3]",
"value": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz",
"sha256": "4a384b14aba7740bd500cdf0da7329a41a2940662e9b1fcab1fbc71c6c8389e7"
}
]
}
]

View File

@@ -22,20 +22,23 @@
{
"kind": "file",
"source": "node_modules",
"locator": "node_modules/@company/internal-pkg/package.json"
"locator": "node_modules/@company/internal-pkg/package.json",
"sha256": "f5311f43a95bd76e1912dbd7d0a5b3611baa9e82bcf72d5dc7f34c5f71f0ddf4"
},
{
"kind": "metadata",
"source": "integrity",
"locator": "bun.lock",
"value": "sha512-customhash123=="
"locator": "bun.lock:packages[@company/internal-pkg@1.0.0]",
"value": "sha512-customhash123==",
"sha256": "eb3bacf736d4a1b3cf9e02357afc1add9f20323916ce62cf8748c9ad9a80f195"
},
{
"kind": "metadata",
"source": "resolved",
"locator": "bun.lock",
"value": "https://npm.company.com/@company/internal-pkg/-/internal-pkg-1.0.0.tgz"
"locator": "bun.lock:packages[@company/internal-pkg@1.0.0]",
"value": "https://npm.company.com/@company/internal-pkg/-/internal-pkg-1.0.0.tgz",
"sha256": "eb3bacf736d4a1b3cf9e02357afc1add9f20323916ce62cf8748c9ad9a80f195"
}
]
}
]
]

View File

@@ -19,19 +19,22 @@
{
"kind": "file",
"source": "node_modules",
"locator": "node_modules/debug/package.json"
"locator": "node_modules/debug/package.json",
"sha256": "2258b5b4d7e5ed711aeef1a86d5d9e5abf2a04410e05bd89ea806e423417e493"
},
{
"kind": "metadata",
"source": "integrity",
"locator": "bun.lock",
"value": "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX\u002B7G/vCNNhehwxfkQ=="
"locator": "bun.lock:packages[debug@4.3.4]",
"value": "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX\u002B7G/vCNNhehwxfkQ==",
"sha256": "33d4886c0591242ffb78b5e739c5248c81559312586d59d543d48387e4bb6a2b"
},
{
"kind": "metadata",
"source": "resolved",
"locator": "bun.lock",
"value": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz"
"locator": "bun.lock:packages[debug@4.3.4]",
"value": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz",
"sha256": "33d4886c0591242ffb78b5e739c5248c81559312586d59d543d48387e4bb6a2b"
}
]
},
@@ -54,20 +57,23 @@
{
"kind": "file",
"source": "node_modules",
"locator": "node_modules/ms/package.json"
"locator": "node_modules/ms/package.json",
"sha256": "ae11c4ce44027a95893e8c890aed0c582f04e8cf1b8022931eddcb613cd9d3f7"
},
{
"kind": "metadata",
"source": "integrity",
"locator": "bun.lock",
"value": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA=="
"locator": "bun.lock:packages[ms@2.1.3]",
"value": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==",
"sha256": "33d4886c0591242ffb78b5e739c5248c81559312586d59d543d48387e4bb6a2b"
},
{
"kind": "metadata",
"source": "resolved",
"locator": "bun.lock",
"value": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz"
"locator": "bun.lock:packages[ms@2.1.3]",
"value": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz",
"sha256": "33d4886c0591242ffb78b5e739c5248c81559312586d59d543d48387e4bb6a2b"
}
]
}
]
]

View File

@@ -21,14 +21,16 @@
{
"kind": "file",
"source": "node_modules",
"locator": "node_modules/my-git-pkg/package.json"
"locator": "node_modules/my-git-pkg/package.json",
"sha256": "45687abed9d301c361987ca877da135e830c80dc3ce37f9ea1c74c7df96b8bf2"
},
{
"kind": "metadata",
"source": "resolved",
"locator": "bun.lock",
"value": "git\u002Bhttps://github.com/user/my-git-pkg.git#abc123def456"
"locator": "bun.lock:packages[my-git-pkg@1.0.0]",
"value": "git\u002Bhttps://github.com/user/my-git-pkg.git#abc123def456",
"sha256": "819a7efc185bd1314d21aa7fdc0e5b2134a0c9b758ecd9daa62cb6cba2feddd0"
}
]
}
]
]

View File

@@ -18,19 +18,22 @@
{
"kind": "file",
"source": "node_modules",
"locator": "node_modules/.bun/is-number@6.0.0/package.json"
"locator": "node_modules/.bun/is-number@6.0.0/package.json",
"sha256": "0324c895ec4aa4049c77371f08e937eed97a58e442595a8834ba21afd8e100b3"
},
{
"kind": "metadata",
"source": "integrity",
"locator": "bun.lock",
"value": "sha512-Wu1VZAVuL1snqOnHLxJ0l2p3pjlzLnMcJ8gJhaTZVfP7VFKN7fSJ8X/gR0qFCLwfFJ0Rqd3IxfS\u002BTY/Lc1Q7Pw=="
"locator": "bun.lock:packages[is-number@6.0.0]",
"value": "sha512-Wu1VZAVuL1snqOnHLxJ0l2p3pjlzLnMcJ8gJhaTZVfP7VFKN7fSJ8X/gR0qFCLwfFJ0Rqd3IxfS\u002BTY/Lc1Q7Pw==",
"sha256": "746b6c809e50ee2d7bdb27a0ee43046d48fa5f21d7597bbadd3bd44269798812"
},
{
"kind": "metadata",
"source": "resolved",
"locator": "bun.lock",
"value": "https://registry.npmjs.org/is-number/-/is-number-6.0.0.tgz"
"locator": "bun.lock:packages[is-number@6.0.0]",
"value": "https://registry.npmjs.org/is-number/-/is-number-6.0.0.tgz",
"sha256": "746b6c809e50ee2d7bdb27a0ee43046d48fa5f21d7597bbadd3bd44269798812"
}
]
},
@@ -54,20 +57,23 @@
{
"kind": "file",
"source": "node_modules",
"locator": "node_modules/.bun/is-odd@3.0.1/package.json"
"locator": "node_modules/.bun/is-odd@3.0.1/package.json",
"sha256": "beb18158821ecb86f3bb2a6be3ef817c0b8dcdc3e05a53e0b9a1c62d74a595ac"
},
{
"kind": "metadata",
"source": "integrity",
"locator": "bun.lock",
"value": "sha512-CQpnWPrDwmP1\u002BSMHXvTXAoSEu2mCPgMU0VKt1WcA7D8VXCo4HfVNlUbD1k8Tg0BVDX/LhyRaZqKqiS4vI6tTHg=="
"locator": "bun.lock:packages[is-odd@3.0.1]",
"value": "sha512-CQpnWPrDwmP1\u002BSMHXvTXAoSEu2mCPgMU0VKt1WcA7D8VXCo4HfVNlUbD1k8Tg0BVDX/LhyRaZqKqiS4vI6tTHg==",
"sha256": "746b6c809e50ee2d7bdb27a0ee43046d48fa5f21d7597bbadd3bd44269798812"
},
{
"kind": "metadata",
"source": "resolved",
"locator": "bun.lock",
"value": "https://registry.npmjs.org/is-odd/-/is-odd-3.0.1.tgz"
"locator": "bun.lock:packages[is-odd@3.0.1]",
"value": "https://registry.npmjs.org/is-odd/-/is-odd-3.0.1.tgz",
"sha256": "746b6c809e50ee2d7bdb27a0ee43046d48fa5f21d7597bbadd3bd44269798812"
}
]
}
]
]

View File

@@ -19,20 +19,23 @@
{
"kind": "file",
"source": "node_modules",
"locator": "node_modules/lodash/package.json"
"locator": "node_modules/lodash/package.json",
"sha256": "82145cd4bdc9a690c14843b405179c60aeda1a958029f6ae62776c1b26e42169"
},
{
"kind": "metadata",
"source": "integrity",
"locator": "bun.lock",
"value": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vz1kAmtILi\u002B8fm9nJMg7b0GN8sMEJz2mxG/S7mNxhWQ7\u002BD9bF8Q=="
"locator": "bun.lock:packages[lodash@4.17.21]",
"value": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vz1kAmtILi\u002B8fm9nJMg7b0GN8sMEJz2mxG/S7mNxhWQ7\u002BD9bF8Q==",
"sha256": "7b34fdbdf0cb3e0d07e25f7d7f452491dcfad421138449217a1c20b2f66a6475"
},
{
"kind": "metadata",
"source": "resolved",
"locator": "bun.lock",
"value": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz"
"locator": "bun.lock:packages[lodash@4.17.21]",
"value": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz",
"sha256": "7b34fdbdf0cb3e0d07e25f7d7f452491dcfad421138449217a1c20b2f66a6475"
}
]
}
]
]

View File

@@ -0,0 +1,10 @@
{
"lockfileVersion": 1,
"packages": {
"prod-pkg@1.0.0": ["https://registry.npmjs.org/prod-pkg/-/prod-pkg-1.0.0.tgz", null, {"shared": "^1.0.0"}],
"dev-pkg@1.0.0": ["https://registry.npmjs.org/dev-pkg/-/dev-pkg-1.0.0.tgz", null, {"dev-only": "^1.0.0"}],
"shared@1.0.0": ["https://registry.npmjs.org/shared/-/shared-1.0.0.tgz", null],
"dev-only@1.0.0": ["https://registry.npmjs.org/dev-only/-/dev-only-1.0.0.tgz", null]
}
}

Some files were not shown because too many files have changed in this diff Show More