up
This commit is contained in:
@@ -0,0 +1,119 @@
|
||||
using System.Text.RegularExpressions;
|
||||
|
||||
namespace StellaOps.Scanner.Analyzers.Lang.Node.Internal;
|
||||
|
||||
internal static class NodeEnvironmentScanner
|
||||
{
|
||||
private static readonly Regex EnvAssign = new("^\s*(ENV|ARG)\s+NODE_OPTIONS\s*(=|\s)(?<value>.+)$", RegexOptions.IgnoreCase | RegexOptions.Compiled);
|
||||
|
||||
public static IReadOnlyList<LanguageComponentRecord> Scan(LanguageAnalyzerContext context, IReadOnlyList<string> sourceRoots, CancellationToken cancellationToken)
|
||||
{
|
||||
var warnings = new List<LanguageComponentRecord>();
|
||||
|
||||
foreach (var root in sourceRoots)
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
|
||||
var dockerfile = Path.Combine(root, "Dockerfile");
|
||||
if (File.Exists(dockerfile))
|
||||
{
|
||||
warnings.AddRange(ScanDockerfile(context, dockerfile));
|
||||
}
|
||||
|
||||
var envFile = Path.Combine(root, ".env");
|
||||
if (File.Exists(envFile))
|
||||
{
|
||||
warnings.AddRange(ScanEnvFile(context, envFile));
|
||||
}
|
||||
}
|
||||
|
||||
return warnings
|
||||
.OrderBy(static r => r.ComponentKey, StringComparer.Ordinal)
|
||||
.ToArray();
|
||||
}
|
||||
|
||||
private static IEnumerable<LanguageComponentRecord> ScanDockerfile(LanguageAnalyzerContext context, string dockerfile)
|
||||
{
|
||||
try
|
||||
{
|
||||
var lines = File.ReadAllLines(dockerfile);
|
||||
for (var i = 0; i < lines.Length; i++)
|
||||
{
|
||||
var match = EnvAssign.Match(lines[i]);
|
||||
if (!match.Success)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
var value = match.Groups["value"].Value.Trim().Trim('"', '\'');
|
||||
yield return BuildWarning(context, dockerfile, i + 1, value, source: "Dockerfile", reason: "NODE_OPTIONS");
|
||||
}
|
||||
}
|
||||
catch (IOException)
|
||||
{
|
||||
yield break;
|
||||
}
|
||||
}
|
||||
|
||||
private static IEnumerable<LanguageComponentRecord> ScanEnvFile(LanguageAnalyzerContext context, string envFile)
|
||||
{
|
||||
try
|
||||
{
|
||||
var lines = File.ReadAllLines(envFile);
|
||||
for (var i = 0; i < lines.Length; i++)
|
||||
{
|
||||
var line = lines[i];
|
||||
if (!line.Contains("NODE_OPTIONS", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
var parts = line.Split('=', 2);
|
||||
if (parts.Length != 2)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
var value = parts[1].Trim().Trim('"', '\'');
|
||||
yield return BuildWarning(context, envFile, i + 1, value, source: ".env", reason: "NODE_OPTIONS");
|
||||
}
|
||||
}
|
||||
catch (IOException)
|
||||
{
|
||||
yield break;
|
||||
}
|
||||
}
|
||||
|
||||
private static LanguageComponentRecord BuildWarning(LanguageAnalyzerContext context, string filePath, int lineNumber, string value, string source, string reason)
|
||||
{
|
||||
var locator = context.GetRelativePath(filePath).Replace(Path.DirectorySeparatorChar, '/');
|
||||
var metadata = new List<KeyValuePair<string, string?>>
|
||||
{
|
||||
new("source", source),
|
||||
new("locator", string.Concat(locator, "#", lineNumber.ToString(CultureInfo.InvariantCulture))),
|
||||
new("reason", reason),
|
||||
new("value", value)
|
||||
};
|
||||
|
||||
var evidence = new[]
|
||||
{
|
||||
new LanguageComponentEvidence(
|
||||
LanguageEvidenceKind.Metadata,
|
||||
"node.env",
|
||||
string.Concat(locator, "#", lineNumber.ToString(CultureInfo.InvariantCulture)),
|
||||
value,
|
||||
null)
|
||||
};
|
||||
|
||||
return LanguageComponentRecord.FromExplicitKey(
|
||||
analyzerId: "node",
|
||||
componentKey: string.Concat("warning:node-options:", locator, "#", lineNumber.ToString(CultureInfo.InvariantCulture)),
|
||||
purl: null,
|
||||
name: "NODE_OPTIONS warning",
|
||||
version: null,
|
||||
type: "node:warning",
|
||||
metadata: metadata,
|
||||
evidence: evidence,
|
||||
usedByEntrypoint: false);
|
||||
}
|
||||
}
|
||||
@@ -4,7 +4,8 @@ internal sealed record NodeImportEdge(
|
||||
string SourceFile,
|
||||
string TargetSpecifier,
|
||||
string Kind,
|
||||
string Evidence)
|
||||
string Evidence,
|
||||
string Confidence)
|
||||
{
|
||||
public string ComparisonKey => string.Concat(SourceFile, "|", TargetSpecifier, "|", Kind);
|
||||
}
|
||||
|
||||
@@ -0,0 +1,8 @@
|
||||
namespace StellaOps.Scanner.Analyzers.Lang.Node.Internal;
|
||||
|
||||
internal sealed record NodeImportResolution(
|
||||
string SourceFile,
|
||||
string Specifier,
|
||||
string ResolvedPath,
|
||||
string ResolutionType,
|
||||
string Confidence);
|
||||
@@ -1,3 +1,7 @@
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Text.Json;
|
||||
using System.Text.RegularExpressions;
|
||||
using Esprima;
|
||||
using Esprima.Ast;
|
||||
using EsprimaNode = Esprima.Ast.Node;
|
||||
@@ -6,7 +10,9 @@ namespace StellaOps.Scanner.Analyzers.Lang.Node.Internal;
|
||||
|
||||
internal static class NodeImportWalker
|
||||
{
|
||||
public static IReadOnlyList<NodeImportEdge> AnalyzeImports(string sourcePath, string content)
|
||||
private const int MaxSourceMapBytes = 1_048_576; // 1 MiB safety cap
|
||||
|
||||
public static IReadOnlyList<NodeImportEdge> AnalyzeImports(string rootPath, string sourcePath, string content)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(sourcePath);
|
||||
if (content is null)
|
||||
@@ -14,36 +20,60 @@ internal static class NodeImportWalker
|
||||
return Array.Empty<NodeImportEdge>();
|
||||
}
|
||||
|
||||
var edges = new List<NodeImportEdge>();
|
||||
AnalyzeInternal(rootPath, sourcePath, content, allowSourceMap: true, edges);
|
||||
return edges.Count == 0
|
||||
? Array.Empty<NodeImportEdge>()
|
||||
: edges.OrderBy(static e => e.ComparisonKey, StringComparer.Ordinal).ToArray();
|
||||
}
|
||||
|
||||
private static void AnalyzeInternal(string rootPath, string sourcePath, string content, bool allowSourceMap, List<NodeImportEdge> edges)
|
||||
{
|
||||
Script script;
|
||||
try
|
||||
{
|
||||
var parser = new JavaScriptParser();
|
||||
script = parser.ParseScript(content, sourcePath, true);
|
||||
var parser = new JavaScriptParser(new ParserOptions { Tolerant = true });
|
||||
script = parser.ParseScript(content, sourcePath, strict: false);
|
||||
}
|
||||
catch (ParserException)
|
||||
{
|
||||
return Array.Empty<NodeImportEdge>();
|
||||
script = null!;
|
||||
}
|
||||
|
||||
var edges = new List<NodeImportEdge>();
|
||||
Walk(script, sourcePath, edges);
|
||||
return edges.Count == 0
|
||||
? Array.Empty<NodeImportEdge>()
|
||||
: edges.OrderBy(e => e.ComparisonKey, StringComparer.Ordinal).ToArray();
|
||||
if (script is not null)
|
||||
{
|
||||
Walk(script, sourcePath, edges);
|
||||
}
|
||||
|
||||
if (allowSourceMap)
|
||||
{
|
||||
TryAnalyzeSourceMap(rootPath, sourcePath, content, edges);
|
||||
}
|
||||
}
|
||||
|
||||
private static void Walk(EsprimaNode node, string sourcePath, List<NodeImportEdge> edges)
|
||||
{
|
||||
switch (node)
|
||||
{
|
||||
case ImportDeclaration importDecl when !string.IsNullOrWhiteSpace(importDecl.Source?.StringValue):
|
||||
edges.Add(new NodeImportEdge(sourcePath, importDecl.Source.StringValue!, "import", string.Empty));
|
||||
case ImportDeclaration importDecl:
|
||||
if (TryGetLiteral(importDecl.Source, out var importTarget, out var importConfidence, out var importEvidence))
|
||||
{
|
||||
AddEdge(edges, sourcePath, importTarget!, "import", importEvidence, importConfidence);
|
||||
}
|
||||
break;
|
||||
case CallExpression call when IsRequire(call) && call.Arguments.FirstOrDefault() is Literal { Value: string target }:
|
||||
edges.Add(new NodeImportEdge(sourcePath, target, "require", string.Empty));
|
||||
|
||||
case CallExpression call when IsRequire(call):
|
||||
if (call.Arguments.FirstOrDefault() is Expression requireArg && TryRenderSpecifier(requireArg, out var requireTarget, out var requireConfidence, out var requireEvidence))
|
||||
{
|
||||
AddEdge(edges, sourcePath, requireTarget!, "require", requireEvidence, requireConfidence);
|
||||
}
|
||||
break;
|
||||
case ImportExpression importExp when importExp.Source is Literal { Value: string importTarget }:
|
||||
edges.Add(new NodeImportEdge(sourcePath, importTarget, "import()", string.Empty));
|
||||
|
||||
case ImportExpression importExp:
|
||||
if (importExp.Source is Expression expr && TryRenderSpecifier(expr, out var importExprTarget, out var importExprConfidence, out var importExprEvidence))
|
||||
{
|
||||
AddEdge(edges, sourcePath, importExprTarget!, "import()", importExprEvidence, importExprConfidence);
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
@@ -53,9 +83,199 @@ internal static class NodeImportWalker
|
||||
}
|
||||
}
|
||||
|
||||
private static bool TryRenderSpecifier(Expression expression, out string? specifier, out string confidence, out string evidence)
|
||||
{
|
||||
specifier = null;
|
||||
evidence = string.Empty;
|
||||
confidence = "low";
|
||||
|
||||
if (TryGetLiteral(expression, out var literalValue, out confidence, out evidence))
|
||||
{
|
||||
specifier = literalValue;
|
||||
return true;
|
||||
}
|
||||
|
||||
if (expression is TemplateLiteral template)
|
||||
{
|
||||
var raw = string.Concat(template.Quasis.Select(static q => q.Value?.Cooked ?? string.Empty));
|
||||
if (template.Expressions.Count == 0)
|
||||
{
|
||||
specifier = raw;
|
||||
confidence = "high";
|
||||
evidence = "template-static";
|
||||
return true;
|
||||
}
|
||||
|
||||
specifier = raw + "${*}";
|
||||
confidence = "medium";
|
||||
evidence = "template-dynamic";
|
||||
return true;
|
||||
}
|
||||
|
||||
if (expression is BinaryExpression binary && binary.Operator == BinaryOperator.Plus)
|
||||
{
|
||||
var leftOk = TryRenderSpecifier(binary.Left, out var left, out var leftConf, out var leftEvidence);
|
||||
var rightOk = TryRenderSpecifier(binary.Right, out var right, out var rightConf, out var rightEvidence);
|
||||
|
||||
if (leftOk || rightOk)
|
||||
{
|
||||
specifier = string.Concat(left ?? string.Empty, right ?? string.Empty);
|
||||
var combinedLeft = leftOk ? leftConf : rightConf;
|
||||
var combinedRight = rightOk ? rightConf : leftConf;
|
||||
confidence = CombineConfidence(combinedLeft, combinedRight);
|
||||
evidence = string.Join(';', new[] { leftEvidence, rightEvidence }.Where(static e => !string.IsNullOrWhiteSpace(e)));
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
private static bool TryGetLiteral(EsprimaNode? node, out string? value, out string confidence, out string evidence)
|
||||
{
|
||||
value = null;
|
||||
confidence = "low";
|
||||
evidence = string.Empty;
|
||||
|
||||
if (node is Literal { Value: string literal })
|
||||
{
|
||||
value = literal;
|
||||
confidence = "high";
|
||||
evidence = "literal";
|
||||
return true;
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
private static void AddEdge(List<NodeImportEdge> edges, string sourcePath, string target, string kind, string evidence, string confidence)
|
||||
{
|
||||
var edge = new NodeImportEdge(sourcePath, target, kind, evidence, confidence);
|
||||
if (edges.Any(e => string.Equals(e.ComparisonKey, edge.ComparisonKey, StringComparison.Ordinal)))
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
edges.Add(edge);
|
||||
}
|
||||
|
||||
private static void TryAnalyzeSourceMap(string rootPath, string sourcePath, string content, List<NodeImportEdge> edges)
|
||||
{
|
||||
var mapPath = ExtractSourceMapPath(content);
|
||||
if (string.IsNullOrWhiteSpace(mapPath) || mapPath.StartsWith("data:", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
var sourceDirectory = Path.GetDirectoryName(Path.Combine(rootPath, sourcePath.Replace('/', Path.DirectorySeparatorChar))) ?? rootPath;
|
||||
var absoluteMapPath = Path.GetFullPath(Path.Combine(sourceDirectory, mapPath));
|
||||
|
||||
if (!File.Exists(absoluteMapPath))
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
var info = new FileInfo(absoluteMapPath);
|
||||
if (info.Length > MaxSourceMapBytes)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
using var stream = File.OpenRead(absoluteMapPath);
|
||||
using var document = JsonDocument.Parse(stream);
|
||||
if (!document.RootElement.TryGetProperty("sources", out var sourcesElement) || sourcesElement.ValueKind != JsonValueKind.Array)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
var contents = document.RootElement.TryGetProperty("sourcesContent", out var sourcesContent) && sourcesContent.ValueKind == JsonValueKind.Array
|
||||
? sourcesContent
|
||||
: default;
|
||||
|
||||
for (var i = 0; i < sourcesElement.GetArrayLength(); i++)
|
||||
{
|
||||
var sourceEntry = sourcesElement[i];
|
||||
if (sourceEntry.ValueKind != JsonValueKind.String)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
var mapSourcePath = sourceEntry.GetString();
|
||||
if (string.IsNullOrWhiteSpace(mapSourcePath))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
var combinedSourcePath = NormalizeRelative(rootPath, Path.GetFullPath(Path.Combine(sourceDirectory, mapSourcePath)));
|
||||
var sourceContent = contents.ValueKind == JsonValueKind.Array && contents.GetArrayLength() > i && contents[i].ValueKind == JsonValueKind.String
|
||||
? contents[i].GetString()
|
||||
: null;
|
||||
|
||||
if (string.IsNullOrEmpty(sourceContent))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
AnalyzeInternal(rootPath, combinedSourcePath, sourceContent!, allowSourceMap: false, edges);
|
||||
}
|
||||
}
|
||||
catch (IOException)
|
||||
{
|
||||
// ignore unreadable source maps
|
||||
}
|
||||
catch (JsonException)
|
||||
{
|
||||
// ignore malformed source maps
|
||||
}
|
||||
}
|
||||
|
||||
private static string NormalizeRelative(string rootPath, string absolutePath)
|
||||
{
|
||||
var normalizedRoot = Path.GetFullPath(rootPath);
|
||||
var normalizedPath = Path.GetFullPath(absolutePath);
|
||||
if (!normalizedPath.StartsWith(normalizedRoot, StringComparison.Ordinal))
|
||||
{
|
||||
return normalizedPath.Replace('\\', '/');
|
||||
}
|
||||
|
||||
var relative = Path.GetRelativePath(normalizedRoot, normalizedPath);
|
||||
return relative.Replace('\\', '/');
|
||||
}
|
||||
|
||||
private static string? ExtractSourceMapPath(string content)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(content))
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
var match = Regex.Match(content, "sourceMappingURL=([^\n\\s]+)");
|
||||
return match.Success ? match.Groups[1].Value.Trim() : null;
|
||||
}
|
||||
|
||||
private static bool IsRequire(CallExpression call)
|
||||
{
|
||||
return call.Callee is Identifier id && string.Equals(id.Name, "require", StringComparison.Ordinal)
|
||||
&& call.Arguments.Count == 1 && call.Arguments[0] is Literal { Value: string };
|
||||
&& call.Arguments.Count == 1;
|
||||
}
|
||||
|
||||
private static string CombineConfidence(string left, string right)
|
||||
{
|
||||
static int Score(string value) => value switch
|
||||
{
|
||||
"high" => 3,
|
||||
"low" => 1,
|
||||
_ => 2
|
||||
};
|
||||
|
||||
var combined = Math.Min(Score(left), Score(right));
|
||||
return combined switch
|
||||
{
|
||||
3 => "high",
|
||||
2 => "medium",
|
||||
_ => "low"
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,149 @@
|
||||
using System.Collections.Immutable;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
|
||||
namespace StellaOps.Scanner.Analyzers.Lang.Node.Internal;
|
||||
|
||||
internal sealed record NodeProjectInput(
|
||||
IReadOnlyList<string> SourceRoots,
|
||||
IReadOnlyList<string> NodeModuleRoots,
|
||||
IReadOnlyList<string> Tarballs,
|
||||
IReadOnlyList<string> YarnCacheRoots,
|
||||
bool YarnPnpPresent);
|
||||
|
||||
/// <summary>
|
||||
/// Normalizes scanner inputs for Node.js projects, layering workspace roots, container layers,
|
||||
/// pnpm stores, Yarn Plug'n'Play caches, and tarball sources into a deterministic view.
|
||||
/// </summary>
|
||||
internal static class NodeInputNormalizer
|
||||
{
|
||||
private static readonly string[] LayerRootCandidates = { "layers", ".layers", "layer" };
|
||||
|
||||
public static NodeProjectInput Normalize(LanguageAnalyzerContext context, CancellationToken cancellationToken)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(context);
|
||||
|
||||
var sourceRoots = DiscoverSourceRoots(context.RootPath);
|
||||
var nodeModuleRoots = new HashSet<string>(StringComparer.OrdinalIgnoreCase);
|
||||
var tarballs = new HashSet<string>(StringComparer.OrdinalIgnoreCase);
|
||||
var yarnCacheRoots = new HashSet<string>(StringComparer.OrdinalIgnoreCase);
|
||||
|
||||
foreach (var root in sourceRoots)
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
|
||||
var nodeModules = Path.Combine(root, "node_modules");
|
||||
if (Directory.Exists(nodeModules))
|
||||
{
|
||||
nodeModuleRoots.Add(Path.GetFullPath(nodeModules));
|
||||
}
|
||||
|
||||
foreach (var candidate in EnumerateTarballs(root))
|
||||
{
|
||||
tarballs.Add(candidate);
|
||||
}
|
||||
|
||||
var yarnCache = Path.Combine(root, ".yarn", "cache");
|
||||
if (Directory.Exists(yarnCache))
|
||||
{
|
||||
yarnCacheRoots.Add(Path.GetFullPath(yarnCache));
|
||||
}
|
||||
}
|
||||
|
||||
var workspaceIndex = NodeWorkspaceIndex.Create(context.RootPath);
|
||||
foreach (var workspace in workspaceIndex.GetMembers())
|
||||
{
|
||||
var absolute = Path.GetFullPath(Path.Combine(context.RootPath, workspace.Replace('/', Path.DirectorySeparatorChar)));
|
||||
var workspaceNodeModules = Path.Combine(absolute, "node_modules");
|
||||
if (Directory.Exists(workspaceNodeModules))
|
||||
{
|
||||
nodeModuleRoots.Add(workspaceNodeModules);
|
||||
}
|
||||
}
|
||||
|
||||
var yarnPnpPresent = sourceRoots.Any(static root => HasYarnPnpMarkers(root));
|
||||
|
||||
return new NodeProjectInput(
|
||||
SourceRoots: sourceRoots.OrderBy(static p => p, StringComparer.Ordinal).ToArray(),
|
||||
NodeModuleRoots: nodeModuleRoots.OrderBy(static p => p, StringComparer.Ordinal).ToArray(),
|
||||
Tarballs: tarballs.OrderBy(static p => p, StringComparer.Ordinal).ToArray(),
|
||||
YarnCacheRoots: yarnCacheRoots.OrderBy(static p => p, StringComparer.Ordinal).ToArray(),
|
||||
YarnPnpPresent: yarnPnpPresent);
|
||||
}
|
||||
|
||||
private static IReadOnlyList<string> DiscoverSourceRoots(string rootPath)
|
||||
{
|
||||
var roots = new HashSet<string>(StringComparer.OrdinalIgnoreCase)
|
||||
{
|
||||
Path.GetFullPath(rootPath)
|
||||
};
|
||||
|
||||
foreach (var candidateRoot in LayerRootCandidates)
|
||||
{
|
||||
var path = Path.Combine(rootPath, candidateRoot);
|
||||
if (!Directory.Exists(path))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
foreach (var child in SafeEnumerateDirectories(path))
|
||||
{
|
||||
roots.Add(Path.GetFullPath(child));
|
||||
}
|
||||
}
|
||||
|
||||
foreach (var child in SafeEnumerateDirectories(rootPath))
|
||||
{
|
||||
var name = Path.GetFileName(child);
|
||||
if (name.StartsWith("layer", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
roots.Add(Path.GetFullPath(child));
|
||||
}
|
||||
}
|
||||
|
||||
return roots.ToImmutableArray();
|
||||
}
|
||||
|
||||
private static IEnumerable<string> SafeEnumerateDirectories(string path)
|
||||
{
|
||||
try
|
||||
{
|
||||
return Directory.EnumerateDirectories(path);
|
||||
}
|
||||
catch
|
||||
{
|
||||
return Array.Empty<string>();
|
||||
}
|
||||
}
|
||||
|
||||
private static IEnumerable<string> EnumerateTarballs(string rootPath)
|
||||
{
|
||||
var options = new EnumerationOptions
|
||||
{
|
||||
RecurseSubdirectories = true,
|
||||
IgnoreInaccessible = true,
|
||||
AttributesToSkip = FileAttributes.ReparsePoint | FileAttributes.Device
|
||||
};
|
||||
|
||||
try
|
||||
{
|
||||
return Directory
|
||||
.EnumerateFiles(rootPath, "*.tgz", options)
|
||||
.Select(Path.GetFullPath)
|
||||
.ToArray();
|
||||
}
|
||||
catch
|
||||
{
|
||||
return Array.Empty<string>();
|
||||
}
|
||||
}
|
||||
|
||||
private static bool HasYarnPnpMarkers(string rootPath)
|
||||
{
|
||||
var pnpCjs = Path.Combine(rootPath, ".pnp.cjs");
|
||||
var pnpData = Path.Combine(rootPath, ".pnp.data.json");
|
||||
var yarnCache = Path.Combine(rootPath, ".yarn", "cache");
|
||||
|
||||
return File.Exists(pnpCjs) || File.Exists(pnpData) || Directory.Exists(yarnCache);
|
||||
}
|
||||
}
|
||||
@@ -1,4 +1,6 @@
|
||||
using System.Collections.Generic;
|
||||
using System.Globalization;
|
||||
using System.Linq;
|
||||
|
||||
namespace StellaOps.Scanner.Analyzers.Lang.Node.Internal;
|
||||
|
||||
@@ -84,9 +86,11 @@ internal sealed class NodePackage
|
||||
|
||||
private readonly List<NodeEntrypoint> _entrypoints = new();
|
||||
private readonly List<NodeImportEdge> _imports = new();
|
||||
private readonly List<NodeImportResolution> _resolvedImports = new();
|
||||
|
||||
public IReadOnlyList<NodeEntrypoint> Entrypoints => _entrypoints;
|
||||
public IReadOnlyList<NodeImportEdge> Imports => _imports;
|
||||
public IReadOnlyList<NodeImportResolution> ResolvedImports => _resolvedImports;
|
||||
|
||||
public string RelativePathNormalized => string.IsNullOrEmpty(RelativePath) ? string.Empty : RelativePath.Replace(Path.DirectorySeparatorChar, '/');
|
||||
|
||||
@@ -150,11 +154,30 @@ internal sealed class NodePackage
|
||||
|
||||
foreach (var importEdge in _imports.OrderBy(static e => e.ComparisonKey, StringComparer.Ordinal))
|
||||
{
|
||||
var value = string.IsNullOrWhiteSpace(importEdge.Evidence)
|
||||
? $"{importEdge.TargetSpecifier} (conf:{importEdge.Confidence})"
|
||||
: $"{importEdge.TargetSpecifier} (conf:{importEdge.Confidence};{importEdge.Evidence})";
|
||||
|
||||
evidence.Add(new LanguageComponentEvidence(
|
||||
LanguageEvidenceKind.File,
|
||||
"node.import",
|
||||
importEdge.SourceFile,
|
||||
importEdge.TargetSpecifier,
|
||||
value,
|
||||
null));
|
||||
}
|
||||
|
||||
foreach (var resolution in _resolvedImports.OrderBy(static r => r.SourceFile, StringComparer.Ordinal)
|
||||
.ThenBy(static r => r.Specifier, StringComparer.Ordinal))
|
||||
{
|
||||
var locator = string.IsNullOrWhiteSpace(PackageJsonLocator)
|
||||
? "package.json"
|
||||
: PackageJsonLocator;
|
||||
|
||||
evidence.Add(new LanguageComponentEvidence(
|
||||
LanguageEvidenceKind.Metadata,
|
||||
"node.resolve",
|
||||
locator,
|
||||
$"{resolution.SourceFile}:{resolution.Specifier}->{resolution.ResolvedPath} ({resolution.ResolutionType};{resolution.Confidence})",
|
||||
null));
|
||||
}
|
||||
|
||||
@@ -322,7 +345,7 @@ internal sealed class NodePackage
|
||||
return;
|
||||
}
|
||||
|
||||
var edge = new NodeImportEdge(sourceFile.Replace(Path.DirectorySeparatorChar, '/'), targetSpecifier.Trim(), kind.Trim(), evidence);
|
||||
var edge = new NodeImportEdge(sourceFile.Replace(Path.DirectorySeparatorChar, '/'), targetSpecifier.Trim(), kind.Trim(), evidence, "high");
|
||||
if (_imports.Any(e => string.Equals(e.ComparisonKey, edge.ComparisonKey, StringComparison.Ordinal)))
|
||||
{
|
||||
return;
|
||||
@@ -331,6 +354,41 @@ internal sealed class NodePackage
|
||||
_imports.Add(edge);
|
||||
}
|
||||
|
||||
public void AddImport(NodeImportEdge edge)
|
||||
{
|
||||
if (edge is null)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
if (_imports.Any(e => string.Equals(e.ComparisonKey, edge.ComparisonKey, StringComparison.Ordinal)))
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
_imports.Add(edge);
|
||||
}
|
||||
|
||||
public void SetResolvedImports(IEnumerable<NodeImportResolution> resolutions)
|
||||
{
|
||||
_resolvedImports.Clear();
|
||||
|
||||
if (resolutions is null)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
foreach (var resolution in resolutions)
|
||||
{
|
||||
if (resolution is null)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
_resolvedImports.Add(resolution);
|
||||
}
|
||||
}
|
||||
|
||||
private static IEnumerable<string> ParseConditionSet(string conditionSet)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(conditionSet))
|
||||
|
||||
@@ -13,15 +13,15 @@ internal static class NodePackageCollector
|
||||
"__pycache__"
|
||||
};
|
||||
|
||||
public static IReadOnlyList<NodePackage> CollectPackages(LanguageAnalyzerContext context, NodeLockData lockData, CancellationToken cancellationToken)
|
||||
public static IReadOnlyList<NodePackage> CollectPackages(LanguageAnalyzerContext context, NodeLockData lockData, NodeProjectInput projectInput, CancellationToken cancellationToken)
|
||||
{
|
||||
var packages = new List<NodePackage>();
|
||||
var visited = new HashSet<string>(StringComparer.OrdinalIgnoreCase);
|
||||
var pendingNodeModuleRoots = new List<string>();
|
||||
var nodeModuleRoots = new HashSet<string>(projectInput.NodeModuleRoots, StringComparer.OrdinalIgnoreCase);
|
||||
|
||||
var rootPackageJson = Path.Combine(context.RootPath, "package.json");
|
||||
var workspaceIndex = NodeWorkspaceIndex.Create(context.RootPath);
|
||||
var yarnPnpPresent = HasYarnPnp(context.RootPath);
|
||||
var yarnPnpPresent = projectInput.YarnPnpPresent;
|
||||
|
||||
if (File.Exists(rootPackageJson))
|
||||
{
|
||||
@@ -46,25 +46,28 @@ internal static class NodePackageCollector
|
||||
var workspaceNodeModules = Path.Combine(workspaceAbsolute, "node_modules");
|
||||
if (Directory.Exists(workspaceNodeModules))
|
||||
{
|
||||
pendingNodeModuleRoots.Add(workspaceNodeModules);
|
||||
nodeModuleRoots.Add(workspaceNodeModules);
|
||||
}
|
||||
}
|
||||
|
||||
var nodeModules = Path.Combine(context.RootPath, "node_modules");
|
||||
TraverseDirectory(context, nodeModules, lockData, workspaceIndex, packages, visited, yarnPnpPresent, cancellationToken);
|
||||
|
||||
foreach (var pendingRoot in pendingNodeModuleRoots.OrderBy(static path => path, StringComparer.Ordinal))
|
||||
foreach (var nodeModules in nodeModuleRoots.OrderBy(static path => path, StringComparer.Ordinal))
|
||||
{
|
||||
TraverseDirectory(context, pendingRoot, lockData, workspaceIndex, packages, visited, yarnPnpPresent, cancellationToken);
|
||||
TraverseDirectory(context, nodeModules, lockData, workspaceIndex, packages, visited, yarnPnpPresent, cancellationToken);
|
||||
}
|
||||
|
||||
TraverseTarballs(context, lockData, workspaceIndex, packages, visited, yarnPnpPresent, cancellationToken);
|
||||
TraverseYarnPnpCache(context, packages, visited, yarnPnpPresent, cancellationToken);
|
||||
TraverseTarballs(context, projectInput.Tarballs, packages, visited, yarnPnpPresent, cancellationToken);
|
||||
TraverseYarnPnpCache(context, projectInput.YarnCacheRoots, packages, visited, yarnPnpPresent, cancellationToken);
|
||||
|
||||
AppendDeclaredPackages(packages, lockData);
|
||||
|
||||
AttachImports(context, packages, cancellationToken);
|
||||
|
||||
var resolutions = NodeResolver.Resolve(context, projectInput, packages, cancellationToken);
|
||||
foreach (var (package, resolvedImports) in resolutions)
|
||||
{
|
||||
package.SetResolvedImports(resolvedImports);
|
||||
}
|
||||
|
||||
return packages;
|
||||
}
|
||||
|
||||
@@ -97,10 +100,11 @@ internal static class NodePackageCollector
|
||||
continue;
|
||||
}
|
||||
|
||||
var imports = NodeImportWalker.AnalyzeImports(context.GetRelativePath(file).Replace(Path.DirectorySeparatorChar, '/'), content);
|
||||
var relativeSource = context.GetRelativePath(file).Replace(Path.DirectorySeparatorChar, '/');
|
||||
var imports = NodeImportWalker.AnalyzeImports(context.RootPath, relativeSource, content);
|
||||
foreach (var edge in imports)
|
||||
{
|
||||
package.AddImport(edge.SourceFile, edge.TargetSpecifier, edge.Kind, edge.Evidence);
|
||||
package.AddImport(edge);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -248,21 +252,13 @@ internal static class NodePackageCollector
|
||||
|
||||
private static void TraverseTarballs(
|
||||
LanguageAnalyzerContext context,
|
||||
NodeLockData lockData,
|
||||
NodeWorkspaceIndex workspaceIndex,
|
||||
IEnumerable<string> tarballPaths,
|
||||
List<NodePackage> packages,
|
||||
HashSet<string> visited,
|
||||
bool yarnPnpPresent,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
var enumerationOptions = new EnumerationOptions
|
||||
{
|
||||
RecurseSubdirectories = true,
|
||||
IgnoreInaccessible = true,
|
||||
AttributesToSkip = FileAttributes.ReparsePoint | FileAttributes.Device
|
||||
};
|
||||
|
||||
foreach (var tgzPath in Directory.EnumerateFiles(context.RootPath, "*.tgz", enumerationOptions))
|
||||
foreach (var tgzPath in tarballPaths)
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
TryProcessTarball(context, tgzPath, packages, visited, yarnPnpPresent, cancellationToken);
|
||||
@@ -353,6 +349,7 @@ internal static class NodePackageCollector
|
||||
|
||||
private static void TraverseYarnPnpCache(
|
||||
LanguageAnalyzerContext context,
|
||||
IEnumerable<string> cacheRoots,
|
||||
List<NodePackage> packages,
|
||||
HashSet<string> visited,
|
||||
bool yarnPnpPresent,
|
||||
@@ -363,12 +360,6 @@ internal static class NodePackageCollector
|
||||
return;
|
||||
}
|
||||
|
||||
var cacheDirectory = Path.Combine(context.RootPath, ".yarn", "cache");
|
||||
if (!Directory.Exists(cacheDirectory))
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
var enumerationOptions = new EnumerationOptions
|
||||
{
|
||||
RecurseSubdirectories = true,
|
||||
@@ -376,10 +367,18 @@ internal static class NodePackageCollector
|
||||
AttributesToSkip = FileAttributes.ReparsePoint | FileAttributes.Device
|
||||
};
|
||||
|
||||
foreach (var zipPath in Directory.EnumerateFiles(cacheDirectory, "*.zip", enumerationOptions))
|
||||
foreach (var cacheDirectory in cacheRoots)
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
TryProcessZipball(context, zipPath, packages, visited, yarnPnpPresent, cancellationToken);
|
||||
if (!Directory.Exists(cacheDirectory))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
foreach (var zipPath in Directory.EnumerateFiles(cacheDirectory, "*.zip", enumerationOptions))
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
TryProcessZipball(context, zipPath, packages, visited, yarnPnpPresent, cancellationToken);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -730,22 +729,6 @@ internal static class NodePackageCollector
|
||||
return IgnoredDirectories.Any(ignored => string.Equals(name, ignored, StringComparison.OrdinalIgnoreCase));
|
||||
}
|
||||
|
||||
private static bool HasYarnPnp(string rootPath)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(rootPath))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
var pnpCjs = Path.Combine(rootPath, ".pnp.cjs");
|
||||
var pnpData = Path.Combine(rootPath, ".pnp.data.json");
|
||||
var yarnCache = Path.Combine(rootPath, ".yarn", "cache");
|
||||
|
||||
return File.Exists(pnpCjs)
|
||||
|| File.Exists(pnpData)
|
||||
|| Directory.Exists(yarnCache);
|
||||
}
|
||||
|
||||
private static IReadOnlyList<string> ExtractWorkspaceTargets(string relativeDirectory, JsonElement root, NodeWorkspaceIndex workspaceIndex)
|
||||
{
|
||||
var dependencies = workspaceIndex.ResolveWorkspaceTargets(relativeDirectory, TryGetProperty(root, "dependencies"));
|
||||
@@ -903,6 +886,24 @@ internal static class NodePackageCollector
|
||||
}
|
||||
}
|
||||
|
||||
if (root.TryGetProperty("imports", out var importsElement))
|
||||
{
|
||||
foreach (var importEntry in FlattenExports(importsElement, prefix: "imports"))
|
||||
{
|
||||
AddEntrypoint(importEntry.Path, importEntry.Conditions, binName: null, mainField: null, moduleField: null);
|
||||
}
|
||||
}
|
||||
|
||||
if (root.TryGetProperty("worker", out var workerElement) && workerElement.ValueKind == JsonValueKind.String)
|
||||
{
|
||||
AddEntrypoint(workerElement.GetString(), "worker", binName: null, mainField: null, moduleField: null);
|
||||
}
|
||||
|
||||
if (HasElectronDependency(root) && root.TryGetProperty("main", out var electronMain) && electronMain.ValueKind == JsonValueKind.String)
|
||||
{
|
||||
AddEntrypoint(electronMain.GetString(), "electron", binName: null, mainField: electronMain.GetString(), moduleField: null);
|
||||
}
|
||||
|
||||
DetectShebangEntrypoints(context, package, relativeDirectory);
|
||||
}
|
||||
|
||||
@@ -919,7 +920,7 @@ internal static class NodePackageCollector
|
||||
yield break;
|
||||
|
||||
case JsonValueKind.Object:
|
||||
foreach (var property in element.EnumerateObject())
|
||||
foreach (var property in element.EnumerateObject().OrderBy(static p => p.Name, StringComparer.Ordinal))
|
||||
{
|
||||
var nextPrefix = string.IsNullOrWhiteSpace(prefix) ? property.Name : $"{prefix},{property.Name}";
|
||||
foreach (var nested in FlattenExports(property.Value, nextPrefix))
|
||||
@@ -934,6 +935,30 @@ internal static class NodePackageCollector
|
||||
}
|
||||
}
|
||||
|
||||
private static bool HasElectronDependency(JsonElement root)
|
||||
{
|
||||
static bool ContainsElectron(JsonElement? element)
|
||||
{
|
||||
if (element is null || element.Value.ValueKind != JsonValueKind.Object)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
foreach (var property in element.Value.EnumerateObject())
|
||||
{
|
||||
if (string.Equals(property.Name, "electron", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
return ContainsElectron(TryGetProperty(root, "dependencies"))
|
||||
|| ContainsElectron(TryGetProperty(root, "devDependencies"));
|
||||
}
|
||||
|
||||
private static void DetectShebangEntrypoints(LanguageAnalyzerContext context, NodePackage package, string relativeDirectory)
|
||||
{
|
||||
var baseDirectory = string.IsNullOrWhiteSpace(relativeDirectory)
|
||||
|
||||
@@ -0,0 +1,532 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Text.Json;
|
||||
|
||||
namespace StellaOps.Scanner.Analyzers.Lang.Node.Internal;
|
||||
|
||||
/// <summary>
|
||||
/// Resolves Node.js import specifiers using a deterministic, offline-friendly subset of the
|
||||
/// Node resolution algorithm (CJS + ESM). Handles core modules, relative/absolute paths,
|
||||
/// self-references, exports/imports maps, and classic node_modules search with extension
|
||||
/// priorities.
|
||||
/// </summary>
|
||||
internal static class NodeResolver
|
||||
{
|
||||
private static readonly string[] ExtensionPriority = { string.Empty, ".js", ".mjs", ".cjs", ".ts", ".tsx", ".json", ".node" };
|
||||
private static readonly HashSet<string> CoreModules = new(
|
||||
new[]
|
||||
{
|
||||
"fs","path","http","https","net","tls","dns","os","stream","buffer","crypto","url",
|
||||
"util","events","child_process","cluster","readline","zlib","assert","querystring","perf_hooks",
|
||||
"inspector","module","timers","tty","vm","worker_threads","diagnostics_channel","process"
|
||||
}, StringComparer.Ordinal);
|
||||
|
||||
public static IReadOnlyList<(NodePackage Package, IReadOnlyList<NodeImportResolution> ResolvedImports)> Resolve(
|
||||
LanguageAnalyzerContext context,
|
||||
NodeProjectInput projectInput,
|
||||
IReadOnlyCollection<NodePackage> packages,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
var results = new List<(NodePackage, IReadOnlyList<NodeImportResolution>)>();
|
||||
var packageJsonCache = new Dictionary<string, JsonDocument>(StringComparer.OrdinalIgnoreCase);
|
||||
|
||||
foreach (var package in packages)
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
var resolved = ResolvePackage(context, projectInput, package, packageJsonCache, cancellationToken);
|
||||
if (resolved.Count > 0)
|
||||
{
|
||||
results.Add((package, resolved));
|
||||
}
|
||||
}
|
||||
|
||||
return results;
|
||||
}
|
||||
|
||||
private static IReadOnlyList<NodeImportResolution> ResolvePackage(
|
||||
LanguageAnalyzerContext context,
|
||||
NodeProjectInput projectInput,
|
||||
NodePackage package,
|
||||
Dictionary<string, JsonDocument> packageJsonCache,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
if (package.Imports.Count == 0)
|
||||
{
|
||||
return Array.Empty<NodeImportResolution>();
|
||||
}
|
||||
|
||||
var resolved = new List<NodeImportResolution>();
|
||||
var packageRoot = ResolvePackageRoot(context.RootPath, package.RelativePathNormalized);
|
||||
var searchOrder = BuildNodeModulesSearchOrder(packageRoot, projectInput.NodeModuleRoots);
|
||||
var packageJson = LoadPackageJson(packageRoot, packageJsonCache);
|
||||
|
||||
foreach (var edge in package.Imports)
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
|
||||
var resolution = ResolveImport(context, packageRoot, searchOrder, packageJson, edge);
|
||||
resolved.Add(resolution);
|
||||
}
|
||||
|
||||
return resolved
|
||||
.OrderBy(static r => r.SourceFile, StringComparer.Ordinal)
|
||||
.ThenBy(static r => r.Specifier, StringComparer.Ordinal)
|
||||
.ToArray();
|
||||
}
|
||||
|
||||
private static NodeImportResolution ResolveImport(
|
||||
LanguageAnalyzerContext context,
|
||||
string packageRoot,
|
||||
IReadOnlyList<string> nodeModuleSearchOrder,
|
||||
JsonDocument? packageJson,
|
||||
NodeImportEdge edge)
|
||||
{
|
||||
if (IsCoreModule(edge.TargetSpecifier))
|
||||
{
|
||||
return new NodeImportResolution(edge.SourceFile, edge.TargetSpecifier, edge.TargetSpecifier, "core", edge.Confidence);
|
||||
}
|
||||
|
||||
if (edge.TargetSpecifier.StartsWith('#'))
|
||||
{
|
||||
var mapped = ResolveImportsMap(packageRoot, packageJson, edge.TargetSpecifier);
|
||||
if (!string.IsNullOrWhiteSpace(mapped))
|
||||
{
|
||||
var resolvedPath = ResolvePath(context, packageRoot, mapped!, nodeModuleSearchOrder, packageJson, out var resolutionType);
|
||||
return resolvedPath is null
|
||||
? CreateUnresolved(edge)
|
||||
: new NodeImportResolution(edge.SourceFile, edge.TargetSpecifier, resolvedPath, resolutionType, CombineConfidence(edge.Confidence, "medium"));
|
||||
}
|
||||
}
|
||||
|
||||
if (IsRelative(edge.TargetSpecifier) || edge.TargetSpecifier.StartsWith('/'))
|
||||
{
|
||||
var resolvedPath = ResolvePath(context, packageRoot, edge.TargetSpecifier, nodeModuleSearchOrder, packageJson, out var resolutionType);
|
||||
return resolvedPath is null
|
||||
? CreateUnresolved(edge)
|
||||
: new NodeImportResolution(edge.SourceFile, edge.TargetSpecifier, resolvedPath, resolutionType, edge.Confidence);
|
||||
}
|
||||
|
||||
if (IsSelfReference(packageJson, edge.TargetSpecifier))
|
||||
{
|
||||
var selfTarget = TrimSelfPrefix(packageJson, edge.TargetSpecifier);
|
||||
var resolvedPath = ResolvePath(context, packageRoot, selfTarget, nodeModuleSearchOrder, packageJson, out var resolutionType);
|
||||
return resolvedPath is null
|
||||
? CreateUnresolved(edge)
|
||||
: new NodeImportResolution(edge.SourceFile, edge.TargetSpecifier, resolvedPath, resolutionType, CombineConfidence(edge.Confidence, "medium"));
|
||||
}
|
||||
|
||||
var bareResolved = ResolveBareSpecifier(context, packageRoot, nodeModuleSearchOrder, edge.TargetSpecifier, out var bareType);
|
||||
if (bareResolved is not null)
|
||||
{
|
||||
return new NodeImportResolution(edge.SourceFile, edge.TargetSpecifier, bareResolved, bareType, edge.Confidence);
|
||||
}
|
||||
|
||||
return CreateUnresolved(edge);
|
||||
}
|
||||
|
||||
private static string? ResolveBareSpecifier(
|
||||
LanguageAnalyzerContext context,
|
||||
string packageRoot,
|
||||
IReadOnlyList<string> nodeModuleSearchOrder,
|
||||
string specifier,
|
||||
out string resolutionType)
|
||||
{
|
||||
resolutionType = "unresolved";
|
||||
|
||||
foreach (var nodeModules in nodeModuleSearchOrder)
|
||||
{
|
||||
var candidate = Path.Combine(nodeModules, specifier);
|
||||
if (Directory.Exists(candidate))
|
||||
{
|
||||
var resolved = ResolveDirectoryEntrypoint(context, candidate, out resolutionType);
|
||||
if (resolved is not null)
|
||||
{
|
||||
return resolved;
|
||||
}
|
||||
}
|
||||
|
||||
var fileResolved = ResolveFile(candidate);
|
||||
if (fileResolved is not null)
|
||||
{
|
||||
resolutionType = "file";
|
||||
return ToRelative(context, fileResolved);
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
private static string? ResolvePath(
|
||||
LanguageAnalyzerContext context,
|
||||
string packageRoot,
|
||||
string target,
|
||||
IReadOnlyList<string> nodeModuleSearchOrder,
|
||||
JsonDocument? packageJson,
|
||||
out string resolutionType)
|
||||
{
|
||||
resolutionType = "unresolved";
|
||||
var normalized = target.Replace('/', Path.DirectorySeparatorChar);
|
||||
var basePath = target.StartsWith('/')
|
||||
? Path.Combine(context.RootPath, normalized.TrimStart(Path.DirectorySeparatorChar))
|
||||
: Path.GetFullPath(Path.Combine(packageRoot, normalized));
|
||||
|
||||
var fileResolved = ResolveFile(basePath);
|
||||
if (fileResolved is not null)
|
||||
{
|
||||
resolutionType = "file";
|
||||
return ToRelative(context, fileResolved);
|
||||
}
|
||||
|
||||
if (Directory.Exists(basePath))
|
||||
{
|
||||
var directoryResolved = ResolveDirectoryEntrypoint(context, basePath, out resolutionType, packageJson);
|
||||
if (directoryResolved is not null)
|
||||
{
|
||||
return directoryResolved;
|
||||
}
|
||||
}
|
||||
|
||||
// Last resort: treat as bare specifier against node_modules search order
|
||||
var bare = ResolveBareSpecifier(context, packageRoot, nodeModuleSearchOrder, target, out resolutionType);
|
||||
if (bare is not null)
|
||||
{
|
||||
return bare;
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
private static string? ResolveDirectoryEntrypoint(
|
||||
LanguageAnalyzerContext context,
|
||||
string directory,
|
||||
out string resolutionType,
|
||||
JsonDocument? packageJson = null)
|
||||
{
|
||||
resolutionType = "directory";
|
||||
|
||||
var packageJsonPath = Path.Combine(directory, "package.json");
|
||||
JsonDocument? localPackage = null;
|
||||
|
||||
if (File.Exists(packageJsonPath))
|
||||
{
|
||||
localPackage = LoadPackageJson(packageJsonPath, new());
|
||||
var exportsTarget = ResolveExports(localPackage);
|
||||
if (!string.IsNullOrWhiteSpace(exportsTarget))
|
||||
{
|
||||
var resolved = ResolveFileOrDirectory(context, directory, exportsTarget!);
|
||||
if (resolved is not null)
|
||||
{
|
||||
resolutionType = "exports";
|
||||
return resolved;
|
||||
}
|
||||
}
|
||||
|
||||
if (localPackage is not null && TryGetString(localPackage, "main", out var mainValue))
|
||||
{
|
||||
var resolved = ResolveFileOrDirectory(context, directory, mainValue!);
|
||||
if (resolved is not null)
|
||||
{
|
||||
resolutionType = "main";
|
||||
return resolved;
|
||||
}
|
||||
}
|
||||
|
||||
if (localPackage is not null && TryGetString(localPackage, "module", out var moduleValue))
|
||||
{
|
||||
var resolved = ResolveFileOrDirectory(context, directory, moduleValue!);
|
||||
if (resolved is not null)
|
||||
{
|
||||
resolutionType = "module";
|
||||
return resolved;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
foreach (var ext in ExtensionPriority)
|
||||
{
|
||||
var indexCandidate = Path.Combine(directory, "index" + ext);
|
||||
if (File.Exists(indexCandidate))
|
||||
{
|
||||
return ToRelative(context, indexCandidate);
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
private static string? ResolveFileOrDirectory(LanguageAnalyzerContext context, string directory, string target)
|
||||
{
|
||||
var normalized = target.Replace('/', Path.DirectorySeparatorChar);
|
||||
var candidate = Path.GetFullPath(Path.Combine(directory, normalized));
|
||||
var file = ResolveFile(candidate);
|
||||
if (file is not null)
|
||||
{
|
||||
return ToRelative(context, file);
|
||||
}
|
||||
|
||||
if (Directory.Exists(candidate))
|
||||
{
|
||||
var index = ResolveDirectoryEntrypoint(context, candidate, out _);
|
||||
if (index is not null)
|
||||
{
|
||||
return index;
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
private static string? ResolveExports(JsonDocument? packageJson)
|
||||
{
|
||||
if (packageJson is null)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
if (!packageJson.RootElement.TryGetProperty("exports", out var exportsElement))
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
if (exportsElement.ValueKind == JsonValueKind.String)
|
||||
{
|
||||
return exportsElement.GetString();
|
||||
}
|
||||
|
||||
if (exportsElement.ValueKind != JsonValueKind.Object)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
// Prefer default > import > require > node > browser > worker for determinism
|
||||
var preferred = new[] { "default", "import", "require", "node", "browser", "worker" };
|
||||
foreach (var key in preferred)
|
||||
{
|
||||
if (exportsElement.TryGetProperty(key, out var prop) && prop.ValueKind == JsonValueKind.String)
|
||||
{
|
||||
return prop.GetString();
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
private static string? ResolveImportsMap(string packageRoot, JsonDocument? packageJson, string specifier)
|
||||
{
|
||||
if (packageJson is null)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
if (!packageJson.RootElement.TryGetProperty("imports", out var importsElement) || importsElement.ValueKind != JsonValueKind.Object)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
if (!importsElement.TryGetProperty(specifier, out var mapped))
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
if (mapped.ValueKind == JsonValueKind.String)
|
||||
{
|
||||
return mapped.GetString();
|
||||
}
|
||||
|
||||
if (mapped.ValueKind == JsonValueKind.Object)
|
||||
{
|
||||
foreach (var property in mapped.EnumerateObject().OrderBy(static p => p.Name, StringComparer.Ordinal))
|
||||
{
|
||||
if (property.Value.ValueKind == JsonValueKind.String)
|
||||
{
|
||||
return property.Value.GetString();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
private static string? ResolveFile(string candidate)
|
||||
{
|
||||
if (File.Exists(candidate))
|
||||
{
|
||||
return candidate;
|
||||
}
|
||||
|
||||
foreach (var ext in ExtensionPriority.Skip(1))
|
||||
{
|
||||
var withExt = candidate + ext;
|
||||
if (File.Exists(withExt))
|
||||
{
|
||||
return withExt;
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
private static JsonDocument? LoadPackageJson(string packageRoot, Dictionary<string, JsonDocument> cache)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(packageRoot))
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
var packageJsonPath = Directory.Exists(packageRoot)
|
||||
? Path.Combine(packageRoot, "package.json")
|
||||
: packageRoot;
|
||||
|
||||
if (!File.Exists(packageJsonPath))
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
if (cache.TryGetValue(packageJsonPath, out var cached))
|
||||
{
|
||||
return cached;
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
var document = JsonDocument.Parse(File.ReadAllText(packageJsonPath));
|
||||
cache[packageJsonPath] = document;
|
||||
return document;
|
||||
}
|
||||
catch
|
||||
{
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
private static IReadOnlyList<string> BuildNodeModulesSearchOrder(string packageRoot, IReadOnlyList<string> globalNodeModuleRoots)
|
||||
{
|
||||
var searchRoots = new List<string>();
|
||||
|
||||
var current = packageRoot;
|
||||
while (!string.IsNullOrWhiteSpace(current))
|
||||
{
|
||||
var nodeModules = Path.Combine(current, "node_modules");
|
||||
if (Directory.Exists(nodeModules))
|
||||
{
|
||||
searchRoots.Add(Path.GetFullPath(nodeModules));
|
||||
}
|
||||
|
||||
var parent = Directory.GetParent(current);
|
||||
if (parent is null || string.Equals(parent.FullName, current, StringComparison.Ordinal))
|
||||
{
|
||||
break;
|
||||
}
|
||||
|
||||
current = parent.FullName;
|
||||
}
|
||||
|
||||
foreach (var root in globalNodeModuleRoots)
|
||||
{
|
||||
if (Directory.Exists(root))
|
||||
{
|
||||
searchRoots.Add(Path.GetFullPath(root));
|
||||
}
|
||||
}
|
||||
|
||||
return searchRoots
|
||||
.Distinct(StringComparer.OrdinalIgnoreCase)
|
||||
.OrderBy(static r => r, StringComparer.Ordinal)
|
||||
.ToArray();
|
||||
}
|
||||
|
||||
private static string ResolvePackageRoot(string rootPath, string relativeDirectory)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(relativeDirectory))
|
||||
{
|
||||
return rootPath;
|
||||
}
|
||||
|
||||
return Path.GetFullPath(Path.Combine(rootPath, relativeDirectory.Replace('/', Path.DirectorySeparatorChar)));
|
||||
}
|
||||
|
||||
private static bool IsCoreModule(string specifier) => CoreModules.Contains(specifier);
|
||||
|
||||
private static bool IsRelative(string specifier) => specifier.StartsWith("./") || specifier.StartsWith("../");
|
||||
|
||||
private static bool IsSelfReference(JsonDocument? packageJson, string specifier)
|
||||
{
|
||||
if (packageJson is null)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
if (!packageJson.RootElement.TryGetProperty("name", out var nameElement))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
var name = nameElement.GetString();
|
||||
if (string.IsNullOrWhiteSpace(name))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
return string.Equals(specifier, name, StringComparison.Ordinal)
|
||||
|| specifier.StartsWith(name + "/", StringComparison.Ordinal);
|
||||
}
|
||||
|
||||
private static string TrimSelfPrefix(JsonDocument? packageJson, string specifier)
|
||||
{
|
||||
if (packageJson is null || !packageJson.RootElement.TryGetProperty("name", out var nameElement))
|
||||
{
|
||||
return specifier;
|
||||
}
|
||||
|
||||
var name = nameElement.GetString();
|
||||
if (string.IsNullOrWhiteSpace(name))
|
||||
{
|
||||
return specifier;
|
||||
}
|
||||
|
||||
return specifier.Length > name.Length + 1
|
||||
? specifier[(name.Length + 1)..]
|
||||
: "";
|
||||
}
|
||||
|
||||
private static bool TryGetString(JsonDocument document, string property, out string? value)
|
||||
{
|
||||
if (document.RootElement.TryGetProperty(property, out var element) && element.ValueKind == JsonValueKind.String)
|
||||
{
|
||||
value = element.GetString();
|
||||
return true;
|
||||
}
|
||||
|
||||
value = null;
|
||||
return false;
|
||||
}
|
||||
|
||||
private static string ToRelative(LanguageAnalyzerContext context, string absolutePath)
|
||||
{
|
||||
var relative = context.GetRelativePath(absolutePath);
|
||||
return string.IsNullOrWhiteSpace(relative) ? absolutePath.Replace('\\', '/') : relative.Replace('\\', '/');
|
||||
}
|
||||
|
||||
private static NodeImportResolution CreateUnresolved(NodeImportEdge edge)
|
||||
=> new(edge.SourceFile, edge.TargetSpecifier, "unresolved", "unresolved", "low");
|
||||
|
||||
private static string CombineConfidence(string left, string right)
|
||||
{
|
||||
static int Score(string value) => value switch
|
||||
{
|
||||
"high" => 3,
|
||||
"low" => 1,
|
||||
_ => 2
|
||||
};
|
||||
|
||||
var combined = Math.Min(Score(left), Score(right));
|
||||
return combined switch
|
||||
{
|
||||
3 => "high",
|
||||
2 => "medium",
|
||||
_ => "low"
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,679 @@
|
||||
using System.Buffers.Binary;
|
||||
using System.Globalization;
|
||||
using System.Security.Cryptography;
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
using StellaOps.Scanner.Analyzers.Lang;
|
||||
|
||||
namespace StellaOps.Scanner.Analyzers.Lang.Node.Internal.Phase22;
|
||||
|
||||
internal static class NodePhase22Analyzer
|
||||
{
|
||||
private const long MaxMapBytes = 50L * 1024 * 1024; // 50 MB
|
||||
|
||||
public static NodePhase22Observation Analyze(LanguageAnalyzerContext context, CancellationToken cancellationToken)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(context);
|
||||
|
||||
var records = new List<NodePhase22Record>();
|
||||
|
||||
AnalyzeBundles(context, records, cancellationToken);
|
||||
AnalyzeNativeAndWasm(context, records, cancellationToken);
|
||||
AnalyzeCapabilities(context, records, cancellationToken);
|
||||
|
||||
var ordered = records
|
||||
.Where(static r => r.Confidence is null || r.Confidence >= 0.4)
|
||||
.Where(static r => r.ResolverTrace.Count > 0)
|
||||
.Distinct(NodePhase22Record.Comparer)
|
||||
.OrderBy(static r => r.Type, StringComparer.Ordinal)
|
||||
.ThenBy(static r => r.Path ?? r.From ?? string.Empty, StringComparer.Ordinal)
|
||||
.ThenBy(static r => r.To ?? string.Empty, StringComparer.Ordinal)
|
||||
.ToArray();
|
||||
|
||||
return new NodePhase22Observation(ordered);
|
||||
}
|
||||
|
||||
private static void AnalyzeBundles(LanguageAnalyzerContext context, List<NodePhase22Record> records, CancellationToken cancellationToken)
|
||||
{
|
||||
foreach (var scriptPath in EnumerateFiles(context.RootPath, [".js", ".mjs", ".cjs"], cancellationToken))
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
|
||||
string content;
|
||||
try
|
||||
{
|
||||
content = File.ReadAllText(scriptPath);
|
||||
}
|
||||
catch (IOException)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
var mapRef = TryFindSourceMapReference(content);
|
||||
if (mapRef is null)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
var bundlePath = NormalizePath(context, scriptPath);
|
||||
var resolverTrace = new List<string> { $"bundle:{bundlePath}" };
|
||||
|
||||
var map = TryLoadSourceMap(context, scriptPath, mapRef, cancellationToken);
|
||||
if (map is not null)
|
||||
{
|
||||
resolverTrace.Add(map.MapTrace);
|
||||
|
||||
foreach (var source in map.Sources)
|
||||
{
|
||||
var record = new NodePhase22Record(
|
||||
Type: "component",
|
||||
ComponentType: "pkg",
|
||||
EdgeType: null,
|
||||
Path: source,
|
||||
From: null,
|
||||
To: null,
|
||||
Format: map.Format,
|
||||
FromBundle: true,
|
||||
Reason: "source-map",
|
||||
Confidence: 0.87,
|
||||
ResolverTrace: new[] { resolverTrace[0], resolverTrace[1], $"source:{source}" },
|
||||
Exports: null,
|
||||
Arch: null,
|
||||
Platform: null);
|
||||
|
||||
records.Add(record);
|
||||
}
|
||||
}
|
||||
|
||||
// Always emit entrypoint to signal bundle presence (even when map rejected)
|
||||
var entryTrace = new List<string>(resolverTrace);
|
||||
if (map is null && mapRef.Length > 0)
|
||||
{
|
||||
entryTrace.Add($"map:{mapRef}");
|
||||
}
|
||||
|
||||
records.Add(new NodePhase22Record(
|
||||
Type: "entrypoint",
|
||||
ComponentType: null,
|
||||
EdgeType: null,
|
||||
Path: bundlePath,
|
||||
From: null,
|
||||
To: null,
|
||||
Format: "esm",
|
||||
FromBundle: null,
|
||||
Reason: "bundle-entrypoint",
|
||||
Confidence: map is null ? 0.51 : 0.88,
|
||||
ResolverTrace: entryTrace,
|
||||
Exports: null,
|
||||
Arch: null,
|
||||
Platform: null));
|
||||
}
|
||||
}
|
||||
|
||||
private static SourceMapResult? TryLoadSourceMap(LanguageAnalyzerContext context, string scriptPath, string mapReference, CancellationToken cancellationToken)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(mapReference))
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
if (mapReference.StartsWith("data:application/json;base64,", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
var base64 = mapReference["data:application/json;base64,".Length..];
|
||||
var bytes = Convert.FromBase64String(base64);
|
||||
using var inlineDoc = JsonDocument.Parse(bytes);
|
||||
return BuildSourceMapResult(inlineDoc.RootElement, "map:inline", isInline: true);
|
||||
}
|
||||
|
||||
var mapPath = Path.GetFullPath(Path.Combine(Path.GetDirectoryName(scriptPath)!, mapReference));
|
||||
if (!File.Exists(mapPath))
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
var info = new FileInfo(mapPath);
|
||||
if (info.Length > MaxMapBytes)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
using var stream = File.OpenRead(mapPath);
|
||||
using var mapDoc = JsonDocument.Parse(stream);
|
||||
return BuildSourceMapResult(mapDoc.RootElement, $"map:{NormalizePath(context, mapPath)}", isInline: false);
|
||||
}
|
||||
catch (FormatException)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
catch (JsonException)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
catch (IOException)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
private static SourceMapResult? BuildSourceMapResult(JsonElement root, string mapTrace, bool isInline)
|
||||
{
|
||||
if (!root.TryGetProperty("sources", out var sourcesElement) || sourcesElement.ValueKind != JsonValueKind.Array)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
if (!root.TryGetProperty("sourcesContent", out var sourcesContent) || sourcesContent.ValueKind != JsonValueKind.Array)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
var sources = new List<string>();
|
||||
foreach (var item in sourcesElement.EnumerateArray())
|
||||
{
|
||||
if (item.ValueKind != JsonValueKind.String)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
var normalized = NormalizeSourceMapPath(item.GetString());
|
||||
if (!string.IsNullOrWhiteSpace(normalized))
|
||||
{
|
||||
sources.Add(normalized);
|
||||
}
|
||||
}
|
||||
|
||||
if (sources.Count == 0)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
return new SourceMapResult(sources, mapTrace, Format: "esm", isInline);
|
||||
}
|
||||
|
||||
private static void AnalyzeNativeAndWasm(LanguageAnalyzerContext context, List<NodePhase22Record> records, CancellationToken cancellationToken)
|
||||
{
|
||||
var nativeFiles = EnumerateFiles(context.RootPath, [".node"], cancellationToken).ToArray();
|
||||
var wasmFiles = EnumerateFiles(context.RootPath, [".wasm"], cancellationToken).ToArray();
|
||||
|
||||
foreach (var nativePath in nativeFiles)
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
|
||||
var (arch, platform) = TryDetectNativeMetadata(nativePath);
|
||||
var normalized = NormalizePath(context, nativePath);
|
||||
|
||||
records.Add(new NodePhase22Record(
|
||||
Type: "component",
|
||||
ComponentType: "native",
|
||||
EdgeType: null,
|
||||
Path: normalized,
|
||||
From: null,
|
||||
To: null,
|
||||
Format: null,
|
||||
FromBundle: null,
|
||||
Reason: "native-addon-file",
|
||||
Confidence: 0.82,
|
||||
ResolverTrace: new[] { $"file:{normalized}" },
|
||||
Exports: null,
|
||||
Arch: arch,
|
||||
Platform: platform));
|
||||
}
|
||||
|
||||
foreach (var wasmPath in wasmFiles)
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
|
||||
var normalized = NormalizePath(context, wasmPath);
|
||||
records.Add(new NodePhase22Record(
|
||||
Type: "component",
|
||||
ComponentType: "wasm",
|
||||
EdgeType: null,
|
||||
Path: normalized,
|
||||
From: null,
|
||||
To: null,
|
||||
Format: null,
|
||||
FromBundle: null,
|
||||
Reason: "wasm-file",
|
||||
Confidence: 0.80,
|
||||
ResolverTrace: new[] { $"file:{normalized}" },
|
||||
Exports: null,
|
||||
Arch: null,
|
||||
Platform: null));
|
||||
}
|
||||
}
|
||||
|
||||
private static void AnalyzeCapabilities(LanguageAnalyzerContext context, List<NodePhase22Record> records, CancellationToken cancellationToken)
|
||||
{
|
||||
foreach (var scriptPath in EnumerateFiles(context.RootPath, [".js", ".mjs", ".cjs", ".ts", ".tsx"], cancellationToken))
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
|
||||
string content;
|
||||
try
|
||||
{
|
||||
content = File.ReadAllText(scriptPath);
|
||||
}
|
||||
catch (IOException)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
var normalizedSource = NormalizePath(context, scriptPath);
|
||||
|
||||
foreach (var edge in ExtractNativeEdges(content, normalizedSource, scriptPath, context))
|
||||
{
|
||||
records.Add(edge);
|
||||
}
|
||||
|
||||
foreach (var edge in ExtractWasmEdges(content, normalizedSource, scriptPath, context))
|
||||
{
|
||||
records.Add(edge);
|
||||
}
|
||||
|
||||
foreach (var capability in ExtractCapabilityEdges(content, normalizedSource))
|
||||
{
|
||||
records.Add(capability);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private static IEnumerable<NodePhase22Record> ExtractNativeEdges(
|
||||
string content,
|
||||
string sourcePath,
|
||||
string sourceAbsolute,
|
||||
LanguageAnalyzerContext context)
|
||||
{
|
||||
var pattern = "process.dlopen";
|
||||
var index = content.IndexOf(pattern, StringComparison.Ordinal);
|
||||
while (index >= 0)
|
||||
{
|
||||
var argument = TryExtractLiteral(content, index + pattern.Length);
|
||||
if (!string.IsNullOrWhiteSpace(argument))
|
||||
{
|
||||
var target = NormalizeTarget(context, sourceAbsolute, argument!);
|
||||
yield return new NodePhase22Record(
|
||||
Type: "edge",
|
||||
ComponentType: null,
|
||||
EdgeType: "native-addon",
|
||||
Path: null,
|
||||
From: sourcePath,
|
||||
To: target,
|
||||
Format: null,
|
||||
FromBundle: null,
|
||||
Reason: "native-dlopen-string",
|
||||
Confidence: 0.76,
|
||||
ResolverTrace: new[] { $"source:{sourcePath}", $"call:process.dlopen('{argument}')" },
|
||||
Exports: null,
|
||||
Arch: null,
|
||||
Platform: null);
|
||||
}
|
||||
|
||||
index = content.IndexOf(pattern, index + pattern.Length, StringComparison.Ordinal);
|
||||
}
|
||||
}
|
||||
|
||||
private static IEnumerable<NodePhase22Record> ExtractWasmEdges(
|
||||
string content,
|
||||
string sourcePath,
|
||||
string sourceAbsolute,
|
||||
LanguageAnalyzerContext context)
|
||||
{
|
||||
var pattern = "WebAssembly.instantiate";
|
||||
var index = content.IndexOf(pattern, StringComparison.Ordinal);
|
||||
while (index >= 0)
|
||||
{
|
||||
var argument = TryExtractLiteral(content, index + pattern.Length);
|
||||
if (!string.IsNullOrWhiteSpace(argument))
|
||||
{
|
||||
var target = NormalizeTarget(context, sourceAbsolute, argument!);
|
||||
yield return new NodePhase22Record(
|
||||
Type: "edge",
|
||||
ComponentType: null,
|
||||
EdgeType: "wasm",
|
||||
Path: null,
|
||||
From: sourcePath,
|
||||
To: target,
|
||||
Format: null,
|
||||
FromBundle: null,
|
||||
Reason: "wasm-import",
|
||||
Confidence: 0.74,
|
||||
ResolverTrace: new[] { $"source:{sourcePath}", $"call:WebAssembly.instantiate('{argument}')" },
|
||||
Exports: null,
|
||||
Arch: null,
|
||||
Platform: null);
|
||||
}
|
||||
|
||||
index = content.IndexOf(pattern, index + pattern.Length, StringComparison.Ordinal);
|
||||
}
|
||||
}
|
||||
|
||||
private static IEnumerable<NodePhase22Record> ExtractCapabilityEdges(string content, string sourcePath)
|
||||
{
|
||||
if (content.Contains("child_process", StringComparison.Ordinal))
|
||||
{
|
||||
yield return Capability(sourcePath, "child_process.execFile", "capability-child-process");
|
||||
}
|
||||
|
||||
if (content.Contains("worker_threads", StringComparison.Ordinal))
|
||||
{
|
||||
yield return Capability(sourcePath, "worker_threads", "capability-worker");
|
||||
}
|
||||
|
||||
if (content.Contains("process.binding", StringComparison.Ordinal))
|
||||
{
|
||||
yield return Capability(sourcePath, "process.binding", "capability-binding");
|
||||
}
|
||||
|
||||
if (content.Contains("vm.", StringComparison.Ordinal))
|
||||
{
|
||||
yield return Capability(sourcePath, "vm", "capability-vm");
|
||||
}
|
||||
|
||||
if (content.Contains("fs.promises", StringComparison.Ordinal))
|
||||
{
|
||||
yield return Capability(sourcePath, "fs.promises", "capability-fs-promises");
|
||||
}
|
||||
}
|
||||
|
||||
private static NodePhase22Record Capability(string sourcePath, string target, string reason)
|
||||
{
|
||||
return new NodePhase22Record(
|
||||
Type: "edge",
|
||||
ComponentType: null,
|
||||
EdgeType: "capability",
|
||||
Path: null,
|
||||
From: sourcePath,
|
||||
To: target,
|
||||
Format: null,
|
||||
FromBundle: null,
|
||||
Reason: reason,
|
||||
Confidence: 0.70,
|
||||
ResolverTrace: new[] { $"source:{sourcePath}", $"call:{target}" },
|
||||
Exports: null,
|
||||
Arch: null,
|
||||
Platform: null);
|
||||
}
|
||||
|
||||
private static string NormalizeTarget(LanguageAnalyzerContext context, string sourceAbsolute, string argument)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(argument))
|
||||
{
|
||||
return argument;
|
||||
}
|
||||
|
||||
if (!argument.StartsWith('.'))
|
||||
{
|
||||
return argument.Replace('\\', '/');
|
||||
}
|
||||
|
||||
var sourceDirectory = Path.GetDirectoryName(sourceAbsolute);
|
||||
var baseDir = string.IsNullOrWhiteSpace(sourceDirectory) ? context.RootPath : sourceDirectory!;
|
||||
var combined = Path.GetFullPath(Path.Combine(baseDir, argument));
|
||||
var relative = context.GetRelativePath(combined).Replace('\\', '/');
|
||||
return relative.StartsWith('/') ? relative : "/" + relative;
|
||||
}
|
||||
|
||||
private static (string? Arch, string? Platform) TryDetectNativeMetadata(string path)
|
||||
{
|
||||
try
|
||||
{
|
||||
Span<byte> header = stackalloc byte[64];
|
||||
using var stream = File.OpenRead(path);
|
||||
var read = stream.Read(header);
|
||||
|
||||
if (read >= 5 && header[0] == 0x7F && header[1] == (byte)'E' && header[2] == (byte)'L' && header[3] == (byte)'F')
|
||||
{
|
||||
var elfClass = header[4];
|
||||
var eMachine = read > 0x13 ? BinaryPrimitives.ReadUInt16LittleEndian(header[0x12..]) : (ushort)0;
|
||||
return (elfClass == 2 ? "x86_64" : "x86", "linux" + (eMachine != 0 ? string.Empty : string.Empty));
|
||||
}
|
||||
|
||||
if (read >= 2 && header[0] == 0x4D && header[1] == 0x5A)
|
||||
{
|
||||
return ("x86_64", "windows");
|
||||
}
|
||||
|
||||
if (read >= 4 && ((header[0] == 0xFE && header[1] == 0xED) || (header[0] == 0xCE && header[1] == 0xFA)))
|
||||
{
|
||||
return ("x86_64", "macos");
|
||||
}
|
||||
}
|
||||
catch (IOException)
|
||||
{
|
||||
// ignore unreadable native file
|
||||
}
|
||||
|
||||
return (null, null);
|
||||
}
|
||||
|
||||
private static string NormalizePath(LanguageAnalyzerContext context, string absolutePath)
|
||||
{
|
||||
var relative = context.GetRelativePath(absolutePath).Replace('\\', '/');
|
||||
return relative.StartsWith('/') ? relative : "/" + relative;
|
||||
}
|
||||
|
||||
private static string NormalizeSourceMapPath(string? value)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(value))
|
||||
{
|
||||
return string.Empty;
|
||||
}
|
||||
|
||||
var normalized = value.Replace("webpack://", string.Empty, StringComparison.OrdinalIgnoreCase)
|
||||
.Replace('\\', '/');
|
||||
|
||||
while (normalized.Contains("../", StringComparison.Ordinal))
|
||||
{
|
||||
normalized = normalized.Replace("../", string.Empty, StringComparison.Ordinal);
|
||||
}
|
||||
|
||||
if (!normalized.StartsWith('/'))
|
||||
{
|
||||
normalized = "/" + normalized.TrimStart('/');
|
||||
}
|
||||
|
||||
return normalized;
|
||||
}
|
||||
|
||||
private static IEnumerable<string> EnumerateFiles(string root, string[] extensions, CancellationToken cancellationToken)
|
||||
{
|
||||
var options = new EnumerationOptions
|
||||
{
|
||||
RecurseSubdirectories = true,
|
||||
IgnoreInaccessible = true,
|
||||
AttributesToSkip = FileAttributes.ReparsePoint | FileAttributes.Device
|
||||
};
|
||||
|
||||
foreach (var ext in extensions)
|
||||
{
|
||||
foreach (var file in Directory.EnumerateFiles(root, "*" + ext, options))
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
yield return file;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private static string? TryExtractLiteral(string content, int startIndex)
|
||||
{
|
||||
var quoteStart = content.IndexOf('"', startIndex);
|
||||
var altQuoteStart = content.IndexOf('\'', startIndex);
|
||||
|
||||
if (quoteStart < 0 || (altQuoteStart >= 0 && altQuoteStart < quoteStart))
|
||||
{
|
||||
quoteStart = altQuoteStart;
|
||||
}
|
||||
|
||||
if (quoteStart < 0 || quoteStart >= content.Length - 1)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
var quoteChar = content[quoteStart];
|
||||
var end = content.IndexOf(quoteChar, quoteStart + 1);
|
||||
if (end <= quoteStart)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
return content.Substring(quoteStart + 1, end - quoteStart - 1).Trim();
|
||||
}
|
||||
|
||||
private static string? TryFindSourceMapReference(string content)
|
||||
{
|
||||
const string marker = "sourceMappingURL=";
|
||||
var index = content.LastIndexOf(marker, StringComparison.OrdinalIgnoreCase);
|
||||
if (index < 0)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
var start = index + marker.Length;
|
||||
var end = content.IndexOf('\n', start);
|
||||
if (end < 0)
|
||||
{
|
||||
end = content.Length;
|
||||
}
|
||||
|
||||
var value = content[start..end].Trim().TrimEnd('*', '/');
|
||||
return value;
|
||||
}
|
||||
}
|
||||
|
||||
internal sealed record NodePhase22Record(
|
||||
string Type,
|
||||
string? ComponentType,
|
||||
string? EdgeType,
|
||||
string? Path,
|
||||
string? From,
|
||||
string? To,
|
||||
string? Format,
|
||||
bool? FromBundle,
|
||||
string? Reason,
|
||||
double? Confidence,
|
||||
IReadOnlyList<string> ResolverTrace,
|
||||
IReadOnlyList<string>? Exports,
|
||||
string? Arch,
|
||||
string? Platform)
|
||||
{
|
||||
public static IEqualityComparer<NodePhase22Record> Comparer { get; } = new NodePhase22RecordComparer();
|
||||
|
||||
private sealed class NodePhase22RecordComparer : IEqualityComparer<NodePhase22Record>
|
||||
{
|
||||
public bool Equals(NodePhase22Record? x, NodePhase22Record? y)
|
||||
{
|
||||
if (ReferenceEquals(x, y))
|
||||
{
|
||||
return true;
|
||||
}
|
||||
|
||||
if (x is null || y is null)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
return string.Equals(x.Type, y.Type, StringComparison.Ordinal)
|
||||
&& string.Equals(x.ComponentType, y.ComponentType, StringComparison.Ordinal)
|
||||
&& string.Equals(x.EdgeType, y.EdgeType, StringComparison.Ordinal)
|
||||
&& string.Equals(x.Path ?? x.From, y.Path ?? y.From, StringComparison.Ordinal)
|
||||
&& string.Equals(x.To, y.To, StringComparison.Ordinal)
|
||||
&& string.Equals(x.Reason, y.Reason, StringComparison.Ordinal)
|
||||
&& x.ResolverTrace.SequenceEqual(y.ResolverTrace);
|
||||
}
|
||||
|
||||
public int GetHashCode(NodePhase22Record obj)
|
||||
{
|
||||
var hash = new HashCode();
|
||||
hash.Add(obj.Type, StringComparer.Ordinal);
|
||||
hash.Add(obj.ComponentType, StringComparer.Ordinal);
|
||||
hash.Add(obj.EdgeType, StringComparer.Ordinal);
|
||||
hash.Add(obj.Path ?? obj.From, StringComparer.Ordinal);
|
||||
hash.Add(obj.To, StringComparer.Ordinal);
|
||||
hash.Add(obj.Reason, StringComparer.Ordinal);
|
||||
foreach (var step in obj.ResolverTrace)
|
||||
{
|
||||
hash.Add(step, StringComparer.Ordinal);
|
||||
}
|
||||
|
||||
return hash.ToHashCode();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
internal sealed class NodePhase22Observation
|
||||
{
|
||||
public NodePhase22Observation(IReadOnlyList<NodePhase22Record> records)
|
||||
{
|
||||
Records = records ?? Array.Empty<NodePhase22Record>();
|
||||
}
|
||||
|
||||
public IReadOnlyList<NodePhase22Record> Records { get; }
|
||||
|
||||
public bool HasRecords => Records.Count > 0;
|
||||
|
||||
public int EntrypointCount => Records.Count(r => string.Equals(r.Type, "entrypoint", StringComparison.Ordinal));
|
||||
|
||||
public int ComponentCount => Records.Count(r => string.Equals(r.Type, "component", StringComparison.Ordinal));
|
||||
|
||||
public int EdgeCount => Records.Count(r => string.Equals(r.Type, "edge", StringComparison.Ordinal));
|
||||
|
||||
public int NativeCount => Records.Count(r => string.Equals(r.ComponentType, "native", StringComparison.Ordinal));
|
||||
|
||||
public int WasmCount => Records.Count(r => string.Equals(r.ComponentType, "wasm", StringComparison.Ordinal));
|
||||
|
||||
public string ToNdjson()
|
||||
{
|
||||
if (!HasRecords)
|
||||
{
|
||||
return string.Empty;
|
||||
}
|
||||
|
||||
var options = new JsonSerializerOptions
|
||||
{
|
||||
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
|
||||
WriteIndented = false,
|
||||
DefaultIgnoreCondition = System.Text.Json.Serialization.JsonIgnoreCondition.WhenWritingNull
|
||||
};
|
||||
|
||||
var builder = new StringBuilder();
|
||||
foreach (var record in Records)
|
||||
{
|
||||
var line = JsonSerializer.Serialize(record, options);
|
||||
builder.AppendLine(line);
|
||||
}
|
||||
|
||||
return builder.ToString().TrimEnd();
|
||||
}
|
||||
|
||||
public string ComputeSha256()
|
||||
{
|
||||
if (!HasRecords)
|
||||
{
|
||||
return string.Empty;
|
||||
}
|
||||
|
||||
var json = ToNdjson();
|
||||
var bytes = Encoding.UTF8.GetBytes(json);
|
||||
return Convert.ToHexString(SHA256.HashData(bytes)).ToLowerInvariant();
|
||||
}
|
||||
|
||||
public IEnumerable<KeyValuePair<string, string?>> BuildMetadata()
|
||||
{
|
||||
yield return new KeyValuePair<string, string?>("node.observation.entrypoints", EntrypointCount.ToString(CultureInfo.InvariantCulture));
|
||||
yield return new KeyValuePair<string, string?>("node.observation.components", ComponentCount.ToString(CultureInfo.InvariantCulture));
|
||||
yield return new KeyValuePair<string, string?>("node.observation.edges", EdgeCount.ToString(CultureInfo.InvariantCulture));
|
||||
if (NativeCount > 0)
|
||||
{
|
||||
yield return new KeyValuePair<string, string?>("node.observation.native", NativeCount.ToString(CultureInfo.InvariantCulture));
|
||||
}
|
||||
|
||||
if (WasmCount > 0)
|
||||
{
|
||||
yield return new KeyValuePair<string, string?>("node.observation.wasm", WasmCount.ToString(CultureInfo.InvariantCulture));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
internal sealed record SourceMapResult(IReadOnlyList<string> Sources, string MapTrace, string Format, bool IsInline);
|
||||
@@ -0,0 +1,175 @@
|
||||
using System.Globalization;
|
||||
|
||||
namespace StellaOps.Scanner.Analyzers.Lang.Node.Internal.Phase22;
|
||||
|
||||
internal static class NodePhase22Exporter
|
||||
{
|
||||
public static IReadOnlyList<LanguageComponentRecord> ToComponentRecords(NodePhase22Observation observation)
|
||||
{
|
||||
if (observation is null || !observation.HasRecords)
|
||||
{
|
||||
return Array.Empty<LanguageComponentRecord>();
|
||||
}
|
||||
|
||||
var records = new List<LanguageComponentRecord>();
|
||||
|
||||
// Observation envelope
|
||||
var ndjson = observation.ToNdjson();
|
||||
var sha256 = observation.ComputeSha256();
|
||||
|
||||
records.Add(LanguageComponentRecord.FromExplicitKey(
|
||||
analyzerId: "node",
|
||||
componentKey: "observation::node-phase22",
|
||||
purl: null,
|
||||
name: "Node Observation (Phase 22)",
|
||||
version: null,
|
||||
type: "node-observation",
|
||||
metadata: observation.BuildMetadata(),
|
||||
evidence: new[]
|
||||
{
|
||||
new LanguageComponentEvidence(
|
||||
LanguageEvidenceKind.Derived,
|
||||
"node.observation",
|
||||
"phase22.ndjson",
|
||||
ndjson,
|
||||
string.IsNullOrWhiteSpace(sha256) ? null : sha256)
|
||||
},
|
||||
usedByEntrypoint: false));
|
||||
|
||||
foreach (var record in observation.Records)
|
||||
{
|
||||
if (string.Equals(record.Type, "component", StringComparison.Ordinal))
|
||||
{
|
||||
records.Add(ConvertComponent(record));
|
||||
}
|
||||
else if (string.Equals(record.Type, "edge", StringComparison.Ordinal))
|
||||
{
|
||||
records.Add(ConvertEdge(record));
|
||||
}
|
||||
else if (string.Equals(record.Type, "entrypoint", StringComparison.Ordinal))
|
||||
{
|
||||
records.Add(ConvertEntrypoint(record));
|
||||
}
|
||||
}
|
||||
|
||||
return records;
|
||||
}
|
||||
|
||||
private static LanguageComponentRecord ConvertComponent(NodePhase22Record record)
|
||||
{
|
||||
var typeTag = record.ComponentType switch
|
||||
{
|
||||
"native" => "node:native",
|
||||
"wasm" => "node:wasm",
|
||||
_ => "node:bundle"
|
||||
};
|
||||
|
||||
var metadata = new List<KeyValuePair<string, string?>>();
|
||||
if (!string.IsNullOrWhiteSpace(record.Reason)) metadata.Add(new("reason", record.Reason));
|
||||
if (!string.IsNullOrWhiteSpace(record.Format)) metadata.Add(new("format", record.Format));
|
||||
if (record.Confidence is double conf) metadata.Add(new("confidence", conf.ToString("0.00", CultureInfo.InvariantCulture)));
|
||||
if (record.FromBundle is bool fromBundle) metadata.Add(new("fromBundle", fromBundle ? "true" : "false"));
|
||||
if (record.ResolverTrace.Count > 0) metadata.Add(new("trace", string.Join("|", record.ResolverTrace)));
|
||||
if (!string.IsNullOrWhiteSpace(record.Arch)) metadata.Add(new("arch", record.Arch));
|
||||
if (!string.IsNullOrWhiteSpace(record.Platform)) metadata.Add(new("platform", record.Platform));
|
||||
|
||||
var evidence = record.ResolverTrace.Count == 0
|
||||
? null
|
||||
: new[]
|
||||
{
|
||||
new LanguageComponentEvidence(
|
||||
LanguageEvidenceKind.Metadata,
|
||||
"node.trace",
|
||||
record.ResolverTrace[0],
|
||||
record.ResolverTrace.Count > 1 ? string.Join("|", record.ResolverTrace) : null,
|
||||
null)
|
||||
};
|
||||
|
||||
var name = record.Path is null ? "" : Path.GetFileName(record.Path.Trim('/'));
|
||||
return LanguageComponentRecord.FromExplicitKey(
|
||||
analyzerId: "node-phase22",
|
||||
componentKey: record.Path ?? Guid.NewGuid().ToString("N"),
|
||||
purl: null,
|
||||
name: string.IsNullOrWhiteSpace(name) ? "node-component" : name,
|
||||
version: null,
|
||||
type: typeTag,
|
||||
metadata: metadata,
|
||||
evidence: evidence,
|
||||
usedByEntrypoint: false);
|
||||
}
|
||||
|
||||
private static LanguageComponentRecord ConvertEdge(NodePhase22Record record)
|
||||
{
|
||||
var metadata = new List<KeyValuePair<string, string?>>
|
||||
{
|
||||
new("from", record.From ?? string.Empty),
|
||||
new("to", record.To ?? string.Empty)
|
||||
};
|
||||
|
||||
if (!string.IsNullOrWhiteSpace(record.Reason)) metadata.Add(new("reason", record.Reason));
|
||||
if (record.Confidence is double conf) metadata.Add(new("confidence", conf.ToString("0.00", CultureInfo.InvariantCulture)));
|
||||
if (record.ResolverTrace.Count > 0) metadata.Add(new("trace", string.Join("|", record.ResolverTrace)));
|
||||
|
||||
var evidence = record.ResolverTrace.Count == 0
|
||||
? null
|
||||
: new[]
|
||||
{
|
||||
new LanguageComponentEvidence(
|
||||
LanguageEvidenceKind.Derived,
|
||||
"node.edge",
|
||||
record.ResolverTrace[0],
|
||||
record.ResolverTrace.Count > 1 ? string.Join("|", record.ResolverTrace) : null,
|
||||
null)
|
||||
};
|
||||
|
||||
var key = string.Concat("edge:", record.From ?? string.Empty, "->", record.To ?? string.Empty, "#", record.EdgeType ?? "edge");
|
||||
|
||||
return LanguageComponentRecord.FromExplicitKey(
|
||||
analyzerId: "node-phase22",
|
||||
componentKey: key,
|
||||
purl: null,
|
||||
name: record.EdgeType ?? "edge",
|
||||
version: null,
|
||||
type: "node:edge",
|
||||
metadata: metadata,
|
||||
evidence: evidence,
|
||||
usedByEntrypoint: false);
|
||||
}
|
||||
|
||||
private static LanguageComponentRecord ConvertEntrypoint(NodePhase22Record record)
|
||||
{
|
||||
var metadata = new List<KeyValuePair<string, string?>>
|
||||
{
|
||||
new("entrypoint", record.Path ?? string.Empty)
|
||||
};
|
||||
|
||||
if (!string.IsNullOrWhiteSpace(record.Format)) metadata.Add(new("format", record.Format));
|
||||
if (record.Confidence is double conf) metadata.Add(new("confidence", conf.ToString("0.00", CultureInfo.InvariantCulture)));
|
||||
if (record.ResolverTrace.Count > 0) metadata.Add(new("trace", string.Join("|", record.ResolverTrace)));
|
||||
|
||||
var evidence = record.ResolverTrace.Count == 0
|
||||
? null
|
||||
: new[]
|
||||
{
|
||||
new LanguageComponentEvidence(
|
||||
LanguageEvidenceKind.File,
|
||||
"node.entrypoint",
|
||||
record.Path ?? "entrypoint",
|
||||
record.ResolverTrace.Count > 0 ? record.ResolverTrace[0] : null,
|
||||
null)
|
||||
};
|
||||
|
||||
var name = record.Path is null ? "entrypoint" : Path.GetFileName(record.Path.Trim('/'));
|
||||
|
||||
return LanguageComponentRecord.FromExplicitKey(
|
||||
analyzerId: "node-phase22",
|
||||
componentKey: record.Path ?? Guid.NewGuid().ToString("N"),
|
||||
purl: null,
|
||||
name: string.IsNullOrWhiteSpace(name) ? "entrypoint" : name,
|
||||
version: null,
|
||||
type: "node:entrypoint",
|
||||
metadata: metadata,
|
||||
evidence: evidence,
|
||||
usedByEntrypoint: true);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,170 @@
|
||||
using System.Globalization;
|
||||
using System.Security.Cryptography;
|
||||
using System.Text.Json;
|
||||
|
||||
namespace StellaOps.Scanner.Analyzers.Lang.Node.Internal;
|
||||
|
||||
/// <summary>
|
||||
/// Ingests optional runtime evidence produced by Node loader hooks (ESM/CJS).
|
||||
/// Input format: NDJSON records under <root>/node-runtime-evidence.ndjson or path from SCANNER_NODE_RUNTIME_EVIDENCE.
|
||||
/// Each line is a JSON object with fields:
|
||||
/// type: "edge" | "component"
|
||||
/// from, to: optional strings
|
||||
/// reason: string (e.g., runtime-import, runtime-require)
|
||||
/// loaderId: optional string to be SHA-256 hashed
|
||||
/// path: optional component path
|
||||
/// </summary>
|
||||
internal static class RuntimeEvidenceLoader
|
||||
{
|
||||
private const string DefaultFileName = "node-runtime-evidence.ndjson";
|
||||
private const string EnvKey = "SCANNER_NODE_RUNTIME_EVIDENCE";
|
||||
|
||||
public static IReadOnlyList<LanguageComponentRecord> Load(LanguageAnalyzerContext context, CancellationToken cancellationToken)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(context);
|
||||
|
||||
var path = Environment.GetEnvironmentVariable(EnvKey);
|
||||
if (string.IsNullOrWhiteSpace(path))
|
||||
{
|
||||
path = Path.Combine(context.RootPath, DefaultFileName);
|
||||
}
|
||||
|
||||
if (!File.Exists(path))
|
||||
{
|
||||
return Array.Empty<LanguageComponentRecord>();
|
||||
}
|
||||
|
||||
var records = new List<LanguageComponentRecord>();
|
||||
using var stream = File.OpenRead(path);
|
||||
using var reader = new StreamReader(stream);
|
||||
|
||||
string? line;
|
||||
while ((line = reader.ReadLine()) is not null)
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
if (string.IsNullOrWhiteSpace(line))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
using var doc = JsonDocument.Parse(line);
|
||||
var root = doc.RootElement;
|
||||
var kind = root.TryGetProperty("type", out var typeProp) ? typeProp.GetString() : null;
|
||||
if (string.IsNullOrWhiteSpace(kind))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
var loaderId = TryHash(root, "loaderId");
|
||||
var reason = root.TryGetProperty("reason", out var reasonProp) ? reasonProp.GetString() : null;
|
||||
var from = root.TryGetProperty("from", out var fromProp) ? ScrubPath(context, fromProp.GetString()) : null;
|
||||
var to = root.TryGetProperty("to", out var toProp) ? ScrubPath(context, toProp.GetString()) : null;
|
||||
var componentPath = root.TryGetProperty("path", out var pathProp) ? ScrubPath(context, pathProp.GetString()) : null;
|
||||
|
||||
var metadata = new List<KeyValuePair<string, string?>>();
|
||||
if (!string.IsNullOrWhiteSpace(reason)) metadata.Add(new("reason", reason));
|
||||
if (!string.IsNullOrWhiteSpace(loaderId)) metadata.Add(new("loaderId.sha256", loaderId));
|
||||
|
||||
if (string.Equals(kind, "edge", StringComparison.Ordinal))
|
||||
{
|
||||
if (!string.IsNullOrWhiteSpace(from)) metadata.Add(new("from", from));
|
||||
if (!string.IsNullOrWhiteSpace(to)) metadata.Add(new("to", to));
|
||||
|
||||
var evidence = BuildDerivedEvidence(reason, from, to);
|
||||
records.Add(LanguageComponentRecord.FromExplicitKey(
|
||||
analyzerId: "node-runtime",
|
||||
componentKey: string.Concat("runtime-edge:", from ?? "", "->", to ?? ""),
|
||||
purl: null,
|
||||
name: "runtime-edge",
|
||||
version: null,
|
||||
type: "node:runtime-edge",
|
||||
metadata: metadata,
|
||||
evidence: evidence,
|
||||
usedByEntrypoint: false));
|
||||
}
|
||||
else if (string.Equals(kind, "component", StringComparison.Ordinal))
|
||||
{
|
||||
var name = string.IsNullOrWhiteSpace(componentPath) ? "runtime-component" : Path.GetFileName(componentPath);
|
||||
metadata.Add(new("path", componentPath));
|
||||
|
||||
records.Add(LanguageComponentRecord.FromExplicitKey(
|
||||
analyzerId: "node-runtime",
|
||||
componentKey: componentPath ?? Guid.NewGuid().ToString("N"),
|
||||
purl: null,
|
||||
name: name,
|
||||
version: null,
|
||||
type: "node:runtime-component",
|
||||
metadata: metadata,
|
||||
evidence: BuildDerivedEvidence(reason, from, to),
|
||||
usedByEntrypoint: false));
|
||||
}
|
||||
}
|
||||
catch (JsonException)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
return records
|
||||
.OrderBy(static r => r.ComponentKey, StringComparer.Ordinal)
|
||||
.ToArray();
|
||||
}
|
||||
|
||||
private static IReadOnlyList<LanguageComponentEvidence>? BuildDerivedEvidence(string? reason, string? from, string? to)
|
||||
{
|
||||
var locatorParts = new[] { reason, from, to }
|
||||
.Where(static v => !string.IsNullOrWhiteSpace(v));
|
||||
|
||||
var locator = string.Join("|", locatorParts);
|
||||
if (string.IsNullOrWhiteSpace(locator))
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
return new[]
|
||||
{
|
||||
new LanguageComponentEvidence(
|
||||
LanguageEvidenceKind.Derived,
|
||||
"node.runtime",
|
||||
locator,
|
||||
null,
|
||||
null)
|
||||
};
|
||||
}
|
||||
|
||||
private static string? TryHash(JsonElement root, string property)
|
||||
{
|
||||
if (!root.TryGetProperty(property, out var prop) || prop.ValueKind != JsonValueKind.String)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
var value = prop.GetString();
|
||||
if (string.IsNullOrWhiteSpace(value))
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
var bytes = SHA256.HashData(System.Text.Encoding.UTF8.GetBytes(value));
|
||||
return Convert.ToHexString(bytes).ToLowerInvariant();
|
||||
}
|
||||
|
||||
private static string? ScrubPath(LanguageAnalyzerContext context, string? value)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(value))
|
||||
{
|
||||
return value;
|
||||
}
|
||||
|
||||
var trimmed = value.Trim();
|
||||
if (!Path.IsPathRooted(trimmed))
|
||||
{
|
||||
return trimmed.Replace('\\', '/');
|
||||
}
|
||||
|
||||
var relative = context.GetRelativePath(trimmed).Replace('\\', '/');
|
||||
return string.IsNullOrWhiteSpace(relative) ? trimmed.Replace('\\', '/') : relative;
|
||||
}
|
||||
}
|
||||
@@ -15,14 +15,15 @@ public sealed class NodeLanguageAnalyzer : ILanguageAnalyzer
|
||||
ArgumentNullException.ThrowIfNull(writer);
|
||||
|
||||
var lockData = await NodeLockData.LoadAsync(context.RootPath, cancellationToken).ConfigureAwait(false);
|
||||
var packages = NodePackageCollector.CollectPackages(context, lockData, cancellationToken);
|
||||
var projectInput = NodeInputNormalizer.Normalize(context, cancellationToken);
|
||||
var packages = NodePackageCollector.CollectPackages(context, lockData, projectInput, cancellationToken);
|
||||
|
||||
foreach (var package in packages.OrderBy(static p => p.ComponentKey, StringComparer.Ordinal))
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
|
||||
var metadata = package.CreateMetadata();
|
||||
var evidence = package.CreateEvidence();
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
|
||||
var metadata = package.CreateMetadata();
|
||||
var evidence = package.CreateEvidence();
|
||||
|
||||
writer.AddFromPurl(
|
||||
analyzerId: Id,
|
||||
@@ -35,11 +36,59 @@ public sealed class NodeLanguageAnalyzer : ILanguageAnalyzer
|
||||
usedByEntrypoint: package.IsUsedByEntrypoint);
|
||||
}
|
||||
|
||||
// Optional Phase 22 prep path: ingest precomputed bundle/native/WASM AOC records from NDJSON fixture
|
||||
var phase22Records = await NodePhase22SampleLoader.TryLoadAsync(context.RootPath, cancellationToken).ConfigureAwait(false);
|
||||
if (phase22Records.Count > 0)
|
||||
var observation = NodePhase22Analyzer.Analyze(context, cancellationToken);
|
||||
if (observation.HasRecords)
|
||||
{
|
||||
var ndjson = observation.ToNdjson();
|
||||
var sha256 = observation.ComputeSha256();
|
||||
|
||||
var evidence = new[]
|
||||
{
|
||||
new LanguageComponentEvidence(
|
||||
LanguageEvidenceKind.Derived,
|
||||
"node.observation",
|
||||
"phase22.ndjson",
|
||||
ndjson,
|
||||
sha256)
|
||||
};
|
||||
|
||||
writer.AddFromExplicitKey(
|
||||
analyzerId: Id,
|
||||
componentKey: "observation::node-phase22",
|
||||
purl: null,
|
||||
name: "Node Observation (Phase 22)",
|
||||
version: null,
|
||||
type: "node-observation",
|
||||
metadata: observation.BuildMetadata(),
|
||||
evidence: evidence);
|
||||
}
|
||||
else
|
||||
{
|
||||
// Fallback to NDJSON fixture when running against prep-only environments.
|
||||
var phase22Records = await NodePhase22SampleLoader.TryLoadAsync(context.RootPath, cancellationToken).ConfigureAwait(false);
|
||||
if (phase22Records.Count > 0)
|
||||
{
|
||||
writer.AddRange(phase22Records);
|
||||
}
|
||||
|
||||
var observation = NodePhase22Analyzer.Analyze(context, cancellationToken);
|
||||
if (observation.HasRecords)
|
||||
{
|
||||
var observationRecords = NodePhase22Exporter.ToComponentRecords(observation);
|
||||
writer.AddRange(observationRecords);
|
||||
}
|
||||
|
||||
var runtimeRecords = RuntimeEvidenceLoader.Load(context, cancellationToken);
|
||||
if (runtimeRecords.Count > 0)
|
||||
{
|
||||
writer.AddRange(runtimeRecords);
|
||||
}
|
||||
|
||||
var envWarnings = NodeEnvironmentScanner.Scan(context, projectInput.SourceRoots, cancellationToken);
|
||||
if (envWarnings.Count > 0)
|
||||
{
|
||||
writer.AddRange(envWarnings);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -18,6 +18,15 @@
|
||||
<PackageReference Include="Esprima" Version="3.0.5" />
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<None Update="runtime-hooks\runtime-require-hook.js">
|
||||
<CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
|
||||
</None>
|
||||
<None Update="runtime-hooks\runtime-esm-loader.mjs">
|
||||
<CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
|
||||
</None>
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<ProjectReference Include="..\StellaOps.Scanner.Analyzers.Lang\StellaOps.Scanner.Analyzers.Lang.csproj" />
|
||||
</ItemGroup>
|
||||
|
||||
@@ -0,0 +1,16 @@
|
||||
# Node Analyzer Tasks (Sprint 132)
|
||||
|
||||
| Task ID | Status | Notes | Updated (UTC) |
|
||||
| --- | --- | --- | --- |
|
||||
| SCANNER-ANALYZERS-NODE-22-001 | DONE | VFS/input normalizer covers dirs/tgz/container layers/pnpm/Yarn PnP; Node version detection wired. | 2025-12-01 |
|
||||
| SCANNER-ANALYZERS-NODE-22-002 | DONE | Entrypoint discovery extended (exports/imports/workers/electron/shebang) with normalized condition sets. | 2025-12-01 |
|
||||
| SCANNER-ANALYZERS-NODE-22-003 | DONE | Import walker flags dynamic patterns with confidence and de-bundles source maps. | 2025-12-01 |
|
||||
| SCANNER-ANALYZERS-NODE-22-004 | DONE | Resolver engine added (core modules, exports/imports maps, extension priority, self references). | 2025-12-01 |
|
||||
| SCANNER-ANALYZERS-NODE-22-005 | DONE | Yarn PnP and pnpm virtual store adapters supported via VFS; tests updated. | 2025-12-01 |
|
||||
| SCANNER-ANALYZERS-NODE-22-006 | DONE | Bundle/source-map correlation emits component/entrypoint records with resolver traces. | 2025-12-01 |
|
||||
| SCANNER-ANALYZERS-NODE-22-007 | DONE | Native addon/WASM/capability edges produced with normalized targets. | 2025-12-01 |
|
||||
| SCANNER-ANALYZERS-NODE-22-008 | DONE | Phase22 observation export (entrypoints/components/edges) added to analyzer output. | 2025-12-01 |
|
||||
| SCANNER-ANALYZERS-NODE-22-009 | DONE | Fixture suite refreshed (npm/pnpm/PnP/bundle/electron/worker) with golden outputs. | 2025-12-01 |
|
||||
| SCANNER-ANALYZERS-NODE-22-010 | DONE | Runtime evidence hooks (ESM loader/CJS require) with path scrubbing and hashed loader IDs; ingestion to runtime-* records. | 2025-12-01 |
|
||||
| SCANNER-ANALYZERS-NODE-22-011 | DONE | Packaged plug-in manifest (0.1.0) with runtime hooks; CLI/offline docs refreshed. | 2025-12-01 |
|
||||
| SCANNER-ANALYZERS-NODE-22-012 | DONE | Container filesystem adapter (layer roots) + NODE_OPTIONS/env warnings emitted. | 2025-12-01 |
|
||||
@@ -0,0 +1,61 @@
|
||||
// Runtime ESM loader for StellaOps Scanner runtime evidence
|
||||
// Usage: node --experimental-loader=./runtime-esm-loader.mjs app.mjs
|
||||
|
||||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
import crypto from 'crypto';
|
||||
import { fileURLToPath, pathToFileURL } from 'url';
|
||||
|
||||
const outPath = process.env.SCANNER_NODE_RUNTIME_OUT || path.join(process.cwd(), 'node-runtime-evidence.ndjson');
|
||||
const root = process.env.SCANNER_NODE_ROOT || process.cwd();
|
||||
const loaderId = hashLoaderId(import.meta.url);
|
||||
|
||||
function hashLoaderId(value) {
|
||||
return crypto.createHash('sha256').update(value || '').digest('hex');
|
||||
}
|
||||
|
||||
function scrub(p) {
|
||||
if (!p) return p;
|
||||
try {
|
||||
const absolute = p.startsWith('file:') ? fileURLToPath(p) : p;
|
||||
const rel = path.relative(root, absolute);
|
||||
return rel.startsWith('..') ? p : rel.split(path.sep).join('/');
|
||||
} catch {
|
||||
return p;
|
||||
}
|
||||
}
|
||||
|
||||
function emit(record) {
|
||||
try {
|
||||
fs.appendFileSync(outPath, JSON.stringify(record) + '\n');
|
||||
} catch {
|
||||
// best-effort: ignore write failures
|
||||
}
|
||||
}
|
||||
|
||||
export async function resolve(specifier, context, next) {
|
||||
const parent = context.parentURL ? scrub(context.parentURL) : undefined;
|
||||
const target = scrub(specifier);
|
||||
|
||||
emit({
|
||||
type: 'edge',
|
||||
from: parent,
|
||||
to: target,
|
||||
reason: 'runtime-import',
|
||||
loaderId
|
||||
});
|
||||
|
||||
return next(specifier, context, next);
|
||||
}
|
||||
|
||||
export async function load(url, context, next) {
|
||||
const pathOrUrl = scrub(url);
|
||||
emit({
|
||||
type: 'component',
|
||||
path: pathOrUrl,
|
||||
reason: 'runtime-load',
|
||||
loaderId
|
||||
});
|
||||
|
||||
return next(url, context, next);
|
||||
}
|
||||
@@ -0,0 +1,49 @@
|
||||
// Runtime require hook for StellaOps Scanner runtime evidence
|
||||
// Usage: node -r ./runtime-require-hook.js app.js
|
||||
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const crypto = require('crypto');
|
||||
|
||||
const outPath = process.env.SCANNER_NODE_RUNTIME_OUT || path.join(process.cwd(), 'node-runtime-evidence.ndjson');
|
||||
const root = process.env.SCANNER_NODE_ROOT || process.cwd();
|
||||
|
||||
function hashLoaderId(value) {
|
||||
return crypto.createHash('sha256').update(value || '').digest('hex');
|
||||
}
|
||||
|
||||
function scrub(p) {
|
||||
if (!p) return p;
|
||||
try {
|
||||
const rel = path.relative(root, p);
|
||||
return rel.startsWith('..') ? p : rel.split(path.sep).join('/');
|
||||
} catch {
|
||||
return p;
|
||||
}
|
||||
}
|
||||
|
||||
function emit(record) {
|
||||
try {
|
||||
fs.appendFileSync(outPath, JSON.stringify(record) + '\n');
|
||||
} catch {
|
||||
// best-effort: ignore write failures
|
||||
}
|
||||
}
|
||||
|
||||
const originalLoad = module.constructor._load;
|
||||
module.constructor._load = function (request, parent, isMain) {
|
||||
const from = parent && parent.filename ? scrub(parent.filename) : undefined;
|
||||
const to = scrub(request);
|
||||
const loaderId = hashLoaderId(__filename);
|
||||
|
||||
emit({
|
||||
type: 'edge',
|
||||
from,
|
||||
to,
|
||||
reason: 'runtime-require',
|
||||
loaderId,
|
||||
isMain: !!isMain
|
||||
});
|
||||
|
||||
return originalLoad.apply(this, arguments);
|
||||
};
|
||||
@@ -23,10 +23,11 @@ public static class ComponentGraphBuilder
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(fragments);
|
||||
|
||||
var orderedLayers = fragments
|
||||
.Where(static fragment => !string.IsNullOrWhiteSpace(fragment.LayerDigest))
|
||||
.Select(NormalizeFragment)
|
||||
.ToImmutableArray();
|
||||
var orderedLayers = fragments
|
||||
.Where(static fragment => !string.IsNullOrWhiteSpace(fragment.LayerDigest))
|
||||
.Select(NormalizeFragment)
|
||||
.OrderBy(static fragment => fragment.LayerDigest, StringComparer.Ordinal)
|
||||
.ToImmutableArray();
|
||||
|
||||
var accumulators = new Dictionary<string, ComponentAccumulator>(StringComparer.Ordinal);
|
||||
|
||||
|
||||
@@ -35,14 +35,14 @@ public sealed class CycloneDxComposer
|
||||
var graph = ComponentGraphBuilder.Build(request.LayerFragments);
|
||||
var generatedAt = ScannerTimestamps.Normalize(request.GeneratedAt);
|
||||
|
||||
var inventoryArtifact = BuildArtifact(
|
||||
request,
|
||||
graph,
|
||||
SbomView.Inventory,
|
||||
graph.Components,
|
||||
generatedAt,
|
||||
InventoryMediaTypeJson,
|
||||
InventoryMediaTypeProtobuf);
|
||||
var inventoryArtifact = BuildArtifact(
|
||||
request,
|
||||
graph,
|
||||
SbomView.Inventory,
|
||||
graph.Components,
|
||||
generatedAt,
|
||||
InventoryMediaTypeJson,
|
||||
InventoryMediaTypeProtobuf);
|
||||
|
||||
var usageComponents = graph.Components
|
||||
.Where(static component => component.Usage.UsedByEntrypoint)
|
||||
@@ -51,14 +51,14 @@ public sealed class CycloneDxComposer
|
||||
CycloneDxArtifact? usageArtifact = null;
|
||||
if (!usageComponents.IsEmpty)
|
||||
{
|
||||
usageArtifact = BuildArtifact(
|
||||
request,
|
||||
graph,
|
||||
SbomView.Usage,
|
||||
usageComponents,
|
||||
generatedAt,
|
||||
UsageMediaTypeJson,
|
||||
UsageMediaTypeProtobuf);
|
||||
usageArtifact = BuildArtifact(
|
||||
request,
|
||||
graph,
|
||||
SbomView.Usage,
|
||||
usageComponents,
|
||||
generatedAt,
|
||||
UsageMediaTypeJson,
|
||||
UsageMediaTypeProtobuf);
|
||||
}
|
||||
|
||||
return new SbomCompositionResult
|
||||
@@ -69,37 +69,47 @@ public sealed class CycloneDxComposer
|
||||
};
|
||||
}
|
||||
|
||||
private CycloneDxArtifact BuildArtifact(
|
||||
SbomCompositionRequest request,
|
||||
ComponentGraph graph,
|
||||
SbomView view,
|
||||
ImmutableArray<AggregatedComponent> components,
|
||||
DateTimeOffset generatedAt,
|
||||
string jsonMediaType,
|
||||
string protobufMediaType)
|
||||
{
|
||||
private CycloneDxArtifact BuildArtifact(
|
||||
SbomCompositionRequest request,
|
||||
ComponentGraph graph,
|
||||
SbomView view,
|
||||
ImmutableArray<AggregatedComponent> components,
|
||||
DateTimeOffset generatedAt,
|
||||
string jsonMediaType,
|
||||
string protobufMediaType)
|
||||
{
|
||||
var bom = BuildBom(request, graph, view, components, generatedAt);
|
||||
var json = JsonSerializer.Serialize(bom);
|
||||
var jsonBytes = Encoding.UTF8.GetBytes(json);
|
||||
var protobufBytes = ProtoSerializer.Serialize(bom);
|
||||
var json = JsonSerializer.Serialize(bom);
|
||||
var jsonBytes = Encoding.UTF8.GetBytes(json);
|
||||
var protobufBytes = ProtoSerializer.Serialize(bom);
|
||||
|
||||
var jsonHash = ComputeSha256(jsonBytes);
|
||||
var protobufHash = ComputeSha256(protobufBytes);
|
||||
|
||||
var merkleRoot = request.AdditionalProperties is not null
|
||||
&& request.AdditionalProperties.TryGetValue("stellaops:merkle.root", out var root)
|
||||
? root
|
||||
: null;
|
||||
|
||||
request.AdditionalProperties?.TryGetValue("stellaops:composition.manifest", out var compositionUri);
|
||||
|
||||
var jsonHash = ComputeSha256(jsonBytes);
|
||||
var protobufHash = ComputeSha256(protobufBytes);
|
||||
|
||||
return new CycloneDxArtifact
|
||||
{
|
||||
View = view,
|
||||
SerialNumber = bom.SerialNumber ?? string.Empty,
|
||||
GeneratedAt = generatedAt,
|
||||
Components = components,
|
||||
JsonBytes = jsonBytes,
|
||||
JsonSha256 = jsonHash,
|
||||
JsonMediaType = jsonMediaType,
|
||||
ProtobufBytes = protobufBytes,
|
||||
ProtobufSha256 = protobufHash,
|
||||
ProtobufMediaType = protobufMediaType,
|
||||
};
|
||||
}
|
||||
return new CycloneDxArtifact
|
||||
{
|
||||
View = view,
|
||||
SerialNumber = bom.SerialNumber ?? string.Empty,
|
||||
GeneratedAt = generatedAt,
|
||||
Components = components,
|
||||
JsonBytes = jsonBytes,
|
||||
JsonSha256 = jsonHash,
|
||||
ContentHash = jsonHash,
|
||||
MerkleRoot = merkleRoot,
|
||||
CompositionUri = compositionUri,
|
||||
JsonMediaType = jsonMediaType,
|
||||
ProtobufBytes = protobufBytes,
|
||||
ProtobufSha256 = protobufHash,
|
||||
ProtobufMediaType = protobufMediaType,
|
||||
};
|
||||
}
|
||||
|
||||
private Bom BuildBom(
|
||||
SbomCompositionRequest request,
|
||||
|
||||
@@ -4,25 +4,40 @@ using StellaOps.Scanner.Core.Contracts;
|
||||
|
||||
namespace StellaOps.Scanner.Emit.Composition;
|
||||
|
||||
public sealed record CycloneDxArtifact
|
||||
{
|
||||
public required SbomView View { get; init; }
|
||||
|
||||
public required string SerialNumber { get; init; }
|
||||
public sealed record CycloneDxArtifact
|
||||
{
|
||||
public required SbomView View { get; init; }
|
||||
|
||||
public required string SerialNumber { get; init; }
|
||||
|
||||
public required DateTimeOffset GeneratedAt { get; init; }
|
||||
|
||||
public required ImmutableArray<AggregatedComponent> Components { get; init; }
|
||||
|
||||
public required byte[] JsonBytes { get; init; }
|
||||
|
||||
public required string JsonSha256 { get; init; }
|
||||
|
||||
public required string JsonMediaType { get; init; }
|
||||
|
||||
public required byte[] ProtobufBytes { get; init; }
|
||||
|
||||
public required string ProtobufSha256 { get; init; }
|
||||
|
||||
public required byte[] JsonBytes { get; init; }
|
||||
|
||||
public required string JsonSha256 { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Canonical content hash (sha256, hex) of the CycloneDX JSON payload.
|
||||
/// </summary>
|
||||
public required string ContentHash { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Merkle root over fragments (hex). Present when composition metadata is provided.
|
||||
/// </summary>
|
||||
public string? MerkleRoot { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// CAS URI of the composition recipe (_composition.json) if emitted.
|
||||
/// </summary>
|
||||
public string? CompositionUri { get; init; }
|
||||
|
||||
public required string JsonMediaType { get; init; }
|
||||
|
||||
public required byte[] ProtobufBytes { get; init; }
|
||||
|
||||
public required string ProtobufSha256 { get; init; }
|
||||
|
||||
public required string ProtobufMediaType { get; init; }
|
||||
}
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -96,7 +96,7 @@ public sealed class EntryTraceRuntimeReconciler
|
||||
terminalBuilder[index] = terminalBuilder[index] with { Confidence = confidence.Score };
|
||||
}
|
||||
|
||||
diagnostics.Add(BuildDiagnostic(confidence, plan.TerminalPath));
|
||||
diagnostics.Add(BuildDiagnostic(confidence, plan.TerminalPath, procGraph, match?.Process));
|
||||
}
|
||||
|
||||
// Update any terminals that were not tied to plans.
|
||||
@@ -242,7 +242,7 @@ public sealed class EntryTraceRuntimeReconciler
|
||||
return new ConfidenceResult(60d, ConfidenceLevel.Low, runtimePath);
|
||||
}
|
||||
|
||||
private static EntryTraceDiagnostic BuildDiagnostic(ConfidenceResult result, string predictedPath)
|
||||
private EntryTraceDiagnostic BuildDiagnostic(ConfidenceResult result, string predictedPath, ProcGraph procGraph, ProcProcess? process)
|
||||
{
|
||||
var runtimePath = string.IsNullOrWhiteSpace(result.RuntimePath) ? "<unknown>" : result.RuntimePath;
|
||||
var severity = result.Level == ConfidenceLevel.High
|
||||
@@ -251,10 +251,18 @@ public sealed class EntryTraceRuntimeReconciler
|
||||
var reason = result.Level == ConfidenceLevel.High
|
||||
? EntryTraceUnknownReason.RuntimeMatch
|
||||
: EntryTraceUnknownReason.RuntimeMismatch;
|
||||
|
||||
var chain = process is null ? null : BuildProcessChain(procGraph, process.Value);
|
||||
|
||||
var message = result.Level == ConfidenceLevel.High
|
||||
? $"Runtime process '{runtimePath}' matches EntryTrace prediction '{predictedPath}'."
|
||||
: $"Runtime process '{runtimePath}' diverges from EntryTrace prediction '{predictedPath}'.";
|
||||
|
||||
if (!string.IsNullOrWhiteSpace(chain))
|
||||
{
|
||||
message += $" Runtime chain: {chain}.";
|
||||
}
|
||||
|
||||
return new EntryTraceDiagnostic(
|
||||
severity,
|
||||
reason,
|
||||
@@ -269,6 +277,50 @@ public sealed class EntryTraceRuntimeReconciler
|
||||
return command.Length > 0 && WrapperNames.Contains(command);
|
||||
}
|
||||
|
||||
private static string? BuildProcessChain(ProcGraph graph, ProcProcess process)
|
||||
{
|
||||
var chain = new List<string>();
|
||||
var current = process;
|
||||
while (true)
|
||||
{
|
||||
var display = string.IsNullOrWhiteSpace(current.ExecutablePath)
|
||||
? current.CommandName
|
||||
: current.ExecutablePath;
|
||||
if (string.IsNullOrWhiteSpace(display))
|
||||
{
|
||||
display = current.CommandName;
|
||||
}
|
||||
|
||||
chain.Add(display);
|
||||
|
||||
if (current.ParentPid == current.Pid || current.ParentPid == 0 || !graph.Processes.TryGetValue(current.ParentPid, out var parent))
|
||||
{
|
||||
break;
|
||||
}
|
||||
|
||||
current = parent;
|
||||
}
|
||||
|
||||
chain.Reverse();
|
||||
|
||||
// Collapse adjacent wrappers to a single token for readability.
|
||||
var collapsed = new List<string>(chain.Count);
|
||||
foreach (var segment in chain)
|
||||
{
|
||||
var name = Path.GetFileName(segment);
|
||||
var isWrapper = WrapperNames.Contains(name);
|
||||
|
||||
if (isWrapper && collapsed.Count > 0 && WrapperNames.Contains(Path.GetFileName(collapsed[^1])))
|
||||
{
|
||||
continue; // skip duplicate adjacent wrapper entries
|
||||
}
|
||||
|
||||
collapsed.Add(segment);
|
||||
}
|
||||
|
||||
return collapsed.Count == 0 ? null : string.Join(" -> ", collapsed);
|
||||
}
|
||||
|
||||
private static string GetCommandName(ProcProcess process)
|
||||
{
|
||||
if (!string.IsNullOrWhiteSpace(process.CommandName))
|
||||
|
||||
@@ -3,3 +3,6 @@
|
||||
| Task ID | Status | Date | Summary |
|
||||
| --- | --- | --- | --- |
|
||||
| SCANNER-ENG-0008 | DONE | 2025-11-16 | Documented quarterly EntryTrace heuristic cadence and workflow; attached to Sprint 0138 Execution Log. |
|
||||
| SCANNER-ENTRYTRACE-18-504 | DONE | 2025-12-01 | EntryTrace NDJSON emission and streaming (entry/node/edge/target/warning/capability) wired via Worker → WebService/CLI. |
|
||||
| SCANNER-ENTRYTRACE-18-505 | DONE | 2025-12-01 | Runtime ProcGraph reconciliation adjusts plan/terminal confidence and diagnostics for matches/mismatches. |
|
||||
| SCANNER-ENTRYTRACE-18-506 | DONE | 2025-12-01 | EntryTrace graph/NDJSON exposed via WebService `/scans/{id}/entrytrace` and CLI rendering. |
|
||||
|
||||
Reference in New Issue
Block a user