save work
This commit is contained in:
@@ -119,6 +119,11 @@ public sealed class PythonLanguageAnalyzer : ILanguageAnalyzer
|
||||
declaredMetadata.Add(new KeyValuePair<string, string?>("lockEditablePathRedacted", "true"));
|
||||
}
|
||||
|
||||
var safeEntry = string.IsNullOrWhiteSpace(entry.EditablePath)
|
||||
? entry
|
||||
: entry with { EditablePath = editableSpec };
|
||||
AppendCommonLockFields(declaredMetadata, safeEntry);
|
||||
|
||||
var componentKey = LanguageExplicitKey.Create("python", "pypi", normalizedName, editableSpec, entry.Locator);
|
||||
writer.AddFromExplicitKey(
|
||||
analyzerId: "python",
|
||||
|
||||
@@ -341,6 +341,18 @@ public sealed class LanguageComponentRecord
|
||||
|
||||
public LanguageComponentSnapshot ToSnapshot()
|
||||
{
|
||||
ComponentThreatVectorSnapshot[]? threatVectors = null;
|
||||
if (_threatVectors.Count > 0)
|
||||
{
|
||||
threatVectors = _threatVectors.Select(static item => new ComponentThreatVectorSnapshot
|
||||
{
|
||||
VectorType = item.VectorType,
|
||||
Confidence = item.Confidence,
|
||||
Evidence = item.Evidence,
|
||||
EntryPath = item.EntryPath,
|
||||
}).ToArray();
|
||||
}
|
||||
|
||||
return new LanguageComponentSnapshot
|
||||
{
|
||||
AnalyzerId = AnalyzerId,
|
||||
@@ -351,14 +363,8 @@ public sealed class LanguageComponentRecord
|
||||
Type = Type,
|
||||
UsedByEntrypoint = UsedByEntrypoint,
|
||||
Intent = Intent,
|
||||
Capabilities = _capabilities.ToArray(),
|
||||
ThreatVectors = _threatVectors.Select(static item => new ComponentThreatVectorSnapshot
|
||||
{
|
||||
VectorType = item.VectorType,
|
||||
Confidence = item.Confidence,
|
||||
Evidence = item.Evidence,
|
||||
EntryPath = item.EntryPath,
|
||||
}).ToArray(),
|
||||
Capabilities = _capabilities.Count == 0 ? null : _capabilities.ToArray(),
|
||||
ThreatVectors = threatVectors,
|
||||
Metadata = _metadata.ToDictionary(static pair => pair.Key, static pair => pair.Value, StringComparer.Ordinal),
|
||||
Evidence = _evidence.Values.Select(static item => new LanguageComponentEvidenceSnapshot
|
||||
{
|
||||
@@ -417,14 +423,14 @@ public sealed class LanguageComponentSnapshot
|
||||
/// </summary>
|
||||
/// <remarks>Part of Sprint 0411 - Semantic Entrypoint Engine (Task 18).</remarks>
|
||||
[JsonPropertyName("capabilities")]
|
||||
public IReadOnlyList<string> Capabilities { get; set; } = Array.Empty<string>();
|
||||
public IReadOnlyList<string>? Capabilities { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Identified threat vectors.
|
||||
/// </summary>
|
||||
/// <remarks>Part of Sprint 0411 - Semantic Entrypoint Engine (Task 18).</remarks>
|
||||
[JsonPropertyName("threatVectors")]
|
||||
public IReadOnlyList<ComponentThreatVectorSnapshot> ThreatVectors { get; set; } = Array.Empty<ComponentThreatVectorSnapshot>();
|
||||
public IReadOnlyList<ComponentThreatVectorSnapshot>? ThreatVectors { get; set; }
|
||||
|
||||
[JsonPropertyName("metadata")]
|
||||
public IDictionary<string, string?> Metadata { get; set; } = new Dictionary<string, string?>(StringComparer.Ordinal);
|
||||
|
||||
@@ -38,5 +38,9 @@ public static class ScanAnalysisKeys
|
||||
|
||||
public const string DeterminismEvidence = "analysis.determinism.evidence";
|
||||
|
||||
public const string EpssEvidence = "epss.evidence";
|
||||
public const string EpssModelDate = "epss.model_date";
|
||||
public const string EpssNotFoundCves = "epss.not_found";
|
||||
|
||||
public const string ReplaySealedBundleMetadata = "analysis.replay.sealed.bundle";
|
||||
}
|
||||
|
||||
@@ -115,7 +115,8 @@ public sealed class ProofBundleWriter : IProofBundleWriter
|
||||
{
|
||||
WriteIndented = true,
|
||||
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
|
||||
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull
|
||||
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull,
|
||||
PropertyNameCaseInsensitive = true
|
||||
};
|
||||
|
||||
public ProofBundleWriter(ProofBundleWriterOptions? options = null)
|
||||
|
||||
@@ -1,11 +1,13 @@
|
||||
using System.Collections.Generic;
|
||||
using System.Collections.Immutable;
|
||||
using System.Text.Json.Serialization;
|
||||
|
||||
namespace StellaOps.Scanner.EntryTrace;
|
||||
|
||||
/// <summary>
|
||||
/// Outcome classification for entrypoint resolution attempts.
|
||||
/// </summary>
|
||||
[JsonConverter(typeof(JsonStringEnumConverter))]
|
||||
public enum EntryTraceOutcome
|
||||
{
|
||||
Resolved,
|
||||
@@ -16,6 +18,7 @@ public enum EntryTraceOutcome
|
||||
/// <summary>
|
||||
/// Logical classification for nodes in the entry trace graph.
|
||||
/// </summary>
|
||||
[JsonConverter(typeof(JsonStringEnumConverter))]
|
||||
public enum EntryTraceNodeKind
|
||||
{
|
||||
Command,
|
||||
@@ -30,6 +33,7 @@ public enum EntryTraceNodeKind
|
||||
/// <summary>
|
||||
/// Interpreter categories supported by the analyzer.
|
||||
/// </summary>
|
||||
[JsonConverter(typeof(JsonStringEnumConverter))]
|
||||
public enum EntryTraceInterpreterKind
|
||||
{
|
||||
None,
|
||||
@@ -41,6 +45,7 @@ public enum EntryTraceInterpreterKind
|
||||
/// <summary>
|
||||
/// Diagnostic severity levels emitted by the analyzer.
|
||||
/// </summary>
|
||||
[JsonConverter(typeof(JsonStringEnumConverter))]
|
||||
public enum EntryTraceDiagnosticSeverity
|
||||
{
|
||||
Info,
|
||||
@@ -51,6 +56,7 @@ public enum EntryTraceDiagnosticSeverity
|
||||
/// <summary>
|
||||
/// Enumerates the canonical reasons for unresolved edges.
|
||||
/// </summary>
|
||||
[JsonConverter(typeof(JsonStringEnumConverter))]
|
||||
public enum EntryTraceUnknownReason
|
||||
{
|
||||
CommandNotFound,
|
||||
@@ -83,6 +89,7 @@ public enum EntryTraceUnknownReason
|
||||
/// <summary>
|
||||
/// Categorises terminal executable kinds.
|
||||
/// </summary>
|
||||
[JsonConverter(typeof(JsonStringEnumConverter))]
|
||||
public enum EntryTraceTerminalType
|
||||
{
|
||||
Unknown,
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
using System.Collections.Frozen;
|
||||
using System.Collections.Immutable;
|
||||
using System.Xml.Linq;
|
||||
|
||||
namespace StellaOps.Scanner.EntryTrace.Semantic.Adapters;
|
||||
|
||||
@@ -175,9 +176,37 @@ public sealed class DotNetSemanticAdapter : ISemanticEntrypointAnalyzer
|
||||
var framework = (string?)null;
|
||||
|
||||
// Analyze dependencies
|
||||
var packageDependencies = new List<string>();
|
||||
if (context.Dependencies.TryGetValue("dotnet", out var deps))
|
||||
{
|
||||
foreach (var dep in deps)
|
||||
packageDependencies.AddRange(deps);
|
||||
}
|
||||
|
||||
ProjectInfo? projectInfo = null;
|
||||
if (context.ManifestPaths.TryGetValue("project", out var projectPath))
|
||||
{
|
||||
projectInfo = await TryReadProjectInfoAsync(context, projectPath, cancellationToken);
|
||||
if (projectInfo is not null && projectInfo.PackageReferences.Count > 0)
|
||||
{
|
||||
packageDependencies.AddRange(projectInfo.PackageReferences);
|
||||
reasoningChain.Add($"Parsed project file ({projectInfo.PackageReferences.Count} PackageReference)");
|
||||
}
|
||||
|
||||
if (projectInfo?.IsWebSdk == true)
|
||||
{
|
||||
builder.AddCapability(CapabilityClass.NetworkListen);
|
||||
if (intent == ApplicationIntent.Unknown)
|
||||
{
|
||||
intent = ApplicationIntent.WebServer;
|
||||
framework = "aspnetcore";
|
||||
reasoningChain.Add("Project Sdk indicates Web -> WebServer");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (packageDependencies.Count > 0)
|
||||
{
|
||||
foreach (var dep in packageDependencies)
|
||||
{
|
||||
var normalizedDep = NormalizeDependency(dep);
|
||||
|
||||
@@ -186,19 +215,30 @@ public sealed class DotNetSemanticAdapter : ISemanticEntrypointAnalyzer
|
||||
if (intent == ApplicationIntent.Unknown || IsHigherPriority(mappedIntent, intent))
|
||||
{
|
||||
intent = mappedIntent;
|
||||
framework = dep;
|
||||
reasoningChain.Add($"Detected {dep} -> {intent}");
|
||||
framework = NormalizeFramework(normalizedDep);
|
||||
reasoningChain.Add($"Detected {normalizedDep} -> {intent}");
|
||||
}
|
||||
|
||||
if (mappedIntent is ApplicationIntent.WebServer or ApplicationIntent.RpcServer or ApplicationIntent.GraphQlServer)
|
||||
builder.AddCapability(CapabilityClass.NetworkListen);
|
||||
else if (mappedIntent is ApplicationIntent.Worker or ApplicationIntent.StreamProcessor)
|
||||
builder.AddCapability(CapabilityClass.MessageQueue);
|
||||
}
|
||||
|
||||
if (PackageCapabilityMap.TryGetValue(normalizedDep, out var capability))
|
||||
{
|
||||
builder.AddCapability(capability);
|
||||
reasoningChain.Add($"Package {dep} -> {capability}");
|
||||
reasoningChain.Add($"Package {normalizedDep} -> {capability}");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (intent == ApplicationIntent.Unknown && projectInfo?.OutputTypeExe == true)
|
||||
{
|
||||
intent = ApplicationIntent.CliTool;
|
||||
reasoningChain.Add("Project OutputType=Exe -> CliTool");
|
||||
}
|
||||
|
||||
// Analyze entrypoint command
|
||||
var cmdSignals = AnalyzeCommand(context.Specification);
|
||||
if (cmdSignals.Intent != ApplicationIntent.Unknown && intent == ApplicationIntent.Unknown)
|
||||
@@ -262,6 +302,17 @@ public sealed class DotNetSemanticAdapter : ISemanticEntrypointAnalyzer
|
||||
return parts[0].Trim();
|
||||
}
|
||||
|
||||
private static string? NormalizeFramework(string normalizedDependency)
|
||||
{
|
||||
if (normalizedDependency.StartsWith("Microsoft.AspNetCore", StringComparison.OrdinalIgnoreCase) ||
|
||||
string.Equals(normalizedDependency, "Swashbuckle.AspNetCore", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
return "aspnetcore";
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
private static bool IsHigherPriority(ApplicationIntent newer, ApplicationIntent current)
|
||||
{
|
||||
var priorityOrder = new[]
|
||||
@@ -358,4 +409,61 @@ public sealed class DotNetSemanticAdapter : ISemanticEntrypointAnalyzer
|
||||
var hash = context.ImageDigest ?? Guid.NewGuid().ToString("N");
|
||||
return $"sem-dotnet-{hash[..12]}";
|
||||
}
|
||||
|
||||
private sealed record ProjectInfo(
|
||||
bool IsWebSdk,
|
||||
bool OutputTypeExe,
|
||||
IReadOnlyList<string> PackageReferences);
|
||||
|
||||
private static async Task<ProjectInfo?> TryReadProjectInfoAsync(
|
||||
SemanticAnalysisContext context,
|
||||
string projectPath,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
var content = await context.FileSystem.TryReadFileAsync(projectPath, cancellationToken);
|
||||
if (string.IsNullOrWhiteSpace(content))
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
var doc = XDocument.Parse(content);
|
||||
var root = doc.Root;
|
||||
|
||||
var sdk = root?.Attribute("Sdk")?.Value?.Trim();
|
||||
var isWebSdk = !string.IsNullOrWhiteSpace(sdk) &&
|
||||
sdk.Contains("Microsoft.NET.Sdk.Web", StringComparison.OrdinalIgnoreCase);
|
||||
|
||||
var outputType = doc.Descendants()
|
||||
.FirstOrDefault(element => element.Name.LocalName == "OutputType")
|
||||
?.Value
|
||||
?.Trim();
|
||||
|
||||
var outputTypeExe = string.Equals(outputType, "Exe", StringComparison.OrdinalIgnoreCase);
|
||||
|
||||
var packageReferences = new HashSet<string>(StringComparer.Ordinal);
|
||||
foreach (var element in doc.Descendants().Where(element => element.Name.LocalName == "PackageReference"))
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
|
||||
var include = element.Attribute("Include")?.Value?.Trim();
|
||||
var update = element.Attribute("Update")?.Value?.Trim();
|
||||
var name = !string.IsNullOrWhiteSpace(include) ? include : update;
|
||||
if (!string.IsNullOrWhiteSpace(name))
|
||||
{
|
||||
packageReferences.Add(name);
|
||||
}
|
||||
}
|
||||
|
||||
return new ProjectInfo(
|
||||
IsWebSdk: isWebSdk,
|
||||
OutputTypeExe: outputTypeExe,
|
||||
PackageReferences: packageReferences.OrderBy(static name => name, StringComparer.Ordinal).ToArray());
|
||||
}
|
||||
catch
|
||||
{
|
||||
return null;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
using System.Collections.Frozen;
|
||||
using System.Collections.Immutable;
|
||||
using System.IO;
|
||||
|
||||
namespace StellaOps.Scanner.EntryTrace.Semantic.Adapters;
|
||||
|
||||
@@ -192,9 +193,31 @@ public sealed class GoSemanticAdapter : ISemanticEntrypointAnalyzer
|
||||
var framework = (string?)null;
|
||||
|
||||
// Analyze dependencies (go.mod imports)
|
||||
var moduleDependencies = new List<string>();
|
||||
if (context.Dependencies.TryGetValue("go", out var deps))
|
||||
{
|
||||
foreach (var dep in deps)
|
||||
moduleDependencies.AddRange(deps);
|
||||
}
|
||||
|
||||
if (context.ManifestPaths.TryGetValue("go.mod", out var goModPath))
|
||||
{
|
||||
var goModDependencies = await TryReadGoModDependenciesAsync(context, goModPath, cancellationToken);
|
||||
if (goModDependencies.Count > 0)
|
||||
{
|
||||
moduleDependencies.AddRange(goModDependencies);
|
||||
reasoningChain.Add($"Parsed go.mod ({goModDependencies.Count} deps)");
|
||||
}
|
||||
|
||||
if (await DetectNetHttpUsageAsync(context, goModPath, cancellationToken))
|
||||
{
|
||||
moduleDependencies.Add("net/http");
|
||||
reasoningChain.Add("Detected net/http usage in source");
|
||||
}
|
||||
}
|
||||
|
||||
if (moduleDependencies.Count > 0)
|
||||
{
|
||||
foreach (var dep in moduleDependencies)
|
||||
{
|
||||
var normalizedDep = NormalizeDependency(dep);
|
||||
|
||||
@@ -203,15 +226,20 @@ public sealed class GoSemanticAdapter : ISemanticEntrypointAnalyzer
|
||||
if (intent == ApplicationIntent.Unknown || IsHigherPriority(mappedIntent, intent))
|
||||
{
|
||||
intent = mappedIntent;
|
||||
framework = dep;
|
||||
reasoningChain.Add($"Detected {dep} -> {intent}");
|
||||
framework = normalizedDep;
|
||||
reasoningChain.Add($"Detected {normalizedDep} -> {intent}");
|
||||
}
|
||||
|
||||
if (mappedIntent is ApplicationIntent.WebServer or ApplicationIntent.RpcServer or ApplicationIntent.GraphQlServer)
|
||||
builder.AddCapability(CapabilityClass.NetworkListen);
|
||||
else if (mappedIntent is ApplicationIntent.Worker or ApplicationIntent.StreamProcessor or ApplicationIntent.MessageBroker)
|
||||
builder.AddCapability(CapabilityClass.MessageQueue);
|
||||
}
|
||||
|
||||
if (ModuleCapabilityMap.TryGetValue(normalizedDep, out var capability))
|
||||
{
|
||||
builder.AddCapability(capability);
|
||||
reasoningChain.Add($"Module {dep} -> {capability}");
|
||||
reasoningChain.Add($"Module {normalizedDep} -> {capability}");
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -263,9 +291,20 @@ public sealed class GoSemanticAdapter : ISemanticEntrypointAnalyzer
|
||||
|
||||
private static string NormalizeDependency(string dep)
|
||||
{
|
||||
// Handle Go module paths with versions
|
||||
var parts = dep.Split('@');
|
||||
return parts[0].Trim();
|
||||
// Handle Go module paths with versions (both @ and whitespace forms):
|
||||
// - github.com/spf13/cobra@v1.7.0 -> github.com/spf13/cobra
|
||||
// - github.com/spf13/cobra v1.7.0 -> github.com/spf13/cobra
|
||||
var trimmed = dep.Trim();
|
||||
if (trimmed.Length == 0)
|
||||
{
|
||||
return trimmed;
|
||||
}
|
||||
|
||||
var whitespaceParts = trimmed.Split([' ', '\t'], StringSplitOptions.RemoveEmptyEntries);
|
||||
trimmed = whitespaceParts.Length > 0 ? whitespaceParts[0] : trimmed;
|
||||
|
||||
var atParts = trimmed.Split('@');
|
||||
return atParts[0].Trim();
|
||||
}
|
||||
|
||||
private static bool IsHigherPriority(ApplicationIntent newer, ApplicationIntent current)
|
||||
@@ -367,4 +406,120 @@ public sealed class GoSemanticAdapter : ISemanticEntrypointAnalyzer
|
||||
var hash = context.ImageDigest ?? Guid.NewGuid().ToString("N");
|
||||
return $"sem-go-{hash[..12]}";
|
||||
}
|
||||
|
||||
private static async Task<IReadOnlyList<string>> TryReadGoModDependenciesAsync(
|
||||
SemanticAnalysisContext context,
|
||||
string goModPath,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
var content = await context.FileSystem.TryReadFileAsync(goModPath, cancellationToken);
|
||||
if (string.IsNullOrWhiteSpace(content))
|
||||
{
|
||||
return Array.Empty<string>();
|
||||
}
|
||||
|
||||
var dependencies = new HashSet<string>(StringComparer.Ordinal);
|
||||
using var reader = new StringReader(content);
|
||||
string? line;
|
||||
var inRequireBlock = false;
|
||||
|
||||
while ((line = reader.ReadLine()) is not null)
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
|
||||
var trimmed = line.Trim();
|
||||
if (trimmed.Length == 0 || trimmed.StartsWith("//", StringComparison.Ordinal))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
if (inRequireBlock)
|
||||
{
|
||||
if (trimmed == ")")
|
||||
{
|
||||
inRequireBlock = false;
|
||||
continue;
|
||||
}
|
||||
|
||||
var parts = trimmed.Split([' ', '\t'], StringSplitOptions.RemoveEmptyEntries);
|
||||
if (parts.Length > 0)
|
||||
{
|
||||
dependencies.Add(parts[0]);
|
||||
}
|
||||
|
||||
continue;
|
||||
}
|
||||
|
||||
if (trimmed.StartsWith("require (", StringComparison.Ordinal))
|
||||
{
|
||||
inRequireBlock = true;
|
||||
continue;
|
||||
}
|
||||
|
||||
if (trimmed.StartsWith("require ", StringComparison.Ordinal))
|
||||
{
|
||||
var rest = trimmed["require ".Length..].Trim();
|
||||
var parts = rest.Split([' ', '\t'], StringSplitOptions.RemoveEmptyEntries);
|
||||
if (parts.Length > 0)
|
||||
{
|
||||
dependencies.Add(parts[0]);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (dependencies.Count == 0)
|
||||
{
|
||||
return Array.Empty<string>();
|
||||
}
|
||||
|
||||
return dependencies.OrderBy(static dependency => dependency, StringComparer.Ordinal).ToArray();
|
||||
}
|
||||
|
||||
private static async Task<bool> DetectNetHttpUsageAsync(
|
||||
SemanticAnalysisContext context,
|
||||
string goModPath,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
var directory = GetDirectory(goModPath);
|
||||
if (directory is null)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
var goFiles = await context.FileSystem.ListFilesAsync(directory, "*.go", cancellationToken);
|
||||
foreach (var file in goFiles)
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
|
||||
var content = await context.FileSystem.TryReadFileAsync(file, cancellationToken);
|
||||
if (string.IsNullOrWhiteSpace(content))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
if (content.Contains("net/http", StringComparison.Ordinal))
|
||||
{
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
private static string? GetDirectory(string path)
|
||||
{
|
||||
var normalized = path.Replace('\\', '/');
|
||||
var lastSlash = normalized.LastIndexOf('/');
|
||||
if (lastSlash < 0)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
if (lastSlash == 0)
|
||||
{
|
||||
return "/";
|
||||
}
|
||||
|
||||
return normalized[..lastSlash];
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
using System.Collections.Frozen;
|
||||
using System.Collections.Immutable;
|
||||
using System.Xml.Linq;
|
||||
|
||||
namespace StellaOps.Scanner.EntryTrace.Semantic.Adapters;
|
||||
|
||||
@@ -183,9 +184,25 @@ public sealed class JavaSemanticAdapter : ISemanticEntrypointAnalyzer
|
||||
var framework = (string?)null;
|
||||
|
||||
// Analyze dependencies
|
||||
var javaDependencies = new List<string>();
|
||||
if (context.Dependencies.TryGetValue("java", out var deps))
|
||||
{
|
||||
foreach (var dep in deps)
|
||||
javaDependencies.AddRange(deps);
|
||||
}
|
||||
|
||||
if (context.ManifestPaths.TryGetValue("pom.xml", out var pomPath))
|
||||
{
|
||||
var pomDependencies = await TryReadPomDependenciesAsync(context, pomPath, cancellationToken);
|
||||
if (pomDependencies.Count > 0)
|
||||
{
|
||||
javaDependencies.AddRange(pomDependencies);
|
||||
reasoningChain.Add($"Parsed pom.xml ({pomDependencies.Count} deps)");
|
||||
}
|
||||
}
|
||||
|
||||
if (javaDependencies.Count > 0)
|
||||
{
|
||||
foreach (var dep in javaDependencies)
|
||||
{
|
||||
var normalizedDep = NormalizeDependency(dep);
|
||||
|
||||
@@ -194,15 +211,20 @@ public sealed class JavaSemanticAdapter : ISemanticEntrypointAnalyzer
|
||||
if (intent == ApplicationIntent.Unknown || IsHigherPriority(mappedIntent, intent))
|
||||
{
|
||||
intent = mappedIntent;
|
||||
framework = dep;
|
||||
reasoningChain.Add($"Detected {dep} -> {intent}");
|
||||
framework = normalizedDep;
|
||||
reasoningChain.Add($"Detected {normalizedDep} -> {intent}");
|
||||
}
|
||||
|
||||
if (mappedIntent == ApplicationIntent.WebServer)
|
||||
builder.AddCapability(CapabilityClass.NetworkListen);
|
||||
else if (mappedIntent is ApplicationIntent.Worker or ApplicationIntent.StreamProcessor)
|
||||
builder.AddCapability(CapabilityClass.MessageQueue);
|
||||
}
|
||||
|
||||
if (DependencyCapabilityMap.TryGetValue(normalizedDep, out var capability))
|
||||
{
|
||||
builder.AddCapability(capability);
|
||||
reasoningChain.Add($"Dependency {dep} -> {capability}");
|
||||
reasoningChain.Add($"Dependency {normalizedDep} -> {capability}");
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -367,4 +389,59 @@ public sealed class JavaSemanticAdapter : ISemanticEntrypointAnalyzer
|
||||
var hash = context.ImageDigest ?? Guid.NewGuid().ToString("N");
|
||||
return $"sem-java-{hash[..12]}";
|
||||
}
|
||||
|
||||
private static async Task<IReadOnlyList<string>> TryReadPomDependenciesAsync(
|
||||
SemanticAnalysisContext context,
|
||||
string pomPath,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
var content = await context.FileSystem.TryReadFileAsync(pomPath, cancellationToken);
|
||||
if (string.IsNullOrWhiteSpace(content))
|
||||
{
|
||||
return Array.Empty<string>();
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
var tokens = new HashSet<string>(StringComparer.Ordinal);
|
||||
var doc = XDocument.Parse(content);
|
||||
|
||||
foreach (var dep in doc.Descendants().Where(element => element.Name.LocalName == "dependency"))
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
|
||||
var groupId = dep.Elements().FirstOrDefault(element => element.Name.LocalName == "groupId")?.Value?.Trim();
|
||||
var artifactId = dep.Elements().FirstOrDefault(element => element.Name.LocalName == "artifactId")?.Value?.Trim();
|
||||
|
||||
if (!string.IsNullOrWhiteSpace(groupId))
|
||||
{
|
||||
if (groupId.StartsWith("org.springframework.boot", StringComparison.Ordinal))
|
||||
{
|
||||
tokens.Add("spring-boot");
|
||||
}
|
||||
|
||||
if (groupId.StartsWith("io.quarkus", StringComparison.Ordinal))
|
||||
{
|
||||
tokens.Add("quarkus");
|
||||
}
|
||||
}
|
||||
|
||||
if (!string.IsNullOrWhiteSpace(artifactId))
|
||||
{
|
||||
tokens.Add(artifactId.ToLowerInvariant().Replace("_", "-"));
|
||||
}
|
||||
}
|
||||
|
||||
if (tokens.Count == 0)
|
||||
{
|
||||
return Array.Empty<string>();
|
||||
}
|
||||
|
||||
return tokens.OrderBy(static token => token, StringComparer.Ordinal).ToArray();
|
||||
}
|
||||
catch
|
||||
{
|
||||
return Array.Empty<string>();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
using System.Collections.Frozen;
|
||||
using System.Collections.Immutable;
|
||||
using System.Text.Json;
|
||||
|
||||
namespace StellaOps.Scanner.EntryTrace.Semantic.Adapters;
|
||||
|
||||
@@ -209,9 +210,29 @@ public sealed class NodeSemanticAdapter : ISemanticEntrypointAnalyzer
|
||||
var framework = (string?)null;
|
||||
|
||||
// Analyze dependencies
|
||||
if (context.Dependencies.TryGetValue("node", out var deps))
|
||||
context.ManifestPaths.TryGetValue("package.json", out var packageJsonPath);
|
||||
|
||||
var nodeDependencies = new List<string>();
|
||||
if (context.Dependencies.TryGetValue("node", out var deps) ||
|
||||
context.Dependencies.TryGetValue("javascript", out deps) ||
|
||||
context.Dependencies.TryGetValue("typescript", out deps))
|
||||
{
|
||||
foreach (var dep in deps)
|
||||
nodeDependencies.AddRange(deps);
|
||||
}
|
||||
|
||||
if (!string.IsNullOrWhiteSpace(packageJsonPath))
|
||||
{
|
||||
var manifestDependencies = await TryReadPackageJsonDependenciesAsync(context, packageJsonPath, cancellationToken);
|
||||
if (manifestDependencies.Count > 0)
|
||||
{
|
||||
nodeDependencies.AddRange(manifestDependencies);
|
||||
reasoningChain.Add($"Parsed package.json ({manifestDependencies.Count} deps)");
|
||||
}
|
||||
}
|
||||
|
||||
if (nodeDependencies.Count > 0)
|
||||
{
|
||||
foreach (var dep in nodeDependencies)
|
||||
{
|
||||
var normalizedDep = NormalizeDependency(dep);
|
||||
|
||||
@@ -220,19 +241,31 @@ public sealed class NodeSemanticAdapter : ISemanticEntrypointAnalyzer
|
||||
if (intent == ApplicationIntent.Unknown || IsHigherPriority(mappedIntent, intent))
|
||||
{
|
||||
intent = mappedIntent;
|
||||
framework = dep;
|
||||
reasoningChain.Add($"Detected {dep} -> {intent}");
|
||||
framework = NormalizeFramework(normalizedDep);
|
||||
reasoningChain.Add($"Detected {normalizedDep} -> {intent}");
|
||||
}
|
||||
|
||||
if (mappedIntent is ApplicationIntent.WebServer or ApplicationIntent.RpcServer or ApplicationIntent.GraphQlServer)
|
||||
builder.AddCapability(CapabilityClass.NetworkListen);
|
||||
else if (mappedIntent is ApplicationIntent.Worker or ApplicationIntent.StreamProcessor)
|
||||
builder.AddCapability(CapabilityClass.MessageQueue);
|
||||
}
|
||||
|
||||
if (PackageCapabilityMap.TryGetValue(normalizedDep, out var capability))
|
||||
{
|
||||
builder.AddCapability(capability);
|
||||
reasoningChain.Add($"Package {dep} -> {capability}");
|
||||
reasoningChain.Add($"Package {normalizedDep} -> {capability}");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Serverless manifest hint (e.g., serverless.yml discovered by earlier filesystem pass).
|
||||
if (intent == ApplicationIntent.Unknown && context.ManifestPaths.ContainsKey("serverless"))
|
||||
{
|
||||
intent = ApplicationIntent.Serverless;
|
||||
reasoningChain.Add("Manifest hint: serverless -> Serverless");
|
||||
}
|
||||
|
||||
// Analyze entrypoint command
|
||||
var cmdSignals = AnalyzeCommand(context.Specification);
|
||||
if (cmdSignals.Intent != ApplicationIntent.Unknown && intent == ApplicationIntent.Unknown)
|
||||
@@ -247,9 +280,9 @@ public sealed class NodeSemanticAdapter : ISemanticEntrypointAnalyzer
|
||||
}
|
||||
|
||||
// Check package.json for bin entries -> CLI tool
|
||||
if (context.ManifestPaths.TryGetValue("package.json", out var pkgPath))
|
||||
if (!string.IsNullOrWhiteSpace(packageJsonPath))
|
||||
{
|
||||
if (await HasBinEntriesAsync(context, pkgPath, cancellationToken))
|
||||
if (await HasBinEntriesAsync(context, packageJsonPath, cancellationToken))
|
||||
{
|
||||
if (intent == ApplicationIntent.Unknown)
|
||||
{
|
||||
@@ -286,10 +319,87 @@ public sealed class NodeSemanticAdapter : ISemanticEntrypointAnalyzer
|
||||
|
||||
private static string NormalizeDependency(string dep)
|
||||
{
|
||||
// Handle scoped packages and versions
|
||||
return dep.ToLowerInvariant()
|
||||
.Split('@')[0] // Remove version
|
||||
.Trim();
|
||||
// Handle scoped packages and versions:
|
||||
// - express@4.18.0 -> express
|
||||
// - @nestjs/core -> @nestjs/core
|
||||
// - @nestjs/core@10.0.0 -> @nestjs/core
|
||||
var normalized = dep.Trim().ToLowerInvariant();
|
||||
if (normalized.Length == 0)
|
||||
{
|
||||
return normalized;
|
||||
}
|
||||
|
||||
if (normalized.StartsWith("@", StringComparison.Ordinal))
|
||||
{
|
||||
var lastAt = normalized.LastIndexOf('@');
|
||||
return lastAt > 0 ? normalized[..lastAt] : normalized;
|
||||
}
|
||||
|
||||
var at = normalized.IndexOf('@', StringComparison.Ordinal);
|
||||
return at > 0 ? normalized[..at] : normalized;
|
||||
}
|
||||
|
||||
private static string NormalizeFramework(string normalizedDependency)
|
||||
{
|
||||
return normalizedDependency switch
|
||||
{
|
||||
"nest" or "@nestjs/core" or "@nestjs/platform-express" => "nestjs",
|
||||
_ => normalizedDependency
|
||||
};
|
||||
}
|
||||
|
||||
private static async Task<IReadOnlyList<string>> TryReadPackageJsonDependenciesAsync(
|
||||
SemanticAnalysisContext context,
|
||||
string pkgPath,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
var content = await context.FileSystem.TryReadFileAsync(pkgPath, cancellationToken);
|
||||
if (string.IsNullOrWhiteSpace(content))
|
||||
{
|
||||
return Array.Empty<string>();
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
using var doc = JsonDocument.Parse(content);
|
||||
if (doc.RootElement.ValueKind != JsonValueKind.Object)
|
||||
{
|
||||
return Array.Empty<string>();
|
||||
}
|
||||
|
||||
var dependencies = new HashSet<string>(StringComparer.Ordinal);
|
||||
AddDependencyObjectKeys(doc.RootElement, "dependencies", dependencies);
|
||||
AddDependencyObjectKeys(doc.RootElement, "devDependencies", dependencies);
|
||||
AddDependencyObjectKeys(doc.RootElement, "peerDependencies", dependencies);
|
||||
AddDependencyObjectKeys(doc.RootElement, "optionalDependencies", dependencies);
|
||||
|
||||
if (dependencies.Count == 0)
|
||||
{
|
||||
return Array.Empty<string>();
|
||||
}
|
||||
|
||||
return dependencies.OrderBy(static dep => dep, StringComparer.Ordinal).ToArray();
|
||||
}
|
||||
catch
|
||||
{
|
||||
return Array.Empty<string>();
|
||||
}
|
||||
}
|
||||
|
||||
private static void AddDependencyObjectKeys(JsonElement root, string propertyName, HashSet<string> dependencies)
|
||||
{
|
||||
if (!root.TryGetProperty(propertyName, out var section) || section.ValueKind != JsonValueKind.Object)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
foreach (var property in section.EnumerateObject())
|
||||
{
|
||||
if (!string.IsNullOrWhiteSpace(property.Name))
|
||||
{
|
||||
dependencies.Add(property.Name);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private static bool IsHigherPriority(ApplicationIntent newer, ApplicationIntent current)
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
using System.Collections.Frozen;
|
||||
using System.Collections.Immutable;
|
||||
using System.IO;
|
||||
|
||||
namespace StellaOps.Scanner.EntryTrace.Semantic.Adapters;
|
||||
|
||||
@@ -188,9 +189,29 @@ public sealed class PythonSemanticAdapter : ISemanticEntrypointAnalyzer
|
||||
var framework = (string?)null;
|
||||
|
||||
// Analyze dependencies to determine intent and capabilities
|
||||
var pythonDependencies = new List<string>();
|
||||
if (context.Dependencies.TryGetValue("python", out var deps))
|
||||
{
|
||||
foreach (var dep in deps)
|
||||
pythonDependencies.AddRange(deps);
|
||||
}
|
||||
else
|
||||
{
|
||||
pythonDependencies = [];
|
||||
}
|
||||
|
||||
if (pythonDependencies.Count == 0)
|
||||
{
|
||||
var requirementsDeps = await TryReadRequirementsDependenciesAsync(context, cancellationToken);
|
||||
if (requirementsDeps.Count > 0)
|
||||
{
|
||||
pythonDependencies.AddRange(requirementsDeps);
|
||||
reasoningChain.Add($"Parsed requirements.txt ({requirementsDeps.Count} deps)");
|
||||
}
|
||||
}
|
||||
|
||||
if (pythonDependencies.Count > 0)
|
||||
{
|
||||
foreach (var dep in pythonDependencies)
|
||||
{
|
||||
var normalizedDep = NormalizeDependency(dep);
|
||||
|
||||
@@ -200,20 +221,33 @@ public sealed class PythonSemanticAdapter : ISemanticEntrypointAnalyzer
|
||||
if (intent == ApplicationIntent.Unknown || IsHigherPriority(mappedIntent, intent))
|
||||
{
|
||||
intent = mappedIntent;
|
||||
framework = dep;
|
||||
reasoningChain.Add($"Detected {dep} -> {intent}");
|
||||
framework = normalizedDep;
|
||||
reasoningChain.Add($"Detected {normalizedDep} -> {intent}");
|
||||
}
|
||||
|
||||
// Baseline capabilities implied by the inferred intent/framework.
|
||||
if (mappedIntent == ApplicationIntent.WebServer)
|
||||
builder.AddCapability(CapabilityClass.NetworkListen);
|
||||
else if (mappedIntent is ApplicationIntent.Worker or ApplicationIntent.StreamProcessor)
|
||||
builder.AddCapability(CapabilityClass.MessageQueue);
|
||||
}
|
||||
|
||||
// Check capability imports
|
||||
if (ImportCapabilityMap.TryGetValue(normalizedDep, out var capability))
|
||||
{
|
||||
builder.AddCapability(capability);
|
||||
reasoningChain.Add($"Import {dep} -> {capability}");
|
||||
reasoningChain.Add($"Import {normalizedDep} -> {capability}");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Serverless manifest hint (e.g., Serverless Framework / SAM markers discovered earlier in the scan).
|
||||
if (intent == ApplicationIntent.Unknown && context.ManifestPaths.ContainsKey("serverless"))
|
||||
{
|
||||
intent = ApplicationIntent.Serverless;
|
||||
reasoningChain.Add("Manifest hint: serverless -> Serverless");
|
||||
}
|
||||
|
||||
// Analyze entrypoint command for additional signals
|
||||
var cmdSignals = AnalyzeCommand(context.Specification);
|
||||
if (cmdSignals.Intent != ApplicationIntent.Unknown && intent == ApplicationIntent.Unknown)
|
||||
@@ -353,4 +387,87 @@ public sealed class PythonSemanticAdapter : ISemanticEntrypointAnalyzer
|
||||
var hash = context.ImageDigest ?? Guid.NewGuid().ToString("N");
|
||||
return $"sem-py-{hash[..12]}";
|
||||
}
|
||||
|
||||
private static async Task<IReadOnlyList<string>> TryReadRequirementsDependenciesAsync(
|
||||
SemanticAnalysisContext context,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
var entrypoint = context.Specification.Entrypoint.FirstOrDefault();
|
||||
if (string.IsNullOrWhiteSpace(entrypoint) || !entrypoint.Contains('/', StringComparison.Ordinal))
|
||||
{
|
||||
return Array.Empty<string>();
|
||||
}
|
||||
|
||||
var directory = GetDirectory(entrypoint);
|
||||
if (directory is null)
|
||||
{
|
||||
return Array.Empty<string>();
|
||||
}
|
||||
|
||||
var candidate = directory == "/" ? "/requirements.txt" : $"{directory}/requirements.txt";
|
||||
var content = await context.FileSystem.TryReadFileAsync(candidate, cancellationToken);
|
||||
if (string.IsNullOrWhiteSpace(content))
|
||||
{
|
||||
return Array.Empty<string>();
|
||||
}
|
||||
|
||||
var dependencies = new HashSet<string>(StringComparer.Ordinal);
|
||||
using var reader = new StringReader(content);
|
||||
string? line;
|
||||
while ((line = reader.ReadLine()) is not null)
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
|
||||
var trimmed = line.Trim();
|
||||
if (trimmed.Length == 0 || trimmed.StartsWith("#", StringComparison.Ordinal))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
var commentIndex = trimmed.IndexOf('#');
|
||||
if (commentIndex >= 0)
|
||||
{
|
||||
trimmed = trimmed[..commentIndex].Trim();
|
||||
if (trimmed.Length == 0)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
if (trimmed.StartsWith("-", StringComparison.Ordinal))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
var normalized = NormalizeDependency(trimmed);
|
||||
if (!string.IsNullOrWhiteSpace(normalized))
|
||||
{
|
||||
dependencies.Add(normalized);
|
||||
}
|
||||
}
|
||||
|
||||
if (dependencies.Count == 0)
|
||||
{
|
||||
return Array.Empty<string>();
|
||||
}
|
||||
|
||||
return dependencies.OrderBy(static dep => dep, StringComparer.Ordinal).ToArray();
|
||||
}
|
||||
|
||||
private static string? GetDirectory(string path)
|
||||
{
|
||||
var normalized = path.Replace('\\', '/');
|
||||
var lastSlash = normalized.LastIndexOf('/');
|
||||
if (lastSlash < 0)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
if (lastSlash == 0)
|
||||
{
|
||||
return "/";
|
||||
}
|
||||
|
||||
return normalized[..lastSlash];
|
||||
}
|
||||
}
|
||||
|
||||
@@ -13,58 +13,58 @@
|
||||
DO $$
|
||||
BEGIN
|
||||
-- Check if table exists
|
||||
IF EXISTS (SELECT 1 FROM information_schema.tables WHERE table_name = 'vuln_instance_triage') THEN
|
||||
IF EXISTS (SELECT 1 FROM information_schema.tables WHERE table_schema = current_schema() AND table_name = 'vuln_instance_triage') THEN
|
||||
-- Add current_epss_score column
|
||||
IF NOT EXISTS (SELECT 1 FROM information_schema.columns
|
||||
WHERE table_name = 'vuln_instance_triage' AND column_name = 'current_epss_score') THEN
|
||||
WHERE table_schema = current_schema() AND table_name = 'vuln_instance_triage' AND column_name = 'current_epss_score') THEN
|
||||
ALTER TABLE vuln_instance_triage ADD COLUMN current_epss_score DOUBLE PRECISION;
|
||||
COMMENT ON COLUMN vuln_instance_triage.current_epss_score IS 'Current EPSS probability score [0,1]';
|
||||
END IF;
|
||||
|
||||
-- Add current_epss_percentile column
|
||||
IF NOT EXISTS (SELECT 1 FROM information_schema.columns
|
||||
WHERE table_name = 'vuln_instance_triage' AND column_name = 'current_epss_percentile') THEN
|
||||
WHERE table_schema = current_schema() AND table_name = 'vuln_instance_triage' AND column_name = 'current_epss_percentile') THEN
|
||||
ALTER TABLE vuln_instance_triage ADD COLUMN current_epss_percentile DOUBLE PRECISION;
|
||||
COMMENT ON COLUMN vuln_instance_triage.current_epss_percentile IS 'Current EPSS percentile rank [0,1]';
|
||||
END IF;
|
||||
|
||||
-- Add current_epss_band column
|
||||
IF NOT EXISTS (SELECT 1 FROM information_schema.columns
|
||||
WHERE table_name = 'vuln_instance_triage' AND column_name = 'current_epss_band') THEN
|
||||
WHERE table_schema = current_schema() AND table_name = 'vuln_instance_triage' AND column_name = 'current_epss_band') THEN
|
||||
ALTER TABLE vuln_instance_triage ADD COLUMN current_epss_band TEXT;
|
||||
COMMENT ON COLUMN vuln_instance_triage.current_epss_band IS 'Current EPSS priority band: CRITICAL, HIGH, MEDIUM, LOW';
|
||||
END IF;
|
||||
|
||||
-- Add epss_model_date column
|
||||
IF NOT EXISTS (SELECT 1 FROM information_schema.columns
|
||||
WHERE table_name = 'vuln_instance_triage' AND column_name = 'epss_model_date') THEN
|
||||
WHERE table_schema = current_schema() AND table_name = 'vuln_instance_triage' AND column_name = 'epss_model_date') THEN
|
||||
ALTER TABLE vuln_instance_triage ADD COLUMN epss_model_date DATE;
|
||||
COMMENT ON COLUMN vuln_instance_triage.epss_model_date IS 'EPSS model date when last updated';
|
||||
END IF;
|
||||
|
||||
-- Add epss_updated_at column
|
||||
IF NOT EXISTS (SELECT 1 FROM information_schema.columns
|
||||
WHERE table_name = 'vuln_instance_triage' AND column_name = 'epss_updated_at') THEN
|
||||
WHERE table_schema = current_schema() AND table_name = 'vuln_instance_triage' AND column_name = 'epss_updated_at') THEN
|
||||
ALTER TABLE vuln_instance_triage ADD COLUMN epss_updated_at TIMESTAMPTZ;
|
||||
COMMENT ON COLUMN vuln_instance_triage.epss_updated_at IS 'Timestamp when EPSS data was last updated';
|
||||
END IF;
|
||||
|
||||
-- Add previous_epss_band column (for change tracking)
|
||||
IF NOT EXISTS (SELECT 1 FROM information_schema.columns
|
||||
WHERE table_name = 'vuln_instance_triage' AND column_name = 'previous_epss_band') THEN
|
||||
WHERE table_schema = current_schema() AND table_name = 'vuln_instance_triage' AND column_name = 'previous_epss_band') THEN
|
||||
ALTER TABLE vuln_instance_triage ADD COLUMN previous_epss_band TEXT;
|
||||
COMMENT ON COLUMN vuln_instance_triage.previous_epss_band IS 'Previous EPSS priority band before last update';
|
||||
END IF;
|
||||
|
||||
-- Create index for efficient band-based queries
|
||||
IF NOT EXISTS (SELECT 1 FROM pg_indexes WHERE indexname = 'idx_vuln_instance_epss_band') THEN
|
||||
IF NOT EXISTS (SELECT 1 FROM pg_indexes WHERE schemaname = current_schema() AND indexname = 'idx_vuln_instance_epss_band') THEN
|
||||
CREATE INDEX idx_vuln_instance_epss_band
|
||||
ON vuln_instance_triage (current_epss_band)
|
||||
WHERE current_epss_band IN ('CRITICAL', 'HIGH');
|
||||
END IF;
|
||||
|
||||
-- Create index for stale EPSS data detection
|
||||
IF NOT EXISTS (SELECT 1 FROM pg_indexes WHERE indexname = 'idx_vuln_instance_epss_model_date') THEN
|
||||
IF NOT EXISTS (SELECT 1 FROM pg_indexes WHERE schemaname = current_schema() AND indexname = 'idx_vuln_instance_epss_model_date') THEN
|
||||
CREATE INDEX idx_vuln_instance_epss_model_date
|
||||
ON vuln_instance_triage (epss_model_date);
|
||||
END IF;
|
||||
@@ -80,6 +80,10 @@ END $$;
|
||||
-- ============================================================================
|
||||
-- Efficiently updates EPSS data for multiple vulnerability instances
|
||||
|
||||
DO $epss_triage$
|
||||
BEGIN
|
||||
IF EXISTS (SELECT 1 FROM information_schema.tables WHERE table_schema = current_schema() AND table_name = 'vuln_instance_triage') THEN
|
||||
EXECUTE $sql$
|
||||
CREATE OR REPLACE FUNCTION batch_update_epss_triage(
|
||||
p_updates JSONB,
|
||||
p_model_date DATE,
|
||||
@@ -127,14 +131,13 @@ BEGIN
|
||||
RETURN QUERY SELECT v_updated, v_band_changes;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
$sql$;
|
||||
|
||||
EXECUTE $sql$
|
||||
COMMENT ON FUNCTION batch_update_epss_triage IS 'Batch updates EPSS data for vulnerability instances, tracking band changes';
|
||||
$sql$;
|
||||
|
||||
-- ============================================================================
|
||||
-- View for Instances Needing EPSS Update
|
||||
-- ============================================================================
|
||||
-- Returns instances with stale or missing EPSS data
|
||||
|
||||
EXECUTE $sql$
|
||||
CREATE OR REPLACE VIEW v_epss_stale_instances AS
|
||||
SELECT
|
||||
vit.instance_id,
|
||||
@@ -146,5 +149,12 @@ SELECT
|
||||
FROM vuln_instance_triage vit
|
||||
WHERE vit.epss_model_date IS NULL
|
||||
OR vit.epss_model_date < CURRENT_DATE - 1;
|
||||
$sql$;
|
||||
|
||||
EXECUTE $sql$
|
||||
COMMENT ON VIEW v_epss_stale_instances IS 'Instances with stale or missing EPSS data, needing enrichment';
|
||||
$sql$;
|
||||
ELSE
|
||||
RAISE NOTICE 'Table vuln_instance_triage does not exist; skipping EPSS triage function/view';
|
||||
END IF;
|
||||
END $epss_triage$;
|
||||
|
||||
@@ -3,23 +3,17 @@
|
||||
-- Sprint: SPRINT_3700_0002_0001_vuln_surfaces_core
|
||||
-- Task: SURF-014
|
||||
-- Description: Vulnerability surface storage for trigger method analysis.
|
||||
--
|
||||
-- Note: migrations are executed with the module schema as the active search_path.
|
||||
-- Keep objects unqualified so integration tests can run in isolated schemas.
|
||||
-- =============================================================================
|
||||
|
||||
BEGIN;
|
||||
|
||||
-- Prevent re-running
|
||||
DO $$ BEGIN
|
||||
IF EXISTS (SELECT 1 FROM pg_tables WHERE schemaname = 'scanner' AND tablename = 'vuln_surfaces') THEN
|
||||
RAISE EXCEPTION 'Migration 014_vuln_surfaces already applied';
|
||||
END IF;
|
||||
END $$;
|
||||
|
||||
-- =============================================================================
|
||||
-- VULN_SURFACES: Computed vulnerability surface for CVE + package + version
|
||||
-- =============================================================================
|
||||
CREATE TABLE scanner.vuln_surfaces (
|
||||
CREATE TABLE IF NOT EXISTS vuln_surfaces (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
tenant_id UUID NOT NULL REFERENCES public.tenants(id),
|
||||
tenant_id UUID NOT NULL,
|
||||
|
||||
-- CVE/vulnerability identity
|
||||
cve_id TEXT NOT NULL,
|
||||
@@ -41,23 +35,22 @@ CREATE TABLE scanner.vuln_surfaces (
|
||||
-- DSSE attestation (optional)
|
||||
attestation_digest TEXT,
|
||||
|
||||
-- Indexes for lookups
|
||||
CONSTRAINT uq_vuln_surface_key UNIQUE (tenant_id, cve_id, package_ecosystem, package_name, vuln_version)
|
||||
);
|
||||
|
||||
-- Indexes for common queries
|
||||
CREATE INDEX idx_vuln_surfaces_cve ON scanner.vuln_surfaces(tenant_id, cve_id);
|
||||
CREATE INDEX idx_vuln_surfaces_package ON scanner.vuln_surfaces(tenant_id, package_ecosystem, package_name);
|
||||
CREATE INDEX idx_vuln_surfaces_computed_at ON scanner.vuln_surfaces(computed_at DESC);
|
||||
CREATE INDEX IF NOT EXISTS idx_vuln_surfaces_cve ON vuln_surfaces(tenant_id, cve_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_vuln_surfaces_package ON vuln_surfaces(tenant_id, package_ecosystem, package_name);
|
||||
CREATE INDEX IF NOT EXISTS idx_vuln_surfaces_computed_at ON vuln_surfaces(computed_at DESC);
|
||||
|
||||
COMMENT ON TABLE scanner.vuln_surfaces IS 'Computed vulnerability surfaces identifying which methods changed between vulnerable and fixed versions';
|
||||
COMMENT ON TABLE vuln_surfaces IS 'Computed vulnerability surfaces identifying which methods changed between vulnerable and fixed versions';
|
||||
|
||||
-- =============================================================================
|
||||
-- VULN_SURFACE_SINKS: Individual trigger methods for a vulnerability surface
|
||||
-- =============================================================================
|
||||
CREATE TABLE scanner.vuln_surface_sinks (
|
||||
CREATE TABLE IF NOT EXISTS vuln_surface_sinks (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
surface_id UUID NOT NULL REFERENCES scanner.vuln_surfaces(id) ON DELETE CASCADE,
|
||||
surface_id UUID NOT NULL REFERENCES vuln_surfaces(id) ON DELETE CASCADE,
|
||||
|
||||
-- Method identity
|
||||
method_key TEXT NOT NULL, -- Normalized method signature (FQN)
|
||||
@@ -82,24 +75,23 @@ CREATE TABLE scanner.vuln_surface_sinks (
|
||||
start_line INTEGER,
|
||||
end_line INTEGER,
|
||||
|
||||
-- Indexes for lookups
|
||||
CONSTRAINT uq_surface_sink_key UNIQUE (surface_id, method_key)
|
||||
);
|
||||
|
||||
-- Indexes for common queries
|
||||
CREATE INDEX idx_vuln_surface_sinks_surface ON scanner.vuln_surface_sinks(surface_id);
|
||||
CREATE INDEX idx_vuln_surface_sinks_method ON scanner.vuln_surface_sinks(method_name);
|
||||
CREATE INDEX idx_vuln_surface_sinks_type ON scanner.vuln_surface_sinks(declaring_type);
|
||||
CREATE INDEX IF NOT EXISTS idx_vuln_surface_sinks_surface ON vuln_surface_sinks(surface_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_vuln_surface_sinks_method ON vuln_surface_sinks(method_name);
|
||||
CREATE INDEX IF NOT EXISTS idx_vuln_surface_sinks_type ON vuln_surface_sinks(declaring_type);
|
||||
|
||||
COMMENT ON TABLE scanner.vuln_surface_sinks IS 'Individual methods that changed between vulnerable and fixed package versions';
|
||||
COMMENT ON TABLE vuln_surface_sinks IS 'Individual methods that changed between vulnerable and fixed package versions';
|
||||
|
||||
-- =============================================================================
|
||||
-- VULN_SURFACE_TRIGGERS: Links sinks to call graph nodes where they are invoked
|
||||
-- =============================================================================
|
||||
CREATE TABLE scanner.vuln_surface_triggers (
|
||||
CREATE TABLE IF NOT EXISTS vuln_surface_triggers (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
sink_id UUID NOT NULL REFERENCES scanner.vuln_surface_sinks(id) ON DELETE CASCADE,
|
||||
scan_id UUID NOT NULL, -- References scanner.scans
|
||||
sink_id UUID NOT NULL REFERENCES vuln_surface_sinks(id) ON DELETE CASCADE,
|
||||
scan_id UUID NOT NULL, -- References scans.scan_id
|
||||
|
||||
-- Caller identity
|
||||
caller_node_id TEXT NOT NULL, -- Call graph node ID
|
||||
@@ -116,34 +108,33 @@ CREATE TABLE scanner.vuln_surface_triggers (
|
||||
call_type TEXT NOT NULL DEFAULT 'direct', -- 'direct', 'virtual', 'interface', 'reflection'
|
||||
is_conditional BOOLEAN NOT NULL DEFAULT false,
|
||||
|
||||
-- Indexes for lookups
|
||||
CONSTRAINT uq_trigger_key UNIQUE (sink_id, scan_id, caller_node_id)
|
||||
);
|
||||
|
||||
-- Indexes for common queries
|
||||
CREATE INDEX idx_vuln_surface_triggers_sink ON scanner.vuln_surface_triggers(sink_id);
|
||||
CREATE INDEX idx_vuln_surface_triggers_scan ON scanner.vuln_surface_triggers(scan_id);
|
||||
CREATE INDEX idx_vuln_surface_triggers_bucket ON scanner.vuln_surface_triggers(reachability_bucket);
|
||||
CREATE INDEX IF NOT EXISTS idx_vuln_surface_triggers_sink ON vuln_surface_triggers(sink_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_vuln_surface_triggers_scan ON vuln_surface_triggers(scan_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_vuln_surface_triggers_bucket ON vuln_surface_triggers(reachability_bucket);
|
||||
|
||||
COMMENT ON TABLE scanner.vuln_surface_triggers IS 'Links between vulnerability sink methods and their callers in analyzed code';
|
||||
COMMENT ON TABLE vuln_surface_triggers IS 'Links between vulnerability sink methods and their callers in analyzed code';
|
||||
|
||||
-- =============================================================================
|
||||
-- RLS (Row Level Security)
|
||||
-- =============================================================================
|
||||
ALTER TABLE scanner.vuln_surfaces ENABLE ROW LEVEL SECURITY;
|
||||
ALTER TABLE vuln_surfaces ENABLE ROW LEVEL SECURITY;
|
||||
DROP POLICY IF EXISTS vuln_surfaces_tenant_isolation ON vuln_surfaces;
|
||||
CREATE POLICY vuln_surfaces_tenant_isolation ON vuln_surfaces
|
||||
FOR ALL
|
||||
USING (tenant_id = current_tenant_id())
|
||||
WITH CHECK (tenant_id = current_tenant_id());
|
||||
|
||||
-- Tenant isolation policy
|
||||
CREATE POLICY vuln_surfaces_tenant_isolation ON scanner.vuln_surfaces
|
||||
USING (tenant_id = current_setting('app.tenant_id', true)::uuid);
|
||||
|
||||
-- Note: vuln_surface_sinks and triggers inherit isolation through FK to surfaces
|
||||
-- Note: vuln_surface_sinks and triggers inherit isolation through FK to surfaces.
|
||||
|
||||
-- =============================================================================
|
||||
-- FUNCTIONS
|
||||
-- =============================================================================
|
||||
|
||||
-- Get surface statistics for a CVE
|
||||
CREATE OR REPLACE FUNCTION scanner.get_vuln_surface_stats(
|
||||
CREATE OR REPLACE FUNCTION get_vuln_surface_stats(
|
||||
p_tenant_id UUID,
|
||||
p_cve_id TEXT
|
||||
)
|
||||
@@ -164,14 +155,12 @@ BEGIN
|
||||
vs.fixed_version,
|
||||
vs.changed_method_count,
|
||||
COUNT(DISTINCT vst.id)::BIGINT AS trigger_count
|
||||
FROM scanner.vuln_surfaces vs
|
||||
LEFT JOIN scanner.vuln_surface_sinks vss ON vss.surface_id = vs.id
|
||||
LEFT JOIN scanner.vuln_surface_triggers vst ON vst.sink_id = vss.id
|
||||
FROM vuln_surfaces vs
|
||||
LEFT JOIN vuln_surface_sinks vss ON vss.surface_id = vs.id
|
||||
LEFT JOIN vuln_surface_triggers vst ON vst.sink_id = vss.id
|
||||
WHERE vs.tenant_id = p_tenant_id
|
||||
AND vs.cve_id = p_cve_id
|
||||
GROUP BY vs.id, vs.package_ecosystem, vs.package_name, vs.vuln_version, vs.fixed_version, vs.changed_method_count
|
||||
ORDER BY vs.package_ecosystem, vs.package_name;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql STABLE;
|
||||
|
||||
COMMIT;
|
||||
|
||||
@@ -427,10 +427,10 @@ public sealed class PostgresEpssRepository : IEpssRepository
|
||||
FROM {stageTable} s
|
||||
LEFT JOIN {CurrentTable} c ON c.cve_id = s.cve_id
|
||||
CROSS JOIN (
|
||||
SELECT high_score, high_percentile, big_jump_delta
|
||||
FROM {ConfigTable}
|
||||
WHERE org_id IS NULL
|
||||
LIMIT 1
|
||||
SELECT
|
||||
COALESCE((SELECT high_score FROM {ConfigTable} WHERE org_id IS NULL LIMIT 1), 0.50) AS high_score,
|
||||
COALESCE((SELECT high_percentile FROM {ConfigTable} WHERE org_id IS NULL LIMIT 1), 0.95) AS high_percentile,
|
||||
COALESCE((SELECT big_jump_delta FROM {ConfigTable} WHERE org_id IS NULL LIMIT 1), 0.10) AS big_jump_delta
|
||||
) cfg
|
||||
""";
|
||||
|
||||
@@ -493,15 +493,15 @@ public sealed class PostgresEpssRepository : IEpssRepository
|
||||
SELECT
|
||||
cve_id,
|
||||
flags,
|
||||
prev_score,
|
||||
old_score,
|
||||
old_percentile,
|
||||
new_score,
|
||||
new_percentile,
|
||||
prev_band,
|
||||
model_date
|
||||
FROM {ChangesTable}
|
||||
WHERE model_date = @ModelDate
|
||||
{(flags.HasValue ? "AND (flags & @Flags) != 0" : "")}
|
||||
ORDER BY new_score DESC
|
||||
ORDER BY new_score DESC, cve_id
|
||||
LIMIT @Limit
|
||||
""";
|
||||
|
||||
@@ -521,10 +521,10 @@ public sealed class PostgresEpssRepository : IEpssRepository
|
||||
{
|
||||
CveId = r.cve_id,
|
||||
Flags = (Core.Epss.EpssChangeFlags)r.flags,
|
||||
PreviousScore = r.prev_score,
|
||||
PreviousScore = r.old_score,
|
||||
NewScore = r.new_score,
|
||||
NewPercentile = r.new_percentile,
|
||||
PreviousBand = (Core.Epss.EpssPriorityBand)r.prev_band,
|
||||
PreviousBand = ComputeBand(r.old_score, r.old_percentile),
|
||||
ModelDate = r.model_date
|
||||
}).ToList();
|
||||
}
|
||||
@@ -533,13 +533,41 @@ public sealed class PostgresEpssRepository : IEpssRepository
|
||||
{
|
||||
public string cve_id { get; set; } = "";
|
||||
public int flags { get; set; }
|
||||
public double? prev_score { get; set; }
|
||||
public double? old_score { get; set; }
|
||||
public double? old_percentile { get; set; }
|
||||
public double new_score { get; set; }
|
||||
public double new_percentile { get; set; }
|
||||
public int prev_band { get; set; }
|
||||
public DateOnly model_date { get; set; }
|
||||
}
|
||||
|
||||
private static Core.Epss.EpssPriorityBand ComputeBand(double? score, double? percentile)
|
||||
{
|
||||
// Keep logic deterministic and aligned with the sprint band thresholds:
|
||||
// CRITICAL >= 99.5%, HIGH >= 99%, MEDIUM >= 90%, LOW otherwise.
|
||||
// (Score-based elevation is handled at higher layers when needed.)
|
||||
if (score is null || percentile is null)
|
||||
{
|
||||
return Core.Epss.EpssPriorityBand.Unknown;
|
||||
}
|
||||
|
||||
if (percentile.Value >= 0.995)
|
||||
{
|
||||
return Core.Epss.EpssPriorityBand.Critical;
|
||||
}
|
||||
|
||||
if (percentile.Value >= 0.99)
|
||||
{
|
||||
return Core.Epss.EpssPriorityBand.High;
|
||||
}
|
||||
|
||||
if (percentile.Value >= 0.90)
|
||||
{
|
||||
return Core.Epss.EpssPriorityBand.Medium;
|
||||
}
|
||||
|
||||
return Core.Epss.EpssPriorityBand.Low;
|
||||
}
|
||||
|
||||
private sealed class StageCounts
|
||||
{
|
||||
public int distinct_count { get; set; }
|
||||
|
||||
Reference in New Issue
Block a user