feat: Implement BerkeleyDB reader for RPM databases
Some checks failed
AOC Guard CI / aoc-guard (push) Has been cancelled
AOC Guard CI / aoc-verify (push) Has been cancelled
Concelier Attestation Tests / attestation-tests (push) Has been cancelled
Docs CI / lint-and-preview (push) Has been cancelled
Policy Lint & Smoke / policy-lint (push) Has been cancelled
Scanner Analyzers / Discover Analyzers (push) Has been cancelled
Scanner Analyzers / Build Analyzers (push) Has been cancelled
Scanner Analyzers / Test Language Analyzers (push) Has been cancelled
Scanner Analyzers / Validate Test Fixtures (push) Has been cancelled
Scanner Analyzers / Verify Deterministic Output (push) Has been cancelled
console-runner-image / build-runner-image (push) Has been cancelled
wine-csp-build / Build Wine CSP Image (push) Has been cancelled
wine-csp-build / Integration Tests (push) Has been cancelled
wine-csp-build / Security Scan (push) Has been cancelled
wine-csp-build / Generate SBOM (push) Has been cancelled
wine-csp-build / Publish Image (push) Has been cancelled
wine-csp-build / Air-Gap Bundle (push) Has been cancelled
wine-csp-build / Test Summary (push) Has been cancelled
Some checks failed
AOC Guard CI / aoc-guard (push) Has been cancelled
AOC Guard CI / aoc-verify (push) Has been cancelled
Concelier Attestation Tests / attestation-tests (push) Has been cancelled
Docs CI / lint-and-preview (push) Has been cancelled
Policy Lint & Smoke / policy-lint (push) Has been cancelled
Scanner Analyzers / Discover Analyzers (push) Has been cancelled
Scanner Analyzers / Build Analyzers (push) Has been cancelled
Scanner Analyzers / Test Language Analyzers (push) Has been cancelled
Scanner Analyzers / Validate Test Fixtures (push) Has been cancelled
Scanner Analyzers / Verify Deterministic Output (push) Has been cancelled
console-runner-image / build-runner-image (push) Has been cancelled
wine-csp-build / Build Wine CSP Image (push) Has been cancelled
wine-csp-build / Integration Tests (push) Has been cancelled
wine-csp-build / Security Scan (push) Has been cancelled
wine-csp-build / Generate SBOM (push) Has been cancelled
wine-csp-build / Publish Image (push) Has been cancelled
wine-csp-build / Air-Gap Bundle (push) Has been cancelled
wine-csp-build / Test Summary (push) Has been cancelled
- Added BerkeleyDbReader class to read and extract RPM header blobs from BerkeleyDB hash databases. - Implemented methods to detect BerkeleyDB format and extract values, including handling of page sizes and magic numbers. - Added tests for BerkeleyDbReader to ensure correct functionality and header extraction. feat: Add Yarn PnP data tests - Created YarnPnpDataTests to validate package resolution and data loading from Yarn PnP cache. - Implemented tests for resolved keys, package presence, and loading from cache structure. test: Add egg-info package fixtures for Python tests - Created egg-info package fixtures for testing Python analyzers. - Included PKG-INFO, entry_points.txt, and installed-files.txt for comprehensive coverage. test: Enhance RPM database reader tests - Added tests for RpmDatabaseReader to validate fallback to legacy packages when SQLite is missing. - Implemented helper methods to create legacy package files and RPM headers for testing. test: Implement dual signing tests - Added DualSignTests to validate secondary signature addition when configured. - Created stub implementations for crypto providers and key resolvers to facilitate testing. chore: Update CI script for Playwright Chromium installation - Modified ci-console-exports.sh to ensure deterministic Chromium binary installation for console exports tests. - Added checks for Windows compatibility and environment variable setups for Playwright browsers.
This commit is contained in:
@@ -1,52 +1,4 @@
|
||||
[
|
||||
{
|
||||
"analyzerId": "node",
|
||||
"componentKey": "purl::pkg:npm/declared-only@9.9.9",
|
||||
"purl": "pkg:npm/declared-only@9.9.9",
|
||||
"name": "declared-only",
|
||||
"version": "9.9.9",
|
||||
"type": "npm",
|
||||
"usedByEntrypoint": false,
|
||||
"metadata": {
|
||||
"declaredOnly": "true",
|
||||
"integrity": "sha512-DECLAREDONLY",
|
||||
"lockLocator": "package-lock.json:packages/app/node_modules/declared-only",
|
||||
"lockSource": "package-lock.json",
|
||||
"path": ".",
|
||||
"resolved": "https://registry.example/declared-only-9.9.9.tgz"
|
||||
},
|
||||
"evidence": [
|
||||
{
|
||||
"kind": "metadata",
|
||||
"source": "package-lock.json",
|
||||
"locator": "package-lock.json:packages/app/node_modules/declared-only"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"analyzerId": "node",
|
||||
"componentKey": "purl::pkg:npm/left-pad@1.3.0",
|
||||
"purl": "pkg:npm/left-pad@1.3.0",
|
||||
"name": "left-pad",
|
||||
"version": "1.3.0",
|
||||
"type": "npm",
|
||||
"usedByEntrypoint": false,
|
||||
"metadata": {
|
||||
"declaredOnly": "true",
|
||||
"integrity": "sha512-LEFTPAD",
|
||||
"lockLocator": "package-lock.json:packages/app/node_modules/left-pad",
|
||||
"lockSource": "package-lock.json",
|
||||
"path": ".",
|
||||
"resolved": "https://registry.example/left-pad-1.3.0.tgz"
|
||||
},
|
||||
"evidence": [
|
||||
{
|
||||
"kind": "metadata",
|
||||
"source": "package-lock.json",
|
||||
"locator": "package-lock.json:packages/app/node_modules/left-pad"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"analyzerId": "node",
|
||||
"componentKey": "purl::pkg:npm/lib@2.0.1",
|
||||
@@ -154,4 +106,4 @@
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
]
|
||||
|
||||
@@ -0,0 +1,136 @@
|
||||
using StellaOps.Scanner.Analyzers.Lang.Node.Internal;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Scanner.Analyzers.Lang.Node.Tests.Internal;
|
||||
|
||||
public sealed class YarnPnpDataTests
|
||||
{
|
||||
[Fact]
|
||||
public void IsResolved_ReturnsTrue_WhenPackageInResolvedKeys()
|
||||
{
|
||||
var resolvedKeys = new HashSet<string>(StringComparer.Ordinal) { "lodash@4.17.21" };
|
||||
var pnpData = new YarnPnpData(null, resolvedKeys);
|
||||
|
||||
Assert.True(pnpData.IsResolved("lodash", "4.17.21"));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void IsResolved_ReturnsFalse_WhenPackageNotInResolvedKeys()
|
||||
{
|
||||
var resolvedKeys = new HashSet<string>(StringComparer.Ordinal) { "lodash@4.17.21" };
|
||||
var pnpData = new YarnPnpData(null, resolvedKeys);
|
||||
|
||||
Assert.False(pnpData.IsResolved("lodash", "4.17.20"));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void IsResolved_ReturnsFalse_WhenNoResolvedKeys()
|
||||
{
|
||||
var pnpData = new YarnPnpData(null, null);
|
||||
|
||||
Assert.False(pnpData.IsResolved("lodash", "4.17.21"));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void HasData_ReturnsTrue_WhenPackagesPresent()
|
||||
{
|
||||
var packages = new Dictionary<string, YarnPnpPackageInfo>(StringComparer.Ordinal)
|
||||
{
|
||||
["lodash@npm:4.17.21"] = new YarnPnpPackageInfo("lodash", "4.17.21", ".yarn/cache/lodash-npm-4.17.21-abc.zip")
|
||||
};
|
||||
var pnpData = new YarnPnpData(packages, null);
|
||||
|
||||
Assert.True(pnpData.HasData);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void HasData_ReturnsFalse_WhenNoPackages()
|
||||
{
|
||||
var pnpData = new YarnPnpData(null, null);
|
||||
|
||||
Assert.False(pnpData.HasData);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void TryGetPackage_FindsPackage_WithNpmPrefix()
|
||||
{
|
||||
var packages = new Dictionary<string, YarnPnpPackageInfo>(StringComparer.Ordinal)
|
||||
{
|
||||
["lodash@npm:4.17.21"] = new YarnPnpPackageInfo("lodash", "4.17.21", ".yarn/cache/lodash-npm-4.17.21-abc.zip")
|
||||
};
|
||||
var pnpData = new YarnPnpData(packages, null);
|
||||
|
||||
Assert.True(pnpData.TryGetPackage("lodash", "4.17.21", out var info));
|
||||
Assert.NotNull(info);
|
||||
Assert.Equal("lodash", info!.Name);
|
||||
Assert.Equal("4.17.21", info.Version);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void TryGetPackage_FindsPackage_WithoutNpmPrefix()
|
||||
{
|
||||
var packages = new Dictionary<string, YarnPnpPackageInfo>(StringComparer.Ordinal)
|
||||
{
|
||||
["lodash@4.17.21"] = new YarnPnpPackageInfo("lodash", "4.17.21", ".yarn/cache/lodash-npm-4.17.21-abc.zip")
|
||||
};
|
||||
var pnpData = new YarnPnpData(packages, null);
|
||||
|
||||
Assert.True(pnpData.TryGetPackage("lodash", "4.17.21", out var info));
|
||||
Assert.NotNull(info);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void TryGetPackage_ReturnsFalse_WhenPackageNotFound()
|
||||
{
|
||||
var packages = new Dictionary<string, YarnPnpPackageInfo>(StringComparer.Ordinal)
|
||||
{
|
||||
["lodash@npm:4.17.21"] = new YarnPnpPackageInfo("lodash", "4.17.21", null)
|
||||
};
|
||||
var pnpData = new YarnPnpData(packages, null);
|
||||
|
||||
Assert.False(pnpData.TryGetPackage("underscore", "1.0.0", out _));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Load_ReturnsEmptyData_WhenNoMarkers()
|
||||
{
|
||||
var tempDir = Path.Combine(Path.GetTempPath(), $"pnp-test-{Guid.NewGuid():N}");
|
||||
Directory.CreateDirectory(tempDir);
|
||||
|
||||
try
|
||||
{
|
||||
var pnpData = YarnPnpData.Load(tempDir);
|
||||
|
||||
Assert.False(pnpData.HasData);
|
||||
}
|
||||
finally
|
||||
{
|
||||
Directory.Delete(tempDir, recursive: true);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Load_ParsesCacheStructure_WhenCacheExists()
|
||||
{
|
||||
var tempDir = Path.Combine(Path.GetTempPath(), $"pnp-test-{Guid.NewGuid():N}");
|
||||
var cacheDir = Path.Combine(tempDir, ".yarn", "cache");
|
||||
Directory.CreateDirectory(cacheDir);
|
||||
|
||||
try
|
||||
{
|
||||
// Create fake cache zip files with Yarn naming convention
|
||||
File.WriteAllBytes(Path.Combine(cacheDir, "lodash-npm-4.17.21-abc123.zip"), Array.Empty<byte>());
|
||||
File.WriteAllBytes(Path.Combine(cacheDir, "@types-node-npm-18.0.0-def456.zip"), Array.Empty<byte>());
|
||||
|
||||
var pnpData = YarnPnpData.Load(tempDir);
|
||||
|
||||
Assert.True(pnpData.HasData);
|
||||
Assert.True(pnpData.IsResolved("lodash", "4.17.21"));
|
||||
Assert.True(pnpData.IsResolved("@types/node", "18.0.0"));
|
||||
}
|
||||
finally
|
||||
{
|
||||
Directory.Delete(tempDir, recursive: true);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,4 @@
|
||||
Metadata-Version: 1.2
|
||||
Name: egg-info-pkg
|
||||
Version: 1.2.3
|
||||
Summary: Example egg-info package for analyzer coverage
|
||||
@@ -0,0 +1,2 @@
|
||||
[console_scripts]
|
||||
egg-info-cli = egg_info_pkg:main
|
||||
@@ -0,0 +1,4 @@
|
||||
egg_info_pkg/__init__.py
|
||||
egg_info_pkg-1.2.3.egg-info/PKG-INFO
|
||||
egg_info_pkg-1.2.3.egg-info/entry_points.txt
|
||||
egg_info_pkg-1.2.3.egg-info/installed-files.txt
|
||||
@@ -0,0 +1 @@
|
||||
__version__ = "1.2.3"
|
||||
@@ -316,6 +316,234 @@ public sealed class PythonPackageDiscoveryTests
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task DiscoverAsync_EggInfo_FindsLegacySetuptoolsPackages()
|
||||
{
|
||||
var cancellationToken = TestContext.Current.CancellationToken;
|
||||
var tempPath = CreateTemporaryWorkspace();
|
||||
try
|
||||
{
|
||||
// Create a standalone egg-info structure (legacy setuptools)
|
||||
var eggInfoPath = Path.Combine(tempPath, "oldpackage-1.2.3.egg-info");
|
||||
Directory.CreateDirectory(eggInfoPath);
|
||||
|
||||
await File.WriteAllTextAsync(
|
||||
Path.Combine(eggInfoPath, "PKG-INFO"),
|
||||
"""
|
||||
Metadata-Version: 1.0
|
||||
Name: oldpackage
|
||||
Version: 1.2.3
|
||||
Summary: An old package
|
||||
""",
|
||||
cancellationToken);
|
||||
|
||||
await File.WriteAllTextAsync(
|
||||
Path.Combine(eggInfoPath, "top_level.txt"),
|
||||
"oldpackage\n",
|
||||
cancellationToken);
|
||||
|
||||
await File.WriteAllTextAsync(
|
||||
Path.Combine(eggInfoPath, "SOURCES.txt"),
|
||||
"""
|
||||
oldpackage/__init__.py
|
||||
oldpackage/module.py
|
||||
setup.py
|
||||
""",
|
||||
cancellationToken);
|
||||
|
||||
await File.WriteAllTextAsync(
|
||||
Path.Combine(eggInfoPath, "requires.txt"),
|
||||
"""
|
||||
six
|
||||
[testing]
|
||||
pytest
|
||||
""",
|
||||
cancellationToken);
|
||||
|
||||
// Create module files
|
||||
var modulePath = Path.Combine(tempPath, "oldpackage");
|
||||
Directory.CreateDirectory(modulePath);
|
||||
await File.WriteAllTextAsync(Path.Combine(modulePath, "__init__.py"), "", cancellationToken);
|
||||
|
||||
var vfs = PythonVirtualFileSystem.CreateBuilder()
|
||||
.AddSitePackages(tempPath)
|
||||
.Build();
|
||||
|
||||
var discovery = new PythonPackageDiscovery();
|
||||
var result = await discovery.DiscoverAsync(vfs, cancellationToken);
|
||||
|
||||
Assert.True(result.IsSuccessful);
|
||||
Assert.Contains(result.Packages, p => p.Name == "oldpackage");
|
||||
|
||||
var oldPkg = result.Packages.First(p => p.Name == "oldpackage");
|
||||
Assert.Equal("1.2.3", oldPkg.Version);
|
||||
Assert.Equal(PythonPackageKind.Egg, oldPkg.Kind);
|
||||
Assert.Equal("setuptools", oldPkg.InstallerTool);
|
||||
Assert.Contains("oldpackage", oldPkg.TopLevelModules);
|
||||
Assert.Contains("six", oldPkg.Dependencies);
|
||||
Assert.Contains("testing", oldPkg.Extras);
|
||||
}
|
||||
finally
|
||||
{
|
||||
Directory.Delete(tempPath, recursive: true);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task DiscoverAsync_EggInfo_WithInstalledFiles_HasDefinitiveConfidence()
|
||||
{
|
||||
var cancellationToken = TestContext.Current.CancellationToken;
|
||||
var tempPath = CreateTemporaryWorkspace();
|
||||
try
|
||||
{
|
||||
var eggInfoPath = Path.Combine(tempPath, "installpkg-0.5.0.egg-info");
|
||||
Directory.CreateDirectory(eggInfoPath);
|
||||
|
||||
await File.WriteAllTextAsync(
|
||||
Path.Combine(eggInfoPath, "PKG-INFO"),
|
||||
"""
|
||||
Metadata-Version: 1.0
|
||||
Name: installpkg
|
||||
Version: 0.5.0
|
||||
""",
|
||||
cancellationToken);
|
||||
|
||||
await File.WriteAllTextAsync(
|
||||
Path.Combine(eggInfoPath, "installed-files.txt"),
|
||||
"""
|
||||
../installpkg/__init__.py
|
||||
../installpkg/main.py
|
||||
PKG-INFO
|
||||
""",
|
||||
cancellationToken);
|
||||
|
||||
var modulePath = Path.Combine(tempPath, "installpkg");
|
||||
Directory.CreateDirectory(modulePath);
|
||||
await File.WriteAllTextAsync(Path.Combine(modulePath, "__init__.py"), "", cancellationToken);
|
||||
|
||||
var vfs = PythonVirtualFileSystem.CreateBuilder()
|
||||
.AddSitePackages(tempPath)
|
||||
.Build();
|
||||
|
||||
var discovery = new PythonPackageDiscovery();
|
||||
var result = await discovery.DiscoverAsync(vfs, cancellationToken);
|
||||
|
||||
var pkg = result.Packages.First(p => p.Name == "installpkg");
|
||||
Assert.Equal(PythonPackageConfidence.Definitive, pkg.Confidence);
|
||||
Assert.True(pkg.RecordFiles.Length > 0);
|
||||
}
|
||||
finally
|
||||
{
|
||||
Directory.Delete(tempPath, recursive: true);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task DiscoverAsync_EggInfo_ParsesRequiresTxtExtras()
|
||||
{
|
||||
var cancellationToken = TestContext.Current.CancellationToken;
|
||||
var tempPath = CreateTemporaryWorkspace();
|
||||
try
|
||||
{
|
||||
var eggInfoPath = Path.Combine(tempPath, "extrapkg-2.0.0.egg-info");
|
||||
Directory.CreateDirectory(eggInfoPath);
|
||||
|
||||
await File.WriteAllTextAsync(
|
||||
Path.Combine(eggInfoPath, "PKG-INFO"),
|
||||
"""
|
||||
Metadata-Version: 1.0
|
||||
Name: extrapkg
|
||||
Version: 2.0.0
|
||||
""",
|
||||
cancellationToken);
|
||||
|
||||
await File.WriteAllTextAsync(
|
||||
Path.Combine(eggInfoPath, "requires.txt"),
|
||||
"""
|
||||
requests>=2.0
|
||||
urllib3
|
||||
[dev]
|
||||
pytest
|
||||
black
|
||||
[docs:python_version>="3.8"]
|
||||
sphinx
|
||||
""",
|
||||
cancellationToken);
|
||||
|
||||
var vfs = PythonVirtualFileSystem.CreateBuilder()
|
||||
.AddSitePackages(tempPath)
|
||||
.Build();
|
||||
|
||||
var discovery = new PythonPackageDiscovery();
|
||||
var result = await discovery.DiscoverAsync(vfs, cancellationToken);
|
||||
|
||||
var pkg = result.Packages.First(p => p.Name == "extrapkg");
|
||||
|
||||
// Base dependencies only (filtered)
|
||||
Assert.Contains("requests>=2.0", pkg.Dependencies);
|
||||
Assert.Contains("urllib3", pkg.Dependencies);
|
||||
Assert.DoesNotContain(pkg.Dependencies, d => d.Contains("pytest"));
|
||||
|
||||
// Extras should be extracted
|
||||
Assert.Contains("dev", pkg.Extras);
|
||||
Assert.Contains("docs", pkg.Extras);
|
||||
}
|
||||
finally
|
||||
{
|
||||
Directory.Delete(tempPath, recursive: true);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task DiscoverAsync_EggInfo_PrefersDistInfo_WhenBothExist()
|
||||
{
|
||||
var cancellationToken = TestContext.Current.CancellationToken;
|
||||
var tempPath = CreateTemporaryWorkspace();
|
||||
try
|
||||
{
|
||||
// Create both dist-info and egg-info for the same package
|
||||
// (This can happen during upgrades)
|
||||
var distInfoPath = Path.Combine(tempPath, "dualpkg-1.0.0.dist-info");
|
||||
Directory.CreateDirectory(distInfoPath);
|
||||
await File.WriteAllTextAsync(
|
||||
Path.Combine(distInfoPath, "METADATA"),
|
||||
"""
|
||||
Name: dualpkg
|
||||
Version: 1.0.0
|
||||
""",
|
||||
cancellationToken);
|
||||
|
||||
var eggInfoPath = Path.Combine(tempPath, "dualpkg-0.9.0.egg-info");
|
||||
Directory.CreateDirectory(eggInfoPath);
|
||||
await File.WriteAllTextAsync(
|
||||
Path.Combine(eggInfoPath, "PKG-INFO"),
|
||||
"""
|
||||
Name: dualpkg
|
||||
Version: 0.9.0
|
||||
""",
|
||||
cancellationToken);
|
||||
|
||||
var vfs = PythonVirtualFileSystem.CreateBuilder()
|
||||
.AddSitePackages(tempPath)
|
||||
.Build();
|
||||
|
||||
var discovery = new PythonPackageDiscovery();
|
||||
var result = await discovery.DiscoverAsync(vfs, cancellationToken);
|
||||
|
||||
// Should only have one entry, and it should be the dist-info version
|
||||
var pkgCount = result.Packages.Count(p => p.NormalizedName == "dualpkg");
|
||||
Assert.Equal(1, pkgCount);
|
||||
|
||||
var pkg = result.Packages.First(p => p.NormalizedName == "dualpkg");
|
||||
Assert.Equal("1.0.0", pkg.Version); // dist-info version
|
||||
Assert.Equal(PythonPackageKind.Wheel, pkg.Kind);
|
||||
}
|
||||
finally
|
||||
{
|
||||
Directory.Delete(tempPath, recursive: true);
|
||||
}
|
||||
}
|
||||
|
||||
private static string CreateTemporaryWorkspace()
|
||||
{
|
||||
var path = Path.Combine(Path.GetTempPath(), $"stellaops-packaging-{Guid.NewGuid():N}");
|
||||
|
||||
@@ -341,6 +341,32 @@ public sealed class PythonLanguageAnalyzerTests
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task EggInfoPackagesAreDetectedAsync()
|
||||
{
|
||||
var cancellationToken = TestContext.Current.CancellationToken;
|
||||
var fixturePath = TestPaths.ResolveFixture("lang", "python", "egg-info");
|
||||
|
||||
var analyzers = new ILanguageAnalyzer[]
|
||||
{
|
||||
new PythonLanguageAnalyzer()
|
||||
};
|
||||
|
||||
var json = await LanguageAnalyzerTestHarness.RunToJsonAsync(
|
||||
fixturePath,
|
||||
analyzers,
|
||||
cancellationToken);
|
||||
|
||||
using var document = JsonDocument.Parse(json);
|
||||
var root = document.RootElement;
|
||||
|
||||
var expectedPath = Path.Combine("lib", "python3.11", "site-packages", "egg_info_pkg-1.2.3.egg-info");
|
||||
|
||||
Assert.True(ComponentHasMetadata(root, "egg-info-pkg", "provenance", "egg-info"));
|
||||
Assert.True(ComponentHasMetadata(root, "egg-info-pkg", "record.totalEntries", "4"));
|
||||
Assert.True(ComponentHasMetadata(root, "egg-info-pkg", "distInfoPath", expectedPath));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task DetectsPythonEnvironmentVariablesAsync()
|
||||
{
|
||||
|
||||
@@ -0,0 +1,184 @@
|
||||
using System;
|
||||
using System.Buffers.Binary;
|
||||
using StellaOps.Scanner.Analyzers.OS.Rpm.Internal;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Scanner.Analyzers.OS.Tests.Rpm;
|
||||
|
||||
public sealed class BerkeleyDbReaderTests
|
||||
{
|
||||
[Fact]
|
||||
public void IsBerkeleyDb_ReturnsFalse_ForEmptyData()
|
||||
{
|
||||
var data = Array.Empty<byte>();
|
||||
Assert.False(BerkeleyDbReader.IsBerkeleyDb(data));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void IsBerkeleyDb_ReturnsFalse_ForSmallData()
|
||||
{
|
||||
var data = new byte[10];
|
||||
Assert.False(BerkeleyDbReader.IsBerkeleyDb(data));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void IsBerkeleyDb_ReturnsFalse_ForNonBdbData()
|
||||
{
|
||||
// Random non-BDB data
|
||||
var data = new byte[64];
|
||||
new Random(42).NextBytes(data);
|
||||
Assert.False(BerkeleyDbReader.IsBerkeleyDb(data));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void IsBerkeleyDb_ReturnsTrue_ForBdbHashMagicBigEndian()
|
||||
{
|
||||
// BerkeleyDB Hash magic at offset 12: 0x00061561 (big-endian)
|
||||
var data = new byte[20];
|
||||
BinaryPrimitives.WriteUInt32BigEndian(data.AsSpan(12), 0x00061561);
|
||||
Assert.True(BerkeleyDbReader.IsBerkeleyDb(data));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void IsBerkeleyDb_ReturnsTrue_ForBdbHashMagicLittleEndian()
|
||||
{
|
||||
// BerkeleyDB Hash magic at offset 12: 0x61150600 (little-endian representation)
|
||||
var data = new byte[20];
|
||||
BinaryPrimitives.WriteUInt32BigEndian(data.AsSpan(12), 0x61150600);
|
||||
Assert.True(BerkeleyDbReader.IsBerkeleyDb(data));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void IsBerkeleyDb_ReturnsTrue_ForBdbBtreeMagicBigEndian()
|
||||
{
|
||||
// BerkeleyDB Btree magic at offset 12: 0x00053162 (big-endian)
|
||||
var data = new byte[20];
|
||||
BinaryPrimitives.WriteUInt32BigEndian(data.AsSpan(12), 0x00053162);
|
||||
Assert.True(BerkeleyDbReader.IsBerkeleyDb(data));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void IsBerkeleyDb_ReturnsTrue_ForBdbBtreeMagicLittleEndian()
|
||||
{
|
||||
// BerkeleyDB Btree magic at offset 12: 0x62310500 (little-endian representation)
|
||||
var data = new byte[20];
|
||||
BinaryPrimitives.WriteUInt32BigEndian(data.AsSpan(12), 0x62310500);
|
||||
Assert.True(BerkeleyDbReader.IsBerkeleyDb(data));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ExtractValues_ReturnsEmptyList_ForSmallData()
|
||||
{
|
||||
var data = new byte[256];
|
||||
var result = BerkeleyDbReader.ExtractValues(data);
|
||||
Assert.Empty(result);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ExtractValues_FindsRpmHeaders_WithValidMagic()
|
||||
{
|
||||
// Create data with a valid RPM header structure
|
||||
var data = CreateDataWithRpmHeader(pageSize: 4096);
|
||||
var result = BerkeleyDbReader.ExtractValues(data);
|
||||
Assert.NotEmpty(result);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ExtractValuesWithOverflow_FindsRpmHeaders()
|
||||
{
|
||||
// Create data with a valid RPM header anywhere in the file
|
||||
var data = CreateDataWithRpmHeader(pageSize: 4096);
|
||||
var result = BerkeleyDbReader.ExtractValuesWithOverflow(data);
|
||||
Assert.NotEmpty(result);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ExtractValues_IgnoresInvalidHeaders()
|
||||
{
|
||||
// Create data with RPM magic but invalid header structure
|
||||
var data = new byte[8192];
|
||||
|
||||
// Set up as BDB (magic at offset 12)
|
||||
BinaryPrimitives.WriteUInt32BigEndian(data.AsSpan(12), 0x00061561);
|
||||
// Page size at offset 20 (little-endian for LE BDB)
|
||||
BinaryPrimitives.WriteInt32LittleEndian(data.AsSpan(20), 4096);
|
||||
|
||||
// Write RPM magic at page 1 with invalid reserved bytes
|
||||
var rpmOffset = 4096 + 100;
|
||||
data[rpmOffset] = 0x8e;
|
||||
data[rpmOffset + 1] = 0xad;
|
||||
data[rpmOffset + 2] = 0xe8;
|
||||
data[rpmOffset + 3] = 0xab;
|
||||
// Reserved bytes should be 0, but we set them non-zero
|
||||
data[rpmOffset + 4] = 0xFF;
|
||||
|
||||
var result = BerkeleyDbReader.ExtractValues(data);
|
||||
Assert.Empty(result);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ExtractValues_DeduplicatesHeaders()
|
||||
{
|
||||
// Create data with duplicate RPM headers
|
||||
var data = new byte[16384];
|
||||
|
||||
// Set up as BDB
|
||||
BinaryPrimitives.WriteUInt32BigEndian(data.AsSpan(12), 0x00061561);
|
||||
BinaryPrimitives.WriteInt32LittleEndian(data.AsSpan(20), 4096);
|
||||
|
||||
// Write same RPM header at two different offsets
|
||||
WriteMinimalRpmHeader(data, 4096 + 100);
|
||||
WriteMinimalRpmHeader(data, 8192 + 100);
|
||||
|
||||
var result = BerkeleyDbReader.ExtractValuesWithOverflow(data);
|
||||
|
||||
// Both should be found since they're at different offsets (no dedup at value level)
|
||||
Assert.Equal(2, result.Count);
|
||||
}
|
||||
|
||||
private static byte[] CreateDataWithRpmHeader(int pageSize)
|
||||
{
|
||||
var data = new byte[pageSize * 3];
|
||||
|
||||
// Set up BDB metadata page
|
||||
BinaryPrimitives.WriteUInt32BigEndian(data.AsSpan(12), 0x00061561);
|
||||
BinaryPrimitives.WriteInt32LittleEndian(data.AsSpan(20), pageSize);
|
||||
|
||||
// Write RPM header at start of page 1
|
||||
WriteMinimalRpmHeader(data, pageSize + 100);
|
||||
|
||||
return data;
|
||||
}
|
||||
|
||||
private static void WriteMinimalRpmHeader(byte[] data, int offset)
|
||||
{
|
||||
// RPM header magic
|
||||
data[offset] = 0x8e;
|
||||
data[offset + 1] = 0xad;
|
||||
data[offset + 2] = 0xe8;
|
||||
data[offset + 3] = 0xab;
|
||||
|
||||
// Reserved (must be 0)
|
||||
BinaryPrimitives.WriteInt32BigEndian(data.AsSpan(offset + 4), 0);
|
||||
|
||||
// Index count (1 entry)
|
||||
BinaryPrimitives.WriteInt32BigEndian(data.AsSpan(offset + 8), 1);
|
||||
|
||||
// Store size (16 bytes)
|
||||
BinaryPrimitives.WriteInt32BigEndian(data.AsSpan(offset + 12), 16);
|
||||
|
||||
// One index entry (16 bytes): tag, type, offset, count
|
||||
// Tag: 1000 (NAME)
|
||||
BinaryPrimitives.WriteInt32BigEndian(data.AsSpan(offset + 16), 1000);
|
||||
// Type: 6 (STRING)
|
||||
BinaryPrimitives.WriteInt32BigEndian(data.AsSpan(offset + 20), 6);
|
||||
// Offset: 0
|
||||
BinaryPrimitives.WriteInt32BigEndian(data.AsSpan(offset + 24), 0);
|
||||
// Count: 1
|
||||
BinaryPrimitives.WriteInt32BigEndian(data.AsSpan(offset + 28), 1);
|
||||
|
||||
// Data store (16 bytes): "test-pkg\0" + padding
|
||||
var name = "test-pkg\0"u8;
|
||||
name.CopyTo(data.AsSpan(offset + 32));
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,120 @@
|
||||
using System;
|
||||
using System.Buffers.Binary;
|
||||
using System.IO;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using StellaOps.Scanner.Analyzers.OS.Rpm;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Scanner.Analyzers.OS.Tests.Rpm;
|
||||
|
||||
public sealed class RpmDatabaseReaderTests
|
||||
{
|
||||
[Fact]
|
||||
public void FallsBackToLegacyPackages_WhenSqliteMissing()
|
||||
{
|
||||
var root = Directory.CreateTempSubdirectory("rpmdb-legacy");
|
||||
try
|
||||
{
|
||||
var packagesPath = Path.Combine(root.FullName, "var", "lib", "rpm");
|
||||
Directory.CreateDirectory(packagesPath);
|
||||
|
||||
var data = CreateLegacyPackagesFile();
|
||||
File.WriteAllBytes(Path.Combine(packagesPath, "Packages"), data);
|
||||
|
||||
var reader = new RpmDatabaseReader(NullLogger.Instance);
|
||||
var headers = reader.ReadHeaders(root.FullName, CancellationToken.None);
|
||||
|
||||
Assert.Single(headers);
|
||||
var header = headers[0];
|
||||
Assert.Equal("legacy-pkg", header.Name);
|
||||
Assert.Equal("1.0.0", header.Version);
|
||||
Assert.Equal("x86_64", header.Architecture);
|
||||
}
|
||||
finally
|
||||
{
|
||||
try
|
||||
{
|
||||
root.Delete(recursive: true);
|
||||
}
|
||||
catch
|
||||
{
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private static byte[] CreateLegacyPackagesFile()
|
||||
{
|
||||
const int pageSize = 4096;
|
||||
var data = new byte[pageSize * 2];
|
||||
|
||||
// BDB hash magic (big-endian) at offset 12
|
||||
BinaryPrimitives.WriteUInt32BigEndian(data.AsSpan(12), 0x00061561);
|
||||
// Page size (big-endian because we use BE magic)
|
||||
BinaryPrimitives.WriteInt32BigEndian(data.AsSpan(20), pageSize);
|
||||
|
||||
var header = CreateRpmHeader("legacy-pkg", "1.0.0", "x86_64");
|
||||
var headerOffset = pageSize + 128;
|
||||
header.CopyTo(data.AsSpan(headerOffset));
|
||||
|
||||
return data;
|
||||
}
|
||||
|
||||
private static byte[] CreateRpmHeader(string name, string version, string arch)
|
||||
{
|
||||
var nameBytes = GetNullTerminated(name);
|
||||
var versionBytes = GetNullTerminated(version);
|
||||
var archBytes = GetNullTerminated(arch);
|
||||
|
||||
var storeSize = nameBytes.Length + versionBytes.Length + archBytes.Length;
|
||||
var header = new byte[16 + (3 * 16) + storeSize];
|
||||
|
||||
// Magic
|
||||
BinaryPrimitives.WriteUInt32BigEndian(header.AsSpan(0), 0x8eade8ab);
|
||||
// Version/reserved bytes (version=1)
|
||||
header[4] = 1;
|
||||
header[5] = 0;
|
||||
BinaryPrimitives.WriteUInt16BigEndian(header.AsSpan(6), 0);
|
||||
|
||||
// Index count (3) and store size
|
||||
BinaryPrimitives.WriteInt32BigEndian(header.AsSpan(8), 3);
|
||||
BinaryPrimitives.WriteInt32BigEndian(header.AsSpan(12), storeSize);
|
||||
|
||||
var offset = 16;
|
||||
var storeOffset = 0;
|
||||
|
||||
WriteIndex(header, ref offset, tag: 1000, type: 6, dataOffset: storeOffset, count: 1); // NAME
|
||||
storeOffset += nameBytes.Length;
|
||||
|
||||
WriteIndex(header, ref offset, tag: 1001, type: 6, dataOffset: storeOffset, count: 1); // VERSION
|
||||
storeOffset += versionBytes.Length;
|
||||
|
||||
WriteIndex(header, ref offset, tag: 1022, type: 6, dataOffset: storeOffset, count: 1); // ARCH
|
||||
|
||||
// Data store
|
||||
var storeStart = 16 + (3 * 16);
|
||||
nameBytes.CopyTo(header.AsSpan(storeStart));
|
||||
versionBytes.CopyTo(header.AsSpan(storeStart + nameBytes.Length));
|
||||
archBytes.CopyTo(header.AsSpan(storeStart + nameBytes.Length + versionBytes.Length));
|
||||
|
||||
return header;
|
||||
}
|
||||
|
||||
private static void WriteIndex(byte[] buffer, ref int offset, int tag, int type, int dataOffset, int count)
|
||||
{
|
||||
BinaryPrimitives.WriteInt32BigEndian(buffer.AsSpan(offset), tag);
|
||||
BinaryPrimitives.WriteInt32BigEndian(buffer.AsSpan(offset + 4), type);
|
||||
BinaryPrimitives.WriteInt32BigEndian(buffer.AsSpan(offset + 8), dataOffset);
|
||||
BinaryPrimitives.WriteInt32BigEndian(buffer.AsSpan(offset + 12), count);
|
||||
offset += 16;
|
||||
}
|
||||
|
||||
private static byte[] GetNullTerminated(string value)
|
||||
{
|
||||
var bytes = System.Text.Encoding.UTF8.GetBytes(value);
|
||||
var result = new byte[bytes.Length + 1];
|
||||
bytes.CopyTo(result, 0);
|
||||
result[^1] = 0;
|
||||
return result;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -45,6 +45,93 @@ public class BinaryReachabilityLifterTests
|
||||
Assert.Equal(expectedCodeId, richNode.CodeId);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task EmitsEntryPointForElfWithNonZeroEntryAddress()
|
||||
{
|
||||
using var temp = new TempDir();
|
||||
var binaryPath = System.IO.Path.Combine(temp.Path, "sample.so");
|
||||
var bytes = CreateElfWithEntryPoint(0x401000);
|
||||
await System.IO.File.WriteAllBytesAsync(binaryPath, bytes);
|
||||
|
||||
var context = new ReachabilityLifterContext
|
||||
{
|
||||
RootPath = temp.Path,
|
||||
AnalysisId = "analysis-entry"
|
||||
};
|
||||
|
||||
var builder = new ReachabilityGraphBuilder();
|
||||
var lifter = new BinaryReachabilityLifter();
|
||||
|
||||
await lifter.LiftAsync(context, builder, CancellationToken.None);
|
||||
var graph = builder.ToUnionGraph(SymbolId.Lang.Binary);
|
||||
|
||||
// Should have binary node + entry point node
|
||||
Assert.Equal(2, graph.Nodes.Count);
|
||||
|
||||
var entryNode = graph.Nodes.FirstOrDefault(n =>
|
||||
n.Kind == "entry_point" &&
|
||||
n.Attributes?.ContainsKey("is_synthetic_root") == true);
|
||||
Assert.NotNull(entryNode);
|
||||
Assert.Equal("_start", entryNode!.Display);
|
||||
|
||||
// Should have edge from entry point to binary
|
||||
var entryEdge = graph.Edges.FirstOrDefault(e =>
|
||||
e.EdgeType == EdgeTypes.Call &&
|
||||
e.To == graph.Nodes.First(n => n.Kind == "binary").SymbolId);
|
||||
Assert.NotNull(entryEdge);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task EmitsPurlForLibrary()
|
||||
{
|
||||
using var temp = new TempDir();
|
||||
var binaryPath = System.IO.Path.Combine(temp.Path, "libssl.so.3");
|
||||
var bytes = CreateMinimalElf();
|
||||
await System.IO.File.WriteAllBytesAsync(binaryPath, bytes);
|
||||
|
||||
var context = new ReachabilityLifterContext
|
||||
{
|
||||
RootPath = temp.Path,
|
||||
AnalysisId = "analysis-purl"
|
||||
};
|
||||
|
||||
var builder = new ReachabilityGraphBuilder();
|
||||
var lifter = new BinaryReachabilityLifter();
|
||||
|
||||
await lifter.LiftAsync(context, builder, CancellationToken.None);
|
||||
var graph = builder.ToUnionGraph(SymbolId.Lang.Binary);
|
||||
|
||||
var node = Assert.Single(graph.Nodes);
|
||||
Assert.NotNull(node.Attributes);
|
||||
Assert.True(node.Attributes!.ContainsKey("purl"));
|
||||
Assert.Equal("pkg:generic/libssl@3", node.Attributes["purl"]);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task DoesNotEmitEntryPointForElfWithZeroEntry()
|
||||
{
|
||||
using var temp = new TempDir();
|
||||
var binaryPath = System.IO.Path.Combine(temp.Path, "noop.so");
|
||||
var bytes = CreateMinimalElf(); // Entry is 0x0
|
||||
await System.IO.File.WriteAllBytesAsync(binaryPath, bytes);
|
||||
|
||||
var context = new ReachabilityLifterContext
|
||||
{
|
||||
RootPath = temp.Path,
|
||||
AnalysisId = "analysis-noentry"
|
||||
};
|
||||
|
||||
var builder = new ReachabilityGraphBuilder();
|
||||
var lifter = new BinaryReachabilityLifter();
|
||||
|
||||
await lifter.LiftAsync(context, builder, CancellationToken.None);
|
||||
var graph = builder.ToUnionGraph(SymbolId.Lang.Binary);
|
||||
|
||||
// Should only have the binary node, no entry point
|
||||
Assert.Single(graph.Nodes);
|
||||
Assert.DoesNotContain(graph.Nodes, n => n.Kind == "entry_point");
|
||||
}
|
||||
|
||||
private static byte[] CreateMinimalElf()
|
||||
{
|
||||
var data = new byte[64];
|
||||
@@ -57,6 +144,25 @@ public class BinaryReachabilityLifterTests
|
||||
data[7] = 0; // System V ABI
|
||||
data[18] = 0x3E; // EM_X86_64
|
||||
data[19] = 0x00;
|
||||
// Entry point at offset 24 is 0 (default)
|
||||
return data;
|
||||
}
|
||||
|
||||
private static byte[] CreateElfWithEntryPoint(ulong entryAddr)
|
||||
{
|
||||
var data = new byte[64];
|
||||
data[0] = 0x7F;
|
||||
data[1] = (byte)'E';
|
||||
data[2] = (byte)'L';
|
||||
data[3] = (byte)'F';
|
||||
data[4] = 2; // 64-bit
|
||||
data[5] = 1; // little endian
|
||||
data[7] = 0; // System V ABI
|
||||
data[18] = 0x3E; // EM_X86_64
|
||||
data[19] = 0x00;
|
||||
|
||||
// Set e_entry at offset 24 (little endian 64-bit)
|
||||
BitConverter.TryWriteBytes(data.AsSpan(24, 8), entryAddr);
|
||||
return data;
|
||||
}
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user