up
Some checks failed
Docs CI / lint-and-preview (push) Has been cancelled
AOC Guard CI / aoc-guard (push) Has been cancelled
AOC Guard CI / aoc-verify (push) Has been cancelled
Concelier Attestation Tests / attestation-tests (push) Has been cancelled
Export Center CI / export-ci (push) Has been cancelled
Notify Smoke Test / Notify Unit Tests (push) Has been cancelled
Notify Smoke Test / Notifier Service Tests (push) Has been cancelled
Notify Smoke Test / Notification Smoke Test (push) Has been cancelled
Policy Lint & Smoke / policy-lint (push) Has been cancelled
Scanner Analyzers / Discover Analyzers (push) Has been cancelled
Scanner Analyzers / Build Analyzers (push) Has been cancelled
Scanner Analyzers / Test Language Analyzers (push) Has been cancelled
Scanner Analyzers / Validate Test Fixtures (push) Has been cancelled
Scanner Analyzers / Verify Deterministic Output (push) Has been cancelled
Signals CI & Image / signals-ci (push) Has been cancelled
Signals Reachability Scoring & Events / reachability-smoke (push) Has been cancelled
Signals Reachability Scoring & Events / sign-and-upload (push) Has been cancelled
Some checks failed
Docs CI / lint-and-preview (push) Has been cancelled
AOC Guard CI / aoc-guard (push) Has been cancelled
AOC Guard CI / aoc-verify (push) Has been cancelled
Concelier Attestation Tests / attestation-tests (push) Has been cancelled
Export Center CI / export-ci (push) Has been cancelled
Notify Smoke Test / Notify Unit Tests (push) Has been cancelled
Notify Smoke Test / Notifier Service Tests (push) Has been cancelled
Notify Smoke Test / Notification Smoke Test (push) Has been cancelled
Policy Lint & Smoke / policy-lint (push) Has been cancelled
Scanner Analyzers / Discover Analyzers (push) Has been cancelled
Scanner Analyzers / Build Analyzers (push) Has been cancelled
Scanner Analyzers / Test Language Analyzers (push) Has been cancelled
Scanner Analyzers / Validate Test Fixtures (push) Has been cancelled
Scanner Analyzers / Verify Deterministic Output (push) Has been cancelled
Signals CI & Image / signals-ci (push) Has been cancelled
Signals Reachability Scoring & Events / reachability-smoke (push) Has been cancelled
Signals Reachability Scoring & Events / sign-and-upload (push) Has been cancelled
This commit is contained in:
@@ -1,134 +1,134 @@
|
||||
using System;
|
||||
using System.Diagnostics.Metrics;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using StellaOps.Scanner.Analyzers.Lang.Go;
|
||||
using StellaOps.Scanner.Analyzers.Lang.Tests.Harness;
|
||||
using StellaOps.Scanner.Analyzers.Lang.Tests.TestUtilities;
|
||||
|
||||
namespace StellaOps.Scanner.Analyzers.Lang.Go.Tests;
|
||||
|
||||
public sealed class GoLanguageAnalyzerTests
|
||||
{
|
||||
[Fact]
|
||||
public async Task BuildInfoFixtureProducesDeterministicOutputAsync()
|
||||
{
|
||||
var cancellationToken = TestContext.Current.CancellationToken;
|
||||
var fixturePath = TestPaths.ResolveFixture("lang", "go", "basic");
|
||||
var goldenPath = Path.Combine(fixturePath, "expected.json");
|
||||
|
||||
var analyzers = new ILanguageAnalyzer[]
|
||||
{
|
||||
new GoLanguageAnalyzer(),
|
||||
};
|
||||
|
||||
await LanguageAnalyzerTestHarness.AssertDeterministicAsync(
|
||||
fixturePath,
|
||||
goldenPath,
|
||||
analyzers,
|
||||
cancellationToken);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task DwarfOnlyFixtureFallsBackToMetadataAsync()
|
||||
{
|
||||
var cancellationToken = TestContext.Current.CancellationToken;
|
||||
var fixturePath = TestPaths.ResolveFixture("lang", "go", "dwarf-only");
|
||||
var goldenPath = Path.Combine(fixturePath, "expected.json");
|
||||
|
||||
var analyzers = new ILanguageAnalyzer[]
|
||||
{
|
||||
new GoLanguageAnalyzer(),
|
||||
};
|
||||
|
||||
await LanguageAnalyzerTestHarness.AssertDeterministicAsync(
|
||||
fixturePath,
|
||||
goldenPath,
|
||||
analyzers,
|
||||
cancellationToken);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task StrippedBinaryFallsBackToHeuristicBinHashAsync()
|
||||
{
|
||||
var cancellationToken = TestContext.Current.CancellationToken;
|
||||
var fixturePath = TestPaths.ResolveFixture("lang", "go", "stripped");
|
||||
var goldenPath = Path.Combine(fixturePath, "expected.json");
|
||||
|
||||
var analyzers = new ILanguageAnalyzer[]
|
||||
{
|
||||
new GoLanguageAnalyzer(),
|
||||
};
|
||||
|
||||
await LanguageAnalyzerTestHarness.AssertDeterministicAsync(
|
||||
fixturePath,
|
||||
goldenPath,
|
||||
analyzers,
|
||||
cancellationToken);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ParallelRunsRemainDeterministicAsync()
|
||||
{
|
||||
var cancellationToken = TestContext.Current.CancellationToken;
|
||||
var fixturePath = TestPaths.ResolveFixture("lang", "go", "basic");
|
||||
var goldenPath = Path.Combine(fixturePath, "expected.json");
|
||||
|
||||
var analyzers = new ILanguageAnalyzer[]
|
||||
{
|
||||
new GoLanguageAnalyzer(),
|
||||
};
|
||||
|
||||
var tasks = Enumerable
|
||||
.Range(0, Environment.ProcessorCount)
|
||||
.Select(_ => LanguageAnalyzerTestHarness.AssertDeterministicAsync(
|
||||
fixturePath,
|
||||
goldenPath,
|
||||
analyzers,
|
||||
cancellationToken));
|
||||
|
||||
await Task.WhenAll(tasks);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task HeuristicMetricCounterIncrementsAsync()
|
||||
{
|
||||
var cancellationToken = TestContext.Current.CancellationToken;
|
||||
var fixturePath = TestPaths.ResolveFixture("lang", "go", "stripped");
|
||||
|
||||
var analyzers = new ILanguageAnalyzer[]
|
||||
{
|
||||
new GoLanguageAnalyzer(),
|
||||
};
|
||||
|
||||
var total = 0L;
|
||||
|
||||
using var listener = new MeterListener
|
||||
{
|
||||
InstrumentPublished = (instrument, meterListener) =>
|
||||
{
|
||||
if (instrument.Meter.Name == "StellaOps.Scanner.Analyzers.Lang.Go"
|
||||
&& instrument.Name == "scanner_analyzer_golang_heuristic_total")
|
||||
{
|
||||
meterListener.EnableMeasurementEvents(instrument);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
listener.SetMeasurementEventCallback<long>((_, measurement, _, _) =>
|
||||
{
|
||||
Interlocked.Add(ref total, measurement);
|
||||
});
|
||||
|
||||
listener.Start();
|
||||
|
||||
await LanguageAnalyzerTestHarness.RunToJsonAsync(
|
||||
fixturePath,
|
||||
analyzers,
|
||||
cancellationToken: cancellationToken).ConfigureAwait(false);
|
||||
|
||||
listener.Dispose();
|
||||
|
||||
Assert.Equal(1, Interlocked.Read(ref total));
|
||||
}
|
||||
}
|
||||
using System;
|
||||
using System.Diagnostics.Metrics;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using StellaOps.Scanner.Analyzers.Lang.Go;
|
||||
using StellaOps.Scanner.Analyzers.Lang.Tests.Harness;
|
||||
using StellaOps.Scanner.Analyzers.Lang.Tests.TestUtilities;
|
||||
|
||||
namespace StellaOps.Scanner.Analyzers.Lang.Go.Tests;
|
||||
|
||||
public sealed class GoLanguageAnalyzerTests
|
||||
{
|
||||
[Fact]
|
||||
public async Task BuildInfoFixtureProducesDeterministicOutputAsync()
|
||||
{
|
||||
var cancellationToken = TestContext.Current.CancellationToken;
|
||||
var fixturePath = TestPaths.ResolveFixture("lang", "go", "basic");
|
||||
var goldenPath = Path.Combine(fixturePath, "expected.json");
|
||||
|
||||
var analyzers = new ILanguageAnalyzer[]
|
||||
{
|
||||
new GoLanguageAnalyzer(),
|
||||
};
|
||||
|
||||
await LanguageAnalyzerTestHarness.AssertDeterministicAsync(
|
||||
fixturePath,
|
||||
goldenPath,
|
||||
analyzers,
|
||||
cancellationToken);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task DwarfOnlyFixtureFallsBackToMetadataAsync()
|
||||
{
|
||||
var cancellationToken = TestContext.Current.CancellationToken;
|
||||
var fixturePath = TestPaths.ResolveFixture("lang", "go", "dwarf-only");
|
||||
var goldenPath = Path.Combine(fixturePath, "expected.json");
|
||||
|
||||
var analyzers = new ILanguageAnalyzer[]
|
||||
{
|
||||
new GoLanguageAnalyzer(),
|
||||
};
|
||||
|
||||
await LanguageAnalyzerTestHarness.AssertDeterministicAsync(
|
||||
fixturePath,
|
||||
goldenPath,
|
||||
analyzers,
|
||||
cancellationToken);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task StrippedBinaryFallsBackToHeuristicBinHashAsync()
|
||||
{
|
||||
var cancellationToken = TestContext.Current.CancellationToken;
|
||||
var fixturePath = TestPaths.ResolveFixture("lang", "go", "stripped");
|
||||
var goldenPath = Path.Combine(fixturePath, "expected.json");
|
||||
|
||||
var analyzers = new ILanguageAnalyzer[]
|
||||
{
|
||||
new GoLanguageAnalyzer(),
|
||||
};
|
||||
|
||||
await LanguageAnalyzerTestHarness.AssertDeterministicAsync(
|
||||
fixturePath,
|
||||
goldenPath,
|
||||
analyzers,
|
||||
cancellationToken);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ParallelRunsRemainDeterministicAsync()
|
||||
{
|
||||
var cancellationToken = TestContext.Current.CancellationToken;
|
||||
var fixturePath = TestPaths.ResolveFixture("lang", "go", "basic");
|
||||
var goldenPath = Path.Combine(fixturePath, "expected.json");
|
||||
|
||||
var analyzers = new ILanguageAnalyzer[]
|
||||
{
|
||||
new GoLanguageAnalyzer(),
|
||||
};
|
||||
|
||||
var tasks = Enumerable
|
||||
.Range(0, Environment.ProcessorCount)
|
||||
.Select(_ => LanguageAnalyzerTestHarness.AssertDeterministicAsync(
|
||||
fixturePath,
|
||||
goldenPath,
|
||||
analyzers,
|
||||
cancellationToken));
|
||||
|
||||
await Task.WhenAll(tasks);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task HeuristicMetricCounterIncrementsAsync()
|
||||
{
|
||||
var cancellationToken = TestContext.Current.CancellationToken;
|
||||
var fixturePath = TestPaths.ResolveFixture("lang", "go", "stripped");
|
||||
|
||||
var analyzers = new ILanguageAnalyzer[]
|
||||
{
|
||||
new GoLanguageAnalyzer(),
|
||||
};
|
||||
|
||||
var total = 0L;
|
||||
|
||||
using var listener = new MeterListener
|
||||
{
|
||||
InstrumentPublished = (instrument, meterListener) =>
|
||||
{
|
||||
if (instrument.Meter.Name == "StellaOps.Scanner.Analyzers.Lang.Go"
|
||||
&& instrument.Name == "scanner_analyzer_golang_heuristic_total")
|
||||
{
|
||||
meterListener.EnableMeasurementEvents(instrument);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
listener.SetMeasurementEventCallback<long>((_, measurement, _, _) =>
|
||||
{
|
||||
Interlocked.Add(ref total, measurement);
|
||||
});
|
||||
|
||||
listener.Start();
|
||||
|
||||
await LanguageAnalyzerTestHarness.RunToJsonAsync(
|
||||
fixturePath,
|
||||
analyzers,
|
||||
cancellationToken: cancellationToken).ConfigureAwait(false);
|
||||
|
||||
listener.Dispose();
|
||||
|
||||
Assert.Equal(1, Interlocked.Read(ref total));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,172 +1,172 @@
|
||||
using System.Collections.Generic;
|
||||
using System.IO.Compression;
|
||||
using System.Linq;
|
||||
using System.Text;
|
||||
using System.Threading;
|
||||
using StellaOps.Scanner.Analyzers.Lang.Java.Internal;
|
||||
using StellaOps.Scanner.Analyzers.Lang.Java.Internal.ClassPath;
|
||||
using StellaOps.Scanner.Analyzers.Lang.Tests.TestUtilities;
|
||||
|
||||
namespace StellaOps.Scanner.Analyzers.Lang.Java.Tests;
|
||||
|
||||
public sealed class JavaClassPathBuilderTests
|
||||
{
|
||||
[Fact]
|
||||
public void Build_ClassPathForSimpleJar()
|
||||
{
|
||||
var root = TestPaths.CreateTemporaryDirectory();
|
||||
try
|
||||
{
|
||||
JavaFixtureBuilder.CreateSampleJar(root, "libs/simple.jar");
|
||||
|
||||
var context = new LanguageAnalyzerContext(root, TimeProvider.System);
|
||||
var workspace = JavaWorkspaceNormalizer.Normalize(context, CancellationToken.None);
|
||||
var analysis = JavaClassPathBuilder.Build(workspace, CancellationToken.None);
|
||||
|
||||
var segment = Assert.Single(analysis.Segments);
|
||||
Assert.Equal("libs/simple.jar", segment.Identifier.Replace('\\', '/'));
|
||||
Assert.Contains("com.example.Demo", segment.Classes);
|
||||
|
||||
var package = Assert.Single(segment.Packages);
|
||||
Assert.Equal("com.example", package.Key);
|
||||
Assert.Equal(1, package.Value.ClassCount);
|
||||
|
||||
Assert.Empty(analysis.DuplicateClasses);
|
||||
Assert.Empty(analysis.SplitPackages);
|
||||
}
|
||||
finally
|
||||
{
|
||||
TestPaths.SafeDelete(root);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Build_CapturesServiceDefinitions()
|
||||
{
|
||||
var root = TestPaths.CreateTemporaryDirectory();
|
||||
try
|
||||
{
|
||||
var services = new Dictionary<string, string[]>
|
||||
{
|
||||
["java.sql.Driver"] = new[] { "com.example.DriverImpl" },
|
||||
};
|
||||
|
||||
CreateJarWithClasses(root, "libs/spi.jar", new[] { "com.example.DriverImpl" }, services);
|
||||
|
||||
var context = new LanguageAnalyzerContext(root, TimeProvider.System);
|
||||
var workspace = JavaWorkspaceNormalizer.Normalize(context, CancellationToken.None);
|
||||
var analysis = JavaClassPathBuilder.Build(workspace, CancellationToken.None);
|
||||
|
||||
var segment = Assert.Single(analysis.Segments);
|
||||
var providers = Assert.Single(segment.ServiceDefinitions);
|
||||
Assert.Equal("java.sql.Driver", providers.Key);
|
||||
Assert.Contains("com.example.DriverImpl", providers.Value);
|
||||
}
|
||||
finally
|
||||
{
|
||||
TestPaths.SafeDelete(root);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Build_FatJarIncludesNestedLibraries()
|
||||
{
|
||||
var root = TestPaths.CreateTemporaryDirectory();
|
||||
try
|
||||
{
|
||||
JavaFixtureBuilder.CreateSpringBootFatJar(root, "apps/app-fat.jar");
|
||||
|
||||
var context = new LanguageAnalyzerContext(root, TimeProvider.System);
|
||||
var workspace = JavaWorkspaceNormalizer.Normalize(context, CancellationToken.None);
|
||||
var analysis = JavaClassPathBuilder.Build(workspace, CancellationToken.None);
|
||||
|
||||
Assert.Equal(2, analysis.Segments.Length);
|
||||
|
||||
var classesSegment = analysis.Segments[0];
|
||||
Assert.Equal("apps/app-fat.jar!BOOT-INF/classes/", classesSegment.Identifier.Replace('\\', '/'));
|
||||
Assert.Contains("com.example.App", classesSegment.Classes);
|
||||
|
||||
var librarySegment = analysis.Segments[1];
|
||||
Assert.Equal("apps/app-fat.jar!BOOT-INF/lib/library.jar", librarySegment.Identifier.Replace('\\', '/'));
|
||||
Assert.Contains("com.example.Lib", librarySegment.Classes);
|
||||
}
|
||||
finally
|
||||
{
|
||||
TestPaths.SafeDelete(root);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Build_ReportsDuplicateClassesAndSplitPackages()
|
||||
{
|
||||
var root = TestPaths.CreateTemporaryDirectory();
|
||||
try
|
||||
{
|
||||
CreateJarWithClasses(root, "libs/a.jar", "com.example.Demo");
|
||||
CreateJarWithClasses(root, "libs/b.jar", "com.example.Demo", "com.example.Other");
|
||||
|
||||
var context = new LanguageAnalyzerContext(root, TimeProvider.System);
|
||||
var workspace = JavaWorkspaceNormalizer.Normalize(context, CancellationToken.None);
|
||||
var analysis = JavaClassPathBuilder.Build(workspace, CancellationToken.None);
|
||||
|
||||
Assert.Equal(2, analysis.Segments.Length);
|
||||
|
||||
var duplicate = Assert.Single(analysis.DuplicateClasses);
|
||||
Assert.Equal("com.example.Demo", duplicate.ClassName);
|
||||
Assert.Equal(2, duplicate.SegmentIdentifiers.Length);
|
||||
|
||||
var split = Assert.Single(analysis.SplitPackages);
|
||||
Assert.Equal("com.example", split.PackageName);
|
||||
Assert.Equal(2, split.SegmentIdentifiers.Length);
|
||||
}
|
||||
finally
|
||||
{
|
||||
TestPaths.SafeDelete(root);
|
||||
}
|
||||
}
|
||||
|
||||
private static void CreateJarWithClasses(string rootDirectory, string relativePath, params string[] classNames)
|
||||
=> CreateJarWithClasses(rootDirectory, relativePath, classNames.AsEnumerable(), serviceDefinitions: null);
|
||||
|
||||
private static void CreateJarWithClasses(
|
||||
string rootDirectory,
|
||||
string relativePath,
|
||||
IEnumerable<string> classNames,
|
||||
IDictionary<string, string[]>? serviceDefinitions)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(rootDirectory);
|
||||
ArgumentException.ThrowIfNullOrEmpty(relativePath);
|
||||
|
||||
var jarPath = Path.Combine(rootDirectory, relativePath.Replace('/', Path.DirectorySeparatorChar));
|
||||
Directory.CreateDirectory(Path.GetDirectoryName(jarPath)!);
|
||||
|
||||
using var fileStream = new FileStream(jarPath, FileMode.Create, FileAccess.ReadWrite, FileShare.None);
|
||||
using var archive = new ZipArchive(fileStream, ZipArchiveMode.Create, leaveOpen: false);
|
||||
|
||||
var timestamp = new DateTimeOffset(2024, 01, 01, 0, 0, 0, TimeSpan.Zero);
|
||||
|
||||
foreach (var className in classNames)
|
||||
{
|
||||
var entryPath = className.Replace('.', '/') + ".class";
|
||||
var entry = archive.CreateEntry(entryPath, CompressionLevel.NoCompression);
|
||||
entry.LastWriteTime = timestamp;
|
||||
using var writer = new BinaryWriter(entry.Open(), Encoding.UTF8, leaveOpen: false);
|
||||
writer.Write(new byte[] { 0xCA, 0xFE, 0xBA, 0xBE });
|
||||
}
|
||||
|
||||
if (serviceDefinitions is not null)
|
||||
{
|
||||
foreach (var pair in serviceDefinitions)
|
||||
{
|
||||
var entryPath = "META-INF/services/" + pair.Key;
|
||||
var entry = archive.CreateEntry(entryPath, CompressionLevel.NoCompression);
|
||||
entry.LastWriteTime = timestamp;
|
||||
using var writer = new StreamWriter(entry.Open(), Encoding.UTF8, leaveOpen: false);
|
||||
foreach (var provider in pair.Value)
|
||||
{
|
||||
writer.WriteLine(provider);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
using System.Collections.Generic;
|
||||
using System.IO.Compression;
|
||||
using System.Linq;
|
||||
using System.Text;
|
||||
using System.Threading;
|
||||
using StellaOps.Scanner.Analyzers.Lang.Java.Internal;
|
||||
using StellaOps.Scanner.Analyzers.Lang.Java.Internal.ClassPath;
|
||||
using StellaOps.Scanner.Analyzers.Lang.Tests.TestUtilities;
|
||||
|
||||
namespace StellaOps.Scanner.Analyzers.Lang.Java.Tests;
|
||||
|
||||
public sealed class JavaClassPathBuilderTests
|
||||
{
|
||||
[Fact]
|
||||
public void Build_ClassPathForSimpleJar()
|
||||
{
|
||||
var root = TestPaths.CreateTemporaryDirectory();
|
||||
try
|
||||
{
|
||||
JavaFixtureBuilder.CreateSampleJar(root, "libs/simple.jar");
|
||||
|
||||
var context = new LanguageAnalyzerContext(root, TimeProvider.System);
|
||||
var workspace = JavaWorkspaceNormalizer.Normalize(context, CancellationToken.None);
|
||||
var analysis = JavaClassPathBuilder.Build(workspace, CancellationToken.None);
|
||||
|
||||
var segment = Assert.Single(analysis.Segments);
|
||||
Assert.Equal("libs/simple.jar", segment.Identifier.Replace('\\', '/'));
|
||||
Assert.Contains("com.example.Demo", segment.Classes);
|
||||
|
||||
var package = Assert.Single(segment.Packages);
|
||||
Assert.Equal("com.example", package.Key);
|
||||
Assert.Equal(1, package.Value.ClassCount);
|
||||
|
||||
Assert.Empty(analysis.DuplicateClasses);
|
||||
Assert.Empty(analysis.SplitPackages);
|
||||
}
|
||||
finally
|
||||
{
|
||||
TestPaths.SafeDelete(root);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Build_CapturesServiceDefinitions()
|
||||
{
|
||||
var root = TestPaths.CreateTemporaryDirectory();
|
||||
try
|
||||
{
|
||||
var services = new Dictionary<string, string[]>
|
||||
{
|
||||
["java.sql.Driver"] = new[] { "com.example.DriverImpl" },
|
||||
};
|
||||
|
||||
CreateJarWithClasses(root, "libs/spi.jar", new[] { "com.example.DriverImpl" }, services);
|
||||
|
||||
var context = new LanguageAnalyzerContext(root, TimeProvider.System);
|
||||
var workspace = JavaWorkspaceNormalizer.Normalize(context, CancellationToken.None);
|
||||
var analysis = JavaClassPathBuilder.Build(workspace, CancellationToken.None);
|
||||
|
||||
var segment = Assert.Single(analysis.Segments);
|
||||
var providers = Assert.Single(segment.ServiceDefinitions);
|
||||
Assert.Equal("java.sql.Driver", providers.Key);
|
||||
Assert.Contains("com.example.DriverImpl", providers.Value);
|
||||
}
|
||||
finally
|
||||
{
|
||||
TestPaths.SafeDelete(root);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Build_FatJarIncludesNestedLibraries()
|
||||
{
|
||||
var root = TestPaths.CreateTemporaryDirectory();
|
||||
try
|
||||
{
|
||||
JavaFixtureBuilder.CreateSpringBootFatJar(root, "apps/app-fat.jar");
|
||||
|
||||
var context = new LanguageAnalyzerContext(root, TimeProvider.System);
|
||||
var workspace = JavaWorkspaceNormalizer.Normalize(context, CancellationToken.None);
|
||||
var analysis = JavaClassPathBuilder.Build(workspace, CancellationToken.None);
|
||||
|
||||
Assert.Equal(2, analysis.Segments.Length);
|
||||
|
||||
var classesSegment = analysis.Segments[0];
|
||||
Assert.Equal("apps/app-fat.jar!BOOT-INF/classes/", classesSegment.Identifier.Replace('\\', '/'));
|
||||
Assert.Contains("com.example.App", classesSegment.Classes);
|
||||
|
||||
var librarySegment = analysis.Segments[1];
|
||||
Assert.Equal("apps/app-fat.jar!BOOT-INF/lib/library.jar", librarySegment.Identifier.Replace('\\', '/'));
|
||||
Assert.Contains("com.example.Lib", librarySegment.Classes);
|
||||
}
|
||||
finally
|
||||
{
|
||||
TestPaths.SafeDelete(root);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Build_ReportsDuplicateClassesAndSplitPackages()
|
||||
{
|
||||
var root = TestPaths.CreateTemporaryDirectory();
|
||||
try
|
||||
{
|
||||
CreateJarWithClasses(root, "libs/a.jar", "com.example.Demo");
|
||||
CreateJarWithClasses(root, "libs/b.jar", "com.example.Demo", "com.example.Other");
|
||||
|
||||
var context = new LanguageAnalyzerContext(root, TimeProvider.System);
|
||||
var workspace = JavaWorkspaceNormalizer.Normalize(context, CancellationToken.None);
|
||||
var analysis = JavaClassPathBuilder.Build(workspace, CancellationToken.None);
|
||||
|
||||
Assert.Equal(2, analysis.Segments.Length);
|
||||
|
||||
var duplicate = Assert.Single(analysis.DuplicateClasses);
|
||||
Assert.Equal("com.example.Demo", duplicate.ClassName);
|
||||
Assert.Equal(2, duplicate.SegmentIdentifiers.Length);
|
||||
|
||||
var split = Assert.Single(analysis.SplitPackages);
|
||||
Assert.Equal("com.example", split.PackageName);
|
||||
Assert.Equal(2, split.SegmentIdentifiers.Length);
|
||||
}
|
||||
finally
|
||||
{
|
||||
TestPaths.SafeDelete(root);
|
||||
}
|
||||
}
|
||||
|
||||
private static void CreateJarWithClasses(string rootDirectory, string relativePath, params string[] classNames)
|
||||
=> CreateJarWithClasses(rootDirectory, relativePath, classNames.AsEnumerable(), serviceDefinitions: null);
|
||||
|
||||
private static void CreateJarWithClasses(
|
||||
string rootDirectory,
|
||||
string relativePath,
|
||||
IEnumerable<string> classNames,
|
||||
IDictionary<string, string[]>? serviceDefinitions)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(rootDirectory);
|
||||
ArgumentException.ThrowIfNullOrEmpty(relativePath);
|
||||
|
||||
var jarPath = Path.Combine(rootDirectory, relativePath.Replace('/', Path.DirectorySeparatorChar));
|
||||
Directory.CreateDirectory(Path.GetDirectoryName(jarPath)!);
|
||||
|
||||
using var fileStream = new FileStream(jarPath, FileMode.Create, FileAccess.ReadWrite, FileShare.None);
|
||||
using var archive = new ZipArchive(fileStream, ZipArchiveMode.Create, leaveOpen: false);
|
||||
|
||||
var timestamp = new DateTimeOffset(2024, 01, 01, 0, 0, 0, TimeSpan.Zero);
|
||||
|
||||
foreach (var className in classNames)
|
||||
{
|
||||
var entryPath = className.Replace('.', '/') + ".class";
|
||||
var entry = archive.CreateEntry(entryPath, CompressionLevel.NoCompression);
|
||||
entry.LastWriteTime = timestamp;
|
||||
using var writer = new BinaryWriter(entry.Open(), Encoding.UTF8, leaveOpen: false);
|
||||
writer.Write(new byte[] { 0xCA, 0xFE, 0xBA, 0xBE });
|
||||
}
|
||||
|
||||
if (serviceDefinitions is not null)
|
||||
{
|
||||
foreach (var pair in serviceDefinitions)
|
||||
{
|
||||
var entryPath = "META-INF/services/" + pair.Key;
|
||||
var entry = archive.CreateEntry(entryPath, CompressionLevel.NoCompression);
|
||||
entry.LastWriteTime = timestamp;
|
||||
using var writer = new StreamWriter(entry.Open(), Encoding.UTF8, leaveOpen: false);
|
||||
foreach (var provider in pair.Value)
|
||||
{
|
||||
writer.WriteLine(provider);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,481 +1,481 @@
|
||||
using System.IO.Compression;
|
||||
using System.Linq;
|
||||
using System.Security.Cryptography;
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
using StellaOps.Scanner.Analyzers.Lang.Java;
|
||||
using StellaOps.Scanner.Analyzers.Lang.Tests.Harness;
|
||||
using StellaOps.Scanner.Analyzers.Lang.Tests.TestUtilities;
|
||||
|
||||
namespace StellaOps.Scanner.Analyzers.Lang.Java.Tests;
|
||||
|
||||
public sealed class JavaLanguageAnalyzerTests
|
||||
{
|
||||
[Fact]
|
||||
public async Task ExtractsMavenArtifactFromJarAsync()
|
||||
{
|
||||
var cancellationToken = TestContext.Current.CancellationToken;
|
||||
var root = TestPaths.CreateTemporaryDirectory();
|
||||
try
|
||||
{
|
||||
var jarPath = JavaFixtureBuilder.CreateSampleJar(root);
|
||||
var usageHints = new LanguageUsageHints(new[] { jarPath });
|
||||
var analyzers = new ILanguageAnalyzer[] { new JavaLanguageAnalyzer() };
|
||||
var goldenPath = TestPaths.ResolveFixture("java", "basic", "expected.json");
|
||||
|
||||
await LanguageAnalyzerTestHarness.AssertDeterministicAsync(
|
||||
fixturePath: root,
|
||||
goldenPath: goldenPath,
|
||||
analyzers: analyzers,
|
||||
cancellationToken: cancellationToken,
|
||||
usageHints: usageHints);
|
||||
}
|
||||
finally
|
||||
{
|
||||
TestPaths.SafeDelete(root);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task LockfilesProduceDeclaredOnlyComponentsAsync()
|
||||
{
|
||||
var cancellationToken = TestContext.Current.CancellationToken;
|
||||
var root = TestPaths.CreateTemporaryDirectory();
|
||||
|
||||
try
|
||||
{
|
||||
var jarPath = CreateSampleJar(root, "com.example", "runtime-only", "1.0.0");
|
||||
var lockPath = Path.Combine(root, "gradle.lockfile");
|
||||
var lockContent = new StringBuilder()
|
||||
.AppendLine("com.example:declared-only:2.0.0=runtimeClasspath")
|
||||
.ToString();
|
||||
await File.WriteAllTextAsync(lockPath, lockContent, cancellationToken);
|
||||
|
||||
var analyzers = new ILanguageAnalyzer[] { new JavaLanguageAnalyzer() };
|
||||
var json = await LanguageAnalyzerTestHarness.RunToJsonAsync(
|
||||
root,
|
||||
analyzers,
|
||||
cancellationToken,
|
||||
new LanguageUsageHints(new[] { jarPath }));
|
||||
|
||||
using var document = JsonDocument.Parse(json);
|
||||
var rootElement = document.RootElement;
|
||||
|
||||
Assert.True(ComponentHasMetadata(rootElement, "declared-only", "declaredOnly", "true"));
|
||||
Assert.True(ComponentHasMetadata(rootElement, "declared-only", "lockSource", "gradle.lockfile"));
|
||||
Assert.True(ComponentHasMetadata(rootElement, "runtime-only", "lockMissing", "true"));
|
||||
}
|
||||
finally
|
||||
{
|
||||
TestPaths.SafeDelete(root);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task CapturesFrameworkConfigurationHintsAsync()
|
||||
{
|
||||
var cancellationToken = TestContext.Current.CancellationToken;
|
||||
var root = TestPaths.CreateTemporaryDirectory();
|
||||
|
||||
try
|
||||
{
|
||||
var jarPath = Path.Combine(root, "demo-framework.jar");
|
||||
Directory.CreateDirectory(Path.GetDirectoryName(jarPath)!);
|
||||
|
||||
using (var archive = ZipFile.Open(jarPath, ZipArchiveMode.Create))
|
||||
{
|
||||
WritePomProperties(archive, "com.example", "demo-framework", "1.0.0");
|
||||
WriteManifest(archive, "demo-framework", "1.0.0", "com.example");
|
||||
|
||||
CreateTextEntry(archive, "META-INF/spring.factories");
|
||||
CreateTextEntry(archive, "META-INF/spring/org.springframework.boot.autoconfigure.AutoConfiguration.imports");
|
||||
CreateTextEntry(archive, "META-INF/spring/org.springframework.boot.actuate.autoconfigure.AutoConfiguration.imports");
|
||||
CreateTextEntry(archive, "BOOT-INF/classes/application.yml");
|
||||
CreateTextEntry(archive, "WEB-INF/web.xml");
|
||||
CreateTextEntry(archive, "META-INF/web-fragment.xml");
|
||||
CreateTextEntry(archive, "META-INF/persistence.xml");
|
||||
CreateTextEntry(archive, "META-INF/beans.xml");
|
||||
CreateTextEntry(archive, "META-INF/jaxb.index");
|
||||
CreateTextEntry(archive, "META-INF/services/jakarta.ws.rs.ext.RuntimeDelegate");
|
||||
CreateTextEntry(archive, "logback.xml");
|
||||
CreateTextEntry(archive, "META-INF/native-image/demo/reflect-config.json");
|
||||
}
|
||||
|
||||
var analyzers = new ILanguageAnalyzer[] { new JavaLanguageAnalyzer() };
|
||||
var json = await LanguageAnalyzerTestHarness.RunToJsonAsync(
|
||||
root,
|
||||
analyzers,
|
||||
cancellationToken,
|
||||
new LanguageUsageHints(new[] { jarPath }));
|
||||
|
||||
using var document = JsonDocument.Parse(json);
|
||||
var component = document.RootElement
|
||||
.EnumerateArray()
|
||||
.First(element => string.Equals(element.GetProperty("name").GetString(), "demo-framework", StringComparison.Ordinal));
|
||||
|
||||
var metadata = component.GetProperty("metadata");
|
||||
Assert.Equal("demo-framework.jar!META-INF/spring.factories", metadata.GetProperty("config.spring.factories").GetString());
|
||||
Assert.Equal(
|
||||
"demo-framework.jar!META-INF/spring/org.springframework.boot.actuate.autoconfigure.AutoConfiguration.imports,demo-framework.jar!META-INF/spring/org.springframework.boot.autoconfigure.AutoConfiguration.imports",
|
||||
metadata.GetProperty("config.spring.imports").GetString());
|
||||
Assert.Equal("demo-framework.jar!BOOT-INF/classes/application.yml", metadata.GetProperty("config.spring.properties").GetString());
|
||||
Assert.Equal("demo-framework.jar!WEB-INF/web.xml", metadata.GetProperty("config.web.xml").GetString());
|
||||
Assert.Equal("demo-framework.jar!META-INF/web-fragment.xml", metadata.GetProperty("config.web.fragment").GetString());
|
||||
Assert.Equal("demo-framework.jar!META-INF/persistence.xml", metadata.GetProperty("config.jpa").GetString());
|
||||
Assert.Equal("demo-framework.jar!META-INF/beans.xml", metadata.GetProperty("config.cdi").GetString());
|
||||
Assert.Equal("demo-framework.jar!META-INF/jaxb.index", metadata.GetProperty("config.jaxb").GetString());
|
||||
Assert.Equal("demo-framework.jar!META-INF/services/jakarta.ws.rs.ext.RuntimeDelegate", metadata.GetProperty("config.jaxrs").GetString());
|
||||
Assert.Equal("demo-framework.jar!logback.xml", metadata.GetProperty("config.logging").GetString());
|
||||
Assert.Equal("demo-framework.jar!META-INF/native-image/demo/reflect-config.json", metadata.GetProperty("config.graal").GetString());
|
||||
|
||||
var evidence = component.GetProperty("evidence").EnumerateArray().ToArray();
|
||||
Assert.Contains(evidence, e =>
|
||||
string.Equals(e.GetProperty("source").GetString(), "framework-config", StringComparison.OrdinalIgnoreCase) &&
|
||||
string.Equals(e.GetProperty("locator").GetString(), "demo-framework.jar!META-INF/spring.factories", StringComparison.OrdinalIgnoreCase) &&
|
||||
e.TryGetProperty("sha256", out var sha) &&
|
||||
!string.IsNullOrWhiteSpace(sha.GetString()));
|
||||
}
|
||||
finally
|
||||
{
|
||||
TestPaths.SafeDelete(root);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task CapturesJniHintsAsync()
|
||||
{
|
||||
var cancellationToken = TestContext.Current.CancellationToken;
|
||||
var root = TestPaths.CreateTemporaryDirectory();
|
||||
|
||||
try
|
||||
{
|
||||
var jarPath = Path.Combine(root, "demo-jni.jar");
|
||||
Directory.CreateDirectory(Path.GetDirectoryName(jarPath)!);
|
||||
|
||||
using (var archive = ZipFile.Open(jarPath, ZipArchiveMode.Create))
|
||||
{
|
||||
WritePomProperties(archive, "com.example", "demo-jni", "1.0.0");
|
||||
WriteManifest(archive, "demo-jni", "1.0.0", "com.example");
|
||||
|
||||
CreateBinaryEntry(archive, "com/example/App.class", "System.loadLibrary(\"foo\")");
|
||||
CreateTextEntry(archive, "lib/native/libfoo.so");
|
||||
CreateTextEntry(archive, "META-INF/native-image/demo/jni-config.json");
|
||||
}
|
||||
|
||||
var analyzers = new ILanguageAnalyzer[] { new JavaLanguageAnalyzer() };
|
||||
var json = await LanguageAnalyzerTestHarness.RunToJsonAsync(
|
||||
root,
|
||||
analyzers,
|
||||
cancellationToken,
|
||||
new LanguageUsageHints(new[] { jarPath }));
|
||||
|
||||
using var document = JsonDocument.Parse(json);
|
||||
var component = document.RootElement
|
||||
.EnumerateArray()
|
||||
.First(element => string.Equals(element.GetProperty("name").GetString(), "demo-jni", StringComparison.Ordinal));
|
||||
|
||||
var metadata = component.GetProperty("metadata");
|
||||
Assert.Equal("libfoo.so", metadata.GetProperty("jni.nativeLibs").GetString());
|
||||
Assert.Equal("demo-jni.jar!META-INF/native-image/demo/jni-config.json", metadata.GetProperty("jni.graalConfig").GetString());
|
||||
Assert.Equal("demo-jni.jar!com/example/App.class", metadata.GetProperty("jni.loadCalls").GetString());
|
||||
}
|
||||
finally
|
||||
{
|
||||
TestPaths.SafeDelete(root);
|
||||
}
|
||||
}
|
||||
|
||||
#region Build File Fixture Integration Tests
|
||||
|
||||
[Fact]
|
||||
public async Task ParsesGradleGroovyBuildFileAsync()
|
||||
{
|
||||
var cancellationToken = TestContext.Current.CancellationToken;
|
||||
var fixturePath = TestPaths.ResolveFixture("java", "gradle-groovy");
|
||||
var goldenPath = TestPaths.ResolveFixture("java", "gradle-groovy", "expected.json");
|
||||
|
||||
var analyzers = new ILanguageAnalyzer[] { new JavaLanguageAnalyzer() };
|
||||
var json = await LanguageAnalyzerTestHarness.RunToJsonAsync(fixturePath, analyzers, cancellationToken);
|
||||
|
||||
using var document = JsonDocument.Parse(json);
|
||||
var components = document.RootElement.EnumerateArray().ToArray();
|
||||
|
||||
// Verify key dependencies are detected
|
||||
Assert.True(components.Any(c => c.GetProperty("name").GetString() == "guava"));
|
||||
Assert.True(components.Any(c => c.GetProperty("name").GetString() == "commons-lang3"));
|
||||
Assert.True(components.Any(c => c.GetProperty("name").GetString() == "slf4j-api"));
|
||||
|
||||
// Verify declaredOnly flag is set for build file dependencies
|
||||
var guava = components.First(c => c.GetProperty("name").GetString() == "guava");
|
||||
Assert.True(guava.GetProperty("metadata").TryGetProperty("declaredOnly", out var declaredOnly));
|
||||
Assert.Equal("true", declaredOnly.GetString());
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ParsesGradleKotlinBuildFileAsync()
|
||||
{
|
||||
var cancellationToken = TestContext.Current.CancellationToken;
|
||||
var fixturePath = TestPaths.ResolveFixture("java", "gradle-kotlin");
|
||||
|
||||
var analyzers = new ILanguageAnalyzer[] { new JavaLanguageAnalyzer() };
|
||||
var json = await LanguageAnalyzerTestHarness.RunToJsonAsync(fixturePath, analyzers, cancellationToken);
|
||||
|
||||
using var document = JsonDocument.Parse(json);
|
||||
var components = document.RootElement.EnumerateArray().ToArray();
|
||||
|
||||
// Verify Kotlin DSL dependencies are detected
|
||||
Assert.True(components.Any(c => c.GetProperty("name").GetString() == "kotlin-stdlib"));
|
||||
Assert.True(components.Any(c => c.GetProperty("name").GetString() == "jackson-databind"));
|
||||
|
||||
// Verify kapt/ksp dependencies are detected
|
||||
Assert.True(components.Any(c => c.GetProperty("name").GetString() == "mapstruct-processor"));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ParsesGradleVersionCatalogAsync()
|
||||
{
|
||||
var cancellationToken = TestContext.Current.CancellationToken;
|
||||
var fixturePath = TestPaths.ResolveFixture("java", "gradle-catalog");
|
||||
|
||||
var analyzers = new ILanguageAnalyzer[] { new JavaLanguageAnalyzer() };
|
||||
var json = await LanguageAnalyzerTestHarness.RunToJsonAsync(fixturePath, analyzers, cancellationToken);
|
||||
|
||||
using var document = JsonDocument.Parse(json);
|
||||
var components = document.RootElement.EnumerateArray().ToArray();
|
||||
|
||||
Assert.True(components.Any(c => c.GetProperty("name").GetString() == "logback-classic"));
|
||||
var logback = components.First(c => c.GetProperty("name").GetString() == "logback-classic");
|
||||
Assert.Equal("1.4.14", logback.GetProperty("version").GetString());
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ParsesMavenParentPomAsync()
|
||||
{
|
||||
var cancellationToken = TestContext.Current.CancellationToken;
|
||||
var fixturePath = TestPaths.ResolveFixture("java", "maven-parent");
|
||||
|
||||
var analyzers = new ILanguageAnalyzer[] { new JavaLanguageAnalyzer() };
|
||||
var json = await LanguageAnalyzerTestHarness.RunToJsonAsync(fixturePath, analyzers, cancellationToken);
|
||||
|
||||
using var document = JsonDocument.Parse(json);
|
||||
var components = document.RootElement.EnumerateArray().ToArray();
|
||||
|
||||
// Verify dependencies with inherited versions are detected
|
||||
Assert.True(components.Any(c => c.GetProperty("name").GetString() == "slf4j-api"));
|
||||
Assert.True(components.Any(c => c.GetProperty("name").GetString() == "spring-core"));
|
||||
|
||||
// Verify version is inherited from parent
|
||||
var springCore = components.First(c => c.GetProperty("name").GetString() == "spring-core");
|
||||
Assert.Equal("6.1.0", springCore.GetProperty("version").GetString());
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ParsesMavenBomImportsAsync()
|
||||
{
|
||||
var cancellationToken = TestContext.Current.CancellationToken;
|
||||
var fixturePath = TestPaths.ResolveFixture("java", "maven-bom");
|
||||
|
||||
var analyzers = new ILanguageAnalyzer[] { new JavaLanguageAnalyzer() };
|
||||
var json = await LanguageAnalyzerTestHarness.RunToJsonAsync(fixturePath, analyzers, cancellationToken);
|
||||
|
||||
using var document = JsonDocument.Parse(json);
|
||||
var components = document.RootElement.EnumerateArray().ToArray();
|
||||
|
||||
Assert.True(components.Any(c => c.GetProperty("name").GetString() == "commons-lang3"));
|
||||
Assert.True(components.Any(c => c.GetProperty("name").GetString() == "lombok"));
|
||||
|
||||
var commonsLang = components.First(c => c.GetProperty("name").GetString() == "commons-lang3");
|
||||
Assert.Equal("3.14.0", commonsLang.GetProperty("version").GetString());
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ParsesMavenPropertyPlaceholdersAsync()
|
||||
{
|
||||
var cancellationToken = TestContext.Current.CancellationToken;
|
||||
var fixturePath = TestPaths.ResolveFixture("java", "maven-properties");
|
||||
|
||||
var analyzers = new ILanguageAnalyzer[] { new JavaLanguageAnalyzer() };
|
||||
var json = await LanguageAnalyzerTestHarness.RunToJsonAsync(fixturePath, analyzers, cancellationToken);
|
||||
|
||||
using var document = JsonDocument.Parse(json);
|
||||
var components = document.RootElement.EnumerateArray().ToArray();
|
||||
|
||||
// Verify property placeholders are resolved
|
||||
var springCore = components.FirstOrDefault(c => c.GetProperty("name").GetString() == "spring-core");
|
||||
Assert.NotEqual(JsonValueKind.Undefined, springCore.ValueKind);
|
||||
Assert.Equal("6.1.0", springCore.GetProperty("version").GetString());
|
||||
|
||||
// Verify versionProperty metadata is captured
|
||||
var metadata = springCore.GetProperty("metadata");
|
||||
Assert.True(metadata.TryGetProperty("maven.versionProperty", out var versionProp));
|
||||
Assert.Equal("spring.version", versionProp.GetString());
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ParsesMavenScopesAsync()
|
||||
{
|
||||
var cancellationToken = TestContext.Current.CancellationToken;
|
||||
var fixturePath = TestPaths.ResolveFixture("java", "maven-scopes");
|
||||
|
||||
var analyzers = new ILanguageAnalyzer[] { new JavaLanguageAnalyzer() };
|
||||
var json = await LanguageAnalyzerTestHarness.RunToJsonAsync(fixturePath, analyzers, cancellationToken);
|
||||
|
||||
using var document = JsonDocument.Parse(json);
|
||||
var components = document.RootElement.EnumerateArray().ToArray();
|
||||
|
||||
// Verify different scopes are captured
|
||||
var guava = components.First(c => c.GetProperty("name").GetString() == "guava");
|
||||
Assert.Equal("compile", guava.GetProperty("metadata").GetProperty("declaredScope").GetString());
|
||||
|
||||
var servletApi = components.First(c => c.GetProperty("name").GetString() == "jakarta.servlet-api");
|
||||
Assert.Equal("provided", servletApi.GetProperty("metadata").GetProperty("declaredScope").GetString());
|
||||
|
||||
var postgresql = components.First(c => c.GetProperty("name").GetString() == "postgresql");
|
||||
Assert.Equal("runtime", postgresql.GetProperty("metadata").GetProperty("declaredScope").GetString());
|
||||
|
||||
var junit = components.First(c => c.GetProperty("name").GetString() == "junit-jupiter");
|
||||
Assert.Equal("test", junit.GetProperty("metadata").GetProperty("declaredScope").GetString());
|
||||
|
||||
// Verify optional flag
|
||||
var springContext = components.First(c => c.GetProperty("name").GetString() == "spring-context");
|
||||
Assert.True(springContext.GetProperty("metadata").TryGetProperty("optional", out var optional));
|
||||
Assert.Equal("true", optional.GetString());
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task DetectsVersionConflictsAsync()
|
||||
{
|
||||
var cancellationToken = TestContext.Current.CancellationToken;
|
||||
var fixturePath = TestPaths.ResolveFixture("java", "version-conflict");
|
||||
|
||||
var analyzers = new ILanguageAnalyzer[] { new JavaLanguageAnalyzer() };
|
||||
var json = await LanguageAnalyzerTestHarness.RunToJsonAsync(fixturePath, analyzers, cancellationToken);
|
||||
|
||||
using var document = JsonDocument.Parse(json);
|
||||
var components = document.RootElement.EnumerateArray().ToArray();
|
||||
|
||||
// Verify Jackson version conflict is detected
|
||||
var jacksonDatabind = components.First(c => c.GetProperty("name").GetString() == "jackson-databind");
|
||||
var metadata = jacksonDatabind.GetProperty("metadata");
|
||||
|
||||
if (metadata.TryGetProperty("versionConflict.group", out var conflictGroup))
|
||||
{
|
||||
Assert.Equal("com.fasterxml.jackson.core", conflictGroup.GetString());
|
||||
}
|
||||
|
||||
// Verify Spring version conflict is detected
|
||||
var springCore = components.First(c => c.GetProperty("name").GetString() == "spring-core");
|
||||
var springMetadata = springCore.GetProperty("metadata");
|
||||
|
||||
if (springMetadata.TryGetProperty("versionConflict.group", out var springConflictGroup))
|
||||
{
|
||||
Assert.Equal("org.springframework", springConflictGroup.GetString());
|
||||
}
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
private static bool ComponentHasMetadata(JsonElement root, string componentName, string key, string expected)
|
||||
{
|
||||
foreach (var element in root.EnumerateArray())
|
||||
{
|
||||
if (!element.TryGetProperty("name", out var nameElement) ||
|
||||
!string.Equals(nameElement.GetString(), componentName, StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
if (!element.TryGetProperty("metadata", out var metadataElement) || metadataElement.ValueKind != JsonValueKind.Object)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
if (!metadataElement.TryGetProperty(key, out var valueElement))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
if (string.Equals(valueElement.GetString(), expected, StringComparison.Ordinal))
|
||||
{
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
private static void WritePomProperties(ZipArchive archive, string groupId, string artifactId, string version)
|
||||
{
|
||||
var pomPropertiesPath = $"META-INF/maven/{groupId}/{artifactId}/pom.properties";
|
||||
var pomPropertiesEntry = archive.CreateEntry(pomPropertiesPath);
|
||||
using var writer = new StreamWriter(pomPropertiesEntry.Open(), Encoding.UTF8);
|
||||
writer.WriteLine($"groupId={groupId}");
|
||||
writer.WriteLine($"artifactId={artifactId}");
|
||||
writer.WriteLine($"version={version}");
|
||||
writer.WriteLine("packaging=jar");
|
||||
writer.WriteLine("name=Sample");
|
||||
}
|
||||
|
||||
private static void WriteManifest(ZipArchive archive, string artifactId, string version, string groupId)
|
||||
{
|
||||
var manifestEntry = archive.CreateEntry("META-INF/MANIFEST.MF");
|
||||
using var writer = new StreamWriter(manifestEntry.Open(), Encoding.UTF8);
|
||||
writer.WriteLine("Manifest-Version: 1.0");
|
||||
writer.WriteLine($"Implementation-Title: {artifactId}");
|
||||
writer.WriteLine($"Implementation-Version: {version}");
|
||||
writer.WriteLine($"Implementation-Vendor: {groupId}");
|
||||
}
|
||||
|
||||
private static void CreateTextEntry(ZipArchive archive, string path, string? content = null)
|
||||
{
|
||||
var entry = archive.CreateEntry(path);
|
||||
using var writer = new StreamWriter(entry.Open(), Encoding.UTF8);
|
||||
if (!string.IsNullOrEmpty(content))
|
||||
{
|
||||
writer.Write(content);
|
||||
}
|
||||
}
|
||||
|
||||
private static void CreateBinaryEntry(ZipArchive archive, string path, string content)
|
||||
{
|
||||
var entry = archive.CreateEntry(path);
|
||||
using var stream = entry.Open();
|
||||
var bytes = Encoding.UTF8.GetBytes(content);
|
||||
stream.Write(bytes, 0, bytes.Length);
|
||||
}
|
||||
|
||||
private static string CreateSampleJar(string root, string groupId, string artifactId, string version)
|
||||
{
|
||||
var jarPath = Path.Combine(root, $"{artifactId}-{version}.jar");
|
||||
Directory.CreateDirectory(Path.GetDirectoryName(jarPath)!);
|
||||
|
||||
using var archive = ZipFile.Open(jarPath, ZipArchiveMode.Create);
|
||||
var pomPropertiesPath = $"META-INF/maven/{groupId}/{artifactId}/pom.properties";
|
||||
var pomPropertiesEntry = archive.CreateEntry(pomPropertiesPath);
|
||||
using (var writer = new StreamWriter(pomPropertiesEntry.Open(), Encoding.UTF8))
|
||||
{
|
||||
writer.WriteLine($"groupId={groupId}");
|
||||
writer.WriteLine($"artifactId={artifactId}");
|
||||
writer.WriteLine($"version={version}");
|
||||
writer.WriteLine("packaging=jar");
|
||||
writer.WriteLine("name=Sample");
|
||||
}
|
||||
|
||||
var manifestEntry = archive.CreateEntry("META-INF/MANIFEST.MF");
|
||||
using (var writer = new StreamWriter(manifestEntry.Open(), Encoding.UTF8))
|
||||
{
|
||||
writer.WriteLine("Manifest-Version: 1.0");
|
||||
writer.WriteLine($"Implementation-Title: {artifactId}");
|
||||
writer.WriteLine($"Implementation-Version: {version}");
|
||||
writer.WriteLine($"Implementation-Vendor: {groupId}");
|
||||
}
|
||||
|
||||
var classEntry = archive.CreateEntry($"{artifactId.Replace('-', '_')}/Main.class");
|
||||
using (var stream = classEntry.Open())
|
||||
{
|
||||
stream.Write(new byte[] { 0xCA, 0xFE, 0xBA, 0xBE });
|
||||
}
|
||||
|
||||
return jarPath;
|
||||
}
|
||||
}
|
||||
using System.IO.Compression;
|
||||
using System.Linq;
|
||||
using System.Security.Cryptography;
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
using StellaOps.Scanner.Analyzers.Lang.Java;
|
||||
using StellaOps.Scanner.Analyzers.Lang.Tests.Harness;
|
||||
using StellaOps.Scanner.Analyzers.Lang.Tests.TestUtilities;
|
||||
|
||||
namespace StellaOps.Scanner.Analyzers.Lang.Java.Tests;
|
||||
|
||||
public sealed class JavaLanguageAnalyzerTests
|
||||
{
|
||||
[Fact]
|
||||
public async Task ExtractsMavenArtifactFromJarAsync()
|
||||
{
|
||||
var cancellationToken = TestContext.Current.CancellationToken;
|
||||
var root = TestPaths.CreateTemporaryDirectory();
|
||||
try
|
||||
{
|
||||
var jarPath = JavaFixtureBuilder.CreateSampleJar(root);
|
||||
var usageHints = new LanguageUsageHints(new[] { jarPath });
|
||||
var analyzers = new ILanguageAnalyzer[] { new JavaLanguageAnalyzer() };
|
||||
var goldenPath = TestPaths.ResolveFixture("java", "basic", "expected.json");
|
||||
|
||||
await LanguageAnalyzerTestHarness.AssertDeterministicAsync(
|
||||
fixturePath: root,
|
||||
goldenPath: goldenPath,
|
||||
analyzers: analyzers,
|
||||
cancellationToken: cancellationToken,
|
||||
usageHints: usageHints);
|
||||
}
|
||||
finally
|
||||
{
|
||||
TestPaths.SafeDelete(root);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task LockfilesProduceDeclaredOnlyComponentsAsync()
|
||||
{
|
||||
var cancellationToken = TestContext.Current.CancellationToken;
|
||||
var root = TestPaths.CreateTemporaryDirectory();
|
||||
|
||||
try
|
||||
{
|
||||
var jarPath = CreateSampleJar(root, "com.example", "runtime-only", "1.0.0");
|
||||
var lockPath = Path.Combine(root, "gradle.lockfile");
|
||||
var lockContent = new StringBuilder()
|
||||
.AppendLine("com.example:declared-only:2.0.0=runtimeClasspath")
|
||||
.ToString();
|
||||
await File.WriteAllTextAsync(lockPath, lockContent, cancellationToken);
|
||||
|
||||
var analyzers = new ILanguageAnalyzer[] { new JavaLanguageAnalyzer() };
|
||||
var json = await LanguageAnalyzerTestHarness.RunToJsonAsync(
|
||||
root,
|
||||
analyzers,
|
||||
cancellationToken,
|
||||
new LanguageUsageHints(new[] { jarPath }));
|
||||
|
||||
using var document = JsonDocument.Parse(json);
|
||||
var rootElement = document.RootElement;
|
||||
|
||||
Assert.True(ComponentHasMetadata(rootElement, "declared-only", "declaredOnly", "true"));
|
||||
Assert.True(ComponentHasMetadata(rootElement, "declared-only", "lockSource", "gradle.lockfile"));
|
||||
Assert.True(ComponentHasMetadata(rootElement, "runtime-only", "lockMissing", "true"));
|
||||
}
|
||||
finally
|
||||
{
|
||||
TestPaths.SafeDelete(root);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task CapturesFrameworkConfigurationHintsAsync()
|
||||
{
|
||||
var cancellationToken = TestContext.Current.CancellationToken;
|
||||
var root = TestPaths.CreateTemporaryDirectory();
|
||||
|
||||
try
|
||||
{
|
||||
var jarPath = Path.Combine(root, "demo-framework.jar");
|
||||
Directory.CreateDirectory(Path.GetDirectoryName(jarPath)!);
|
||||
|
||||
using (var archive = ZipFile.Open(jarPath, ZipArchiveMode.Create))
|
||||
{
|
||||
WritePomProperties(archive, "com.example", "demo-framework", "1.0.0");
|
||||
WriteManifest(archive, "demo-framework", "1.0.0", "com.example");
|
||||
|
||||
CreateTextEntry(archive, "META-INF/spring.factories");
|
||||
CreateTextEntry(archive, "META-INF/spring/org.springframework.boot.autoconfigure.AutoConfiguration.imports");
|
||||
CreateTextEntry(archive, "META-INF/spring/org.springframework.boot.actuate.autoconfigure.AutoConfiguration.imports");
|
||||
CreateTextEntry(archive, "BOOT-INF/classes/application.yml");
|
||||
CreateTextEntry(archive, "WEB-INF/web.xml");
|
||||
CreateTextEntry(archive, "META-INF/web-fragment.xml");
|
||||
CreateTextEntry(archive, "META-INF/persistence.xml");
|
||||
CreateTextEntry(archive, "META-INF/beans.xml");
|
||||
CreateTextEntry(archive, "META-INF/jaxb.index");
|
||||
CreateTextEntry(archive, "META-INF/services/jakarta.ws.rs.ext.RuntimeDelegate");
|
||||
CreateTextEntry(archive, "logback.xml");
|
||||
CreateTextEntry(archive, "META-INF/native-image/demo/reflect-config.json");
|
||||
}
|
||||
|
||||
var analyzers = new ILanguageAnalyzer[] { new JavaLanguageAnalyzer() };
|
||||
var json = await LanguageAnalyzerTestHarness.RunToJsonAsync(
|
||||
root,
|
||||
analyzers,
|
||||
cancellationToken,
|
||||
new LanguageUsageHints(new[] { jarPath }));
|
||||
|
||||
using var document = JsonDocument.Parse(json);
|
||||
var component = document.RootElement
|
||||
.EnumerateArray()
|
||||
.First(element => string.Equals(element.GetProperty("name").GetString(), "demo-framework", StringComparison.Ordinal));
|
||||
|
||||
var metadata = component.GetProperty("metadata");
|
||||
Assert.Equal("demo-framework.jar!META-INF/spring.factories", metadata.GetProperty("config.spring.factories").GetString());
|
||||
Assert.Equal(
|
||||
"demo-framework.jar!META-INF/spring/org.springframework.boot.actuate.autoconfigure.AutoConfiguration.imports,demo-framework.jar!META-INF/spring/org.springframework.boot.autoconfigure.AutoConfiguration.imports",
|
||||
metadata.GetProperty("config.spring.imports").GetString());
|
||||
Assert.Equal("demo-framework.jar!BOOT-INF/classes/application.yml", metadata.GetProperty("config.spring.properties").GetString());
|
||||
Assert.Equal("demo-framework.jar!WEB-INF/web.xml", metadata.GetProperty("config.web.xml").GetString());
|
||||
Assert.Equal("demo-framework.jar!META-INF/web-fragment.xml", metadata.GetProperty("config.web.fragment").GetString());
|
||||
Assert.Equal("demo-framework.jar!META-INF/persistence.xml", metadata.GetProperty("config.jpa").GetString());
|
||||
Assert.Equal("demo-framework.jar!META-INF/beans.xml", metadata.GetProperty("config.cdi").GetString());
|
||||
Assert.Equal("demo-framework.jar!META-INF/jaxb.index", metadata.GetProperty("config.jaxb").GetString());
|
||||
Assert.Equal("demo-framework.jar!META-INF/services/jakarta.ws.rs.ext.RuntimeDelegate", metadata.GetProperty("config.jaxrs").GetString());
|
||||
Assert.Equal("demo-framework.jar!logback.xml", metadata.GetProperty("config.logging").GetString());
|
||||
Assert.Equal("demo-framework.jar!META-INF/native-image/demo/reflect-config.json", metadata.GetProperty("config.graal").GetString());
|
||||
|
||||
var evidence = component.GetProperty("evidence").EnumerateArray().ToArray();
|
||||
Assert.Contains(evidence, e =>
|
||||
string.Equals(e.GetProperty("source").GetString(), "framework-config", StringComparison.OrdinalIgnoreCase) &&
|
||||
string.Equals(e.GetProperty("locator").GetString(), "demo-framework.jar!META-INF/spring.factories", StringComparison.OrdinalIgnoreCase) &&
|
||||
e.TryGetProperty("sha256", out var sha) &&
|
||||
!string.IsNullOrWhiteSpace(sha.GetString()));
|
||||
}
|
||||
finally
|
||||
{
|
||||
TestPaths.SafeDelete(root);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task CapturesJniHintsAsync()
|
||||
{
|
||||
var cancellationToken = TestContext.Current.CancellationToken;
|
||||
var root = TestPaths.CreateTemporaryDirectory();
|
||||
|
||||
try
|
||||
{
|
||||
var jarPath = Path.Combine(root, "demo-jni.jar");
|
||||
Directory.CreateDirectory(Path.GetDirectoryName(jarPath)!);
|
||||
|
||||
using (var archive = ZipFile.Open(jarPath, ZipArchiveMode.Create))
|
||||
{
|
||||
WritePomProperties(archive, "com.example", "demo-jni", "1.0.0");
|
||||
WriteManifest(archive, "demo-jni", "1.0.0", "com.example");
|
||||
|
||||
CreateBinaryEntry(archive, "com/example/App.class", "System.loadLibrary(\"foo\")");
|
||||
CreateTextEntry(archive, "lib/native/libfoo.so");
|
||||
CreateTextEntry(archive, "META-INF/native-image/demo/jni-config.json");
|
||||
}
|
||||
|
||||
var analyzers = new ILanguageAnalyzer[] { new JavaLanguageAnalyzer() };
|
||||
var json = await LanguageAnalyzerTestHarness.RunToJsonAsync(
|
||||
root,
|
||||
analyzers,
|
||||
cancellationToken,
|
||||
new LanguageUsageHints(new[] { jarPath }));
|
||||
|
||||
using var document = JsonDocument.Parse(json);
|
||||
var component = document.RootElement
|
||||
.EnumerateArray()
|
||||
.First(element => string.Equals(element.GetProperty("name").GetString(), "demo-jni", StringComparison.Ordinal));
|
||||
|
||||
var metadata = component.GetProperty("metadata");
|
||||
Assert.Equal("libfoo.so", metadata.GetProperty("jni.nativeLibs").GetString());
|
||||
Assert.Equal("demo-jni.jar!META-INF/native-image/demo/jni-config.json", metadata.GetProperty("jni.graalConfig").GetString());
|
||||
Assert.Equal("demo-jni.jar!com/example/App.class", metadata.GetProperty("jni.loadCalls").GetString());
|
||||
}
|
||||
finally
|
||||
{
|
||||
TestPaths.SafeDelete(root);
|
||||
}
|
||||
}
|
||||
|
||||
#region Build File Fixture Integration Tests
|
||||
|
||||
[Fact]
|
||||
public async Task ParsesGradleGroovyBuildFileAsync()
|
||||
{
|
||||
var cancellationToken = TestContext.Current.CancellationToken;
|
||||
var fixturePath = TestPaths.ResolveFixture("java", "gradle-groovy");
|
||||
var goldenPath = TestPaths.ResolveFixture("java", "gradle-groovy", "expected.json");
|
||||
|
||||
var analyzers = new ILanguageAnalyzer[] { new JavaLanguageAnalyzer() };
|
||||
var json = await LanguageAnalyzerTestHarness.RunToJsonAsync(fixturePath, analyzers, cancellationToken);
|
||||
|
||||
using var document = JsonDocument.Parse(json);
|
||||
var components = document.RootElement.EnumerateArray().ToArray();
|
||||
|
||||
// Verify key dependencies are detected
|
||||
Assert.True(components.Any(c => c.GetProperty("name").GetString() == "guava"));
|
||||
Assert.True(components.Any(c => c.GetProperty("name").GetString() == "commons-lang3"));
|
||||
Assert.True(components.Any(c => c.GetProperty("name").GetString() == "slf4j-api"));
|
||||
|
||||
// Verify declaredOnly flag is set for build file dependencies
|
||||
var guava = components.First(c => c.GetProperty("name").GetString() == "guava");
|
||||
Assert.True(guava.GetProperty("metadata").TryGetProperty("declaredOnly", out var declaredOnly));
|
||||
Assert.Equal("true", declaredOnly.GetString());
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ParsesGradleKotlinBuildFileAsync()
|
||||
{
|
||||
var cancellationToken = TestContext.Current.CancellationToken;
|
||||
var fixturePath = TestPaths.ResolveFixture("java", "gradle-kotlin");
|
||||
|
||||
var analyzers = new ILanguageAnalyzer[] { new JavaLanguageAnalyzer() };
|
||||
var json = await LanguageAnalyzerTestHarness.RunToJsonAsync(fixturePath, analyzers, cancellationToken);
|
||||
|
||||
using var document = JsonDocument.Parse(json);
|
||||
var components = document.RootElement.EnumerateArray().ToArray();
|
||||
|
||||
// Verify Kotlin DSL dependencies are detected
|
||||
Assert.True(components.Any(c => c.GetProperty("name").GetString() == "kotlin-stdlib"));
|
||||
Assert.True(components.Any(c => c.GetProperty("name").GetString() == "jackson-databind"));
|
||||
|
||||
// Verify kapt/ksp dependencies are detected
|
||||
Assert.True(components.Any(c => c.GetProperty("name").GetString() == "mapstruct-processor"));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ParsesGradleVersionCatalogAsync()
|
||||
{
|
||||
var cancellationToken = TestContext.Current.CancellationToken;
|
||||
var fixturePath = TestPaths.ResolveFixture("java", "gradle-catalog");
|
||||
|
||||
var analyzers = new ILanguageAnalyzer[] { new JavaLanguageAnalyzer() };
|
||||
var json = await LanguageAnalyzerTestHarness.RunToJsonAsync(fixturePath, analyzers, cancellationToken);
|
||||
|
||||
using var document = JsonDocument.Parse(json);
|
||||
var components = document.RootElement.EnumerateArray().ToArray();
|
||||
|
||||
Assert.True(components.Any(c => c.GetProperty("name").GetString() == "logback-classic"));
|
||||
var logback = components.First(c => c.GetProperty("name").GetString() == "logback-classic");
|
||||
Assert.Equal("1.4.14", logback.GetProperty("version").GetString());
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ParsesMavenParentPomAsync()
|
||||
{
|
||||
var cancellationToken = TestContext.Current.CancellationToken;
|
||||
var fixturePath = TestPaths.ResolveFixture("java", "maven-parent");
|
||||
|
||||
var analyzers = new ILanguageAnalyzer[] { new JavaLanguageAnalyzer() };
|
||||
var json = await LanguageAnalyzerTestHarness.RunToJsonAsync(fixturePath, analyzers, cancellationToken);
|
||||
|
||||
using var document = JsonDocument.Parse(json);
|
||||
var components = document.RootElement.EnumerateArray().ToArray();
|
||||
|
||||
// Verify dependencies with inherited versions are detected
|
||||
Assert.True(components.Any(c => c.GetProperty("name").GetString() == "slf4j-api"));
|
||||
Assert.True(components.Any(c => c.GetProperty("name").GetString() == "spring-core"));
|
||||
|
||||
// Verify version is inherited from parent
|
||||
var springCore = components.First(c => c.GetProperty("name").GetString() == "spring-core");
|
||||
Assert.Equal("6.1.0", springCore.GetProperty("version").GetString());
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ParsesMavenBomImportsAsync()
|
||||
{
|
||||
var cancellationToken = TestContext.Current.CancellationToken;
|
||||
var fixturePath = TestPaths.ResolveFixture("java", "maven-bom");
|
||||
|
||||
var analyzers = new ILanguageAnalyzer[] { new JavaLanguageAnalyzer() };
|
||||
var json = await LanguageAnalyzerTestHarness.RunToJsonAsync(fixturePath, analyzers, cancellationToken);
|
||||
|
||||
using var document = JsonDocument.Parse(json);
|
||||
var components = document.RootElement.EnumerateArray().ToArray();
|
||||
|
||||
Assert.True(components.Any(c => c.GetProperty("name").GetString() == "commons-lang3"));
|
||||
Assert.True(components.Any(c => c.GetProperty("name").GetString() == "lombok"));
|
||||
|
||||
var commonsLang = components.First(c => c.GetProperty("name").GetString() == "commons-lang3");
|
||||
Assert.Equal("3.14.0", commonsLang.GetProperty("version").GetString());
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ParsesMavenPropertyPlaceholdersAsync()
|
||||
{
|
||||
var cancellationToken = TestContext.Current.CancellationToken;
|
||||
var fixturePath = TestPaths.ResolveFixture("java", "maven-properties");
|
||||
|
||||
var analyzers = new ILanguageAnalyzer[] { new JavaLanguageAnalyzer() };
|
||||
var json = await LanguageAnalyzerTestHarness.RunToJsonAsync(fixturePath, analyzers, cancellationToken);
|
||||
|
||||
using var document = JsonDocument.Parse(json);
|
||||
var components = document.RootElement.EnumerateArray().ToArray();
|
||||
|
||||
// Verify property placeholders are resolved
|
||||
var springCore = components.FirstOrDefault(c => c.GetProperty("name").GetString() == "spring-core");
|
||||
Assert.NotEqual(JsonValueKind.Undefined, springCore.ValueKind);
|
||||
Assert.Equal("6.1.0", springCore.GetProperty("version").GetString());
|
||||
|
||||
// Verify versionProperty metadata is captured
|
||||
var metadata = springCore.GetProperty("metadata");
|
||||
Assert.True(metadata.TryGetProperty("maven.versionProperty", out var versionProp));
|
||||
Assert.Equal("spring.version", versionProp.GetString());
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ParsesMavenScopesAsync()
|
||||
{
|
||||
var cancellationToken = TestContext.Current.CancellationToken;
|
||||
var fixturePath = TestPaths.ResolveFixture("java", "maven-scopes");
|
||||
|
||||
var analyzers = new ILanguageAnalyzer[] { new JavaLanguageAnalyzer() };
|
||||
var json = await LanguageAnalyzerTestHarness.RunToJsonAsync(fixturePath, analyzers, cancellationToken);
|
||||
|
||||
using var document = JsonDocument.Parse(json);
|
||||
var components = document.RootElement.EnumerateArray().ToArray();
|
||||
|
||||
// Verify different scopes are captured
|
||||
var guava = components.First(c => c.GetProperty("name").GetString() == "guava");
|
||||
Assert.Equal("compile", guava.GetProperty("metadata").GetProperty("declaredScope").GetString());
|
||||
|
||||
var servletApi = components.First(c => c.GetProperty("name").GetString() == "jakarta.servlet-api");
|
||||
Assert.Equal("provided", servletApi.GetProperty("metadata").GetProperty("declaredScope").GetString());
|
||||
|
||||
var postgresql = components.First(c => c.GetProperty("name").GetString() == "postgresql");
|
||||
Assert.Equal("runtime", postgresql.GetProperty("metadata").GetProperty("declaredScope").GetString());
|
||||
|
||||
var junit = components.First(c => c.GetProperty("name").GetString() == "junit-jupiter");
|
||||
Assert.Equal("test", junit.GetProperty("metadata").GetProperty("declaredScope").GetString());
|
||||
|
||||
// Verify optional flag
|
||||
var springContext = components.First(c => c.GetProperty("name").GetString() == "spring-context");
|
||||
Assert.True(springContext.GetProperty("metadata").TryGetProperty("optional", out var optional));
|
||||
Assert.Equal("true", optional.GetString());
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task DetectsVersionConflictsAsync()
|
||||
{
|
||||
var cancellationToken = TestContext.Current.CancellationToken;
|
||||
var fixturePath = TestPaths.ResolveFixture("java", "version-conflict");
|
||||
|
||||
var analyzers = new ILanguageAnalyzer[] { new JavaLanguageAnalyzer() };
|
||||
var json = await LanguageAnalyzerTestHarness.RunToJsonAsync(fixturePath, analyzers, cancellationToken);
|
||||
|
||||
using var document = JsonDocument.Parse(json);
|
||||
var components = document.RootElement.EnumerateArray().ToArray();
|
||||
|
||||
// Verify Jackson version conflict is detected
|
||||
var jacksonDatabind = components.First(c => c.GetProperty("name").GetString() == "jackson-databind");
|
||||
var metadata = jacksonDatabind.GetProperty("metadata");
|
||||
|
||||
if (metadata.TryGetProperty("versionConflict.group", out var conflictGroup))
|
||||
{
|
||||
Assert.Equal("com.fasterxml.jackson.core", conflictGroup.GetString());
|
||||
}
|
||||
|
||||
// Verify Spring version conflict is detected
|
||||
var springCore = components.First(c => c.GetProperty("name").GetString() == "spring-core");
|
||||
var springMetadata = springCore.GetProperty("metadata");
|
||||
|
||||
if (springMetadata.TryGetProperty("versionConflict.group", out var springConflictGroup))
|
||||
{
|
||||
Assert.Equal("org.springframework", springConflictGroup.GetString());
|
||||
}
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
private static bool ComponentHasMetadata(JsonElement root, string componentName, string key, string expected)
|
||||
{
|
||||
foreach (var element in root.EnumerateArray())
|
||||
{
|
||||
if (!element.TryGetProperty("name", out var nameElement) ||
|
||||
!string.Equals(nameElement.GetString(), componentName, StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
if (!element.TryGetProperty("metadata", out var metadataElement) || metadataElement.ValueKind != JsonValueKind.Object)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
if (!metadataElement.TryGetProperty(key, out var valueElement))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
if (string.Equals(valueElement.GetString(), expected, StringComparison.Ordinal))
|
||||
{
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
private static void WritePomProperties(ZipArchive archive, string groupId, string artifactId, string version)
|
||||
{
|
||||
var pomPropertiesPath = $"META-INF/maven/{groupId}/{artifactId}/pom.properties";
|
||||
var pomPropertiesEntry = archive.CreateEntry(pomPropertiesPath);
|
||||
using var writer = new StreamWriter(pomPropertiesEntry.Open(), Encoding.UTF8);
|
||||
writer.WriteLine($"groupId={groupId}");
|
||||
writer.WriteLine($"artifactId={artifactId}");
|
||||
writer.WriteLine($"version={version}");
|
||||
writer.WriteLine("packaging=jar");
|
||||
writer.WriteLine("name=Sample");
|
||||
}
|
||||
|
||||
private static void WriteManifest(ZipArchive archive, string artifactId, string version, string groupId)
|
||||
{
|
||||
var manifestEntry = archive.CreateEntry("META-INF/MANIFEST.MF");
|
||||
using var writer = new StreamWriter(manifestEntry.Open(), Encoding.UTF8);
|
||||
writer.WriteLine("Manifest-Version: 1.0");
|
||||
writer.WriteLine($"Implementation-Title: {artifactId}");
|
||||
writer.WriteLine($"Implementation-Version: {version}");
|
||||
writer.WriteLine($"Implementation-Vendor: {groupId}");
|
||||
}
|
||||
|
||||
private static void CreateTextEntry(ZipArchive archive, string path, string? content = null)
|
||||
{
|
||||
var entry = archive.CreateEntry(path);
|
||||
using var writer = new StreamWriter(entry.Open(), Encoding.UTF8);
|
||||
if (!string.IsNullOrEmpty(content))
|
||||
{
|
||||
writer.Write(content);
|
||||
}
|
||||
}
|
||||
|
||||
private static void CreateBinaryEntry(ZipArchive archive, string path, string content)
|
||||
{
|
||||
var entry = archive.CreateEntry(path);
|
||||
using var stream = entry.Open();
|
||||
var bytes = Encoding.UTF8.GetBytes(content);
|
||||
stream.Write(bytes, 0, bytes.Length);
|
||||
}
|
||||
|
||||
private static string CreateSampleJar(string root, string groupId, string artifactId, string version)
|
||||
{
|
||||
var jarPath = Path.Combine(root, $"{artifactId}-{version}.jar");
|
||||
Directory.CreateDirectory(Path.GetDirectoryName(jarPath)!);
|
||||
|
||||
using var archive = ZipFile.Open(jarPath, ZipArchiveMode.Create);
|
||||
var pomPropertiesPath = $"META-INF/maven/{groupId}/{artifactId}/pom.properties";
|
||||
var pomPropertiesEntry = archive.CreateEntry(pomPropertiesPath);
|
||||
using (var writer = new StreamWriter(pomPropertiesEntry.Open(), Encoding.UTF8))
|
||||
{
|
||||
writer.WriteLine($"groupId={groupId}");
|
||||
writer.WriteLine($"artifactId={artifactId}");
|
||||
writer.WriteLine($"version={version}");
|
||||
writer.WriteLine("packaging=jar");
|
||||
writer.WriteLine("name=Sample");
|
||||
}
|
||||
|
||||
var manifestEntry = archive.CreateEntry("META-INF/MANIFEST.MF");
|
||||
using (var writer = new StreamWriter(manifestEntry.Open(), Encoding.UTF8))
|
||||
{
|
||||
writer.WriteLine("Manifest-Version: 1.0");
|
||||
writer.WriteLine($"Implementation-Title: {artifactId}");
|
||||
writer.WriteLine($"Implementation-Version: {version}");
|
||||
writer.WriteLine($"Implementation-Vendor: {groupId}");
|
||||
}
|
||||
|
||||
var classEntry = archive.CreateEntry($"{artifactId.Replace('-', '_')}/Main.class");
|
||||
using (var stream = classEntry.Open())
|
||||
{
|
||||
stream.Write(new byte[] { 0xCA, 0xFE, 0xBA, 0xBE });
|
||||
}
|
||||
|
||||
return jarPath;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,82 +1,82 @@
|
||||
using System.IO.Compression;
|
||||
using System.Threading;
|
||||
using StellaOps.Scanner.Analyzers.Lang.Java.Internal;
|
||||
using StellaOps.Scanner.Analyzers.Lang.Java.Internal.ClassPath;
|
||||
using StellaOps.Scanner.Analyzers.Lang.Java.Internal.Reflection;
|
||||
using StellaOps.Scanner.Analyzers.Lang.Tests.TestUtilities;
|
||||
|
||||
namespace StellaOps.Scanner.Analyzers.Lang.Java.Tests;
|
||||
|
||||
public sealed class JavaReflectionAnalyzerTests
|
||||
{
|
||||
[Fact]
|
||||
public void Analyze_ClassForNameLiteral_ProducesEdge()
|
||||
{
|
||||
var root = TestPaths.CreateTemporaryDirectory();
|
||||
try
|
||||
{
|
||||
var jarPath = Path.Combine(root, "libs", "reflect.jar");
|
||||
Directory.CreateDirectory(Path.GetDirectoryName(jarPath)!);
|
||||
using (var archive = new ZipArchive(new FileStream(jarPath, FileMode.Create, FileAccess.ReadWrite, FileShare.None), ZipArchiveMode.Create, leaveOpen: false))
|
||||
{
|
||||
var entry = archive.CreateEntry("com/example/Reflective.class");
|
||||
var bytes = JavaClassFileFactory.CreateClassForNameInvoker("com/example/Reflective", "com.example.Plugin");
|
||||
using var stream = entry.Open();
|
||||
stream.Write(bytes);
|
||||
}
|
||||
|
||||
var cancellationToken = TestContext.Current.CancellationToken;
|
||||
var context = new LanguageAnalyzerContext(root, TimeProvider.System);
|
||||
var workspace = JavaWorkspaceNormalizer.Normalize(context, cancellationToken);
|
||||
var classPath = JavaClassPathBuilder.Build(workspace, cancellationToken);
|
||||
var analysis = JavaReflectionAnalyzer.Analyze(classPath, cancellationToken);
|
||||
|
||||
var edge = Assert.Single(analysis.Edges);
|
||||
Assert.Equal("com.example.Reflective", edge.SourceClass);
|
||||
Assert.Equal("com.example.Plugin", edge.TargetType);
|
||||
Assert.Equal(JavaReflectionReason.ClassForName, edge.Reason);
|
||||
Assert.Equal(JavaReflectionConfidence.High, edge.Confidence);
|
||||
}
|
||||
finally
|
||||
{
|
||||
TestPaths.SafeDelete(root);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Analyze_TcclUsage_ProducesWarning()
|
||||
{
|
||||
var root = TestPaths.CreateTemporaryDirectory();
|
||||
try
|
||||
{
|
||||
var jarPath = Path.Combine(root, "libs", "tccl.jar");
|
||||
Directory.CreateDirectory(Path.GetDirectoryName(jarPath)!);
|
||||
using (var archive = new ZipArchive(new FileStream(jarPath, FileMode.Create, FileAccess.ReadWrite, FileShare.None), ZipArchiveMode.Create, leaveOpen: false))
|
||||
{
|
||||
var entry = archive.CreateEntry("com/example/Tccl.class");
|
||||
var bytes = JavaClassFileFactory.CreateTcclChecker("com/example/Tccl");
|
||||
using var stream = entry.Open();
|
||||
stream.Write(bytes);
|
||||
}
|
||||
|
||||
var cancellationToken = TestContext.Current.CancellationToken;
|
||||
var context = new LanguageAnalyzerContext(root, TimeProvider.System);
|
||||
var workspace = JavaWorkspaceNormalizer.Normalize(context, cancellationToken);
|
||||
var classPath = JavaClassPathBuilder.Build(workspace, cancellationToken);
|
||||
var analysis = JavaReflectionAnalyzer.Analyze(classPath, cancellationToken);
|
||||
|
||||
Assert.Empty(analysis.Edges);
|
||||
var warning = Assert.Single(analysis.Warnings);
|
||||
Assert.Equal("tccl", warning.WarningCode);
|
||||
Assert.Equal("com.example.Tccl", warning.SourceClass);
|
||||
}
|
||||
finally
|
||||
{
|
||||
TestPaths.SafeDelete(root);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
using System.IO.Compression;
|
||||
using System.Threading;
|
||||
using StellaOps.Scanner.Analyzers.Lang.Java.Internal;
|
||||
using StellaOps.Scanner.Analyzers.Lang.Java.Internal.ClassPath;
|
||||
using StellaOps.Scanner.Analyzers.Lang.Java.Internal.Reflection;
|
||||
using StellaOps.Scanner.Analyzers.Lang.Tests.TestUtilities;
|
||||
|
||||
namespace StellaOps.Scanner.Analyzers.Lang.Java.Tests;
|
||||
|
||||
public sealed class JavaReflectionAnalyzerTests
|
||||
{
|
||||
[Fact]
|
||||
public void Analyze_ClassForNameLiteral_ProducesEdge()
|
||||
{
|
||||
var root = TestPaths.CreateTemporaryDirectory();
|
||||
try
|
||||
{
|
||||
var jarPath = Path.Combine(root, "libs", "reflect.jar");
|
||||
Directory.CreateDirectory(Path.GetDirectoryName(jarPath)!);
|
||||
using (var archive = new ZipArchive(new FileStream(jarPath, FileMode.Create, FileAccess.ReadWrite, FileShare.None), ZipArchiveMode.Create, leaveOpen: false))
|
||||
{
|
||||
var entry = archive.CreateEntry("com/example/Reflective.class");
|
||||
var bytes = JavaClassFileFactory.CreateClassForNameInvoker("com/example/Reflective", "com.example.Plugin");
|
||||
using var stream = entry.Open();
|
||||
stream.Write(bytes);
|
||||
}
|
||||
|
||||
var cancellationToken = TestContext.Current.CancellationToken;
|
||||
var context = new LanguageAnalyzerContext(root, TimeProvider.System);
|
||||
var workspace = JavaWorkspaceNormalizer.Normalize(context, cancellationToken);
|
||||
var classPath = JavaClassPathBuilder.Build(workspace, cancellationToken);
|
||||
var analysis = JavaReflectionAnalyzer.Analyze(classPath, cancellationToken);
|
||||
|
||||
var edge = Assert.Single(analysis.Edges);
|
||||
Assert.Equal("com.example.Reflective", edge.SourceClass);
|
||||
Assert.Equal("com.example.Plugin", edge.TargetType);
|
||||
Assert.Equal(JavaReflectionReason.ClassForName, edge.Reason);
|
||||
Assert.Equal(JavaReflectionConfidence.High, edge.Confidence);
|
||||
}
|
||||
finally
|
||||
{
|
||||
TestPaths.SafeDelete(root);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Analyze_TcclUsage_ProducesWarning()
|
||||
{
|
||||
var root = TestPaths.CreateTemporaryDirectory();
|
||||
try
|
||||
{
|
||||
var jarPath = Path.Combine(root, "libs", "tccl.jar");
|
||||
Directory.CreateDirectory(Path.GetDirectoryName(jarPath)!);
|
||||
using (var archive = new ZipArchive(new FileStream(jarPath, FileMode.Create, FileAccess.ReadWrite, FileShare.None), ZipArchiveMode.Create, leaveOpen: false))
|
||||
{
|
||||
var entry = archive.CreateEntry("com/example/Tccl.class");
|
||||
var bytes = JavaClassFileFactory.CreateTcclChecker("com/example/Tccl");
|
||||
using var stream = entry.Open();
|
||||
stream.Write(bytes);
|
||||
}
|
||||
|
||||
var cancellationToken = TestContext.Current.CancellationToken;
|
||||
var context = new LanguageAnalyzerContext(root, TimeProvider.System);
|
||||
var workspace = JavaWorkspaceNormalizer.Normalize(context, cancellationToken);
|
||||
var classPath = JavaClassPathBuilder.Build(workspace, cancellationToken);
|
||||
var analysis = JavaReflectionAnalyzer.Analyze(classPath, cancellationToken);
|
||||
|
||||
Assert.Empty(analysis.Edges);
|
||||
var warning = Assert.Single(analysis.Warnings);
|
||||
Assert.Equal("tccl", warning.WarningCode);
|
||||
Assert.Equal("com.example.Tccl", warning.SourceClass);
|
||||
}
|
||||
finally
|
||||
{
|
||||
TestPaths.SafeDelete(root);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Analyze_SpringBootFatJar_ScansEmbeddedAndBootSegments()
|
||||
{
|
||||
var root = TestPaths.CreateTemporaryDirectory();
|
||||
|
||||
@@ -1,147 +1,147 @@
|
||||
using System.Collections.Generic;
|
||||
using System.IO.Compression;
|
||||
using System.Linq;
|
||||
using System.Text;
|
||||
using System.Threading;
|
||||
using StellaOps.Scanner.Analyzers.Lang.Java.Internal;
|
||||
using StellaOps.Scanner.Analyzers.Lang.Java.Internal.ClassPath;
|
||||
using StellaOps.Scanner.Analyzers.Lang.Java.Internal.ServiceProviders;
|
||||
using StellaOps.Scanner.Analyzers.Lang.Tests.TestUtilities;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Scanner.Analyzers.Lang.Java.Tests;
|
||||
|
||||
public sealed class JavaServiceProviderScannerTests
|
||||
{
|
||||
[Fact]
|
||||
public void Scan_SelectsFirstProviderByClasspathOrder()
|
||||
{
|
||||
var root = TestPaths.CreateTemporaryDirectory();
|
||||
try
|
||||
{
|
||||
var servicesA = new Dictionary<string, string[]>
|
||||
{
|
||||
["java.sql.Driver"] = new[] { "com.example.ADriver" },
|
||||
};
|
||||
|
||||
var servicesB = new Dictionary<string, string[]>
|
||||
{
|
||||
["java.sql.Driver"] = new[] { "com.example.BDriver" },
|
||||
};
|
||||
|
||||
CreateJarWithClasses(root, "libs/a.jar", new[] { "com.example.ADriver" }, servicesA);
|
||||
CreateJarWithClasses(root, "libs/b.jar", new[] { "com.example.BDriver" }, servicesB);
|
||||
|
||||
var cancellationToken = TestContext.Current.CancellationToken;
|
||||
var context = new LanguageAnalyzerContext(root, TimeProvider.System);
|
||||
var workspace = JavaWorkspaceNormalizer.Normalize(context, cancellationToken);
|
||||
var classPath = JavaClassPathBuilder.Build(workspace, cancellationToken);
|
||||
var analysis = JavaServiceProviderScanner.Scan(classPath, JavaSpiCatalog.Default, cancellationToken);
|
||||
|
||||
var service = Assert.Single(analysis.Services, record => record.ServiceId == "java.sql.Driver");
|
||||
Assert.Equal("jdk", service.Category);
|
||||
|
||||
var selected = Assert.Single(service.Candidates.Where(candidate => candidate.IsSelected));
|
||||
Assert.Equal("com.example.ADriver", selected.ProviderClass);
|
||||
Assert.Empty(service.Warnings);
|
||||
}
|
||||
finally
|
||||
{
|
||||
TestPaths.SafeDelete(root);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Scan_FlagsDuplicateProviders()
|
||||
{
|
||||
var root = TestPaths.CreateTemporaryDirectory();
|
||||
try
|
||||
{
|
||||
var services = new Dictionary<string, string[]>
|
||||
{
|
||||
["java.sql.Driver"] = new[] { "com.example.DuplicateDriver" },
|
||||
};
|
||||
|
||||
CreateJarWithClasses(root, "libs/a.jar", new[] { "com.example.DuplicateDriver" }, services);
|
||||
CreateJarWithClasses(root, "libs/b.jar", new[] { "com.example.Other" }, services);
|
||||
|
||||
var cancellationToken = TestContext.Current.CancellationToken;
|
||||
var context = new LanguageAnalyzerContext(root, TimeProvider.System);
|
||||
var workspace = JavaWorkspaceNormalizer.Normalize(context, cancellationToken);
|
||||
var classPath = JavaClassPathBuilder.Build(workspace, cancellationToken);
|
||||
var analysis = JavaServiceProviderScanner.Scan(classPath, JavaSpiCatalog.Default, cancellationToken);
|
||||
|
||||
var service = Assert.Single(analysis.Services, record => record.ServiceId == "java.sql.Driver");
|
||||
Assert.NotEmpty(service.Warnings);
|
||||
Assert.Contains(service.Warnings, warning => warning.Contains("duplicate-provider", StringComparison.OrdinalIgnoreCase));
|
||||
}
|
||||
finally
|
||||
{
|
||||
TestPaths.SafeDelete(root);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Scan_RespectsBootFatJarOrdering()
|
||||
{
|
||||
var root = TestPaths.CreateTemporaryDirectory();
|
||||
try
|
||||
{
|
||||
JavaFixtureBuilder.CreateSpringBootFatJar(root, "apps/app-fat.jar");
|
||||
|
||||
var cancellationToken = TestContext.Current.CancellationToken;
|
||||
var context = new LanguageAnalyzerContext(root, TimeProvider.System);
|
||||
var workspace = JavaWorkspaceNormalizer.Normalize(context, cancellationToken);
|
||||
var classPath = JavaClassPathBuilder.Build(workspace, cancellationToken);
|
||||
var analysis = JavaServiceProviderScanner.Scan(classPath, JavaSpiCatalog.Default, cancellationToken);
|
||||
|
||||
var service = Assert.Single(analysis.Services, record => record.ServiceId == "java.sql.Driver");
|
||||
var selected = Assert.Single(service.Candidates.Where(candidate => candidate.IsSelected));
|
||||
Assert.Equal("com.example.AppDriver", selected.ProviderClass);
|
||||
Assert.Contains(service.Candidates.Select(candidate => candidate.ProviderClass), provider => provider == "com.example.LibDriver");
|
||||
}
|
||||
finally
|
||||
{
|
||||
TestPaths.SafeDelete(root);
|
||||
}
|
||||
}
|
||||
|
||||
private static void CreateJarWithClasses(
|
||||
string rootDirectory,
|
||||
string relativePath,
|
||||
IEnumerable<string> classNames,
|
||||
IDictionary<string, string[]> serviceDefinitions)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(rootDirectory);
|
||||
ArgumentException.ThrowIfNullOrEmpty(relativePath);
|
||||
|
||||
var jarPath = Path.Combine(rootDirectory, relativePath.Replace('/', Path.DirectorySeparatorChar));
|
||||
Directory.CreateDirectory(Path.GetDirectoryName(jarPath)!);
|
||||
|
||||
using var fileStream = new FileStream(jarPath, FileMode.Create, FileAccess.ReadWrite, FileShare.None);
|
||||
using var archive = new ZipArchive(fileStream, ZipArchiveMode.Create, leaveOpen: false);
|
||||
|
||||
var timestamp = new DateTimeOffset(2024, 01, 01, 0, 0, 0, TimeSpan.Zero);
|
||||
|
||||
foreach (var className in classNames)
|
||||
{
|
||||
var entryPath = className.Replace('.', '/') + ".class";
|
||||
var entry = archive.CreateEntry(entryPath, CompressionLevel.NoCompression);
|
||||
entry.LastWriteTime = timestamp;
|
||||
using var writer = new BinaryWriter(entry.Open(), Encoding.UTF8, leaveOpen: false);
|
||||
writer.Write(new byte[] { 0xCA, 0xFE, 0xBA, 0xBE });
|
||||
}
|
||||
|
||||
foreach (var pair in serviceDefinitions)
|
||||
{
|
||||
var entryPath = "META-INF/services/" + pair.Key;
|
||||
var entry = archive.CreateEntry(entryPath, CompressionLevel.NoCompression);
|
||||
entry.LastWriteTime = timestamp;
|
||||
using var writer = new StreamWriter(entry.Open(), Encoding.UTF8, leaveOpen: false);
|
||||
foreach (var provider in pair.Value)
|
||||
{
|
||||
writer.WriteLine(provider);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
using System.Collections.Generic;
|
||||
using System.IO.Compression;
|
||||
using System.Linq;
|
||||
using System.Text;
|
||||
using System.Threading;
|
||||
using StellaOps.Scanner.Analyzers.Lang.Java.Internal;
|
||||
using StellaOps.Scanner.Analyzers.Lang.Java.Internal.ClassPath;
|
||||
using StellaOps.Scanner.Analyzers.Lang.Java.Internal.ServiceProviders;
|
||||
using StellaOps.Scanner.Analyzers.Lang.Tests.TestUtilities;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Scanner.Analyzers.Lang.Java.Tests;
|
||||
|
||||
public sealed class JavaServiceProviderScannerTests
|
||||
{
|
||||
[Fact]
|
||||
public void Scan_SelectsFirstProviderByClasspathOrder()
|
||||
{
|
||||
var root = TestPaths.CreateTemporaryDirectory();
|
||||
try
|
||||
{
|
||||
var servicesA = new Dictionary<string, string[]>
|
||||
{
|
||||
["java.sql.Driver"] = new[] { "com.example.ADriver" },
|
||||
};
|
||||
|
||||
var servicesB = new Dictionary<string, string[]>
|
||||
{
|
||||
["java.sql.Driver"] = new[] { "com.example.BDriver" },
|
||||
};
|
||||
|
||||
CreateJarWithClasses(root, "libs/a.jar", new[] { "com.example.ADriver" }, servicesA);
|
||||
CreateJarWithClasses(root, "libs/b.jar", new[] { "com.example.BDriver" }, servicesB);
|
||||
|
||||
var cancellationToken = TestContext.Current.CancellationToken;
|
||||
var context = new LanguageAnalyzerContext(root, TimeProvider.System);
|
||||
var workspace = JavaWorkspaceNormalizer.Normalize(context, cancellationToken);
|
||||
var classPath = JavaClassPathBuilder.Build(workspace, cancellationToken);
|
||||
var analysis = JavaServiceProviderScanner.Scan(classPath, JavaSpiCatalog.Default, cancellationToken);
|
||||
|
||||
var service = Assert.Single(analysis.Services, record => record.ServiceId == "java.sql.Driver");
|
||||
Assert.Equal("jdk", service.Category);
|
||||
|
||||
var selected = Assert.Single(service.Candidates.Where(candidate => candidate.IsSelected));
|
||||
Assert.Equal("com.example.ADriver", selected.ProviderClass);
|
||||
Assert.Empty(service.Warnings);
|
||||
}
|
||||
finally
|
||||
{
|
||||
TestPaths.SafeDelete(root);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Scan_FlagsDuplicateProviders()
|
||||
{
|
||||
var root = TestPaths.CreateTemporaryDirectory();
|
||||
try
|
||||
{
|
||||
var services = new Dictionary<string, string[]>
|
||||
{
|
||||
["java.sql.Driver"] = new[] { "com.example.DuplicateDriver" },
|
||||
};
|
||||
|
||||
CreateJarWithClasses(root, "libs/a.jar", new[] { "com.example.DuplicateDriver" }, services);
|
||||
CreateJarWithClasses(root, "libs/b.jar", new[] { "com.example.Other" }, services);
|
||||
|
||||
var cancellationToken = TestContext.Current.CancellationToken;
|
||||
var context = new LanguageAnalyzerContext(root, TimeProvider.System);
|
||||
var workspace = JavaWorkspaceNormalizer.Normalize(context, cancellationToken);
|
||||
var classPath = JavaClassPathBuilder.Build(workspace, cancellationToken);
|
||||
var analysis = JavaServiceProviderScanner.Scan(classPath, JavaSpiCatalog.Default, cancellationToken);
|
||||
|
||||
var service = Assert.Single(analysis.Services, record => record.ServiceId == "java.sql.Driver");
|
||||
Assert.NotEmpty(service.Warnings);
|
||||
Assert.Contains(service.Warnings, warning => warning.Contains("duplicate-provider", StringComparison.OrdinalIgnoreCase));
|
||||
}
|
||||
finally
|
||||
{
|
||||
TestPaths.SafeDelete(root);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Scan_RespectsBootFatJarOrdering()
|
||||
{
|
||||
var root = TestPaths.CreateTemporaryDirectory();
|
||||
try
|
||||
{
|
||||
JavaFixtureBuilder.CreateSpringBootFatJar(root, "apps/app-fat.jar");
|
||||
|
||||
var cancellationToken = TestContext.Current.CancellationToken;
|
||||
var context = new LanguageAnalyzerContext(root, TimeProvider.System);
|
||||
var workspace = JavaWorkspaceNormalizer.Normalize(context, cancellationToken);
|
||||
var classPath = JavaClassPathBuilder.Build(workspace, cancellationToken);
|
||||
var analysis = JavaServiceProviderScanner.Scan(classPath, JavaSpiCatalog.Default, cancellationToken);
|
||||
|
||||
var service = Assert.Single(analysis.Services, record => record.ServiceId == "java.sql.Driver");
|
||||
var selected = Assert.Single(service.Candidates.Where(candidate => candidate.IsSelected));
|
||||
Assert.Equal("com.example.AppDriver", selected.ProviderClass);
|
||||
Assert.Contains(service.Candidates.Select(candidate => candidate.ProviderClass), provider => provider == "com.example.LibDriver");
|
||||
}
|
||||
finally
|
||||
{
|
||||
TestPaths.SafeDelete(root);
|
||||
}
|
||||
}
|
||||
|
||||
private static void CreateJarWithClasses(
|
||||
string rootDirectory,
|
||||
string relativePath,
|
||||
IEnumerable<string> classNames,
|
||||
IDictionary<string, string[]> serviceDefinitions)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(rootDirectory);
|
||||
ArgumentException.ThrowIfNullOrEmpty(relativePath);
|
||||
|
||||
var jarPath = Path.Combine(rootDirectory, relativePath.Replace('/', Path.DirectorySeparatorChar));
|
||||
Directory.CreateDirectory(Path.GetDirectoryName(jarPath)!);
|
||||
|
||||
using var fileStream = new FileStream(jarPath, FileMode.Create, FileAccess.ReadWrite, FileShare.None);
|
||||
using var archive = new ZipArchive(fileStream, ZipArchiveMode.Create, leaveOpen: false);
|
||||
|
||||
var timestamp = new DateTimeOffset(2024, 01, 01, 0, 0, 0, TimeSpan.Zero);
|
||||
|
||||
foreach (var className in classNames)
|
||||
{
|
||||
var entryPath = className.Replace('.', '/') + ".class";
|
||||
var entry = archive.CreateEntry(entryPath, CompressionLevel.NoCompression);
|
||||
entry.LastWriteTime = timestamp;
|
||||
using var writer = new BinaryWriter(entry.Open(), Encoding.UTF8, leaveOpen: false);
|
||||
writer.Write(new byte[] { 0xCA, 0xFE, 0xBA, 0xBE });
|
||||
}
|
||||
|
||||
foreach (var pair in serviceDefinitions)
|
||||
{
|
||||
var entryPath = "META-INF/services/" + pair.Key;
|
||||
var entry = archive.CreateEntry(entryPath, CompressionLevel.NoCompression);
|
||||
entry.LastWriteTime = timestamp;
|
||||
using var writer = new StreamWriter(entry.Open(), Encoding.UTF8, leaveOpen: false);
|
||||
foreach (var provider in pair.Value)
|
||||
{
|
||||
writer.WriteLine(provider);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,93 +1,93 @@
|
||||
using System.Linq;
|
||||
using System.Threading;
|
||||
using StellaOps.Scanner.Analyzers.Lang.Java.Internal;
|
||||
using StellaOps.Scanner.Analyzers.Lang.Tests.TestUtilities;
|
||||
|
||||
namespace StellaOps.Scanner.Analyzers.Lang.Java.Tests;
|
||||
|
||||
public sealed class JavaWorkspaceNormalizerTests
|
||||
{
|
||||
[Fact]
|
||||
public void Normalize_ClassifiesPackagingAndLayers()
|
||||
{
|
||||
var root = TestPaths.CreateTemporaryDirectory();
|
||||
try
|
||||
{
|
||||
JavaFixtureBuilder.CreateSampleJar(root, "libs/simple.jar");
|
||||
JavaFixtureBuilder.CreateSpringBootFatJar(root, "libs/app-fat.jar");
|
||||
JavaFixtureBuilder.CreateWarArchive(root, "apps/sample.war");
|
||||
|
||||
var context = new LanguageAnalyzerContext(root, TimeProvider.System);
|
||||
var workspace = JavaWorkspaceNormalizer.Normalize(context, CancellationToken.None);
|
||||
|
||||
var archivesByPath = workspace.Archives.ToDictionary(
|
||||
archive => archive.RelativePath.Replace('\\', '/'),
|
||||
archive => archive,
|
||||
StringComparer.Ordinal);
|
||||
|
||||
var simpleJar = Assert.Contains("libs/simple.jar", archivesByPath);
|
||||
Assert.Equal(JavaPackagingKind.Jar, simpleJar.Packaging);
|
||||
Assert.Empty(simpleJar.LayeredDirectories);
|
||||
|
||||
var fatJar = Assert.Contains("libs/app-fat.jar", archivesByPath);
|
||||
Assert.Equal(JavaPackagingKind.SpringBootFatJar, fatJar.Packaging);
|
||||
Assert.Contains("BOOT-INF", fatJar.LayeredDirectories);
|
||||
|
||||
var war = Assert.Contains("apps/sample.war", archivesByPath);
|
||||
Assert.Equal(JavaPackagingKind.War, war.Packaging);
|
||||
Assert.Contains("WEB-INF", war.LayeredDirectories);
|
||||
}
|
||||
finally
|
||||
{
|
||||
TestPaths.SafeDelete(root);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Normalize_SelectsMultiReleaseOverlay()
|
||||
{
|
||||
var root = TestPaths.CreateTemporaryDirectory();
|
||||
try
|
||||
{
|
||||
JavaFixtureBuilder.CreateMultiReleaseJar(root, "libs/mr.jar");
|
||||
|
||||
var context = new LanguageAnalyzerContext(root, TimeProvider.System);
|
||||
var workspace = JavaWorkspaceNormalizer.Normalize(context, CancellationToken.None);
|
||||
|
||||
var archive = Assert.Single(workspace.Archives);
|
||||
|
||||
Assert.True(archive.IsMultiRelease);
|
||||
Assert.False(archive.HasModuleInfo);
|
||||
|
||||
Assert.True(archive.TryGetEntry("com/example/App.class", out var entry));
|
||||
Assert.Equal(11, entry.Version);
|
||||
Assert.Equal("META-INF/versions/11/com/example/App.class", entry.OriginalPath.Replace('\\', '/'));
|
||||
}
|
||||
finally
|
||||
{
|
||||
TestPaths.SafeDelete(root);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Normalize_DetectsRuntimeImageMetadata()
|
||||
{
|
||||
var root = TestPaths.CreateTemporaryDirectory();
|
||||
try
|
||||
{
|
||||
JavaFixtureBuilder.CreateRuntimeImage(root, "runtime/jre");
|
||||
|
||||
var context = new LanguageAnalyzerContext(root, TimeProvider.System);
|
||||
var workspace = JavaWorkspaceNormalizer.Normalize(context, CancellationToken.None);
|
||||
|
||||
var runtime = Assert.Single(workspace.RuntimeImages);
|
||||
Assert.Equal("17.0.8", runtime.JavaVersion);
|
||||
Assert.Equal("Eclipse Adoptium", runtime.Vendor);
|
||||
Assert.Equal("runtime/jre", runtime.RelativePath.Replace('\\', '/'));
|
||||
}
|
||||
finally
|
||||
{
|
||||
TestPaths.SafeDelete(root);
|
||||
}
|
||||
}
|
||||
}
|
||||
using System.Linq;
|
||||
using System.Threading;
|
||||
using StellaOps.Scanner.Analyzers.Lang.Java.Internal;
|
||||
using StellaOps.Scanner.Analyzers.Lang.Tests.TestUtilities;
|
||||
|
||||
namespace StellaOps.Scanner.Analyzers.Lang.Java.Tests;
|
||||
|
||||
public sealed class JavaWorkspaceNormalizerTests
|
||||
{
|
||||
[Fact]
|
||||
public void Normalize_ClassifiesPackagingAndLayers()
|
||||
{
|
||||
var root = TestPaths.CreateTemporaryDirectory();
|
||||
try
|
||||
{
|
||||
JavaFixtureBuilder.CreateSampleJar(root, "libs/simple.jar");
|
||||
JavaFixtureBuilder.CreateSpringBootFatJar(root, "libs/app-fat.jar");
|
||||
JavaFixtureBuilder.CreateWarArchive(root, "apps/sample.war");
|
||||
|
||||
var context = new LanguageAnalyzerContext(root, TimeProvider.System);
|
||||
var workspace = JavaWorkspaceNormalizer.Normalize(context, CancellationToken.None);
|
||||
|
||||
var archivesByPath = workspace.Archives.ToDictionary(
|
||||
archive => archive.RelativePath.Replace('\\', '/'),
|
||||
archive => archive,
|
||||
StringComparer.Ordinal);
|
||||
|
||||
var simpleJar = Assert.Contains("libs/simple.jar", archivesByPath);
|
||||
Assert.Equal(JavaPackagingKind.Jar, simpleJar.Packaging);
|
||||
Assert.Empty(simpleJar.LayeredDirectories);
|
||||
|
||||
var fatJar = Assert.Contains("libs/app-fat.jar", archivesByPath);
|
||||
Assert.Equal(JavaPackagingKind.SpringBootFatJar, fatJar.Packaging);
|
||||
Assert.Contains("BOOT-INF", fatJar.LayeredDirectories);
|
||||
|
||||
var war = Assert.Contains("apps/sample.war", archivesByPath);
|
||||
Assert.Equal(JavaPackagingKind.War, war.Packaging);
|
||||
Assert.Contains("WEB-INF", war.LayeredDirectories);
|
||||
}
|
||||
finally
|
||||
{
|
||||
TestPaths.SafeDelete(root);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Normalize_SelectsMultiReleaseOverlay()
|
||||
{
|
||||
var root = TestPaths.CreateTemporaryDirectory();
|
||||
try
|
||||
{
|
||||
JavaFixtureBuilder.CreateMultiReleaseJar(root, "libs/mr.jar");
|
||||
|
||||
var context = new LanguageAnalyzerContext(root, TimeProvider.System);
|
||||
var workspace = JavaWorkspaceNormalizer.Normalize(context, CancellationToken.None);
|
||||
|
||||
var archive = Assert.Single(workspace.Archives);
|
||||
|
||||
Assert.True(archive.IsMultiRelease);
|
||||
Assert.False(archive.HasModuleInfo);
|
||||
|
||||
Assert.True(archive.TryGetEntry("com/example/App.class", out var entry));
|
||||
Assert.Equal(11, entry.Version);
|
||||
Assert.Equal("META-INF/versions/11/com/example/App.class", entry.OriginalPath.Replace('\\', '/'));
|
||||
}
|
||||
finally
|
||||
{
|
||||
TestPaths.SafeDelete(root);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Normalize_DetectsRuntimeImageMetadata()
|
||||
{
|
||||
var root = TestPaths.CreateTemporaryDirectory();
|
||||
try
|
||||
{
|
||||
JavaFixtureBuilder.CreateRuntimeImage(root, "runtime/jre");
|
||||
|
||||
var context = new LanguageAnalyzerContext(root, TimeProvider.System);
|
||||
var workspace = JavaWorkspaceNormalizer.Normalize(context, CancellationToken.None);
|
||||
|
||||
var runtime = Assert.Single(workspace.RuntimeImages);
|
||||
Assert.Equal("17.0.8", runtime.JavaVersion);
|
||||
Assert.Equal("Eclipse Adoptium", runtime.Vendor);
|
||||
Assert.Equal("runtime/jre", runtime.RelativePath.Replace('\\', '/'));
|
||||
}
|
||||
finally
|
||||
{
|
||||
TestPaths.SafeDelete(root);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,25 +1,25 @@
|
||||
using StellaOps.Scanner.Analyzers.Lang.Node;
|
||||
using StellaOps.Scanner.Analyzers.Lang.Tests.Harness;
|
||||
using StellaOps.Scanner.Analyzers.Lang.Tests.TestUtilities;
|
||||
|
||||
namespace StellaOps.Scanner.Analyzers.Lang.Node.Tests;
|
||||
|
||||
public sealed class NodeLanguageAnalyzerTests
|
||||
{
|
||||
[Fact]
|
||||
using StellaOps.Scanner.Analyzers.Lang.Node;
|
||||
using StellaOps.Scanner.Analyzers.Lang.Tests.Harness;
|
||||
using StellaOps.Scanner.Analyzers.Lang.Tests.TestUtilities;
|
||||
|
||||
namespace StellaOps.Scanner.Analyzers.Lang.Node.Tests;
|
||||
|
||||
public sealed class NodeLanguageAnalyzerTests
|
||||
{
|
||||
[Fact]
|
||||
public async Task WorkspaceFixtureProducesDeterministicOutputAsync()
|
||||
{
|
||||
var cancellationToken = TestContext.Current.CancellationToken;
|
||||
var fixturePath = TestPaths.ResolveFixture("lang", "node", "workspaces");
|
||||
var goldenPath = Path.Combine(fixturePath, "expected.json");
|
||||
|
||||
var analyzers = new ILanguageAnalyzer[]
|
||||
{
|
||||
new NodeLanguageAnalyzer()
|
||||
};
|
||||
|
||||
await LanguageAnalyzerTestHarness.AssertDeterministicAsync(
|
||||
fixturePath,
|
||||
|
||||
var analyzers = new ILanguageAnalyzer[]
|
||||
{
|
||||
new NodeLanguageAnalyzer()
|
||||
};
|
||||
|
||||
await LanguageAnalyzerTestHarness.AssertDeterministicAsync(
|
||||
fixturePath,
|
||||
goldenPath,
|
||||
analyzers,
|
||||
cancellationToken);
|
||||
|
||||
@@ -1,88 +1,88 @@
|
||||
using StellaOps.Scanner.Analyzers.Lang.Tests.TestUtilities;
|
||||
|
||||
namespace StellaOps.Scanner.Analyzers.Lang.Tests.Core;
|
||||
|
||||
public sealed class LanguageAnalyzerResultTests
|
||||
{
|
||||
[Fact]
|
||||
public async Task MergesDuplicateComponentsDeterministicallyAsync()
|
||||
{
|
||||
var analyzer = new DuplicateComponentAnalyzer();
|
||||
var engine = new LanguageAnalyzerEngine(new[] { analyzer });
|
||||
var root = TestPaths.CreateTemporaryDirectory();
|
||||
try
|
||||
{
|
||||
var context = new LanguageAnalyzerContext(root, TimeProvider.System);
|
||||
var result = await engine.AnalyzeAsync(context, CancellationToken.None);
|
||||
|
||||
var component = Assert.Single(result.Components);
|
||||
Assert.Equal("purl::pkg:example/acme@2.0.0", component.ComponentKey);
|
||||
Assert.Equal("pkg:example/acme@2.0.0", component.Purl);
|
||||
Assert.True(component.UsedByEntrypoint);
|
||||
Assert.Equal(2, component.Evidence.Count);
|
||||
Assert.Equal(3, component.Metadata.Count);
|
||||
|
||||
// Metadata retains stable ordering (sorted by key)
|
||||
var keys = component.Metadata.Keys.ToArray();
|
||||
Assert.Equal(new[] { "artifactId", "groupId", "path" }, keys);
|
||||
|
||||
// Evidence de-duplicates via comparison key
|
||||
Assert.Equal(2, component.Evidence.Count);
|
||||
}
|
||||
finally
|
||||
{
|
||||
TestPaths.SafeDelete(root);
|
||||
}
|
||||
}
|
||||
|
||||
private sealed class DuplicateComponentAnalyzer : ILanguageAnalyzer
|
||||
{
|
||||
public string Id => "duplicate";
|
||||
|
||||
public string DisplayName => "Duplicate Analyzer";
|
||||
|
||||
public async ValueTask AnalyzeAsync(LanguageAnalyzerContext context, LanguageComponentWriter writer, CancellationToken cancellationToken)
|
||||
{
|
||||
await Task.Yield();
|
||||
|
||||
var metadataA = new[]
|
||||
{
|
||||
new KeyValuePair<string, string?>("groupId", "example"),
|
||||
new KeyValuePair<string, string?>("artifactId", "acme")
|
||||
};
|
||||
|
||||
var metadataB = new[]
|
||||
{
|
||||
new KeyValuePair<string, string?>("artifactId", "acme"),
|
||||
new KeyValuePair<string, string?>("path", ".")
|
||||
};
|
||||
|
||||
var evidence = new[]
|
||||
{
|
||||
new LanguageComponentEvidence(LanguageEvidenceKind.File, "manifest", "META-INF/MANIFEST.MF", null, null),
|
||||
new LanguageComponentEvidence(LanguageEvidenceKind.Metadata, "pom", "pom.xml", "groupId=example", null)
|
||||
};
|
||||
|
||||
writer.AddFromPurl(
|
||||
analyzerId: Id,
|
||||
purl: "pkg:example/acme@2.0.0",
|
||||
name: "acme",
|
||||
version: "2.0.0",
|
||||
type: "example",
|
||||
metadata: metadataA,
|
||||
evidence: evidence,
|
||||
usedByEntrypoint: true);
|
||||
|
||||
// duplicate insert with different metadata ordering
|
||||
writer.AddFromPurl(
|
||||
analyzerId: Id,
|
||||
purl: "pkg:example/acme@2.0.0",
|
||||
name: "acme",
|
||||
version: "2.0.0",
|
||||
type: "example",
|
||||
metadata: metadataB,
|
||||
evidence: evidence,
|
||||
usedByEntrypoint: false);
|
||||
}
|
||||
}
|
||||
}
|
||||
using StellaOps.Scanner.Analyzers.Lang.Tests.TestUtilities;
|
||||
|
||||
namespace StellaOps.Scanner.Analyzers.Lang.Tests.Core;
|
||||
|
||||
public sealed class LanguageAnalyzerResultTests
|
||||
{
|
||||
[Fact]
|
||||
public async Task MergesDuplicateComponentsDeterministicallyAsync()
|
||||
{
|
||||
var analyzer = new DuplicateComponentAnalyzer();
|
||||
var engine = new LanguageAnalyzerEngine(new[] { analyzer });
|
||||
var root = TestPaths.CreateTemporaryDirectory();
|
||||
try
|
||||
{
|
||||
var context = new LanguageAnalyzerContext(root, TimeProvider.System);
|
||||
var result = await engine.AnalyzeAsync(context, CancellationToken.None);
|
||||
|
||||
var component = Assert.Single(result.Components);
|
||||
Assert.Equal("purl::pkg:example/acme@2.0.0", component.ComponentKey);
|
||||
Assert.Equal("pkg:example/acme@2.0.0", component.Purl);
|
||||
Assert.True(component.UsedByEntrypoint);
|
||||
Assert.Equal(2, component.Evidence.Count);
|
||||
Assert.Equal(3, component.Metadata.Count);
|
||||
|
||||
// Metadata retains stable ordering (sorted by key)
|
||||
var keys = component.Metadata.Keys.ToArray();
|
||||
Assert.Equal(new[] { "artifactId", "groupId", "path" }, keys);
|
||||
|
||||
// Evidence de-duplicates via comparison key
|
||||
Assert.Equal(2, component.Evidence.Count);
|
||||
}
|
||||
finally
|
||||
{
|
||||
TestPaths.SafeDelete(root);
|
||||
}
|
||||
}
|
||||
|
||||
private sealed class DuplicateComponentAnalyzer : ILanguageAnalyzer
|
||||
{
|
||||
public string Id => "duplicate";
|
||||
|
||||
public string DisplayName => "Duplicate Analyzer";
|
||||
|
||||
public async ValueTask AnalyzeAsync(LanguageAnalyzerContext context, LanguageComponentWriter writer, CancellationToken cancellationToken)
|
||||
{
|
||||
await Task.Yield();
|
||||
|
||||
var metadataA = new[]
|
||||
{
|
||||
new KeyValuePair<string, string?>("groupId", "example"),
|
||||
new KeyValuePair<string, string?>("artifactId", "acme")
|
||||
};
|
||||
|
||||
var metadataB = new[]
|
||||
{
|
||||
new KeyValuePair<string, string?>("artifactId", "acme"),
|
||||
new KeyValuePair<string, string?>("path", ".")
|
||||
};
|
||||
|
||||
var evidence = new[]
|
||||
{
|
||||
new LanguageComponentEvidence(LanguageEvidenceKind.File, "manifest", "META-INF/MANIFEST.MF", null, null),
|
||||
new LanguageComponentEvidence(LanguageEvidenceKind.Metadata, "pom", "pom.xml", "groupId=example", null)
|
||||
};
|
||||
|
||||
writer.AddFromPurl(
|
||||
analyzerId: Id,
|
||||
purl: "pkg:example/acme@2.0.0",
|
||||
name: "acme",
|
||||
version: "2.0.0",
|
||||
type: "example",
|
||||
metadata: metadataA,
|
||||
evidence: evidence,
|
||||
usedByEntrypoint: true);
|
||||
|
||||
// duplicate insert with different metadata ordering
|
||||
writer.AddFromPurl(
|
||||
analyzerId: Id,
|
||||
purl: "pkg:example/acme@2.0.0",
|
||||
name: "acme",
|
||||
version: "2.0.0",
|
||||
type: "example",
|
||||
metadata: metadataB,
|
||||
evidence: evidence,
|
||||
usedByEntrypoint: false);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,70 +1,70 @@
|
||||
using StellaOps.Scanner.Core.Contracts;
|
||||
|
||||
namespace StellaOps.Scanner.Analyzers.Lang.Tests.Core;
|
||||
|
||||
public sealed class LanguageComponentMapperTests
|
||||
{
|
||||
[Fact]
|
||||
public void ToComponentRecordsProjectsDeterministicComponents()
|
||||
{
|
||||
// Arrange
|
||||
var analyzerId = "node";
|
||||
var records = new[]
|
||||
{
|
||||
LanguageComponentRecord.FromPurl(
|
||||
analyzerId: analyzerId,
|
||||
purl: "pkg:npm/example@1.0.0",
|
||||
name: "example",
|
||||
version: "1.0.0",
|
||||
type: "npm",
|
||||
metadata: new Dictionary<string, string?>()
|
||||
{
|
||||
["path"] = "packages/app",
|
||||
["license"] = "MIT"
|
||||
},
|
||||
evidence: new[]
|
||||
{
|
||||
new LanguageComponentEvidence(LanguageEvidenceKind.File, "package.json", "packages/app/package.json", null, "abc123")
|
||||
},
|
||||
usedByEntrypoint: true),
|
||||
LanguageComponentRecord.FromExplicitKey(
|
||||
analyzerId: analyzerId,
|
||||
componentKey: "bin::sha256:deadbeef",
|
||||
purl: null,
|
||||
name: "app-binary",
|
||||
version: null,
|
||||
type: "binary",
|
||||
metadata: new Dictionary<string, string?>()
|
||||
{
|
||||
["description"] = "Utility binary"
|
||||
},
|
||||
evidence: new[]
|
||||
{
|
||||
new LanguageComponentEvidence(LanguageEvidenceKind.Derived, "entrypoint", "/usr/local/bin/app", "ENTRYPOINT", null)
|
||||
})
|
||||
};
|
||||
|
||||
// Act
|
||||
var layerDigest = LanguageComponentMapper.ComputeLayerDigest(analyzerId);
|
||||
var results = LanguageComponentMapper.ToComponentRecords(analyzerId, records, layerDigest);
|
||||
|
||||
// Assert
|
||||
Assert.Equal(2, results.Length);
|
||||
Assert.All(results, component => Assert.Equal(layerDigest, component.LayerDigest));
|
||||
|
||||
var first = results[0];
|
||||
Assert.Equal("bin::sha256:deadbeef", first.Identity.Key);
|
||||
Assert.Equal("Utility binary", first.Metadata!.Properties!["stellaops.lang.meta.description"]);
|
||||
Assert.Equal("derived", first.Evidence.Single().Kind);
|
||||
|
||||
var second = results[1];
|
||||
Assert.Equal("pkg:npm/example@1.0.0", second.Identity.Key); // prefix removed
|
||||
Assert.True(second.Usage.UsedByEntrypoint);
|
||||
Assert.Contains("MIT", second.Metadata!.Licenses!);
|
||||
Assert.Equal("packages/app", second.Metadata.Properties!["stellaops.lang.meta.path"]);
|
||||
Assert.Equal("abc123", second.Metadata.Properties!["stellaops.lang.evidence.0.sha256"]);
|
||||
Assert.Equal("file", second.Evidence.Single().Kind);
|
||||
Assert.Equal("packages/app/package.json", second.Evidence.Single().Value);
|
||||
Assert.Equal("package.json", second.Evidence.Single().Source);
|
||||
}
|
||||
}
|
||||
using StellaOps.Scanner.Core.Contracts;
|
||||
|
||||
namespace StellaOps.Scanner.Analyzers.Lang.Tests.Core;
|
||||
|
||||
public sealed class LanguageComponentMapperTests
|
||||
{
|
||||
[Fact]
|
||||
public void ToComponentRecordsProjectsDeterministicComponents()
|
||||
{
|
||||
// Arrange
|
||||
var analyzerId = "node";
|
||||
var records = new[]
|
||||
{
|
||||
LanguageComponentRecord.FromPurl(
|
||||
analyzerId: analyzerId,
|
||||
purl: "pkg:npm/example@1.0.0",
|
||||
name: "example",
|
||||
version: "1.0.0",
|
||||
type: "npm",
|
||||
metadata: new Dictionary<string, string?>()
|
||||
{
|
||||
["path"] = "packages/app",
|
||||
["license"] = "MIT"
|
||||
},
|
||||
evidence: new[]
|
||||
{
|
||||
new LanguageComponentEvidence(LanguageEvidenceKind.File, "package.json", "packages/app/package.json", null, "abc123")
|
||||
},
|
||||
usedByEntrypoint: true),
|
||||
LanguageComponentRecord.FromExplicitKey(
|
||||
analyzerId: analyzerId,
|
||||
componentKey: "bin::sha256:deadbeef",
|
||||
purl: null,
|
||||
name: "app-binary",
|
||||
version: null,
|
||||
type: "binary",
|
||||
metadata: new Dictionary<string, string?>()
|
||||
{
|
||||
["description"] = "Utility binary"
|
||||
},
|
||||
evidence: new[]
|
||||
{
|
||||
new LanguageComponentEvidence(LanguageEvidenceKind.Derived, "entrypoint", "/usr/local/bin/app", "ENTRYPOINT", null)
|
||||
})
|
||||
};
|
||||
|
||||
// Act
|
||||
var layerDigest = LanguageComponentMapper.ComputeLayerDigest(analyzerId);
|
||||
var results = LanguageComponentMapper.ToComponentRecords(analyzerId, records, layerDigest);
|
||||
|
||||
// Assert
|
||||
Assert.Equal(2, results.Length);
|
||||
Assert.All(results, component => Assert.Equal(layerDigest, component.LayerDigest));
|
||||
|
||||
var first = results[0];
|
||||
Assert.Equal("bin::sha256:deadbeef", first.Identity.Key);
|
||||
Assert.Equal("Utility binary", first.Metadata!.Properties!["stellaops.lang.meta.description"]);
|
||||
Assert.Equal("derived", first.Evidence.Single().Kind);
|
||||
|
||||
var second = results[1];
|
||||
Assert.Equal("pkg:npm/example@1.0.0", second.Identity.Key); // prefix removed
|
||||
Assert.True(second.Usage.UsedByEntrypoint);
|
||||
Assert.Contains("MIT", second.Metadata!.Licenses!);
|
||||
Assert.Equal("packages/app", second.Metadata.Properties!["stellaops.lang.meta.path"]);
|
||||
Assert.Equal("abc123", second.Metadata.Properties!["stellaops.lang.evidence.0.sha256"]);
|
||||
Assert.Equal("file", second.Evidence.Single().Kind);
|
||||
Assert.Equal("packages/app/package.json", second.Evidence.Single().Value);
|
||||
Assert.Equal("package.json", second.Evidence.Single().Source);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,102 +1,102 @@
|
||||
using StellaOps.Scanner.Analyzers.Lang;
|
||||
using StellaOps.Scanner.Analyzers.Lang.Tests.Harness;
|
||||
using StellaOps.Scanner.Analyzers.Lang.Tests.TestUtilities;
|
||||
|
||||
namespace StellaOps.Scanner.Analyzers.Lang.Tests.Determinism;
|
||||
|
||||
public sealed class LanguageAnalyzerHarnessTests
|
||||
{
|
||||
[Fact]
|
||||
public async Task HarnessProducesDeterministicOutputAsync()
|
||||
{
|
||||
var fixturePath = TestPaths.ResolveFixture("determinism", "basic", "input");
|
||||
var goldenPath = TestPaths.ResolveFixture("determinism", "basic", "expected.json");
|
||||
var cancellationToken = TestContext.Current.CancellationToken;
|
||||
|
||||
var analyzers = new ILanguageAnalyzer[]
|
||||
{
|
||||
new FakeLanguageAnalyzer(
|
||||
"fake-java",
|
||||
LanguageComponentRecord.FromPurl(
|
||||
analyzerId: "fake-java",
|
||||
purl: "pkg:maven/org.example/example-lib@1.2.3",
|
||||
name: "example-lib",
|
||||
version: "1.2.3",
|
||||
type: "maven",
|
||||
metadata: new Dictionary<string, string?>
|
||||
{
|
||||
["groupId"] = "org.example",
|
||||
["artifactId"] = "example-lib",
|
||||
},
|
||||
evidence: new []
|
||||
{
|
||||
new LanguageComponentEvidence(LanguageEvidenceKind.File, "pom.properties", "META-INF/maven/org.example/example-lib/pom.properties", null, "abc123"),
|
||||
}),
|
||||
LanguageComponentRecord.FromExplicitKey(
|
||||
analyzerId: "fake-java",
|
||||
componentKey: "bin::sha256:deadbeef",
|
||||
purl: null,
|
||||
name: "example-cli",
|
||||
version: null,
|
||||
type: "bin",
|
||||
metadata: new Dictionary<string, string?>
|
||||
{
|
||||
["sha256"] = "deadbeef",
|
||||
},
|
||||
evidence: new []
|
||||
{
|
||||
new LanguageComponentEvidence(LanguageEvidenceKind.File, "binary", "usr/local/bin/example", null, "deadbeef"),
|
||||
})),
|
||||
new FakeLanguageAnalyzer(
|
||||
"fake-node",
|
||||
LanguageComponentRecord.FromPurl(
|
||||
analyzerId: "fake-node",
|
||||
purl: "pkg:npm/example-package@4.5.6",
|
||||
name: "example-package",
|
||||
version: "4.5.6",
|
||||
type: "npm",
|
||||
metadata: new Dictionary<string, string?>
|
||||
{
|
||||
["workspace"] = "packages/example",
|
||||
},
|
||||
evidence: new []
|
||||
{
|
||||
new LanguageComponentEvidence(LanguageEvidenceKind.File, "package.json", "packages/example/package.json", null, null),
|
||||
},
|
||||
usedByEntrypoint: true)),
|
||||
};
|
||||
|
||||
await LanguageAnalyzerTestHarness.AssertDeterministicAsync(fixturePath, goldenPath, analyzers, cancellationToken);
|
||||
|
||||
var first = await LanguageAnalyzerTestHarness.RunToJsonAsync(fixturePath, analyzers, cancellationToken);
|
||||
var second = await LanguageAnalyzerTestHarness.RunToJsonAsync(fixturePath, analyzers, cancellationToken);
|
||||
Assert.Equal(first, second);
|
||||
}
|
||||
|
||||
private sealed class FakeLanguageAnalyzer : ILanguageAnalyzer
|
||||
{
|
||||
private readonly IReadOnlyList<LanguageComponentRecord> _components;
|
||||
|
||||
public FakeLanguageAnalyzer(string id, params LanguageComponentRecord[] components)
|
||||
{
|
||||
Id = id;
|
||||
DisplayName = id;
|
||||
_components = components ?? Array.Empty<LanguageComponentRecord>();
|
||||
}
|
||||
|
||||
public string Id { get; }
|
||||
|
||||
public string DisplayName { get; }
|
||||
|
||||
public async ValueTask AnalyzeAsync(LanguageAnalyzerContext context, LanguageComponentWriter writer, CancellationToken cancellationToken)
|
||||
{
|
||||
await Task.Delay(5, cancellationToken).ConfigureAwait(false); // ensure asynchrony is handled
|
||||
|
||||
// Intentionally add in reverse order to prove determinism.
|
||||
foreach (var component in _components.Reverse())
|
||||
{
|
||||
writer.Add(component);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
using StellaOps.Scanner.Analyzers.Lang;
|
||||
using StellaOps.Scanner.Analyzers.Lang.Tests.Harness;
|
||||
using StellaOps.Scanner.Analyzers.Lang.Tests.TestUtilities;
|
||||
|
||||
namespace StellaOps.Scanner.Analyzers.Lang.Tests.Determinism;
|
||||
|
||||
public sealed class LanguageAnalyzerHarnessTests
|
||||
{
|
||||
[Fact]
|
||||
public async Task HarnessProducesDeterministicOutputAsync()
|
||||
{
|
||||
var fixturePath = TestPaths.ResolveFixture("determinism", "basic", "input");
|
||||
var goldenPath = TestPaths.ResolveFixture("determinism", "basic", "expected.json");
|
||||
var cancellationToken = TestContext.Current.CancellationToken;
|
||||
|
||||
var analyzers = new ILanguageAnalyzer[]
|
||||
{
|
||||
new FakeLanguageAnalyzer(
|
||||
"fake-java",
|
||||
LanguageComponentRecord.FromPurl(
|
||||
analyzerId: "fake-java",
|
||||
purl: "pkg:maven/org.example/example-lib@1.2.3",
|
||||
name: "example-lib",
|
||||
version: "1.2.3",
|
||||
type: "maven",
|
||||
metadata: new Dictionary<string, string?>
|
||||
{
|
||||
["groupId"] = "org.example",
|
||||
["artifactId"] = "example-lib",
|
||||
},
|
||||
evidence: new []
|
||||
{
|
||||
new LanguageComponentEvidence(LanguageEvidenceKind.File, "pom.properties", "META-INF/maven/org.example/example-lib/pom.properties", null, "abc123"),
|
||||
}),
|
||||
LanguageComponentRecord.FromExplicitKey(
|
||||
analyzerId: "fake-java",
|
||||
componentKey: "bin::sha256:deadbeef",
|
||||
purl: null,
|
||||
name: "example-cli",
|
||||
version: null,
|
||||
type: "bin",
|
||||
metadata: new Dictionary<string, string?>
|
||||
{
|
||||
["sha256"] = "deadbeef",
|
||||
},
|
||||
evidence: new []
|
||||
{
|
||||
new LanguageComponentEvidence(LanguageEvidenceKind.File, "binary", "usr/local/bin/example", null, "deadbeef"),
|
||||
})),
|
||||
new FakeLanguageAnalyzer(
|
||||
"fake-node",
|
||||
LanguageComponentRecord.FromPurl(
|
||||
analyzerId: "fake-node",
|
||||
purl: "pkg:npm/example-package@4.5.6",
|
||||
name: "example-package",
|
||||
version: "4.5.6",
|
||||
type: "npm",
|
||||
metadata: new Dictionary<string, string?>
|
||||
{
|
||||
["workspace"] = "packages/example",
|
||||
},
|
||||
evidence: new []
|
||||
{
|
||||
new LanguageComponentEvidence(LanguageEvidenceKind.File, "package.json", "packages/example/package.json", null, null),
|
||||
},
|
||||
usedByEntrypoint: true)),
|
||||
};
|
||||
|
||||
await LanguageAnalyzerTestHarness.AssertDeterministicAsync(fixturePath, goldenPath, analyzers, cancellationToken);
|
||||
|
||||
var first = await LanguageAnalyzerTestHarness.RunToJsonAsync(fixturePath, analyzers, cancellationToken);
|
||||
var second = await LanguageAnalyzerTestHarness.RunToJsonAsync(fixturePath, analyzers, cancellationToken);
|
||||
Assert.Equal(first, second);
|
||||
}
|
||||
|
||||
private sealed class FakeLanguageAnalyzer : ILanguageAnalyzer
|
||||
{
|
||||
private readonly IReadOnlyList<LanguageComponentRecord> _components;
|
||||
|
||||
public FakeLanguageAnalyzer(string id, params LanguageComponentRecord[] components)
|
||||
{
|
||||
Id = id;
|
||||
DisplayName = id;
|
||||
_components = components ?? Array.Empty<LanguageComponentRecord>();
|
||||
}
|
||||
|
||||
public string Id { get; }
|
||||
|
||||
public string DisplayName { get; }
|
||||
|
||||
public async ValueTask AnalyzeAsync(LanguageAnalyzerContext context, LanguageComponentWriter writer, CancellationToken cancellationToken)
|
||||
{
|
||||
await Task.Delay(5, cancellationToken).ConfigureAwait(false); // ensure asynchrony is handled
|
||||
|
||||
// Intentionally add in reverse order to prove determinism.
|
||||
foreach (var component in _components.Reverse())
|
||||
{
|
||||
writer.Add(component);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,41 +1,41 @@
|
||||
using System;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Text.Json.Nodes;
|
||||
using StellaOps.Scanner.Analyzers.Lang.Rust;
|
||||
using StellaOps.Scanner.Analyzers.Lang.Tests.Harness;
|
||||
using StellaOps.Scanner.Analyzers.Lang.Tests.TestUtilities;
|
||||
|
||||
namespace StellaOps.Scanner.Analyzers.Lang.Tests.Rust;
|
||||
|
||||
public sealed class RustLanguageAnalyzerTests
|
||||
{
|
||||
[Fact]
|
||||
public async Task SimpleFixtureProducesDeterministicOutputAsync()
|
||||
{
|
||||
var cancellationToken = TestContext.Current.CancellationToken;
|
||||
var fixturePath = TestPaths.ResolveFixture("lang", "rust", "simple");
|
||||
var goldenPath = Path.Combine(fixturePath, "expected.json");
|
||||
|
||||
var usageHints = new LanguageUsageHints(new[]
|
||||
{
|
||||
Path.Combine(fixturePath, "usr/local/bin/my_app")
|
||||
});
|
||||
|
||||
var analyzers = new ILanguageAnalyzer[]
|
||||
{
|
||||
new RustLanguageAnalyzer()
|
||||
};
|
||||
|
||||
await LanguageAnalyzerTestHarness.AssertDeterministicAsync(
|
||||
fixturePath,
|
||||
goldenPath,
|
||||
analyzers,
|
||||
cancellationToken,
|
||||
usageHints);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
using StellaOps.Scanner.Analyzers.Lang.Tests.Harness;
|
||||
using StellaOps.Scanner.Analyzers.Lang.Tests.TestUtilities;
|
||||
|
||||
namespace StellaOps.Scanner.Analyzers.Lang.Tests.Rust;
|
||||
|
||||
public sealed class RustLanguageAnalyzerTests
|
||||
{
|
||||
[Fact]
|
||||
public async Task SimpleFixtureProducesDeterministicOutputAsync()
|
||||
{
|
||||
var cancellationToken = TestContext.Current.CancellationToken;
|
||||
var fixturePath = TestPaths.ResolveFixture("lang", "rust", "simple");
|
||||
var goldenPath = Path.Combine(fixturePath, "expected.json");
|
||||
|
||||
var usageHints = new LanguageUsageHints(new[]
|
||||
{
|
||||
Path.Combine(fixturePath, "usr/local/bin/my_app")
|
||||
});
|
||||
|
||||
var analyzers = new ILanguageAnalyzer[]
|
||||
{
|
||||
new RustLanguageAnalyzer()
|
||||
};
|
||||
|
||||
await LanguageAnalyzerTestHarness.AssertDeterministicAsync(
|
||||
fixturePath,
|
||||
goldenPath,
|
||||
analyzers,
|
||||
cancellationToken,
|
||||
usageHints);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task AnalyzerIsThreadSafeUnderConcurrencyAsync()
|
||||
{
|
||||
var cancellationToken = TestContext.Current.CancellationToken;
|
||||
|
||||
@@ -1,428 +1,428 @@
|
||||
using System.Buffers.Binary;
|
||||
using System.Text;
|
||||
|
||||
namespace StellaOps.Scanner.Analyzers.Lang.Tests.TestUtilities;
|
||||
|
||||
public static class JavaClassFileFactory
|
||||
{
|
||||
public static byte[] CreateClassForNameInvoker(string internalClassName, string targetClassName)
|
||||
{
|
||||
using var buffer = new MemoryStream();
|
||||
using var writer = new BigEndianWriter(buffer);
|
||||
|
||||
WriteClassFileHeader(writer, constantPoolCount: 16);
|
||||
|
||||
writer.WriteByte((byte)ConstantTag.Utf8); writer.WriteUtf8(internalClassName); // #1
|
||||
writer.WriteByte((byte)ConstantTag.Class); writer.WriteUInt16(1); // #2
|
||||
writer.WriteByte((byte)ConstantTag.Utf8); writer.WriteUtf8("java/lang/Object"); // #3
|
||||
writer.WriteByte((byte)ConstantTag.Class); writer.WriteUInt16(3); // #4
|
||||
writer.WriteByte((byte)ConstantTag.Utf8); writer.WriteUtf8("invoke"); // #5
|
||||
writer.WriteByte((byte)ConstantTag.Utf8); writer.WriteUtf8("()V"); // #6
|
||||
writer.WriteByte((byte)ConstantTag.Utf8); writer.WriteUtf8("Code"); // #7
|
||||
writer.WriteByte((byte)ConstantTag.Utf8); writer.WriteUtf8(targetClassName); // #8
|
||||
writer.WriteByte((byte)ConstantTag.String); writer.WriteUInt16(8); // #9
|
||||
writer.WriteByte((byte)ConstantTag.Utf8); writer.WriteUtf8("java/lang/Class"); // #10
|
||||
writer.WriteByte((byte)ConstantTag.Class); writer.WriteUInt16(10); // #11
|
||||
writer.WriteByte((byte)ConstantTag.Utf8); writer.WriteUtf8("forName"); // #12
|
||||
writer.WriteByte((byte)ConstantTag.Utf8); writer.WriteUtf8("(Ljava/lang/String;)Ljava/lang/Class;"); // #13
|
||||
writer.WriteByte((byte)ConstantTag.NameAndType); writer.WriteUInt16(12); writer.WriteUInt16(13); // #14
|
||||
writer.WriteByte((byte)ConstantTag.Methodref); writer.WriteUInt16(11); writer.WriteUInt16(14); // #15
|
||||
|
||||
writer.WriteUInt16(0x0001); // public
|
||||
writer.WriteUInt16(2); // this class
|
||||
writer.WriteUInt16(4); // super class
|
||||
|
||||
writer.WriteUInt16(0); // interfaces
|
||||
writer.WriteUInt16(0); // fields
|
||||
writer.WriteUInt16(1); // methods
|
||||
|
||||
WriteInvokeMethod(writer, methodNameIndex: 5, descriptorIndex: 6, ldcIndex: 9, methodRefIndex: 15);
|
||||
|
||||
writer.WriteUInt16(0); // class attributes
|
||||
|
||||
return buffer.ToArray();
|
||||
}
|
||||
|
||||
public static byte[] CreateClassResourceLookup(string internalClassName, string resourcePath)
|
||||
{
|
||||
using var buffer = new MemoryStream();
|
||||
using var writer = new BigEndianWriter(buffer);
|
||||
|
||||
WriteClassFileHeader(writer, constantPoolCount: 20);
|
||||
|
||||
writer.WriteByte((byte)ConstantTag.Utf8); writer.WriteUtf8(internalClassName); // #1
|
||||
writer.WriteByte((byte)ConstantTag.Class); writer.WriteUInt16(1); // #2
|
||||
writer.WriteByte((byte)ConstantTag.Utf8); writer.WriteUtf8("java/lang/Object"); // #3
|
||||
writer.WriteByte((byte)ConstantTag.Class); writer.WriteUInt16(3); // #4
|
||||
writer.WriteByte((byte)ConstantTag.Utf8); writer.WriteUtf8("load"); // #5
|
||||
writer.WriteByte((byte)ConstantTag.Utf8); writer.WriteUtf8("()V"); // #6
|
||||
writer.WriteByte((byte)ConstantTag.Utf8); writer.WriteUtf8("Code"); // #7
|
||||
writer.WriteByte((byte)ConstantTag.Utf8); writer.WriteUtf8(resourcePath); // #8
|
||||
writer.WriteByte((byte)ConstantTag.String); writer.WriteUInt16(8); // #9
|
||||
writer.WriteByte((byte)ConstantTag.Utf8); writer.WriteUtf8("java/lang/ClassLoader"); // #10
|
||||
writer.WriteByte((byte)ConstantTag.Class); writer.WriteUInt16(10); // #11
|
||||
writer.WriteByte((byte)ConstantTag.Utf8); writer.WriteUtf8("getSystemClassLoader"); // #12
|
||||
writer.WriteByte((byte)ConstantTag.Utf8); writer.WriteUtf8("()Ljava/lang/ClassLoader;"); // #13
|
||||
writer.WriteByte((byte)ConstantTag.NameAndType); writer.WriteUInt16(12); writer.WriteUInt16(13); // #14
|
||||
writer.WriteByte((byte)ConstantTag.Methodref); writer.WriteUInt16(11); writer.WriteUInt16(14); // #15
|
||||
writer.WriteByte((byte)ConstantTag.Utf8); writer.WriteUtf8("getResource"); // #16
|
||||
writer.WriteByte((byte)ConstantTag.Utf8); writer.WriteUtf8("(Ljava/lang/String;)Ljava/net/URL;"); // #17
|
||||
writer.WriteByte((byte)ConstantTag.NameAndType); writer.WriteUInt16(16); writer.WriteUInt16(17); // #18
|
||||
writer.WriteByte((byte)ConstantTag.Methodref); writer.WriteUInt16(11); writer.WriteUInt16(18); // #19
|
||||
|
||||
writer.WriteUInt16(0x0001); // public
|
||||
writer.WriteUInt16(2); // this class
|
||||
writer.WriteUInt16(4); // super class
|
||||
|
||||
writer.WriteUInt16(0); // interfaces
|
||||
writer.WriteUInt16(0); // fields
|
||||
writer.WriteUInt16(1); // methods
|
||||
|
||||
WriteResourceLookupMethod(writer, methodNameIndex: 5, descriptorIndex: 6, systemLoaderMethodRefIndex: 15, stringIndex: 9, getResourceMethodRefIndex: 19);
|
||||
|
||||
writer.WriteUInt16(0); // class attributes
|
||||
|
||||
return buffer.ToArray();
|
||||
}
|
||||
|
||||
public static byte[] CreateTcclChecker(string internalClassName)
|
||||
{
|
||||
using var buffer = new MemoryStream();
|
||||
using var writer = new BigEndianWriter(buffer);
|
||||
|
||||
WriteClassFileHeader(writer, constantPoolCount: 18);
|
||||
|
||||
writer.WriteByte((byte)ConstantTag.Utf8); writer.WriteUtf8(internalClassName); // #1
|
||||
writer.WriteByte((byte)ConstantTag.Class); writer.WriteUInt16(1); // #2
|
||||
writer.WriteByte((byte)ConstantTag.Utf8); writer.WriteUtf8("java/lang/Object"); // #3
|
||||
writer.WriteByte((byte)ConstantTag.Class); writer.WriteUInt16(3); // #4
|
||||
writer.WriteByte((byte)ConstantTag.Utf8); writer.WriteUtf8("check"); // #5
|
||||
writer.WriteByte((byte)ConstantTag.Utf8); writer.WriteUtf8("()V"); // #6
|
||||
writer.WriteByte((byte)ConstantTag.Utf8); writer.WriteUtf8("Code"); // #7
|
||||
writer.WriteByte((byte)ConstantTag.Utf8); writer.WriteUtf8("java/lang/Thread"); // #8
|
||||
writer.WriteByte((byte)ConstantTag.Class); writer.WriteUInt16(8); // #9
|
||||
writer.WriteByte((byte)ConstantTag.Utf8); writer.WriteUtf8("currentThread"); // #10
|
||||
writer.WriteByte((byte)ConstantTag.Utf8); writer.WriteUtf8("()Ljava/lang/Thread;"); // #11
|
||||
writer.WriteByte((byte)ConstantTag.NameAndType); writer.WriteUInt16(10); writer.WriteUInt16(11); // #12
|
||||
writer.WriteByte((byte)ConstantTag.Methodref); writer.WriteUInt16(9); writer.WriteUInt16(12); // #13
|
||||
writer.WriteByte((byte)ConstantTag.Utf8); writer.WriteUtf8("getContextClassLoader"); // #14
|
||||
writer.WriteByte((byte)ConstantTag.Utf8); writer.WriteUtf8("()Ljava/lang/ClassLoader;"); // #15
|
||||
writer.WriteByte((byte)ConstantTag.NameAndType); writer.WriteUInt16(14); writer.WriteUInt16(15); // #16
|
||||
writer.WriteByte((byte)ConstantTag.Methodref); writer.WriteUInt16(9); writer.WriteUInt16(16); // #17
|
||||
|
||||
writer.WriteUInt16(0x0001); // public
|
||||
writer.WriteUInt16(2); // this
|
||||
writer.WriteUInt16(4); // super
|
||||
|
||||
writer.WriteUInt16(0); // interfaces
|
||||
writer.WriteUInt16(0); // fields
|
||||
writer.WriteUInt16(1); // methods
|
||||
|
||||
WriteTcclMethod(writer, methodNameIndex: 5, descriptorIndex: 6, currentThreadMethodRefIndex: 13, getContextMethodRefIndex: 17);
|
||||
|
||||
writer.WriteUInt16(0); // class attributes
|
||||
|
||||
return buffer.ToArray();
|
||||
}
|
||||
|
||||
private static void WriteClassFileHeader(BigEndianWriter writer, ushort constantPoolCount)
|
||||
{
|
||||
writer.WriteUInt32(0xCAFEBABE);
|
||||
writer.WriteUInt16(0);
|
||||
writer.WriteUInt16(52);
|
||||
writer.WriteUInt16(constantPoolCount);
|
||||
}
|
||||
|
||||
private static void WriteInvokeMethod(BigEndianWriter writer, ushort methodNameIndex, ushort descriptorIndex, ushort ldcIndex, ushort methodRefIndex)
|
||||
{
|
||||
writer.WriteUInt16(0x0009); // public static
|
||||
writer.WriteUInt16(methodNameIndex);
|
||||
writer.WriteUInt16(descriptorIndex);
|
||||
writer.WriteUInt16(1); // attributes_count
|
||||
|
||||
writer.WriteUInt16(7); // "Code"
|
||||
using var codeBuffer = new MemoryStream();
|
||||
using (var codeWriter = new BigEndianWriter(codeBuffer))
|
||||
{
|
||||
codeWriter.WriteUInt16(1); // max_stack
|
||||
codeWriter.WriteUInt16(0); // max_locals
|
||||
codeWriter.WriteUInt32(6); // code_length
|
||||
codeWriter.WriteByte(0x12);
|
||||
codeWriter.WriteByte((byte)ldcIndex);
|
||||
codeWriter.WriteByte(0xB8);
|
||||
codeWriter.WriteUInt16(methodRefIndex);
|
||||
codeWriter.WriteByte(0xB1);
|
||||
codeWriter.WriteUInt16(0); // exception table length
|
||||
codeWriter.WriteUInt16(0); // code attributes
|
||||
}
|
||||
|
||||
var codeBytes = codeBuffer.ToArray();
|
||||
writer.WriteUInt32((uint)codeBytes.Length);
|
||||
writer.WriteBytes(codeBytes);
|
||||
}
|
||||
|
||||
private static void WriteTcclMethod(BigEndianWriter writer, ushort methodNameIndex, ushort descriptorIndex, ushort currentThreadMethodRefIndex, ushort getContextMethodRefIndex)
|
||||
{
|
||||
writer.WriteUInt16(0x0009);
|
||||
writer.WriteUInt16(methodNameIndex);
|
||||
writer.WriteUInt16(descriptorIndex);
|
||||
writer.WriteUInt16(1);
|
||||
|
||||
writer.WriteUInt16(7);
|
||||
using var codeBuffer = new MemoryStream();
|
||||
using (var codeWriter = new BigEndianWriter(codeBuffer))
|
||||
{
|
||||
codeWriter.WriteUInt16(2);
|
||||
codeWriter.WriteUInt16(0);
|
||||
codeWriter.WriteUInt32(8);
|
||||
codeWriter.WriteByte(0xB8);
|
||||
codeWriter.WriteUInt16(currentThreadMethodRefIndex);
|
||||
codeWriter.WriteByte(0xB6);
|
||||
codeWriter.WriteUInt16(getContextMethodRefIndex);
|
||||
codeWriter.WriteByte(0x57);
|
||||
codeWriter.WriteByte(0xB1);
|
||||
codeWriter.WriteUInt16(0);
|
||||
codeWriter.WriteUInt16(0);
|
||||
}
|
||||
|
||||
var codeBytes = codeBuffer.ToArray();
|
||||
writer.WriteUInt32((uint)codeBytes.Length);
|
||||
writer.WriteBytes(codeBytes);
|
||||
}
|
||||
|
||||
private static void WriteResourceLookupMethod(
|
||||
BigEndianWriter writer,
|
||||
ushort methodNameIndex,
|
||||
ushort descriptorIndex,
|
||||
ushort systemLoaderMethodRefIndex,
|
||||
ushort stringIndex,
|
||||
ushort getResourceMethodRefIndex)
|
||||
{
|
||||
writer.WriteUInt16(0x0009);
|
||||
writer.WriteUInt16(methodNameIndex);
|
||||
writer.WriteUInt16(descriptorIndex);
|
||||
writer.WriteUInt16(1);
|
||||
|
||||
writer.WriteUInt16(7);
|
||||
using var codeBuffer = new MemoryStream();
|
||||
using (var codeWriter = new BigEndianWriter(codeBuffer))
|
||||
{
|
||||
codeWriter.WriteUInt16(2);
|
||||
codeWriter.WriteUInt16(0);
|
||||
codeWriter.WriteUInt32(10);
|
||||
codeWriter.WriteByte(0xB8); // invokestatic
|
||||
codeWriter.WriteUInt16(systemLoaderMethodRefIndex);
|
||||
codeWriter.WriteByte(0x12); // ldc
|
||||
codeWriter.WriteByte((byte)stringIndex);
|
||||
codeWriter.WriteByte(0xB6); // invokevirtual
|
||||
codeWriter.WriteUInt16(getResourceMethodRefIndex);
|
||||
codeWriter.WriteByte(0x57);
|
||||
codeWriter.WriteByte(0xB1);
|
||||
codeWriter.WriteUInt16(0);
|
||||
codeWriter.WriteUInt16(0);
|
||||
}
|
||||
|
||||
var codeBytes = codeBuffer.ToArray();
|
||||
writer.WriteUInt32((uint)codeBytes.Length);
|
||||
writer.WriteBytes(codeBytes);
|
||||
}
|
||||
|
||||
private sealed class BigEndianWriter : IDisposable
|
||||
{
|
||||
private readonly BinaryWriter _writer;
|
||||
|
||||
public BigEndianWriter(Stream stream)
|
||||
{
|
||||
_writer = new BinaryWriter(stream, Encoding.UTF8, leaveOpen: true);
|
||||
}
|
||||
|
||||
public void WriteByte(byte value) => _writer.Write(value);
|
||||
|
||||
public void WriteBytes(byte[] data) => _writer.Write(data);
|
||||
|
||||
public void WriteUInt16(ushort value)
|
||||
{
|
||||
Span<byte> buffer = stackalloc byte[2];
|
||||
BinaryPrimitives.WriteUInt16BigEndian(buffer, value);
|
||||
_writer.Write(buffer);
|
||||
}
|
||||
|
||||
public void WriteUInt32(uint value)
|
||||
{
|
||||
Span<byte> buffer = stackalloc byte[4];
|
||||
BinaryPrimitives.WriteUInt32BigEndian(buffer, value);
|
||||
_writer.Write(buffer);
|
||||
}
|
||||
|
||||
public void WriteUtf8(string value)
|
||||
{
|
||||
var bytes = Encoding.UTF8.GetBytes(value);
|
||||
WriteUInt16((ushort)bytes.Length);
|
||||
_writer.Write(bytes);
|
||||
}
|
||||
|
||||
public void Dispose() => _writer.Dispose();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Creates a class file with a native method declaration.
|
||||
/// </summary>
|
||||
public static byte[] CreateNativeMethodClass(string internalClassName, string nativeMethodName)
|
||||
{
|
||||
using var buffer = new MemoryStream();
|
||||
using var writer = new BigEndianWriter(buffer);
|
||||
|
||||
WriteClassFileHeader(writer, constantPoolCount: 8);
|
||||
|
||||
writer.WriteByte((byte)ConstantTag.Utf8); writer.WriteUtf8(internalClassName); // #1
|
||||
writer.WriteByte((byte)ConstantTag.Class); writer.WriteUInt16(1); // #2
|
||||
writer.WriteByte((byte)ConstantTag.Utf8); writer.WriteUtf8("java/lang/Object"); // #3
|
||||
writer.WriteByte((byte)ConstantTag.Class); writer.WriteUInt16(3); // #4
|
||||
writer.WriteByte((byte)ConstantTag.Utf8); writer.WriteUtf8(nativeMethodName); // #5
|
||||
writer.WriteByte((byte)ConstantTag.Utf8); writer.WriteUtf8("()V"); // #6
|
||||
writer.WriteByte((byte)ConstantTag.Utf8); writer.WriteUtf8("Code"); // #7
|
||||
|
||||
writer.WriteUInt16(0x0001); // public
|
||||
writer.WriteUInt16(2); // this class
|
||||
writer.WriteUInt16(4); // super class
|
||||
|
||||
writer.WriteUInt16(0); // interfaces
|
||||
writer.WriteUInt16(0); // fields
|
||||
writer.WriteUInt16(1); // methods
|
||||
|
||||
// native method: access_flags = ACC_PUBLIC | ACC_NATIVE (0x0101)
|
||||
writer.WriteUInt16(0x0101);
|
||||
writer.WriteUInt16(5); // name
|
||||
writer.WriteUInt16(6); // descriptor
|
||||
writer.WriteUInt16(0); // no attributes (native methods have no Code)
|
||||
|
||||
writer.WriteUInt16(0); // class attributes
|
||||
|
||||
return buffer.ToArray();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Creates a class file with a System.loadLibrary call.
|
||||
/// </summary>
|
||||
public static byte[] CreateSystemLoadLibraryInvoker(string internalClassName, string libraryName)
|
||||
{
|
||||
using var buffer = new MemoryStream();
|
||||
using var writer = new BigEndianWriter(buffer);
|
||||
|
||||
WriteClassFileHeader(writer, constantPoolCount: 16);
|
||||
|
||||
writer.WriteByte((byte)ConstantTag.Utf8); writer.WriteUtf8(internalClassName); // #1
|
||||
writer.WriteByte((byte)ConstantTag.Class); writer.WriteUInt16(1); // #2
|
||||
writer.WriteByte((byte)ConstantTag.Utf8); writer.WriteUtf8("java/lang/Object"); // #3
|
||||
writer.WriteByte((byte)ConstantTag.Class); writer.WriteUInt16(3); // #4
|
||||
writer.WriteByte((byte)ConstantTag.Utf8); writer.WriteUtf8("loadNative"); // #5
|
||||
writer.WriteByte((byte)ConstantTag.Utf8); writer.WriteUtf8("()V"); // #6
|
||||
writer.WriteByte((byte)ConstantTag.Utf8); writer.WriteUtf8("Code"); // #7
|
||||
writer.WriteByte((byte)ConstantTag.Utf8); writer.WriteUtf8(libraryName); // #8
|
||||
writer.WriteByte((byte)ConstantTag.String); writer.WriteUInt16(8); // #9
|
||||
writer.WriteByte((byte)ConstantTag.Utf8); writer.WriteUtf8("java/lang/System"); // #10
|
||||
writer.WriteByte((byte)ConstantTag.Class); writer.WriteUInt16(10); // #11
|
||||
writer.WriteByte((byte)ConstantTag.Utf8); writer.WriteUtf8("loadLibrary"); // #12
|
||||
writer.WriteByte((byte)ConstantTag.Utf8); writer.WriteUtf8("(Ljava/lang/String;)V"); // #13
|
||||
writer.WriteByte((byte)ConstantTag.NameAndType); writer.WriteUInt16(12); writer.WriteUInt16(13); // #14
|
||||
writer.WriteByte((byte)ConstantTag.Methodref); writer.WriteUInt16(11); writer.WriteUInt16(14); // #15
|
||||
|
||||
writer.WriteUInt16(0x0001); // public
|
||||
writer.WriteUInt16(2); // this class
|
||||
writer.WriteUInt16(4); // super class
|
||||
|
||||
writer.WriteUInt16(0); // interfaces
|
||||
writer.WriteUInt16(0); // fields
|
||||
writer.WriteUInt16(1); // methods
|
||||
|
||||
WriteInvokeStaticMethod(writer, methodNameIndex: 5, descriptorIndex: 6, ldcIndex: 9, methodRefIndex: 15);
|
||||
|
||||
writer.WriteUInt16(0); // class attributes
|
||||
|
||||
return buffer.ToArray();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Creates a class file with a System.load call (loads by path).
|
||||
/// </summary>
|
||||
public static byte[] CreateSystemLoadInvoker(string internalClassName, string libraryPath)
|
||||
{
|
||||
using var buffer = new MemoryStream();
|
||||
using var writer = new BigEndianWriter(buffer);
|
||||
|
||||
WriteClassFileHeader(writer, constantPoolCount: 16);
|
||||
|
||||
writer.WriteByte((byte)ConstantTag.Utf8); writer.WriteUtf8(internalClassName); // #1
|
||||
writer.WriteByte((byte)ConstantTag.Class); writer.WriteUInt16(1); // #2
|
||||
writer.WriteByte((byte)ConstantTag.Utf8); writer.WriteUtf8("java/lang/Object"); // #3
|
||||
writer.WriteByte((byte)ConstantTag.Class); writer.WriteUInt16(3); // #4
|
||||
writer.WriteByte((byte)ConstantTag.Utf8); writer.WriteUtf8("loadNative"); // #5
|
||||
writer.WriteByte((byte)ConstantTag.Utf8); writer.WriteUtf8("()V"); // #6
|
||||
writer.WriteByte((byte)ConstantTag.Utf8); writer.WriteUtf8("Code"); // #7
|
||||
writer.WriteByte((byte)ConstantTag.Utf8); writer.WriteUtf8(libraryPath); // #8
|
||||
writer.WriteByte((byte)ConstantTag.String); writer.WriteUInt16(8); // #9
|
||||
writer.WriteByte((byte)ConstantTag.Utf8); writer.WriteUtf8("java/lang/System"); // #10
|
||||
writer.WriteByte((byte)ConstantTag.Class); writer.WriteUInt16(10); // #11
|
||||
writer.WriteByte((byte)ConstantTag.Utf8); writer.WriteUtf8("load"); // #12
|
||||
writer.WriteByte((byte)ConstantTag.Utf8); writer.WriteUtf8("(Ljava/lang/String;)V"); // #13
|
||||
writer.WriteByte((byte)ConstantTag.NameAndType); writer.WriteUInt16(12); writer.WriteUInt16(13); // #14
|
||||
writer.WriteByte((byte)ConstantTag.Methodref); writer.WriteUInt16(11); writer.WriteUInt16(14); // #15
|
||||
|
||||
writer.WriteUInt16(0x0001); // public
|
||||
writer.WriteUInt16(2); // this class
|
||||
writer.WriteUInt16(4); // super class
|
||||
|
||||
writer.WriteUInt16(0); // interfaces
|
||||
writer.WriteUInt16(0); // fields
|
||||
writer.WriteUInt16(1); // methods
|
||||
|
||||
WriteInvokeStaticMethod(writer, methodNameIndex: 5, descriptorIndex: 6, ldcIndex: 9, methodRefIndex: 15);
|
||||
|
||||
writer.WriteUInt16(0); // class attributes
|
||||
|
||||
return buffer.ToArray();
|
||||
}
|
||||
|
||||
private static void WriteInvokeStaticMethod(BigEndianWriter writer, ushort methodNameIndex, ushort descriptorIndex, ushort ldcIndex, ushort methodRefIndex)
|
||||
{
|
||||
writer.WriteUInt16(0x0009); // public static
|
||||
writer.WriteUInt16(methodNameIndex);
|
||||
writer.WriteUInt16(descriptorIndex);
|
||||
writer.WriteUInt16(1); // attributes_count
|
||||
|
||||
writer.WriteUInt16(7); // "Code"
|
||||
using var codeBuffer = new MemoryStream();
|
||||
using (var codeWriter = new BigEndianWriter(codeBuffer))
|
||||
{
|
||||
codeWriter.WriteUInt16(1); // max_stack
|
||||
codeWriter.WriteUInt16(0); // max_locals
|
||||
codeWriter.WriteUInt32(6); // code_length
|
||||
codeWriter.WriteByte(0x12); // ldc
|
||||
codeWriter.WriteByte((byte)ldcIndex);
|
||||
codeWriter.WriteByte(0xB8); // invokestatic
|
||||
codeWriter.WriteUInt16(methodRefIndex);
|
||||
codeWriter.WriteByte(0xB1); // return
|
||||
codeWriter.WriteUInt16(0); // exception table length
|
||||
codeWriter.WriteUInt16(0); // code attributes
|
||||
}
|
||||
|
||||
var codeBytes = codeBuffer.ToArray();
|
||||
writer.WriteUInt32((uint)codeBytes.Length);
|
||||
writer.WriteBytes(codeBytes);
|
||||
}
|
||||
|
||||
private enum ConstantTag : byte
|
||||
{
|
||||
Utf8 = 1,
|
||||
Integer = 3,
|
||||
Float = 4,
|
||||
Long = 5,
|
||||
Double = 6,
|
||||
Class = 7,
|
||||
String = 8,
|
||||
Fieldref = 9,
|
||||
Methodref = 10,
|
||||
InterfaceMethodref = 11,
|
||||
NameAndType = 12,
|
||||
}
|
||||
}
|
||||
using System.Buffers.Binary;
|
||||
using System.Text;
|
||||
|
||||
namespace StellaOps.Scanner.Analyzers.Lang.Tests.TestUtilities;
|
||||
|
||||
public static class JavaClassFileFactory
|
||||
{
|
||||
public static byte[] CreateClassForNameInvoker(string internalClassName, string targetClassName)
|
||||
{
|
||||
using var buffer = new MemoryStream();
|
||||
using var writer = new BigEndianWriter(buffer);
|
||||
|
||||
WriteClassFileHeader(writer, constantPoolCount: 16);
|
||||
|
||||
writer.WriteByte((byte)ConstantTag.Utf8); writer.WriteUtf8(internalClassName); // #1
|
||||
writer.WriteByte((byte)ConstantTag.Class); writer.WriteUInt16(1); // #2
|
||||
writer.WriteByte((byte)ConstantTag.Utf8); writer.WriteUtf8("java/lang/Object"); // #3
|
||||
writer.WriteByte((byte)ConstantTag.Class); writer.WriteUInt16(3); // #4
|
||||
writer.WriteByte((byte)ConstantTag.Utf8); writer.WriteUtf8("invoke"); // #5
|
||||
writer.WriteByte((byte)ConstantTag.Utf8); writer.WriteUtf8("()V"); // #6
|
||||
writer.WriteByte((byte)ConstantTag.Utf8); writer.WriteUtf8("Code"); // #7
|
||||
writer.WriteByte((byte)ConstantTag.Utf8); writer.WriteUtf8(targetClassName); // #8
|
||||
writer.WriteByte((byte)ConstantTag.String); writer.WriteUInt16(8); // #9
|
||||
writer.WriteByte((byte)ConstantTag.Utf8); writer.WriteUtf8("java/lang/Class"); // #10
|
||||
writer.WriteByte((byte)ConstantTag.Class); writer.WriteUInt16(10); // #11
|
||||
writer.WriteByte((byte)ConstantTag.Utf8); writer.WriteUtf8("forName"); // #12
|
||||
writer.WriteByte((byte)ConstantTag.Utf8); writer.WriteUtf8("(Ljava/lang/String;)Ljava/lang/Class;"); // #13
|
||||
writer.WriteByte((byte)ConstantTag.NameAndType); writer.WriteUInt16(12); writer.WriteUInt16(13); // #14
|
||||
writer.WriteByte((byte)ConstantTag.Methodref); writer.WriteUInt16(11); writer.WriteUInt16(14); // #15
|
||||
|
||||
writer.WriteUInt16(0x0001); // public
|
||||
writer.WriteUInt16(2); // this class
|
||||
writer.WriteUInt16(4); // super class
|
||||
|
||||
writer.WriteUInt16(0); // interfaces
|
||||
writer.WriteUInt16(0); // fields
|
||||
writer.WriteUInt16(1); // methods
|
||||
|
||||
WriteInvokeMethod(writer, methodNameIndex: 5, descriptorIndex: 6, ldcIndex: 9, methodRefIndex: 15);
|
||||
|
||||
writer.WriteUInt16(0); // class attributes
|
||||
|
||||
return buffer.ToArray();
|
||||
}
|
||||
|
||||
public static byte[] CreateClassResourceLookup(string internalClassName, string resourcePath)
|
||||
{
|
||||
using var buffer = new MemoryStream();
|
||||
using var writer = new BigEndianWriter(buffer);
|
||||
|
||||
WriteClassFileHeader(writer, constantPoolCount: 20);
|
||||
|
||||
writer.WriteByte((byte)ConstantTag.Utf8); writer.WriteUtf8(internalClassName); // #1
|
||||
writer.WriteByte((byte)ConstantTag.Class); writer.WriteUInt16(1); // #2
|
||||
writer.WriteByte((byte)ConstantTag.Utf8); writer.WriteUtf8("java/lang/Object"); // #3
|
||||
writer.WriteByte((byte)ConstantTag.Class); writer.WriteUInt16(3); // #4
|
||||
writer.WriteByte((byte)ConstantTag.Utf8); writer.WriteUtf8("load"); // #5
|
||||
writer.WriteByte((byte)ConstantTag.Utf8); writer.WriteUtf8("()V"); // #6
|
||||
writer.WriteByte((byte)ConstantTag.Utf8); writer.WriteUtf8("Code"); // #7
|
||||
writer.WriteByte((byte)ConstantTag.Utf8); writer.WriteUtf8(resourcePath); // #8
|
||||
writer.WriteByte((byte)ConstantTag.String); writer.WriteUInt16(8); // #9
|
||||
writer.WriteByte((byte)ConstantTag.Utf8); writer.WriteUtf8("java/lang/ClassLoader"); // #10
|
||||
writer.WriteByte((byte)ConstantTag.Class); writer.WriteUInt16(10); // #11
|
||||
writer.WriteByte((byte)ConstantTag.Utf8); writer.WriteUtf8("getSystemClassLoader"); // #12
|
||||
writer.WriteByte((byte)ConstantTag.Utf8); writer.WriteUtf8("()Ljava/lang/ClassLoader;"); // #13
|
||||
writer.WriteByte((byte)ConstantTag.NameAndType); writer.WriteUInt16(12); writer.WriteUInt16(13); // #14
|
||||
writer.WriteByte((byte)ConstantTag.Methodref); writer.WriteUInt16(11); writer.WriteUInt16(14); // #15
|
||||
writer.WriteByte((byte)ConstantTag.Utf8); writer.WriteUtf8("getResource"); // #16
|
||||
writer.WriteByte((byte)ConstantTag.Utf8); writer.WriteUtf8("(Ljava/lang/String;)Ljava/net/URL;"); // #17
|
||||
writer.WriteByte((byte)ConstantTag.NameAndType); writer.WriteUInt16(16); writer.WriteUInt16(17); // #18
|
||||
writer.WriteByte((byte)ConstantTag.Methodref); writer.WriteUInt16(11); writer.WriteUInt16(18); // #19
|
||||
|
||||
writer.WriteUInt16(0x0001); // public
|
||||
writer.WriteUInt16(2); // this class
|
||||
writer.WriteUInt16(4); // super class
|
||||
|
||||
writer.WriteUInt16(0); // interfaces
|
||||
writer.WriteUInt16(0); // fields
|
||||
writer.WriteUInt16(1); // methods
|
||||
|
||||
WriteResourceLookupMethod(writer, methodNameIndex: 5, descriptorIndex: 6, systemLoaderMethodRefIndex: 15, stringIndex: 9, getResourceMethodRefIndex: 19);
|
||||
|
||||
writer.WriteUInt16(0); // class attributes
|
||||
|
||||
return buffer.ToArray();
|
||||
}
|
||||
|
||||
public static byte[] CreateTcclChecker(string internalClassName)
|
||||
{
|
||||
using var buffer = new MemoryStream();
|
||||
using var writer = new BigEndianWriter(buffer);
|
||||
|
||||
WriteClassFileHeader(writer, constantPoolCount: 18);
|
||||
|
||||
writer.WriteByte((byte)ConstantTag.Utf8); writer.WriteUtf8(internalClassName); // #1
|
||||
writer.WriteByte((byte)ConstantTag.Class); writer.WriteUInt16(1); // #2
|
||||
writer.WriteByte((byte)ConstantTag.Utf8); writer.WriteUtf8("java/lang/Object"); // #3
|
||||
writer.WriteByte((byte)ConstantTag.Class); writer.WriteUInt16(3); // #4
|
||||
writer.WriteByte((byte)ConstantTag.Utf8); writer.WriteUtf8("check"); // #5
|
||||
writer.WriteByte((byte)ConstantTag.Utf8); writer.WriteUtf8("()V"); // #6
|
||||
writer.WriteByte((byte)ConstantTag.Utf8); writer.WriteUtf8("Code"); // #7
|
||||
writer.WriteByte((byte)ConstantTag.Utf8); writer.WriteUtf8("java/lang/Thread"); // #8
|
||||
writer.WriteByte((byte)ConstantTag.Class); writer.WriteUInt16(8); // #9
|
||||
writer.WriteByte((byte)ConstantTag.Utf8); writer.WriteUtf8("currentThread"); // #10
|
||||
writer.WriteByte((byte)ConstantTag.Utf8); writer.WriteUtf8("()Ljava/lang/Thread;"); // #11
|
||||
writer.WriteByte((byte)ConstantTag.NameAndType); writer.WriteUInt16(10); writer.WriteUInt16(11); // #12
|
||||
writer.WriteByte((byte)ConstantTag.Methodref); writer.WriteUInt16(9); writer.WriteUInt16(12); // #13
|
||||
writer.WriteByte((byte)ConstantTag.Utf8); writer.WriteUtf8("getContextClassLoader"); // #14
|
||||
writer.WriteByte((byte)ConstantTag.Utf8); writer.WriteUtf8("()Ljava/lang/ClassLoader;"); // #15
|
||||
writer.WriteByte((byte)ConstantTag.NameAndType); writer.WriteUInt16(14); writer.WriteUInt16(15); // #16
|
||||
writer.WriteByte((byte)ConstantTag.Methodref); writer.WriteUInt16(9); writer.WriteUInt16(16); // #17
|
||||
|
||||
writer.WriteUInt16(0x0001); // public
|
||||
writer.WriteUInt16(2); // this
|
||||
writer.WriteUInt16(4); // super
|
||||
|
||||
writer.WriteUInt16(0); // interfaces
|
||||
writer.WriteUInt16(0); // fields
|
||||
writer.WriteUInt16(1); // methods
|
||||
|
||||
WriteTcclMethod(writer, methodNameIndex: 5, descriptorIndex: 6, currentThreadMethodRefIndex: 13, getContextMethodRefIndex: 17);
|
||||
|
||||
writer.WriteUInt16(0); // class attributes
|
||||
|
||||
return buffer.ToArray();
|
||||
}
|
||||
|
||||
private static void WriteClassFileHeader(BigEndianWriter writer, ushort constantPoolCount)
|
||||
{
|
||||
writer.WriteUInt32(0xCAFEBABE);
|
||||
writer.WriteUInt16(0);
|
||||
writer.WriteUInt16(52);
|
||||
writer.WriteUInt16(constantPoolCount);
|
||||
}
|
||||
|
||||
private static void WriteInvokeMethod(BigEndianWriter writer, ushort methodNameIndex, ushort descriptorIndex, ushort ldcIndex, ushort methodRefIndex)
|
||||
{
|
||||
writer.WriteUInt16(0x0009); // public static
|
||||
writer.WriteUInt16(methodNameIndex);
|
||||
writer.WriteUInt16(descriptorIndex);
|
||||
writer.WriteUInt16(1); // attributes_count
|
||||
|
||||
writer.WriteUInt16(7); // "Code"
|
||||
using var codeBuffer = new MemoryStream();
|
||||
using (var codeWriter = new BigEndianWriter(codeBuffer))
|
||||
{
|
||||
codeWriter.WriteUInt16(1); // max_stack
|
||||
codeWriter.WriteUInt16(0); // max_locals
|
||||
codeWriter.WriteUInt32(6); // code_length
|
||||
codeWriter.WriteByte(0x12);
|
||||
codeWriter.WriteByte((byte)ldcIndex);
|
||||
codeWriter.WriteByte(0xB8);
|
||||
codeWriter.WriteUInt16(methodRefIndex);
|
||||
codeWriter.WriteByte(0xB1);
|
||||
codeWriter.WriteUInt16(0); // exception table length
|
||||
codeWriter.WriteUInt16(0); // code attributes
|
||||
}
|
||||
|
||||
var codeBytes = codeBuffer.ToArray();
|
||||
writer.WriteUInt32((uint)codeBytes.Length);
|
||||
writer.WriteBytes(codeBytes);
|
||||
}
|
||||
|
||||
private static void WriteTcclMethod(BigEndianWriter writer, ushort methodNameIndex, ushort descriptorIndex, ushort currentThreadMethodRefIndex, ushort getContextMethodRefIndex)
|
||||
{
|
||||
writer.WriteUInt16(0x0009);
|
||||
writer.WriteUInt16(methodNameIndex);
|
||||
writer.WriteUInt16(descriptorIndex);
|
||||
writer.WriteUInt16(1);
|
||||
|
||||
writer.WriteUInt16(7);
|
||||
using var codeBuffer = new MemoryStream();
|
||||
using (var codeWriter = new BigEndianWriter(codeBuffer))
|
||||
{
|
||||
codeWriter.WriteUInt16(2);
|
||||
codeWriter.WriteUInt16(0);
|
||||
codeWriter.WriteUInt32(8);
|
||||
codeWriter.WriteByte(0xB8);
|
||||
codeWriter.WriteUInt16(currentThreadMethodRefIndex);
|
||||
codeWriter.WriteByte(0xB6);
|
||||
codeWriter.WriteUInt16(getContextMethodRefIndex);
|
||||
codeWriter.WriteByte(0x57);
|
||||
codeWriter.WriteByte(0xB1);
|
||||
codeWriter.WriteUInt16(0);
|
||||
codeWriter.WriteUInt16(0);
|
||||
}
|
||||
|
||||
var codeBytes = codeBuffer.ToArray();
|
||||
writer.WriteUInt32((uint)codeBytes.Length);
|
||||
writer.WriteBytes(codeBytes);
|
||||
}
|
||||
|
||||
private static void WriteResourceLookupMethod(
|
||||
BigEndianWriter writer,
|
||||
ushort methodNameIndex,
|
||||
ushort descriptorIndex,
|
||||
ushort systemLoaderMethodRefIndex,
|
||||
ushort stringIndex,
|
||||
ushort getResourceMethodRefIndex)
|
||||
{
|
||||
writer.WriteUInt16(0x0009);
|
||||
writer.WriteUInt16(methodNameIndex);
|
||||
writer.WriteUInt16(descriptorIndex);
|
||||
writer.WriteUInt16(1);
|
||||
|
||||
writer.WriteUInt16(7);
|
||||
using var codeBuffer = new MemoryStream();
|
||||
using (var codeWriter = new BigEndianWriter(codeBuffer))
|
||||
{
|
||||
codeWriter.WriteUInt16(2);
|
||||
codeWriter.WriteUInt16(0);
|
||||
codeWriter.WriteUInt32(10);
|
||||
codeWriter.WriteByte(0xB8); // invokestatic
|
||||
codeWriter.WriteUInt16(systemLoaderMethodRefIndex);
|
||||
codeWriter.WriteByte(0x12); // ldc
|
||||
codeWriter.WriteByte((byte)stringIndex);
|
||||
codeWriter.WriteByte(0xB6); // invokevirtual
|
||||
codeWriter.WriteUInt16(getResourceMethodRefIndex);
|
||||
codeWriter.WriteByte(0x57);
|
||||
codeWriter.WriteByte(0xB1);
|
||||
codeWriter.WriteUInt16(0);
|
||||
codeWriter.WriteUInt16(0);
|
||||
}
|
||||
|
||||
var codeBytes = codeBuffer.ToArray();
|
||||
writer.WriteUInt32((uint)codeBytes.Length);
|
||||
writer.WriteBytes(codeBytes);
|
||||
}
|
||||
|
||||
private sealed class BigEndianWriter : IDisposable
|
||||
{
|
||||
private readonly BinaryWriter _writer;
|
||||
|
||||
public BigEndianWriter(Stream stream)
|
||||
{
|
||||
_writer = new BinaryWriter(stream, Encoding.UTF8, leaveOpen: true);
|
||||
}
|
||||
|
||||
public void WriteByte(byte value) => _writer.Write(value);
|
||||
|
||||
public void WriteBytes(byte[] data) => _writer.Write(data);
|
||||
|
||||
public void WriteUInt16(ushort value)
|
||||
{
|
||||
Span<byte> buffer = stackalloc byte[2];
|
||||
BinaryPrimitives.WriteUInt16BigEndian(buffer, value);
|
||||
_writer.Write(buffer);
|
||||
}
|
||||
|
||||
public void WriteUInt32(uint value)
|
||||
{
|
||||
Span<byte> buffer = stackalloc byte[4];
|
||||
BinaryPrimitives.WriteUInt32BigEndian(buffer, value);
|
||||
_writer.Write(buffer);
|
||||
}
|
||||
|
||||
public void WriteUtf8(string value)
|
||||
{
|
||||
var bytes = Encoding.UTF8.GetBytes(value);
|
||||
WriteUInt16((ushort)bytes.Length);
|
||||
_writer.Write(bytes);
|
||||
}
|
||||
|
||||
public void Dispose() => _writer.Dispose();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Creates a class file with a native method declaration.
|
||||
/// </summary>
|
||||
public static byte[] CreateNativeMethodClass(string internalClassName, string nativeMethodName)
|
||||
{
|
||||
using var buffer = new MemoryStream();
|
||||
using var writer = new BigEndianWriter(buffer);
|
||||
|
||||
WriteClassFileHeader(writer, constantPoolCount: 8);
|
||||
|
||||
writer.WriteByte((byte)ConstantTag.Utf8); writer.WriteUtf8(internalClassName); // #1
|
||||
writer.WriteByte((byte)ConstantTag.Class); writer.WriteUInt16(1); // #2
|
||||
writer.WriteByte((byte)ConstantTag.Utf8); writer.WriteUtf8("java/lang/Object"); // #3
|
||||
writer.WriteByte((byte)ConstantTag.Class); writer.WriteUInt16(3); // #4
|
||||
writer.WriteByte((byte)ConstantTag.Utf8); writer.WriteUtf8(nativeMethodName); // #5
|
||||
writer.WriteByte((byte)ConstantTag.Utf8); writer.WriteUtf8("()V"); // #6
|
||||
writer.WriteByte((byte)ConstantTag.Utf8); writer.WriteUtf8("Code"); // #7
|
||||
|
||||
writer.WriteUInt16(0x0001); // public
|
||||
writer.WriteUInt16(2); // this class
|
||||
writer.WriteUInt16(4); // super class
|
||||
|
||||
writer.WriteUInt16(0); // interfaces
|
||||
writer.WriteUInt16(0); // fields
|
||||
writer.WriteUInt16(1); // methods
|
||||
|
||||
// native method: access_flags = ACC_PUBLIC | ACC_NATIVE (0x0101)
|
||||
writer.WriteUInt16(0x0101);
|
||||
writer.WriteUInt16(5); // name
|
||||
writer.WriteUInt16(6); // descriptor
|
||||
writer.WriteUInt16(0); // no attributes (native methods have no Code)
|
||||
|
||||
writer.WriteUInt16(0); // class attributes
|
||||
|
||||
return buffer.ToArray();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Creates a class file with a System.loadLibrary call.
|
||||
/// </summary>
|
||||
public static byte[] CreateSystemLoadLibraryInvoker(string internalClassName, string libraryName)
|
||||
{
|
||||
using var buffer = new MemoryStream();
|
||||
using var writer = new BigEndianWriter(buffer);
|
||||
|
||||
WriteClassFileHeader(writer, constantPoolCount: 16);
|
||||
|
||||
writer.WriteByte((byte)ConstantTag.Utf8); writer.WriteUtf8(internalClassName); // #1
|
||||
writer.WriteByte((byte)ConstantTag.Class); writer.WriteUInt16(1); // #2
|
||||
writer.WriteByte((byte)ConstantTag.Utf8); writer.WriteUtf8("java/lang/Object"); // #3
|
||||
writer.WriteByte((byte)ConstantTag.Class); writer.WriteUInt16(3); // #4
|
||||
writer.WriteByte((byte)ConstantTag.Utf8); writer.WriteUtf8("loadNative"); // #5
|
||||
writer.WriteByte((byte)ConstantTag.Utf8); writer.WriteUtf8("()V"); // #6
|
||||
writer.WriteByte((byte)ConstantTag.Utf8); writer.WriteUtf8("Code"); // #7
|
||||
writer.WriteByte((byte)ConstantTag.Utf8); writer.WriteUtf8(libraryName); // #8
|
||||
writer.WriteByte((byte)ConstantTag.String); writer.WriteUInt16(8); // #9
|
||||
writer.WriteByte((byte)ConstantTag.Utf8); writer.WriteUtf8("java/lang/System"); // #10
|
||||
writer.WriteByte((byte)ConstantTag.Class); writer.WriteUInt16(10); // #11
|
||||
writer.WriteByte((byte)ConstantTag.Utf8); writer.WriteUtf8("loadLibrary"); // #12
|
||||
writer.WriteByte((byte)ConstantTag.Utf8); writer.WriteUtf8("(Ljava/lang/String;)V"); // #13
|
||||
writer.WriteByte((byte)ConstantTag.NameAndType); writer.WriteUInt16(12); writer.WriteUInt16(13); // #14
|
||||
writer.WriteByte((byte)ConstantTag.Methodref); writer.WriteUInt16(11); writer.WriteUInt16(14); // #15
|
||||
|
||||
writer.WriteUInt16(0x0001); // public
|
||||
writer.WriteUInt16(2); // this class
|
||||
writer.WriteUInt16(4); // super class
|
||||
|
||||
writer.WriteUInt16(0); // interfaces
|
||||
writer.WriteUInt16(0); // fields
|
||||
writer.WriteUInt16(1); // methods
|
||||
|
||||
WriteInvokeStaticMethod(writer, methodNameIndex: 5, descriptorIndex: 6, ldcIndex: 9, methodRefIndex: 15);
|
||||
|
||||
writer.WriteUInt16(0); // class attributes
|
||||
|
||||
return buffer.ToArray();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Creates a class file with a System.load call (loads by path).
|
||||
/// </summary>
|
||||
public static byte[] CreateSystemLoadInvoker(string internalClassName, string libraryPath)
|
||||
{
|
||||
using var buffer = new MemoryStream();
|
||||
using var writer = new BigEndianWriter(buffer);
|
||||
|
||||
WriteClassFileHeader(writer, constantPoolCount: 16);
|
||||
|
||||
writer.WriteByte((byte)ConstantTag.Utf8); writer.WriteUtf8(internalClassName); // #1
|
||||
writer.WriteByte((byte)ConstantTag.Class); writer.WriteUInt16(1); // #2
|
||||
writer.WriteByte((byte)ConstantTag.Utf8); writer.WriteUtf8("java/lang/Object"); // #3
|
||||
writer.WriteByte((byte)ConstantTag.Class); writer.WriteUInt16(3); // #4
|
||||
writer.WriteByte((byte)ConstantTag.Utf8); writer.WriteUtf8("loadNative"); // #5
|
||||
writer.WriteByte((byte)ConstantTag.Utf8); writer.WriteUtf8("()V"); // #6
|
||||
writer.WriteByte((byte)ConstantTag.Utf8); writer.WriteUtf8("Code"); // #7
|
||||
writer.WriteByte((byte)ConstantTag.Utf8); writer.WriteUtf8(libraryPath); // #8
|
||||
writer.WriteByte((byte)ConstantTag.String); writer.WriteUInt16(8); // #9
|
||||
writer.WriteByte((byte)ConstantTag.Utf8); writer.WriteUtf8("java/lang/System"); // #10
|
||||
writer.WriteByte((byte)ConstantTag.Class); writer.WriteUInt16(10); // #11
|
||||
writer.WriteByte((byte)ConstantTag.Utf8); writer.WriteUtf8("load"); // #12
|
||||
writer.WriteByte((byte)ConstantTag.Utf8); writer.WriteUtf8("(Ljava/lang/String;)V"); // #13
|
||||
writer.WriteByte((byte)ConstantTag.NameAndType); writer.WriteUInt16(12); writer.WriteUInt16(13); // #14
|
||||
writer.WriteByte((byte)ConstantTag.Methodref); writer.WriteUInt16(11); writer.WriteUInt16(14); // #15
|
||||
|
||||
writer.WriteUInt16(0x0001); // public
|
||||
writer.WriteUInt16(2); // this class
|
||||
writer.WriteUInt16(4); // super class
|
||||
|
||||
writer.WriteUInt16(0); // interfaces
|
||||
writer.WriteUInt16(0); // fields
|
||||
writer.WriteUInt16(1); // methods
|
||||
|
||||
WriteInvokeStaticMethod(writer, methodNameIndex: 5, descriptorIndex: 6, ldcIndex: 9, methodRefIndex: 15);
|
||||
|
||||
writer.WriteUInt16(0); // class attributes
|
||||
|
||||
return buffer.ToArray();
|
||||
}
|
||||
|
||||
private static void WriteInvokeStaticMethod(BigEndianWriter writer, ushort methodNameIndex, ushort descriptorIndex, ushort ldcIndex, ushort methodRefIndex)
|
||||
{
|
||||
writer.WriteUInt16(0x0009); // public static
|
||||
writer.WriteUInt16(methodNameIndex);
|
||||
writer.WriteUInt16(descriptorIndex);
|
||||
writer.WriteUInt16(1); // attributes_count
|
||||
|
||||
writer.WriteUInt16(7); // "Code"
|
||||
using var codeBuffer = new MemoryStream();
|
||||
using (var codeWriter = new BigEndianWriter(codeBuffer))
|
||||
{
|
||||
codeWriter.WriteUInt16(1); // max_stack
|
||||
codeWriter.WriteUInt16(0); // max_locals
|
||||
codeWriter.WriteUInt32(6); // code_length
|
||||
codeWriter.WriteByte(0x12); // ldc
|
||||
codeWriter.WriteByte((byte)ldcIndex);
|
||||
codeWriter.WriteByte(0xB8); // invokestatic
|
||||
codeWriter.WriteUInt16(methodRefIndex);
|
||||
codeWriter.WriteByte(0xB1); // return
|
||||
codeWriter.WriteUInt16(0); // exception table length
|
||||
codeWriter.WriteUInt16(0); // code attributes
|
||||
}
|
||||
|
||||
var codeBytes = codeBuffer.ToArray();
|
||||
writer.WriteUInt32((uint)codeBytes.Length);
|
||||
writer.WriteBytes(codeBytes);
|
||||
}
|
||||
|
||||
private enum ConstantTag : byte
|
||||
{
|
||||
Utf8 = 1,
|
||||
Integer = 3,
|
||||
Float = 4,
|
||||
Long = 5,
|
||||
Double = 6,
|
||||
Class = 7,
|
||||
String = 8,
|
||||
Fieldref = 9,
|
||||
Methodref = 10,
|
||||
InterfaceMethodref = 11,
|
||||
NameAndType = 12,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
using System.IO.Compression;
|
||||
using System.Text;
|
||||
|
||||
namespace StellaOps.Scanner.Analyzers.Lang.Tests.TestUtilities;
|
||||
|
||||
using System.IO.Compression;
|
||||
using System.Text;
|
||||
|
||||
namespace StellaOps.Scanner.Analyzers.Lang.Tests.TestUtilities;
|
||||
|
||||
public static class JavaFixtureBuilder
|
||||
{
|
||||
private static readonly DateTimeOffset DefaultTimestamp = new(2024, 01, 01, 0, 0, 0, TimeSpan.Zero);
|
||||
|
||||
@@ -1,40 +1,40 @@
|
||||
namespace StellaOps.Scanner.Analyzers.Lang.Tests.TestUtilities;
|
||||
|
||||
public static class TestPaths
|
||||
{
|
||||
public static string ResolveFixture(params string[] segments)
|
||||
{
|
||||
var baseDirectory = AppContext.BaseDirectory;
|
||||
var parts = new List<string> { baseDirectory };
|
||||
parts.AddRange(new[] { "Fixtures" });
|
||||
parts.AddRange(segments);
|
||||
return Path.GetFullPath(Path.Combine(parts.ToArray()));
|
||||
}
|
||||
|
||||
public static string CreateTemporaryDirectory()
|
||||
{
|
||||
var root = Path.Combine(AppContext.BaseDirectory, "tmp", Guid.NewGuid().ToString("N"));
|
||||
Directory.CreateDirectory(root);
|
||||
return root;
|
||||
}
|
||||
|
||||
public static void SafeDelete(string directory)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(directory) || !Directory.Exists(directory))
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
Directory.Delete(directory, recursive: true);
|
||||
}
|
||||
catch
|
||||
{
|
||||
// Swallow cleanup exceptions to avoid masking test failures.
|
||||
}
|
||||
}
|
||||
|
||||
namespace StellaOps.Scanner.Analyzers.Lang.Tests.TestUtilities;
|
||||
|
||||
public static class TestPaths
|
||||
{
|
||||
public static string ResolveFixture(params string[] segments)
|
||||
{
|
||||
var baseDirectory = AppContext.BaseDirectory;
|
||||
var parts = new List<string> { baseDirectory };
|
||||
parts.AddRange(new[] { "Fixtures" });
|
||||
parts.AddRange(segments);
|
||||
return Path.GetFullPath(Path.Combine(parts.ToArray()));
|
||||
}
|
||||
|
||||
public static string CreateTemporaryDirectory()
|
||||
{
|
||||
var root = Path.Combine(AppContext.BaseDirectory, "tmp", Guid.NewGuid().ToString("N"));
|
||||
Directory.CreateDirectory(root);
|
||||
return root;
|
||||
}
|
||||
|
||||
public static void SafeDelete(string directory)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(directory) || !Directory.Exists(directory))
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
Directory.Delete(directory, recursive: true);
|
||||
}
|
||||
catch
|
||||
{
|
||||
// Swallow cleanup exceptions to avoid masking test failures.
|
||||
}
|
||||
}
|
||||
|
||||
public static string ResolveProjectRoot()
|
||||
{
|
||||
var directory = AppContext.BaseDirectory;
|
||||
@@ -47,8 +47,8 @@ public static class TestPaths
|
||||
}
|
||||
|
||||
directory = Path.GetDirectoryName(directory) ?? string.Empty;
|
||||
}
|
||||
|
||||
throw new InvalidOperationException("Unable to locate project root.");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
throw new InvalidOperationException("Unable to locate project root.");
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,44 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using StellaOps.Scanner.Analyzers.OS.Helpers;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Scanner.Analyzers.OS.Tests.Helpers;
|
||||
|
||||
public sealed class OsFileEvidenceFactoryTests
|
||||
{
|
||||
[Fact]
|
||||
public void Create_DoesNotComputeSha256_WhenOtherDigestsPresent()
|
||||
{
|
||||
var rootPath = Path.Combine(Path.GetTempPath(), "stellaops-os-evidence-" + Guid.NewGuid().ToString("N")[..8]);
|
||||
var filePath = Path.Combine(rootPath, "bin", "test");
|
||||
Directory.CreateDirectory(Path.GetDirectoryName(filePath)!);
|
||||
File.WriteAllText(filePath, "hello");
|
||||
|
||||
try
|
||||
{
|
||||
var metadata = new Dictionary<string, string>(StringComparer.Ordinal);
|
||||
var factory = OsFileEvidenceFactory.Create(rootPath, metadata);
|
||||
|
||||
var evidence = factory.Create(
|
||||
"bin/test",
|
||||
isConfigFile: false,
|
||||
digests: new Dictionary<string, string>(StringComparer.OrdinalIgnoreCase)
|
||||
{
|
||||
["md5"] = "deadbeef"
|
||||
});
|
||||
|
||||
Assert.NotNull(evidence);
|
||||
Assert.Null(evidence.Sha256);
|
||||
Assert.True(evidence.Digests.ContainsKey("md5"));
|
||||
Assert.False(evidence.Digests.ContainsKey("sha256"));
|
||||
Assert.True(evidence.SizeBytes.HasValue);
|
||||
}
|
||||
finally
|
||||
{
|
||||
Directory.Delete(rootPath, recursive: true);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,76 +1,76 @@
|
||||
using System.Collections.Generic;
|
||||
using System.Collections.Immutable;
|
||||
using StellaOps.Scanner.Analyzers.OS.Mapping;
|
||||
using StellaOps.Scanner.Core.Contracts;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Scanner.Analyzers.OS.Tests.Mapping;
|
||||
|
||||
public class OsComponentMapperTests
|
||||
{
|
||||
[Fact]
|
||||
public void ToLayerFragments_ProducesDeterministicComponents()
|
||||
{
|
||||
var package = new OSPackageRecord(
|
||||
analyzerId: "apk",
|
||||
packageUrl: "pkg:alpine/busybox@1.37.0-r0?arch=x86_64",
|
||||
name: "busybox",
|
||||
version: "1.37.0",
|
||||
architecture: "x86_64",
|
||||
evidenceSource: PackageEvidenceSource.ApkDatabase,
|
||||
release: "r0",
|
||||
sourcePackage: "busybox",
|
||||
license: "GPL-2.0-only",
|
||||
depends: new[] { "musl>=1.2.5-r0", "ssl_client" },
|
||||
files: new[]
|
||||
{
|
||||
new OSPackageFileEvidence("/bin/busybox", sha256: "abc123", isConfigFile: false),
|
||||
new OSPackageFileEvidence("/etc/profile", isConfigFile: true, digests: new Dictionary<string, string> { ["md5"] = "deadbeef" }),
|
||||
},
|
||||
vendorMetadata: new Dictionary<string, string?>
|
||||
{
|
||||
["homepage"] = "https://busybox.net/",
|
||||
});
|
||||
|
||||
var result = new OSPackageAnalyzerResult(
|
||||
analyzerId: "apk",
|
||||
packages: ImmutableArray.Create(package),
|
||||
telemetry: new OSAnalyzerTelemetry(System.TimeSpan.Zero, 1, 2));
|
||||
|
||||
var fragments = OsComponentMapper.ToLayerFragments(new[] { result });
|
||||
|
||||
Assert.Single(fragments);
|
||||
var fragment = fragments[0];
|
||||
Assert.StartsWith("sha256:", fragment.LayerDigest);
|
||||
Assert.Single(fragment.Components);
|
||||
|
||||
var component = fragment.Components[0];
|
||||
Assert.Equal(fragment.LayerDigest, component.LayerDigest);
|
||||
Assert.Equal("pkg:alpine/busybox@1.37.0-r0?arch=x86_64", component.Identity.Key);
|
||||
Assert.Equal("busybox", component.Identity.Name);
|
||||
Assert.Equal("1.37.0", component.Identity.Version);
|
||||
Assert.Equal("pkg:alpine/busybox@1.37.0-r0?arch=x86_64", component.Identity.Purl);
|
||||
Assert.Equal("os-package", component.Identity.ComponentType);
|
||||
Assert.Equal("busybox", component.Identity.Group);
|
||||
Assert.Collection(component.Evidence,
|
||||
evidence =>
|
||||
{
|
||||
Assert.Equal("file", evidence.Kind);
|
||||
Assert.Equal("/bin/busybox", evidence.Value);
|
||||
Assert.Equal("abc123", evidence.Source);
|
||||
},
|
||||
evidence =>
|
||||
{
|
||||
Assert.Equal("config-file", evidence.Kind);
|
||||
Assert.Equal("/etc/profile", evidence.Value);
|
||||
Assert.Null(evidence.Source);
|
||||
});
|
||||
Assert.Equal(new[] { "musl>=1.2.5-r0", "ssl_client" }, component.Dependencies);
|
||||
Assert.False(component.Usage.UsedByEntrypoint);
|
||||
Assert.NotNull(component.Metadata);
|
||||
Assert.Equal(new[] { "GPL-2.0-only" }, component.Metadata!.Licenses);
|
||||
Assert.Contains("stellaops.os.analyzer", component.Metadata.Properties!.Keys);
|
||||
Assert.Equal("apk", component.Metadata.Properties!["stellaops.os.analyzer"]);
|
||||
Assert.Equal("https://busybox.net/", component.Metadata.Properties!["vendor.homepage"]);
|
||||
}
|
||||
}
|
||||
using System.Collections.Generic;
|
||||
using System.Collections.Immutable;
|
||||
using StellaOps.Scanner.Analyzers.OS.Mapping;
|
||||
using StellaOps.Scanner.Core.Contracts;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Scanner.Analyzers.OS.Tests.Mapping;
|
||||
|
||||
public class OsComponentMapperTests
|
||||
{
|
||||
[Fact]
|
||||
public void ToLayerFragments_ProducesDeterministicComponents()
|
||||
{
|
||||
var package = new OSPackageRecord(
|
||||
analyzerId: "apk",
|
||||
packageUrl: "pkg:alpine/busybox@1.37.0-r0?arch=x86_64",
|
||||
name: "busybox",
|
||||
version: "1.37.0",
|
||||
architecture: "x86_64",
|
||||
evidenceSource: PackageEvidenceSource.ApkDatabase,
|
||||
release: "r0",
|
||||
sourcePackage: "busybox",
|
||||
license: "GPL-2.0-only",
|
||||
depends: new[] { "musl>=1.2.5-r0", "ssl_client" },
|
||||
files: new[]
|
||||
{
|
||||
new OSPackageFileEvidence("/bin/busybox", sha256: "abc123", isConfigFile: false),
|
||||
new OSPackageFileEvidence("/etc/profile", isConfigFile: true, digests: new Dictionary<string, string> { ["md5"] = "deadbeef" }),
|
||||
},
|
||||
vendorMetadata: new Dictionary<string, string?>
|
||||
{
|
||||
["homepage"] = "https://busybox.net/",
|
||||
});
|
||||
|
||||
var result = new OSPackageAnalyzerResult(
|
||||
analyzerId: "apk",
|
||||
packages: ImmutableArray.Create(package),
|
||||
telemetry: new OSAnalyzerTelemetry(System.TimeSpan.Zero, 1, 2));
|
||||
|
||||
var fragments = OsComponentMapper.ToLayerFragments(new[] { result });
|
||||
|
||||
Assert.Single(fragments);
|
||||
var fragment = fragments[0];
|
||||
Assert.StartsWith("sha256:", fragment.LayerDigest);
|
||||
Assert.Single(fragment.Components);
|
||||
|
||||
var component = fragment.Components[0];
|
||||
Assert.Equal(fragment.LayerDigest, component.LayerDigest);
|
||||
Assert.Equal("pkg:alpine/busybox@1.37.0-r0?arch=x86_64", component.Identity.Key);
|
||||
Assert.Equal("busybox", component.Identity.Name);
|
||||
Assert.Equal("1.37.0", component.Identity.Version);
|
||||
Assert.Equal("pkg:alpine/busybox@1.37.0-r0?arch=x86_64", component.Identity.Purl);
|
||||
Assert.Equal("os-package", component.Identity.ComponentType);
|
||||
Assert.Equal("busybox", component.Identity.Group);
|
||||
Assert.Collection(component.Evidence,
|
||||
evidence =>
|
||||
{
|
||||
Assert.Equal("file", evidence.Kind);
|
||||
Assert.Equal("/bin/busybox", evidence.Value);
|
||||
Assert.Equal("abc123", evidence.Source);
|
||||
},
|
||||
evidence =>
|
||||
{
|
||||
Assert.Equal("config-file", evidence.Kind);
|
||||
Assert.Equal("/etc/profile", evidence.Value);
|
||||
Assert.Equal("deadbeef", evidence.Source);
|
||||
});
|
||||
Assert.Equal(new[] { "musl>=1.2.5-r0", "ssl_client" }, component.Dependencies);
|
||||
Assert.False(component.Usage.UsedByEntrypoint);
|
||||
Assert.NotNull(component.Metadata);
|
||||
Assert.Equal(new[] { "GPL-2.0-only" }, component.Metadata!.Licenses);
|
||||
Assert.Contains("stellaops.os.analyzer", component.Metadata.Properties!.Keys);
|
||||
Assert.Equal("apk", component.Metadata.Properties!["stellaops.os.analyzer"]);
|
||||
Assert.Equal("https://busybox.net/", component.Metadata.Properties!["vendor.homepage"]);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,137 +1,137 @@
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using StellaOps.Scanner.Analyzers.OS;
|
||||
using StellaOps.Scanner.Analyzers.OS.Apk;
|
||||
using StellaOps.Scanner.Analyzers.OS.Dpkg;
|
||||
using StellaOps.Scanner.Analyzers.OS.Rpm;
|
||||
using StellaOps.Scanner.Analyzers.OS.Rpm.Internal;
|
||||
using StellaOps.Scanner.Core.Contracts;
|
||||
using StellaOps.Scanner.Analyzers.OS.Tests.TestUtilities;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Scanner.Analyzers.OS.Tests;
|
||||
|
||||
public sealed class OsAnalyzerDeterminismTests
|
||||
{
|
||||
[Fact]
|
||||
public async Task ApkAnalyzerMatchesGolden()
|
||||
{
|
||||
using var fixture = FixtureManager.UseFixture("apk", out var rootPath);
|
||||
var analyzer = new ApkPackageAnalyzer(NullLogger<ApkPackageAnalyzer>.Instance);
|
||||
var context = CreateContext(rootPath);
|
||||
|
||||
var result = await analyzer.AnalyzeAsync(context, CancellationToken.None);
|
||||
var snapshot = SnapshotSerializer.Serialize(new[] { result });
|
||||
GoldenAssert.MatchSnapshot(snapshot, FixtureManager.GetGoldenPath("apk.json"));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task DpkgAnalyzerMatchesGolden()
|
||||
{
|
||||
using var fixture = FixtureManager.UseFixture("dpkg", out var rootPath);
|
||||
var analyzer = new DpkgPackageAnalyzer(NullLogger<DpkgPackageAnalyzer>.Instance);
|
||||
var context = CreateContext(rootPath);
|
||||
|
||||
var result = await analyzer.AnalyzeAsync(context, CancellationToken.None);
|
||||
var snapshot = SnapshotSerializer.Serialize(new[] { result });
|
||||
GoldenAssert.MatchSnapshot(snapshot, FixtureManager.GetGoldenPath("dpkg.json"));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task RpmAnalyzerMatchesGolden()
|
||||
{
|
||||
var headers = new[]
|
||||
{
|
||||
CreateRpmHeader(
|
||||
name: "openssl-libs",
|
||||
version: "3.2.1",
|
||||
architecture: "x86_64",
|
||||
release: "8.el9",
|
||||
epoch: "1",
|
||||
license: "OpenSSL",
|
||||
sourceRpm: "openssl-3.2.1-8.el9.src.rpm",
|
||||
provides: new[] { "libcrypto.so.3()(64bit)", "openssl-libs" },
|
||||
requires: new[] { "glibc(x86-64) >= 2.34" },
|
||||
files: new[]
|
||||
{
|
||||
new RpmFileEntry("/usr/lib64/libcrypto.so.3", false, new Dictionary<string, string> { ["sha256"] = "abc123" }),
|
||||
new RpmFileEntry("/etc/pki/tls/openssl.cnf", true, new Dictionary<string, string> { ["md5"] = "c0ffee" })
|
||||
},
|
||||
changeLogs: new[] { "Resolves: CVE-2025-1234" },
|
||||
metadata: new Dictionary<string, string?> { ["summary"] = "TLS toolkit" })
|
||||
};
|
||||
|
||||
var reader = new StubRpmDatabaseReader(headers);
|
||||
var analyzer = new RpmPackageAnalyzer(
|
||||
NullLogger<RpmPackageAnalyzer>.Instance,
|
||||
reader);
|
||||
|
||||
var context = CreateContext("/tmp/nonexistent");
|
||||
var result = await analyzer.AnalyzeAsync(context, CancellationToken.None);
|
||||
var snapshot = SnapshotSerializer.Serialize(new[] { result });
|
||||
GoldenAssert.MatchSnapshot(snapshot, FixtureManager.GetGoldenPath("rpm.json"));
|
||||
}
|
||||
|
||||
private static OSPackageAnalyzerContext CreateContext(string rootPath)
|
||||
{
|
||||
var metadata = new Dictionary<string, string>
|
||||
{
|
||||
[ScanMetadataKeys.RootFilesystemPath] = rootPath
|
||||
};
|
||||
|
||||
return new OSPackageAnalyzerContext(rootPath, workspacePath: null, TimeProvider.System, NullLoggerFactory.Instance.CreateLogger("os-analyzer-tests"), metadata);
|
||||
}
|
||||
|
||||
private static RpmHeader CreateRpmHeader(
|
||||
string name,
|
||||
string version,
|
||||
string architecture,
|
||||
string? release,
|
||||
string? epoch,
|
||||
string? license,
|
||||
string? sourceRpm,
|
||||
IReadOnlyList<string> provides,
|
||||
IReadOnlyList<string> requires,
|
||||
IReadOnlyList<RpmFileEntry> files,
|
||||
IReadOnlyList<string> changeLogs,
|
||||
IReadOnlyDictionary<string, string?> metadata)
|
||||
{
|
||||
return new RpmHeader(
|
||||
name,
|
||||
version,
|
||||
architecture,
|
||||
release,
|
||||
epoch,
|
||||
metadata.TryGetValue("summary", out var summary) ? summary : null,
|
||||
metadata.TryGetValue("description", out var description) ? description : null,
|
||||
license,
|
||||
sourceRpm,
|
||||
metadata.TryGetValue("url", out var url) ? url : null,
|
||||
metadata.TryGetValue("vendor", out var vendor) ? vendor : null,
|
||||
buildTime: null,
|
||||
installTime: null,
|
||||
provides,
|
||||
provideVersions: provides.Select(_ => string.Empty).ToArray(),
|
||||
requires,
|
||||
requireVersions: requires.Select(_ => string.Empty).ToArray(),
|
||||
files,
|
||||
changeLogs,
|
||||
metadata);
|
||||
}
|
||||
|
||||
private sealed class StubRpmDatabaseReader : IRpmDatabaseReader
|
||||
{
|
||||
private readonly IReadOnlyList<RpmHeader> _headers;
|
||||
|
||||
public StubRpmDatabaseReader(IReadOnlyList<RpmHeader> headers)
|
||||
{
|
||||
_headers = headers;
|
||||
}
|
||||
|
||||
public IReadOnlyList<RpmHeader> ReadHeaders(string rootPath, CancellationToken cancellationToken)
|
||||
=> _headers;
|
||||
}
|
||||
}
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using StellaOps.Scanner.Analyzers.OS;
|
||||
using StellaOps.Scanner.Analyzers.OS.Apk;
|
||||
using StellaOps.Scanner.Analyzers.OS.Dpkg;
|
||||
using StellaOps.Scanner.Analyzers.OS.Rpm;
|
||||
using StellaOps.Scanner.Analyzers.OS.Rpm.Internal;
|
||||
using StellaOps.Scanner.Core.Contracts;
|
||||
using StellaOps.Scanner.Analyzers.OS.Tests.TestUtilities;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Scanner.Analyzers.OS.Tests;
|
||||
|
||||
public sealed class OsAnalyzerDeterminismTests
|
||||
{
|
||||
[Fact]
|
||||
public async Task ApkAnalyzerMatchesGolden()
|
||||
{
|
||||
using var fixture = FixtureManager.UseFixture("apk", out var rootPath);
|
||||
var analyzer = new ApkPackageAnalyzer(NullLogger<ApkPackageAnalyzer>.Instance);
|
||||
var context = CreateContext(rootPath);
|
||||
|
||||
var result = await analyzer.AnalyzeAsync(context, CancellationToken.None);
|
||||
var snapshot = SnapshotSerializer.Serialize(new[] { result });
|
||||
GoldenAssert.MatchSnapshot(snapshot, FixtureManager.GetGoldenPath("apk.json"));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task DpkgAnalyzerMatchesGolden()
|
||||
{
|
||||
using var fixture = FixtureManager.UseFixture("dpkg", out var rootPath);
|
||||
var analyzer = new DpkgPackageAnalyzer(NullLogger<DpkgPackageAnalyzer>.Instance);
|
||||
var context = CreateContext(rootPath);
|
||||
|
||||
var result = await analyzer.AnalyzeAsync(context, CancellationToken.None);
|
||||
var snapshot = SnapshotSerializer.Serialize(new[] { result });
|
||||
GoldenAssert.MatchSnapshot(snapshot, FixtureManager.GetGoldenPath("dpkg.json"));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task RpmAnalyzerMatchesGolden()
|
||||
{
|
||||
var headers = new[]
|
||||
{
|
||||
CreateRpmHeader(
|
||||
name: "openssl-libs",
|
||||
version: "3.2.1",
|
||||
architecture: "x86_64",
|
||||
release: "8.el9",
|
||||
epoch: "1",
|
||||
license: "OpenSSL",
|
||||
sourceRpm: "openssl-3.2.1-8.el9.src.rpm",
|
||||
provides: new[] { "libcrypto.so.3()(64bit)", "openssl-libs" },
|
||||
requires: new[] { "glibc(x86-64) >= 2.34" },
|
||||
files: new[]
|
||||
{
|
||||
new RpmFileEntry("/usr/lib64/libcrypto.so.3", false, new Dictionary<string, string> { ["sha256"] = "abc123" }),
|
||||
new RpmFileEntry("/etc/pki/tls/openssl.cnf", true, new Dictionary<string, string> { ["md5"] = "c0ffee" })
|
||||
},
|
||||
changeLogs: new[] { "Resolves: CVE-2025-1234" },
|
||||
metadata: new Dictionary<string, string?> { ["summary"] = "TLS toolkit" })
|
||||
};
|
||||
|
||||
var reader = new StubRpmDatabaseReader(headers);
|
||||
var analyzer = new RpmPackageAnalyzer(
|
||||
NullLogger<RpmPackageAnalyzer>.Instance,
|
||||
reader);
|
||||
|
||||
var context = CreateContext("/tmp/nonexistent");
|
||||
var result = await analyzer.AnalyzeAsync(context, CancellationToken.None);
|
||||
var snapshot = SnapshotSerializer.Serialize(new[] { result });
|
||||
GoldenAssert.MatchSnapshot(snapshot, FixtureManager.GetGoldenPath("rpm.json"));
|
||||
}
|
||||
|
||||
private static OSPackageAnalyzerContext CreateContext(string rootPath)
|
||||
{
|
||||
var metadata = new Dictionary<string, string>
|
||||
{
|
||||
[ScanMetadataKeys.RootFilesystemPath] = rootPath
|
||||
};
|
||||
|
||||
return new OSPackageAnalyzerContext(rootPath, workspacePath: null, TimeProvider.System, NullLoggerFactory.Instance.CreateLogger("os-analyzer-tests"), metadata);
|
||||
}
|
||||
|
||||
private static RpmHeader CreateRpmHeader(
|
||||
string name,
|
||||
string version,
|
||||
string architecture,
|
||||
string? release,
|
||||
string? epoch,
|
||||
string? license,
|
||||
string? sourceRpm,
|
||||
IReadOnlyList<string> provides,
|
||||
IReadOnlyList<string> requires,
|
||||
IReadOnlyList<RpmFileEntry> files,
|
||||
IReadOnlyList<string> changeLogs,
|
||||
IReadOnlyDictionary<string, string?> metadata)
|
||||
{
|
||||
return new RpmHeader(
|
||||
name,
|
||||
version,
|
||||
architecture,
|
||||
release,
|
||||
epoch,
|
||||
metadata.TryGetValue("summary", out var summary) ? summary : null,
|
||||
metadata.TryGetValue("description", out var description) ? description : null,
|
||||
license,
|
||||
sourceRpm,
|
||||
metadata.TryGetValue("url", out var url) ? url : null,
|
||||
metadata.TryGetValue("vendor", out var vendor) ? vendor : null,
|
||||
buildTime: null,
|
||||
installTime: null,
|
||||
provides,
|
||||
provideVersions: provides.Select(_ => string.Empty).ToArray(),
|
||||
requires,
|
||||
requireVersions: requires.Select(_ => string.Empty).ToArray(),
|
||||
files,
|
||||
changeLogs,
|
||||
metadata);
|
||||
}
|
||||
|
||||
private sealed class StubRpmDatabaseReader : IRpmDatabaseReader
|
||||
{
|
||||
private readonly IReadOnlyList<RpmHeader> _headers;
|
||||
|
||||
public StubRpmDatabaseReader(IReadOnlyList<RpmHeader> headers)
|
||||
{
|
||||
_headers = headers;
|
||||
}
|
||||
|
||||
public IReadOnlyList<RpmHeader> ReadHeaders(string rootPath, CancellationToken cancellationToken)
|
||||
=> _headers;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
using System;
|
||||
using System.Buffers.Binary;
|
||||
using System.IO;
|
||||
using Microsoft.Data.Sqlite;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using StellaOps.Scanner.Analyzers.OS.Rpm;
|
||||
using Xunit;
|
||||
@@ -9,6 +10,42 @@ namespace StellaOps.Scanner.Analyzers.OS.Tests.Rpm;
|
||||
|
||||
public sealed class RpmDatabaseReaderTests
|
||||
{
|
||||
[Theory]
|
||||
[InlineData("hdr")]
|
||||
[InlineData("header")]
|
||||
[InlineData("headerBlob")]
|
||||
public void ReadsHeaders_FromSqlite_WhenHeaderBlobColumnPresent(string headerColumnName)
|
||||
{
|
||||
var root = Directory.CreateTempSubdirectory("rpmdb-sqlite");
|
||||
try
|
||||
{
|
||||
var rpmPath = Path.Combine(root.FullName, "var", "lib", "rpm");
|
||||
Directory.CreateDirectory(rpmPath);
|
||||
|
||||
var sqlitePath = Path.Combine(rpmPath, "rpmdb.sqlite");
|
||||
CreateSqliteRpmdb(sqlitePath, headerColumnName);
|
||||
|
||||
var reader = new RpmDatabaseReader(NullLogger.Instance);
|
||||
var headers = reader.ReadHeaders(root.FullName, CancellationToken.None);
|
||||
|
||||
Assert.Single(headers);
|
||||
var header = headers[0];
|
||||
Assert.Equal("sqlite-pkg", header.Name);
|
||||
Assert.Equal("2.0.0", header.Version);
|
||||
Assert.Equal("aarch64", header.Architecture);
|
||||
}
|
||||
finally
|
||||
{
|
||||
try
|
||||
{
|
||||
root.Delete(recursive: true);
|
||||
}
|
||||
catch
|
||||
{
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void FallsBackToLegacyPackages_WhenSqliteMissing()
|
||||
{
|
||||
@@ -42,6 +79,36 @@ public sealed class RpmDatabaseReaderTests
|
||||
}
|
||||
}
|
||||
|
||||
private static void CreateSqliteRpmdb(string sqlitePath, string headerColumnName)
|
||||
{
|
||||
var connectionString = new SqliteConnectionStringBuilder
|
||||
{
|
||||
DataSource = sqlitePath,
|
||||
Mode = SqliteOpenMode.ReadWriteCreate,
|
||||
}.ToString();
|
||||
|
||||
using var connection = new SqliteConnection(connectionString);
|
||||
connection.Open();
|
||||
|
||||
using var create = connection.CreateCommand();
|
||||
create.CommandText = $@"CREATE TABLE Packages (
|
||||
pkgKey INTEGER PRIMARY KEY,
|
||||
pkgId BLOB,
|
||||
""{headerColumnName}"" BLOB
|
||||
);";
|
||||
create.ExecuteNonQuery();
|
||||
|
||||
var header = CreateRpmHeader("sqlite-pkg", "2.0.0", "aarch64");
|
||||
|
||||
using var insert = connection.CreateCommand();
|
||||
insert.CommandText = $@"INSERT INTO Packages (pkgKey, pkgId, ""{headerColumnName}"")
|
||||
VALUES ($key, $pkgId, $hdr);";
|
||||
insert.Parameters.AddWithValue("$key", 1);
|
||||
insert.Parameters.AddWithValue("$pkgId", new byte[] { 0x01, 0x02, 0x03, 0x04 }); // not an RPM header
|
||||
insert.Parameters.AddWithValue("$hdr", header);
|
||||
insert.ExecuteNonQuery();
|
||||
}
|
||||
|
||||
private static byte[] CreateLegacyPackagesFile()
|
||||
{
|
||||
const int pageSize = 4096;
|
||||
|
||||
@@ -1,75 +1,75 @@
|
||||
using System;
|
||||
using System.IO;
|
||||
|
||||
namespace StellaOps.Scanner.Analyzers.OS.Tests.TestUtilities;
|
||||
|
||||
internal static class FixtureManager
|
||||
{
|
||||
public static IDisposable UseFixture(string name, out string rootPath)
|
||||
{
|
||||
var basePath = Path.Combine(AppContext.BaseDirectory, "Fixtures", name);
|
||||
if (!Directory.Exists(basePath))
|
||||
{
|
||||
throw new DirectoryNotFoundException($"Fixture '{name}' was not found at '{basePath}'.");
|
||||
}
|
||||
|
||||
var tempRoot = Path.Combine(Path.GetTempPath(), "stellaops-os-fixture", name, Guid.NewGuid().ToString("n"));
|
||||
CopyDirectory(basePath, tempRoot);
|
||||
rootPath = tempRoot;
|
||||
return new Disposable(() => DeleteDirectory(tempRoot));
|
||||
}
|
||||
|
||||
public static string GetGoldenPath(string name)
|
||||
=> Path.Combine(AppContext.BaseDirectory, "Fixtures", "goldens", name);
|
||||
|
||||
private static void CopyDirectory(string source, string destination)
|
||||
{
|
||||
Directory.CreateDirectory(destination);
|
||||
foreach (var file in Directory.GetFiles(source, "*", SearchOption.AllDirectories))
|
||||
{
|
||||
var relative = Path.GetRelativePath(source, file);
|
||||
var target = Path.Combine(destination, relative);
|
||||
Directory.CreateDirectory(Path.GetDirectoryName(target)!);
|
||||
File.Copy(file, target);
|
||||
}
|
||||
}
|
||||
|
||||
private static void DeleteDirectory(string path)
|
||||
{
|
||||
if (!Directory.Exists(path))
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
Directory.Delete(path, recursive: true);
|
||||
}
|
||||
catch
|
||||
{
|
||||
// best-effort cleanup
|
||||
}
|
||||
}
|
||||
|
||||
private sealed class Disposable : IDisposable
|
||||
{
|
||||
private readonly Action _dispose;
|
||||
private bool _disposed;
|
||||
|
||||
public Disposable(Action dispose)
|
||||
{
|
||||
_dispose = dispose;
|
||||
}
|
||||
|
||||
public void Dispose()
|
||||
{
|
||||
if (_disposed)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
_disposed = true;
|
||||
_dispose();
|
||||
}
|
||||
}
|
||||
}
|
||||
using System;
|
||||
using System.IO;
|
||||
|
||||
namespace StellaOps.Scanner.Analyzers.OS.Tests.TestUtilities;
|
||||
|
||||
internal static class FixtureManager
|
||||
{
|
||||
public static IDisposable UseFixture(string name, out string rootPath)
|
||||
{
|
||||
var basePath = Path.Combine(AppContext.BaseDirectory, "Fixtures", name);
|
||||
if (!Directory.Exists(basePath))
|
||||
{
|
||||
throw new DirectoryNotFoundException($"Fixture '{name}' was not found at '{basePath}'.");
|
||||
}
|
||||
|
||||
var tempRoot = Path.Combine(Path.GetTempPath(), "stellaops-os-fixture", name, Guid.NewGuid().ToString("n"));
|
||||
CopyDirectory(basePath, tempRoot);
|
||||
rootPath = tempRoot;
|
||||
return new Disposable(() => DeleteDirectory(tempRoot));
|
||||
}
|
||||
|
||||
public static string GetGoldenPath(string name)
|
||||
=> Path.Combine(AppContext.BaseDirectory, "Fixtures", "goldens", name);
|
||||
|
||||
private static void CopyDirectory(string source, string destination)
|
||||
{
|
||||
Directory.CreateDirectory(destination);
|
||||
foreach (var file in Directory.GetFiles(source, "*", SearchOption.AllDirectories))
|
||||
{
|
||||
var relative = Path.GetRelativePath(source, file);
|
||||
var target = Path.Combine(destination, relative);
|
||||
Directory.CreateDirectory(Path.GetDirectoryName(target)!);
|
||||
File.Copy(file, target);
|
||||
}
|
||||
}
|
||||
|
||||
private static void DeleteDirectory(string path)
|
||||
{
|
||||
if (!Directory.Exists(path))
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
Directory.Delete(path, recursive: true);
|
||||
}
|
||||
catch
|
||||
{
|
||||
// best-effort cleanup
|
||||
}
|
||||
}
|
||||
|
||||
private sealed class Disposable : IDisposable
|
||||
{
|
||||
private readonly Action _dispose;
|
||||
private bool _disposed;
|
||||
|
||||
public Disposable(Action dispose)
|
||||
{
|
||||
_dispose = dispose;
|
||||
}
|
||||
|
||||
public void Dispose()
|
||||
{
|
||||
if (_disposed)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
_disposed = true;
|
||||
_dispose();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,41 +1,41 @@
|
||||
using System;
|
||||
using System.IO;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Scanner.Analyzers.OS.Tests.TestUtilities;
|
||||
|
||||
internal static class GoldenAssert
|
||||
{
|
||||
private const string UpdateEnvironmentVariable = "UPDATE_OS_ANALYZER_FIXTURES";
|
||||
|
||||
public static void MatchSnapshot(string snapshot, string goldenPath)
|
||||
{
|
||||
var directory = Path.GetDirectoryName(goldenPath);
|
||||
if (!string.IsNullOrWhiteSpace(directory) && !Directory.Exists(directory))
|
||||
{
|
||||
Directory.CreateDirectory(directory);
|
||||
}
|
||||
|
||||
snapshot = Normalize(snapshot);
|
||||
|
||||
if (!File.Exists(goldenPath))
|
||||
{
|
||||
File.WriteAllText(goldenPath, snapshot);
|
||||
return;
|
||||
}
|
||||
|
||||
if (ShouldUpdate())
|
||||
{
|
||||
File.WriteAllText(goldenPath, snapshot);
|
||||
}
|
||||
|
||||
var expected = Normalize(File.ReadAllText(goldenPath));
|
||||
Assert.Equal(expected.TrimEnd(), snapshot.TrimEnd());
|
||||
}
|
||||
|
||||
private static bool ShouldUpdate()
|
||||
=> string.Equals(Environment.GetEnvironmentVariable(UpdateEnvironmentVariable), "1", StringComparison.OrdinalIgnoreCase);
|
||||
|
||||
private static string Normalize(string value)
|
||||
=> value.Replace("\r\n", "\n");
|
||||
}
|
||||
using System;
|
||||
using System.IO;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Scanner.Analyzers.OS.Tests.TestUtilities;
|
||||
|
||||
internal static class GoldenAssert
|
||||
{
|
||||
private const string UpdateEnvironmentVariable = "UPDATE_OS_ANALYZER_FIXTURES";
|
||||
|
||||
public static void MatchSnapshot(string snapshot, string goldenPath)
|
||||
{
|
||||
var directory = Path.GetDirectoryName(goldenPath);
|
||||
if (!string.IsNullOrWhiteSpace(directory) && !Directory.Exists(directory))
|
||||
{
|
||||
Directory.CreateDirectory(directory);
|
||||
}
|
||||
|
||||
snapshot = Normalize(snapshot);
|
||||
|
||||
if (!File.Exists(goldenPath))
|
||||
{
|
||||
File.WriteAllText(goldenPath, snapshot);
|
||||
return;
|
||||
}
|
||||
|
||||
if (ShouldUpdate())
|
||||
{
|
||||
File.WriteAllText(goldenPath, snapshot);
|
||||
}
|
||||
|
||||
var expected = Normalize(File.ReadAllText(goldenPath));
|
||||
Assert.Equal(expected.TrimEnd(), snapshot.TrimEnd());
|
||||
}
|
||||
|
||||
private static bool ShouldUpdate()
|
||||
=> string.Equals(Environment.GetEnvironmentVariable(UpdateEnvironmentVariable), "1", StringComparison.OrdinalIgnoreCase);
|
||||
|
||||
private static string Normalize(string value)
|
||||
=> value.Replace("\r\n", "\n");
|
||||
}
|
||||
|
||||
@@ -1,106 +1,106 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Globalization;
|
||||
using System.Linq;
|
||||
using System.Text.Json;
|
||||
using System.Text.Json.Serialization;
|
||||
using StellaOps.Scanner.Analyzers.OS;
|
||||
|
||||
namespace StellaOps.Scanner.Analyzers.OS.Tests.TestUtilities;
|
||||
|
||||
internal static class SnapshotSerializer
|
||||
{
|
||||
private static readonly JsonSerializerOptions Options = new()
|
||||
{
|
||||
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
|
||||
WriteIndented = true,
|
||||
Converters =
|
||||
{
|
||||
new JsonStringEnumConverter(JsonNamingPolicy.CamelCase)
|
||||
}
|
||||
};
|
||||
|
||||
public static string Serialize(IEnumerable<OSPackageAnalyzerResult> results)
|
||||
{
|
||||
var ordered = results
|
||||
.OrderBy(r => r.AnalyzerId, StringComparer.OrdinalIgnoreCase)
|
||||
.Select(result => new AnalyzerSnapshot
|
||||
{
|
||||
AnalyzerId = result.AnalyzerId,
|
||||
PackageCount = result.Telemetry.PackageCount,
|
||||
FileEvidenceCount = result.Telemetry.FileEvidenceCount,
|
||||
DurationMilliseconds = 0,
|
||||
Warnings = result.Warnings.Select(w => new WarningSnapshot(w.Code, w.Message)).ToArray(),
|
||||
Packages = result.Packages
|
||||
.OrderBy(p => p, Comparer<OSPackageRecord>.Default)
|
||||
.Select(p => new PackageSnapshot
|
||||
{
|
||||
PackageUrl = p.PackageUrl,
|
||||
Name = p.Name,
|
||||
Version = p.Version,
|
||||
Architecture = p.Architecture,
|
||||
Epoch = p.Epoch,
|
||||
Release = p.Release,
|
||||
SourcePackage = p.SourcePackage,
|
||||
License = p.License,
|
||||
EvidenceSource = p.EvidenceSource.ToString(),
|
||||
CveHints = p.CveHints,
|
||||
Provides = p.Provides,
|
||||
Depends = p.Depends,
|
||||
Files = p.Files.Select(f => new FileSnapshot
|
||||
{
|
||||
Path = f.Path,
|
||||
LayerDigest = f.LayerDigest,
|
||||
Sha256 = f.Sha256,
|
||||
SizeBytes = f.SizeBytes,
|
||||
IsConfigFile = f.IsConfigFile,
|
||||
Digests = f.Digests.OrderBy(kv => kv.Key, StringComparer.OrdinalIgnoreCase).ToDictionary(kv => kv.Key, kv => kv.Value, StringComparer.OrdinalIgnoreCase)
|
||||
}).ToArray(),
|
||||
VendorMetadata = p.VendorMetadata.OrderBy(kv => kv.Key, StringComparer.Ordinal).ToDictionary(kv => kv.Key, kv => kv.Value, StringComparer.Ordinal)
|
||||
}).ToArray()
|
||||
})
|
||||
.ToArray();
|
||||
|
||||
return JsonSerializer.Serialize(ordered, Options);
|
||||
}
|
||||
|
||||
private sealed record AnalyzerSnapshot
|
||||
{
|
||||
public string AnalyzerId { get; init; } = string.Empty;
|
||||
public double DurationMilliseconds { get; init; }
|
||||
public int PackageCount { get; init; }
|
||||
public int FileEvidenceCount { get; init; }
|
||||
public IReadOnlyList<WarningSnapshot> Warnings { get; init; } = Array.Empty<WarningSnapshot>();
|
||||
public IReadOnlyList<PackageSnapshot> Packages { get; init; } = Array.Empty<PackageSnapshot>();
|
||||
}
|
||||
|
||||
private sealed record WarningSnapshot(string Code, string Message);
|
||||
|
||||
private sealed record PackageSnapshot
|
||||
{
|
||||
public string PackageUrl { get; init; } = string.Empty;
|
||||
public string Name { get; init; } = string.Empty;
|
||||
public string Version { get; init; } = string.Empty;
|
||||
public string Architecture { get; init; } = string.Empty;
|
||||
public string? Epoch { get; init; }
|
||||
public string? Release { get; init; }
|
||||
public string? SourcePackage { get; init; }
|
||||
public string? License { get; init; }
|
||||
public string EvidenceSource { get; init; } = string.Empty;
|
||||
public IReadOnlyList<string> CveHints { get; init; } = Array.Empty<string>();
|
||||
public IReadOnlyList<string> Provides { get; init; } = Array.Empty<string>();
|
||||
public IReadOnlyList<string> Depends { get; init; } = Array.Empty<string>();
|
||||
public IReadOnlyList<FileSnapshot> Files { get; init; } = Array.Empty<FileSnapshot>();
|
||||
public IReadOnlyDictionary<string, string?> VendorMetadata { get; init; } = new Dictionary<string, string?>();
|
||||
}
|
||||
|
||||
private sealed record FileSnapshot
|
||||
{
|
||||
public string Path { get; init; } = string.Empty;
|
||||
public string? LayerDigest { get; init; }
|
||||
public string? Sha256 { get; init; }
|
||||
public long? SizeBytes { get; init; }
|
||||
public bool? IsConfigFile { get; init; }
|
||||
public IReadOnlyDictionary<string, string> Digests { get; init; } = new Dictionary<string, string>();
|
||||
}
|
||||
}
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Globalization;
|
||||
using System.Linq;
|
||||
using System.Text.Json;
|
||||
using System.Text.Json.Serialization;
|
||||
using StellaOps.Scanner.Analyzers.OS;
|
||||
|
||||
namespace StellaOps.Scanner.Analyzers.OS.Tests.TestUtilities;
|
||||
|
||||
internal static class SnapshotSerializer
|
||||
{
|
||||
private static readonly JsonSerializerOptions Options = new()
|
||||
{
|
||||
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
|
||||
WriteIndented = true,
|
||||
Converters =
|
||||
{
|
||||
new JsonStringEnumConverter(JsonNamingPolicy.CamelCase)
|
||||
}
|
||||
};
|
||||
|
||||
public static string Serialize(IEnumerable<OSPackageAnalyzerResult> results)
|
||||
{
|
||||
var ordered = results
|
||||
.OrderBy(r => r.AnalyzerId, StringComparer.OrdinalIgnoreCase)
|
||||
.Select(result => new AnalyzerSnapshot
|
||||
{
|
||||
AnalyzerId = result.AnalyzerId,
|
||||
PackageCount = result.Telemetry.PackageCount,
|
||||
FileEvidenceCount = result.Telemetry.FileEvidenceCount,
|
||||
DurationMilliseconds = 0,
|
||||
Warnings = result.Warnings.Select(w => new WarningSnapshot(w.Code, w.Message)).ToArray(),
|
||||
Packages = result.Packages
|
||||
.OrderBy(p => p, Comparer<OSPackageRecord>.Default)
|
||||
.Select(p => new PackageSnapshot
|
||||
{
|
||||
PackageUrl = p.PackageUrl,
|
||||
Name = p.Name,
|
||||
Version = p.Version,
|
||||
Architecture = p.Architecture,
|
||||
Epoch = p.Epoch,
|
||||
Release = p.Release,
|
||||
SourcePackage = p.SourcePackage,
|
||||
License = p.License,
|
||||
EvidenceSource = p.EvidenceSource.ToString(),
|
||||
CveHints = p.CveHints,
|
||||
Provides = p.Provides,
|
||||
Depends = p.Depends,
|
||||
Files = p.Files.Select(f => new FileSnapshot
|
||||
{
|
||||
Path = f.Path,
|
||||
LayerDigest = f.LayerDigest,
|
||||
Sha256 = f.Sha256,
|
||||
SizeBytes = f.SizeBytes,
|
||||
IsConfigFile = f.IsConfigFile,
|
||||
Digests = f.Digests.OrderBy(kv => kv.Key, StringComparer.OrdinalIgnoreCase).ToDictionary(kv => kv.Key, kv => kv.Value, StringComparer.OrdinalIgnoreCase)
|
||||
}).ToArray(),
|
||||
VendorMetadata = p.VendorMetadata.OrderBy(kv => kv.Key, StringComparer.Ordinal).ToDictionary(kv => kv.Key, kv => kv.Value, StringComparer.Ordinal)
|
||||
}).ToArray()
|
||||
})
|
||||
.ToArray();
|
||||
|
||||
return JsonSerializer.Serialize(ordered, Options);
|
||||
}
|
||||
|
||||
private sealed record AnalyzerSnapshot
|
||||
{
|
||||
public string AnalyzerId { get; init; } = string.Empty;
|
||||
public double DurationMilliseconds { get; init; }
|
||||
public int PackageCount { get; init; }
|
||||
public int FileEvidenceCount { get; init; }
|
||||
public IReadOnlyList<WarningSnapshot> Warnings { get; init; } = Array.Empty<WarningSnapshot>();
|
||||
public IReadOnlyList<PackageSnapshot> Packages { get; init; } = Array.Empty<PackageSnapshot>();
|
||||
}
|
||||
|
||||
private sealed record WarningSnapshot(string Code, string Message);
|
||||
|
||||
private sealed record PackageSnapshot
|
||||
{
|
||||
public string PackageUrl { get; init; } = string.Empty;
|
||||
public string Name { get; init; } = string.Empty;
|
||||
public string Version { get; init; } = string.Empty;
|
||||
public string Architecture { get; init; } = string.Empty;
|
||||
public string? Epoch { get; init; }
|
||||
public string? Release { get; init; }
|
||||
public string? SourcePackage { get; init; }
|
||||
public string? License { get; init; }
|
||||
public string EvidenceSource { get; init; } = string.Empty;
|
||||
public IReadOnlyList<string> CveHints { get; init; } = Array.Empty<string>();
|
||||
public IReadOnlyList<string> Provides { get; init; } = Array.Empty<string>();
|
||||
public IReadOnlyList<string> Depends { get; init; } = Array.Empty<string>();
|
||||
public IReadOnlyList<FileSnapshot> Files { get; init; } = Array.Empty<FileSnapshot>();
|
||||
public IReadOnlyDictionary<string, string?> VendorMetadata { get; init; } = new Dictionary<string, string?>();
|
||||
}
|
||||
|
||||
private sealed record FileSnapshot
|
||||
{
|
||||
public string Path { get; init; } = string.Empty;
|
||||
public string? LayerDigest { get; init; }
|
||||
public string? Sha256 { get; init; }
|
||||
public long? SizeBytes { get; init; }
|
||||
public bool? IsConfigFile { get; init; }
|
||||
public IReadOnlyDictionary<string, string> Digests { get; init; } = new Dictionary<string, string>();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,140 +1,140 @@
|
||||
using System.Text;
|
||||
using FluentAssertions;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using Microsoft.Extensions.Options;
|
||||
using Microsoft.Extensions.Time.Testing;
|
||||
using StellaOps.Scanner.Cache;
|
||||
using StellaOps.Scanner.Cache.Abstractions;
|
||||
using StellaOps.Scanner.Cache.FileCas;
|
||||
using StellaOps.Scanner.Cache.LayerCache;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Scanner.Cache.Tests;
|
||||
|
||||
public sealed class LayerCacheRoundTripTests : IAsyncLifetime
|
||||
{
|
||||
private readonly string _rootPath;
|
||||
private readonly FakeTimeProvider _timeProvider;
|
||||
private readonly IOptions<ScannerCacheOptions> _options;
|
||||
private readonly LayerCacheStore _layerCache;
|
||||
private readonly FileContentAddressableStore _fileCas;
|
||||
|
||||
public LayerCacheRoundTripTests()
|
||||
{
|
||||
_rootPath = Path.Combine(Path.GetTempPath(), "stellaops-cache-tests", Guid.NewGuid().ToString("N"));
|
||||
Directory.CreateDirectory(_rootPath);
|
||||
|
||||
_timeProvider = new FakeTimeProvider(new DateTimeOffset(2025, 10, 19, 12, 0, 0, TimeSpan.Zero));
|
||||
|
||||
var optionsValue = new ScannerCacheOptions
|
||||
{
|
||||
RootPath = _rootPath,
|
||||
LayerTtl = TimeSpan.FromHours(1),
|
||||
FileTtl = TimeSpan.FromHours(2),
|
||||
MaxBytes = 512 * 1024, // 512 KiB
|
||||
WarmBytesThreshold = 256 * 1024,
|
||||
ColdBytesThreshold = 400 * 1024,
|
||||
MaintenanceInterval = TimeSpan.FromMinutes(5)
|
||||
};
|
||||
|
||||
_options = Options.Create(optionsValue);
|
||||
_layerCache = new LayerCacheStore(_options, NullLogger<LayerCacheStore>.Instance, _timeProvider);
|
||||
_fileCas = new FileContentAddressableStore(_options, NullLogger<FileContentAddressableStore>.Instance, _timeProvider);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task RoundTrip_Succeeds_And_Respects_Ttl_And_ImportExport()
|
||||
{
|
||||
var layerDigest = "sha256:abcd1234";
|
||||
var metadata = new Dictionary<string, string>
|
||||
{
|
||||
["image"] = "ghcr.io/stella/sample:1",
|
||||
["schema"] = "1.0"
|
||||
};
|
||||
|
||||
using var inventoryStream = CreateStream("inventory" + Environment.NewLine + "component:libfoo" + Environment.NewLine);
|
||||
using var usageStream = CreateStream("usage" + Environment.NewLine + "component:bin" + Environment.NewLine);
|
||||
|
||||
var request = new LayerCachePutRequest(
|
||||
layerDigest,
|
||||
architecture: "linux/amd64",
|
||||
mediaType: "application/vnd.oci.image.layer.v1.tar",
|
||||
metadata,
|
||||
new List<LayerCacheArtifactContent>
|
||||
{
|
||||
new("inventory.cdx.json", inventoryStream, "application/json"),
|
||||
new("usage.cdx.json", usageStream, "application/json")
|
||||
});
|
||||
|
||||
var stored = await _layerCache.PutAsync(request, CancellationToken.None);
|
||||
stored.LayerDigest.Should().Be(layerDigest);
|
||||
stored.Artifacts.Should().ContainKey("inventory.cdx.json");
|
||||
stored.TotalSizeBytes.Should().BeGreaterThan(0);
|
||||
|
||||
var cached = await _layerCache.TryGetAsync(layerDigest, CancellationToken.None);
|
||||
cached.Should().NotBeNull();
|
||||
cached!.Metadata.Should().ContainKey("image");
|
||||
|
||||
await using (var artifact = await _layerCache.OpenArtifactAsync(layerDigest, "inventory.cdx.json", CancellationToken.None))
|
||||
{
|
||||
artifact.Should().NotBeNull();
|
||||
using var reader = new StreamReader(artifact!, Encoding.UTF8);
|
||||
var content = await reader.ReadToEndAsync();
|
||||
content.Should().Contain("component:libfoo");
|
||||
}
|
||||
|
||||
// Store file CAS entry and validate export/import lifecycle.
|
||||
var casHash = "sha256:" + new string('f', 64);
|
||||
using var casStream = CreateStream("some-cas-content");
|
||||
await _fileCas.PutAsync(new FileCasPutRequest(casHash, casStream), CancellationToken.None);
|
||||
|
||||
var exportPath = Path.Combine(_rootPath, "export");
|
||||
var exportCount = await _fileCas.ExportAsync(exportPath, CancellationToken.None);
|
||||
exportCount.Should().Be(1);
|
||||
|
||||
await _fileCas.RemoveAsync(casHash, CancellationToken.None);
|
||||
(await _fileCas.TryGetAsync(casHash, CancellationToken.None)).Should().BeNull();
|
||||
|
||||
var importCount = await _fileCas.ImportAsync(exportPath, CancellationToken.None);
|
||||
importCount.Should().Be(1);
|
||||
var imported = await _fileCas.TryGetAsync(casHash, CancellationToken.None);
|
||||
imported.Should().NotBeNull();
|
||||
imported!.RelativePath.Should().EndWith("content.bin");
|
||||
|
||||
// TTL eviction
|
||||
_timeProvider.Advance(TimeSpan.FromHours(2));
|
||||
await _layerCache.EvictExpiredAsync(CancellationToken.None);
|
||||
(await _layerCache.TryGetAsync(layerDigest, CancellationToken.None)).Should().BeNull();
|
||||
|
||||
// Compaction removes CAS entry once over threshold.
|
||||
// Force compaction by writing a large entry.
|
||||
using var largeStream = CreateStream(new string('x', 400_000));
|
||||
var largeHash = "sha256:" + new string('e', 64);
|
||||
await _fileCas.PutAsync(new FileCasPutRequest(largeHash, largeStream), CancellationToken.None);
|
||||
_timeProvider.Advance(TimeSpan.FromMinutes(1));
|
||||
await _fileCas.CompactAsync(CancellationToken.None);
|
||||
(await _fileCas.TryGetAsync(casHash, CancellationToken.None)).Should().BeNull();
|
||||
}
|
||||
|
||||
public Task InitializeAsync() => Task.CompletedTask;
|
||||
|
||||
public Task DisposeAsync()
|
||||
{
|
||||
try
|
||||
{
|
||||
if (Directory.Exists(_rootPath))
|
||||
{
|
||||
Directory.Delete(_rootPath, recursive: true);
|
||||
}
|
||||
}
|
||||
catch
|
||||
{
|
||||
// Ignored – best effort cleanup.
|
||||
}
|
||||
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
|
||||
private static MemoryStream CreateStream(string content)
|
||||
=> new(Encoding.UTF8.GetBytes(content));
|
||||
}
|
||||
using System.Text;
|
||||
using FluentAssertions;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using Microsoft.Extensions.Options;
|
||||
using Microsoft.Extensions.Time.Testing;
|
||||
using StellaOps.Scanner.Cache;
|
||||
using StellaOps.Scanner.Cache.Abstractions;
|
||||
using StellaOps.Scanner.Cache.FileCas;
|
||||
using StellaOps.Scanner.Cache.LayerCache;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Scanner.Cache.Tests;
|
||||
|
||||
public sealed class LayerCacheRoundTripTests : IAsyncLifetime
|
||||
{
|
||||
private readonly string _rootPath;
|
||||
private readonly FakeTimeProvider _timeProvider;
|
||||
private readonly IOptions<ScannerCacheOptions> _options;
|
||||
private readonly LayerCacheStore _layerCache;
|
||||
private readonly FileContentAddressableStore _fileCas;
|
||||
|
||||
public LayerCacheRoundTripTests()
|
||||
{
|
||||
_rootPath = Path.Combine(Path.GetTempPath(), "stellaops-cache-tests", Guid.NewGuid().ToString("N"));
|
||||
Directory.CreateDirectory(_rootPath);
|
||||
|
||||
_timeProvider = new FakeTimeProvider(new DateTimeOffset(2025, 10, 19, 12, 0, 0, TimeSpan.Zero));
|
||||
|
||||
var optionsValue = new ScannerCacheOptions
|
||||
{
|
||||
RootPath = _rootPath,
|
||||
LayerTtl = TimeSpan.FromHours(1),
|
||||
FileTtl = TimeSpan.FromHours(2),
|
||||
MaxBytes = 512 * 1024, // 512 KiB
|
||||
WarmBytesThreshold = 256 * 1024,
|
||||
ColdBytesThreshold = 400 * 1024,
|
||||
MaintenanceInterval = TimeSpan.FromMinutes(5)
|
||||
};
|
||||
|
||||
_options = Options.Create(optionsValue);
|
||||
_layerCache = new LayerCacheStore(_options, NullLogger<LayerCacheStore>.Instance, _timeProvider);
|
||||
_fileCas = new FileContentAddressableStore(_options, NullLogger<FileContentAddressableStore>.Instance, _timeProvider);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task RoundTrip_Succeeds_And_Respects_Ttl_And_ImportExport()
|
||||
{
|
||||
var layerDigest = "sha256:abcd1234";
|
||||
var metadata = new Dictionary<string, string>
|
||||
{
|
||||
["image"] = "ghcr.io/stella/sample:1",
|
||||
["schema"] = "1.0"
|
||||
};
|
||||
|
||||
using var inventoryStream = CreateStream("inventory" + Environment.NewLine + "component:libfoo" + Environment.NewLine);
|
||||
using var usageStream = CreateStream("usage" + Environment.NewLine + "component:bin" + Environment.NewLine);
|
||||
|
||||
var request = new LayerCachePutRequest(
|
||||
layerDigest,
|
||||
architecture: "linux/amd64",
|
||||
mediaType: "application/vnd.oci.image.layer.v1.tar",
|
||||
metadata,
|
||||
new List<LayerCacheArtifactContent>
|
||||
{
|
||||
new("inventory.cdx.json", inventoryStream, "application/json"),
|
||||
new("usage.cdx.json", usageStream, "application/json")
|
||||
});
|
||||
|
||||
var stored = await _layerCache.PutAsync(request, CancellationToken.None);
|
||||
stored.LayerDigest.Should().Be(layerDigest);
|
||||
stored.Artifacts.Should().ContainKey("inventory.cdx.json");
|
||||
stored.TotalSizeBytes.Should().BeGreaterThan(0);
|
||||
|
||||
var cached = await _layerCache.TryGetAsync(layerDigest, CancellationToken.None);
|
||||
cached.Should().NotBeNull();
|
||||
cached!.Metadata.Should().ContainKey("image");
|
||||
|
||||
await using (var artifact = await _layerCache.OpenArtifactAsync(layerDigest, "inventory.cdx.json", CancellationToken.None))
|
||||
{
|
||||
artifact.Should().NotBeNull();
|
||||
using var reader = new StreamReader(artifact!, Encoding.UTF8);
|
||||
var content = await reader.ReadToEndAsync();
|
||||
content.Should().Contain("component:libfoo");
|
||||
}
|
||||
|
||||
// Store file CAS entry and validate export/import lifecycle.
|
||||
var casHash = "sha256:" + new string('f', 64);
|
||||
using var casStream = CreateStream("some-cas-content");
|
||||
await _fileCas.PutAsync(new FileCasPutRequest(casHash, casStream), CancellationToken.None);
|
||||
|
||||
var exportPath = Path.Combine(_rootPath, "export");
|
||||
var exportCount = await _fileCas.ExportAsync(exportPath, CancellationToken.None);
|
||||
exportCount.Should().Be(1);
|
||||
|
||||
await _fileCas.RemoveAsync(casHash, CancellationToken.None);
|
||||
(await _fileCas.TryGetAsync(casHash, CancellationToken.None)).Should().BeNull();
|
||||
|
||||
var importCount = await _fileCas.ImportAsync(exportPath, CancellationToken.None);
|
||||
importCount.Should().Be(1);
|
||||
var imported = await _fileCas.TryGetAsync(casHash, CancellationToken.None);
|
||||
imported.Should().NotBeNull();
|
||||
imported!.RelativePath.Should().EndWith("content.bin");
|
||||
|
||||
// TTL eviction
|
||||
_timeProvider.Advance(TimeSpan.FromHours(2));
|
||||
await _layerCache.EvictExpiredAsync(CancellationToken.None);
|
||||
(await _layerCache.TryGetAsync(layerDigest, CancellationToken.None)).Should().BeNull();
|
||||
|
||||
// Compaction removes CAS entry once over threshold.
|
||||
// Force compaction by writing a large entry.
|
||||
using var largeStream = CreateStream(new string('x', 400_000));
|
||||
var largeHash = "sha256:" + new string('e', 64);
|
||||
await _fileCas.PutAsync(new FileCasPutRequest(largeHash, largeStream), CancellationToken.None);
|
||||
_timeProvider.Advance(TimeSpan.FromMinutes(1));
|
||||
await _fileCas.CompactAsync(CancellationToken.None);
|
||||
(await _fileCas.TryGetAsync(casHash, CancellationToken.None)).Should().BeNull();
|
||||
}
|
||||
|
||||
public Task InitializeAsync() => Task.CompletedTask;
|
||||
|
||||
public Task DisposeAsync()
|
||||
{
|
||||
try
|
||||
{
|
||||
if (Directory.Exists(_rootPath))
|
||||
{
|
||||
Directory.Delete(_rootPath, recursive: true);
|
||||
}
|
||||
}
|
||||
catch
|
||||
{
|
||||
// Ignored – best effort cleanup.
|
||||
}
|
||||
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
|
||||
private static MemoryStream CreateStream(string content)
|
||||
=> new(Encoding.UTF8.GetBytes(content));
|
||||
}
|
||||
|
||||
@@ -1,23 +1,23 @@
|
||||
using System.Collections.Immutable;
|
||||
using System.Linq;
|
||||
using StellaOps.Scanner.Core.Contracts;
|
||||
|
||||
namespace StellaOps.Scanner.Core.Tests.Contracts;
|
||||
|
||||
public sealed class ComponentGraphBuilderTests
|
||||
{
|
||||
[Fact]
|
||||
public void Build_AggregatesComponentsAcrossLayers()
|
||||
{
|
||||
var layer1 = LayerComponentFragment.Create("sha256:layer1", new[]
|
||||
{
|
||||
new ComponentRecord
|
||||
{
|
||||
Identity = ComponentIdentity.Create("pkg:npm/a", "a", "1.0.0"),
|
||||
LayerDigest = "sha256:layer1",
|
||||
Evidence = ImmutableArray.Create(ComponentEvidence.FromPath("/app/node_modules/a/package.json")),
|
||||
Dependencies = ImmutableArray.Create("pkg:npm/x"),
|
||||
Usage = ComponentUsage.Create(false),
|
||||
using System.Collections.Immutable;
|
||||
using System.Linq;
|
||||
using StellaOps.Scanner.Core.Contracts;
|
||||
|
||||
namespace StellaOps.Scanner.Core.Tests.Contracts;
|
||||
|
||||
public sealed class ComponentGraphBuilderTests
|
||||
{
|
||||
[Fact]
|
||||
public void Build_AggregatesComponentsAcrossLayers()
|
||||
{
|
||||
var layer1 = LayerComponentFragment.Create("sha256:layer1", new[]
|
||||
{
|
||||
new ComponentRecord
|
||||
{
|
||||
Identity = ComponentIdentity.Create("pkg:npm/a", "a", "1.0.0"),
|
||||
LayerDigest = "sha256:layer1",
|
||||
Evidence = ImmutableArray.Create(ComponentEvidence.FromPath("/app/node_modules/a/package.json")),
|
||||
Dependencies = ImmutableArray.Create("pkg:npm/x"),
|
||||
Usage = ComponentUsage.Create(false),
|
||||
Metadata = new ComponentMetadata
|
||||
{
|
||||
Scope = "runtime",
|
||||
@@ -27,10 +27,10 @@ public sealed class ComponentGraphBuilderTests
|
||||
});
|
||||
|
||||
var layer2 = LayerComponentFragment.Create("sha256:layer2", new[]
|
||||
{
|
||||
new ComponentRecord
|
||||
{
|
||||
Identity = ComponentIdentity.Create("pkg:npm/a", "a", "1.0.0"),
|
||||
{
|
||||
new ComponentRecord
|
||||
{
|
||||
Identity = ComponentIdentity.Create("pkg:npm/a", "a", "1.0.0"),
|
||||
LayerDigest = "sha256:layer2",
|
||||
Evidence = ImmutableArray.Create(ComponentEvidence.FromPath("/app/node_modules/a/index.js")),
|
||||
Dependencies = ImmutableArray.Create("pkg:npm/y"),
|
||||
@@ -44,55 +44,55 @@ public sealed class ComponentGraphBuilderTests
|
||||
{
|
||||
Identity = ComponentIdentity.Create("pkg:npm/b", "b", "2.0.0"),
|
||||
LayerDigest = "sha256:layer2",
|
||||
Evidence = ImmutableArray.Create(ComponentEvidence.FromPath("/app/node_modules/b/package.json")),
|
||||
}
|
||||
});
|
||||
|
||||
var graph = ComponentGraphBuilder.Build(new[] { layer1, layer2 });
|
||||
|
||||
Assert.Equal(new[] { "sha256:layer1", "sha256:layer2" }, graph.Layers.Select(layer => layer.LayerDigest));
|
||||
Assert.Equal(new[] { "pkg:npm/a", "pkg:npm/b" }, graph.Components.Select(component => component.Identity.Key));
|
||||
|
||||
var componentA = graph.ComponentMap["pkg:npm/a"];
|
||||
Assert.Equal("sha256:layer1", componentA.FirstLayerDigest);
|
||||
Assert.Equal("sha256:layer2", componentA.LastLayerDigest);
|
||||
Assert.Equal(new[] { "sha256:layer1", "sha256:layer2" }, componentA.LayerDigests);
|
||||
Assert.True(componentA.Usage.UsedByEntrypoint);
|
||||
Assert.Contains("/app/start.sh", componentA.Usage.Entrypoints);
|
||||
Evidence = ImmutableArray.Create(ComponentEvidence.FromPath("/app/node_modules/b/package.json")),
|
||||
}
|
||||
});
|
||||
|
||||
var graph = ComponentGraphBuilder.Build(new[] { layer1, layer2 });
|
||||
|
||||
Assert.Equal(new[] { "sha256:layer1", "sha256:layer2" }, graph.Layers.Select(layer => layer.LayerDigest));
|
||||
Assert.Equal(new[] { "pkg:npm/a", "pkg:npm/b" }, graph.Components.Select(component => component.Identity.Key));
|
||||
|
||||
var componentA = graph.ComponentMap["pkg:npm/a"];
|
||||
Assert.Equal("sha256:layer1", componentA.FirstLayerDigest);
|
||||
Assert.Equal("sha256:layer2", componentA.LastLayerDigest);
|
||||
Assert.Equal(new[] { "sha256:layer1", "sha256:layer2" }, componentA.LayerDigests);
|
||||
Assert.True(componentA.Usage.UsedByEntrypoint);
|
||||
Assert.Contains("/app/start.sh", componentA.Usage.Entrypoints);
|
||||
Assert.Equal(new[] { "pkg:npm/x", "pkg:npm/y" }, componentA.Dependencies);
|
||||
Assert.Equal("runtime", componentA.Metadata?.Scope);
|
||||
Assert.Equal("abcdef1234567890abcdef1234567890abcdef12", componentA.Metadata?.BuildId);
|
||||
Assert.Equal(2, componentA.Evidence.Length);
|
||||
|
||||
var componentB = graph.ComponentMap["pkg:npm/b"];
|
||||
Assert.Equal("sha256:layer2", componentB.FirstLayerDigest);
|
||||
Assert.Null(componentB.LastLayerDigest);
|
||||
Assert.Single(componentB.LayerDigests, "sha256:layer2");
|
||||
Assert.False(componentB.Usage.UsedByEntrypoint);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
|
||||
var componentB = graph.ComponentMap["pkg:npm/b"];
|
||||
Assert.Equal("sha256:layer2", componentB.FirstLayerDigest);
|
||||
Assert.Null(componentB.LastLayerDigest);
|
||||
Assert.Single(componentB.LayerDigests, "sha256:layer2");
|
||||
Assert.False(componentB.Usage.UsedByEntrypoint);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Build_DeterministicOrdering()
|
||||
{
|
||||
var fragments = new[]
|
||||
{
|
||||
LayerComponentFragment.Create("sha256:layer1", new[]
|
||||
{
|
||||
new ComponentRecord
|
||||
{
|
||||
Identity = ComponentIdentity.Create("pkg:npm/c", "c"),
|
||||
LayerDigest = "sha256:layer1",
|
||||
},
|
||||
new ComponentRecord
|
||||
{
|
||||
Identity = ComponentIdentity.Create("pkg:npm/a", "a"),
|
||||
LayerDigest = "sha256:layer1",
|
||||
}
|
||||
})
|
||||
};
|
||||
|
||||
var graph1 = ComponentGraphBuilder.Build(fragments);
|
||||
var graph2 = ComponentGraphBuilder.Build(fragments);
|
||||
{
|
||||
new ComponentRecord
|
||||
{
|
||||
Identity = ComponentIdentity.Create("pkg:npm/c", "c"),
|
||||
LayerDigest = "sha256:layer1",
|
||||
},
|
||||
new ComponentRecord
|
||||
{
|
||||
Identity = ComponentIdentity.Create("pkg:npm/a", "a"),
|
||||
LayerDigest = "sha256:layer1",
|
||||
}
|
||||
})
|
||||
};
|
||||
|
||||
var graph1 = ComponentGraphBuilder.Build(fragments);
|
||||
var graph2 = ComponentGraphBuilder.Build(fragments);
|
||||
|
||||
Assert.Equal(graph1.Components.Select(c => c.Identity.Key), graph2.Components.Select(c => c.Identity.Key));
|
||||
}
|
||||
|
||||
@@ -1,67 +1,67 @@
|
||||
using System.Collections.Generic;
|
||||
using System.Collections.Immutable;
|
||||
using System.Linq;
|
||||
using System.Text.Json;
|
||||
using StellaOps.Scanner.Core.Contracts;
|
||||
using StellaOps.Scanner.Core.Serialization;
|
||||
|
||||
namespace StellaOps.Scanner.Core.Tests.Contracts;
|
||||
|
||||
public sealed class ComponentModelsTests
|
||||
{
|
||||
[Fact]
|
||||
public void ComponentIdentity_Create_Trimmed()
|
||||
{
|
||||
var identity = ComponentIdentity.Create(" pkg:npm/foo ", " Foo ", " 1.0.0 ", " pkg:npm/foo@1.0.0 ", " library ", " group ");
|
||||
|
||||
Assert.Equal("pkg:npm/foo", identity.Key);
|
||||
Assert.Equal("Foo", identity.Name);
|
||||
Assert.Equal("1.0.0", identity.Version);
|
||||
Assert.Equal("pkg:npm/foo@1.0.0", identity.Purl);
|
||||
Assert.Equal("library", identity.ComponentType);
|
||||
Assert.Equal("group", identity.Group);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ComponentUsage_Create_SortsEntrypoints()
|
||||
{
|
||||
var usage = ComponentUsage.Create(true, new[] { "/app/start.sh", "/app/start.sh", "/bin/init", " ", null! });
|
||||
|
||||
Assert.True(usage.UsedByEntrypoint);
|
||||
Assert.Equal(new[] { "/app/start.sh", "/bin/init" }, usage.Entrypoints);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void LayerComponentFragment_Create_SortsComponents()
|
||||
{
|
||||
var compB = new ComponentRecord
|
||||
{
|
||||
Identity = ComponentIdentity.Create("pkg:npm/b", "b"),
|
||||
LayerDigest = "sha256:layer2",
|
||||
};
|
||||
|
||||
var compA = new ComponentRecord
|
||||
{
|
||||
Identity = ComponentIdentity.Create("pkg:npm/a", "a"),
|
||||
LayerDigest = "sha256:layer2",
|
||||
};
|
||||
|
||||
var fragment = LayerComponentFragment.Create("sha256:layer2", new[] { compB, compA });
|
||||
|
||||
Assert.Equal("sha256:layer2", fragment.LayerDigest);
|
||||
Assert.Equal(new[] { compA.Identity.Key, compB.Identity.Key }, fragment.Components.Select(c => c.Identity.Key));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ComponentRecord_Serializes_WithScannerDefaults()
|
||||
{
|
||||
var record = new ComponentRecord
|
||||
{
|
||||
Identity = ComponentIdentity.Create("pkg:npm/test", "test", "1.0.0"),
|
||||
LayerDigest = "sha256:layer",
|
||||
Evidence = ImmutableArray.Create(ComponentEvidence.FromPath("/app/package.json")),
|
||||
Dependencies = ImmutableArray.Create("pkg:npm/dep"),
|
||||
Usage = ComponentUsage.Create(true, new[] { "/app/start.sh" }),
|
||||
using System.Collections.Generic;
|
||||
using System.Collections.Immutable;
|
||||
using System.Linq;
|
||||
using System.Text.Json;
|
||||
using StellaOps.Scanner.Core.Contracts;
|
||||
using StellaOps.Scanner.Core.Serialization;
|
||||
|
||||
namespace StellaOps.Scanner.Core.Tests.Contracts;
|
||||
|
||||
public sealed class ComponentModelsTests
|
||||
{
|
||||
[Fact]
|
||||
public void ComponentIdentity_Create_Trimmed()
|
||||
{
|
||||
var identity = ComponentIdentity.Create(" pkg:npm/foo ", " Foo ", " 1.0.0 ", " pkg:npm/foo@1.0.0 ", " library ", " group ");
|
||||
|
||||
Assert.Equal("pkg:npm/foo", identity.Key);
|
||||
Assert.Equal("Foo", identity.Name);
|
||||
Assert.Equal("1.0.0", identity.Version);
|
||||
Assert.Equal("pkg:npm/foo@1.0.0", identity.Purl);
|
||||
Assert.Equal("library", identity.ComponentType);
|
||||
Assert.Equal("group", identity.Group);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ComponentUsage_Create_SortsEntrypoints()
|
||||
{
|
||||
var usage = ComponentUsage.Create(true, new[] { "/app/start.sh", "/app/start.sh", "/bin/init", " ", null! });
|
||||
|
||||
Assert.True(usage.UsedByEntrypoint);
|
||||
Assert.Equal(new[] { "/app/start.sh", "/bin/init" }, usage.Entrypoints);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void LayerComponentFragment_Create_SortsComponents()
|
||||
{
|
||||
var compB = new ComponentRecord
|
||||
{
|
||||
Identity = ComponentIdentity.Create("pkg:npm/b", "b"),
|
||||
LayerDigest = "sha256:layer2",
|
||||
};
|
||||
|
||||
var compA = new ComponentRecord
|
||||
{
|
||||
Identity = ComponentIdentity.Create("pkg:npm/a", "a"),
|
||||
LayerDigest = "sha256:layer2",
|
||||
};
|
||||
|
||||
var fragment = LayerComponentFragment.Create("sha256:layer2", new[] { compB, compA });
|
||||
|
||||
Assert.Equal("sha256:layer2", fragment.LayerDigest);
|
||||
Assert.Equal(new[] { compA.Identity.Key, compB.Identity.Key }, fragment.Components.Select(c => c.Identity.Key));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ComponentRecord_Serializes_WithScannerDefaults()
|
||||
{
|
||||
var record = new ComponentRecord
|
||||
{
|
||||
Identity = ComponentIdentity.Create("pkg:npm/test", "test", "1.0.0"),
|
||||
LayerDigest = "sha256:layer",
|
||||
Evidence = ImmutableArray.Create(ComponentEvidence.FromPath("/app/package.json")),
|
||||
Dependencies = ImmutableArray.Create("pkg:npm/dep"),
|
||||
Usage = ComponentUsage.Create(true, new[] { "/app/start.sh" }),
|
||||
Metadata = new ComponentMetadata
|
||||
{
|
||||
Scope = "runtime",
|
||||
@@ -76,9 +76,9 @@ public sealed class ComponentModelsTests
|
||||
|
||||
var json = JsonSerializer.Serialize(record, ScannerJsonOptions.Default);
|
||||
var deserialized = JsonSerializer.Deserialize<ComponentRecord>(json, ScannerJsonOptions.Default);
|
||||
|
||||
Assert.NotNull(deserialized);
|
||||
Assert.Equal(record.Identity.Key, deserialized!.Identity.Key);
|
||||
|
||||
Assert.NotNull(deserialized);
|
||||
Assert.Equal(record.Identity.Key, deserialized!.Identity.Key);
|
||||
Assert.Equal(record.Metadata?.Scope, deserialized.Metadata?.Scope);
|
||||
Assert.Equal(record.Metadata?.BuildId, deserialized.Metadata?.BuildId);
|
||||
Assert.True(deserialized.Usage.UsedByEntrypoint);
|
||||
|
||||
@@ -1,81 +1,81 @@
|
||||
using System.Text.Json;
|
||||
using StellaOps.Scanner.Core.Contracts;
|
||||
using StellaOps.Scanner.Core.Serialization;
|
||||
using StellaOps.Scanner.Core.Utility;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Scanner.Core.Tests.Contracts;
|
||||
|
||||
public sealed class ScanJobTests
|
||||
{
|
||||
[Fact]
|
||||
public void SerializeAndDeserialize_RoundTripsDeterministically()
|
||||
{
|
||||
var createdAt = new DateTimeOffset(2025, 10, 18, 14, 30, 15, TimeSpan.Zero);
|
||||
var jobId = ScannerIdentifiers.CreateJobId("registry.example.com/stellaops/scanner:1.2.3", "sha256:ABCDEF", "tenant-a", "request-1");
|
||||
var correlationId = ScannerIdentifiers.CreateCorrelationId(jobId, "enqueue");
|
||||
var error = new ScannerError(
|
||||
ScannerErrorCode.AnalyzerFailure,
|
||||
ScannerErrorSeverity.Error,
|
||||
"Analyzer crashed for layer sha256:abc",
|
||||
createdAt,
|
||||
retryable: false,
|
||||
details: new Dictionary<string, string>
|
||||
{
|
||||
["stage"] = "analyze-os",
|
||||
["layer"] = "sha256:abc"
|
||||
});
|
||||
|
||||
var job = new ScanJob(
|
||||
jobId,
|
||||
ScanJobStatus.Running,
|
||||
"registry.example.com/stellaops/scanner:1.2.3",
|
||||
"SHA256:ABCDEF",
|
||||
createdAt,
|
||||
createdAt,
|
||||
correlationId,
|
||||
"tenant-a",
|
||||
new Dictionary<string, string>
|
||||
{
|
||||
["requestId"] = "request-1"
|
||||
},
|
||||
error);
|
||||
|
||||
var json = JsonSerializer.Serialize(job, ScannerJsonOptions.CreateDefault());
|
||||
var deserialized = JsonSerializer.Deserialize<ScanJob>(json, ScannerJsonOptions.CreateDefault());
|
||||
|
||||
Assert.NotNull(deserialized);
|
||||
Assert.Equal(job.Id, deserialized!.Id);
|
||||
Assert.Equal(job.ImageDigest, deserialized.ImageDigest);
|
||||
Assert.Equal(job.CorrelationId, deserialized.CorrelationId);
|
||||
Assert.Equal(job.Metadata["requestId"], deserialized.Metadata["requestId"]);
|
||||
|
||||
var secondJson = JsonSerializer.Serialize(deserialized, ScannerJsonOptions.CreateDefault());
|
||||
Assert.Equal(json, secondJson);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void WithStatus_UpdatesTimestampDeterministically()
|
||||
{
|
||||
var createdAt = new DateTimeOffset(2025, 10, 18, 14, 30, 15, 123, TimeSpan.Zero);
|
||||
var jobId = ScannerIdentifiers.CreateJobId("example/scanner:latest", "sha256:def", null, null);
|
||||
var correlationId = ScannerIdentifiers.CreateCorrelationId(jobId, "enqueue");
|
||||
|
||||
var job = new ScanJob(
|
||||
jobId,
|
||||
ScanJobStatus.Pending,
|
||||
"example/scanner:latest",
|
||||
"sha256:def",
|
||||
createdAt,
|
||||
null,
|
||||
correlationId,
|
||||
null,
|
||||
null,
|
||||
null);
|
||||
|
||||
var updated = job.WithStatus(ScanJobStatus.Running, createdAt.AddSeconds(5));
|
||||
|
||||
Assert.Equal(ScanJobStatus.Running, updated.Status);
|
||||
Assert.Equal(ScannerTimestamps.Normalize(createdAt.AddSeconds(5)), updated.UpdatedAt);
|
||||
}
|
||||
}
|
||||
using System.Text.Json;
|
||||
using StellaOps.Scanner.Core.Contracts;
|
||||
using StellaOps.Scanner.Core.Serialization;
|
||||
using StellaOps.Scanner.Core.Utility;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Scanner.Core.Tests.Contracts;
|
||||
|
||||
public sealed class ScanJobTests
|
||||
{
|
||||
[Fact]
|
||||
public void SerializeAndDeserialize_RoundTripsDeterministically()
|
||||
{
|
||||
var createdAt = new DateTimeOffset(2025, 10, 18, 14, 30, 15, TimeSpan.Zero);
|
||||
var jobId = ScannerIdentifiers.CreateJobId("registry.example.com/stellaops/scanner:1.2.3", "sha256:ABCDEF", "tenant-a", "request-1");
|
||||
var correlationId = ScannerIdentifiers.CreateCorrelationId(jobId, "enqueue");
|
||||
var error = new ScannerError(
|
||||
ScannerErrorCode.AnalyzerFailure,
|
||||
ScannerErrorSeverity.Error,
|
||||
"Analyzer crashed for layer sha256:abc",
|
||||
createdAt,
|
||||
retryable: false,
|
||||
details: new Dictionary<string, string>
|
||||
{
|
||||
["stage"] = "analyze-os",
|
||||
["layer"] = "sha256:abc"
|
||||
});
|
||||
|
||||
var job = new ScanJob(
|
||||
jobId,
|
||||
ScanJobStatus.Running,
|
||||
"registry.example.com/stellaops/scanner:1.2.3",
|
||||
"SHA256:ABCDEF",
|
||||
createdAt,
|
||||
createdAt,
|
||||
correlationId,
|
||||
"tenant-a",
|
||||
new Dictionary<string, string>
|
||||
{
|
||||
["requestId"] = "request-1"
|
||||
},
|
||||
error);
|
||||
|
||||
var json = JsonSerializer.Serialize(job, ScannerJsonOptions.CreateDefault());
|
||||
var deserialized = JsonSerializer.Deserialize<ScanJob>(json, ScannerJsonOptions.CreateDefault());
|
||||
|
||||
Assert.NotNull(deserialized);
|
||||
Assert.Equal(job.Id, deserialized!.Id);
|
||||
Assert.Equal(job.ImageDigest, deserialized.ImageDigest);
|
||||
Assert.Equal(job.CorrelationId, deserialized.CorrelationId);
|
||||
Assert.Equal(job.Metadata["requestId"], deserialized.Metadata["requestId"]);
|
||||
|
||||
var secondJson = JsonSerializer.Serialize(deserialized, ScannerJsonOptions.CreateDefault());
|
||||
Assert.Equal(json, secondJson);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void WithStatus_UpdatesTimestampDeterministically()
|
||||
{
|
||||
var createdAt = new DateTimeOffset(2025, 10, 18, 14, 30, 15, 123, TimeSpan.Zero);
|
||||
var jobId = ScannerIdentifiers.CreateJobId("example/scanner:latest", "sha256:def", null, null);
|
||||
var correlationId = ScannerIdentifiers.CreateCorrelationId(jobId, "enqueue");
|
||||
|
||||
var job = new ScanJob(
|
||||
jobId,
|
||||
ScanJobStatus.Pending,
|
||||
"example/scanner:latest",
|
||||
"sha256:def",
|
||||
createdAt,
|
||||
null,
|
||||
correlationId,
|
||||
null,
|
||||
null,
|
||||
null);
|
||||
|
||||
var updated = job.WithStatus(ScanJobStatus.Running, createdAt.AddSeconds(5));
|
||||
|
||||
Assert.Equal(ScanJobStatus.Running, updated.Status);
|
||||
Assert.Equal(ScannerTimestamps.Normalize(createdAt.AddSeconds(5)), updated.UpdatedAt);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,130 +1,130 @@
|
||||
using System.Text.Json;
|
||||
using StellaOps.Scanner.Core.Contracts;
|
||||
using StellaOps.Scanner.Core.Serialization;
|
||||
using StellaOps.Scanner.Core.Utility;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Scanner.Core.Tests.Contracts;
|
||||
|
||||
public sealed class ScannerCoreContractsTests
|
||||
{
|
||||
private static readonly JsonSerializerOptions Options = ScannerJsonOptions.CreateDefault();
|
||||
private static readonly ScanJobId SampleJobId = ScanJobId.From(Guid.Parse("8f4cc9c5-8245-4b9d-9b4f-5ae049631b7d"));
|
||||
private static readonly DateTimeOffset SampleCreatedAt = new DateTimeOffset(2025, 10, 18, 14, 30, 15, TimeSpan.Zero).AddTicks(1_234_560);
|
||||
|
||||
[Fact]
|
||||
public void ScanJob_RoundTripMatchesGoldenFixture()
|
||||
{
|
||||
var job = CreateSampleJob();
|
||||
|
||||
var json = JsonSerializer.Serialize(job, Options);
|
||||
var expected = LoadFixture("scan-job.json");
|
||||
Assert.Equal(expected, json);
|
||||
|
||||
var deserialized = JsonSerializer.Deserialize<ScanJob>(expected, Options);
|
||||
Assert.NotNull(deserialized);
|
||||
Assert.Equal(job.Id, deserialized!.Id);
|
||||
Assert.Equal(job.ImageDigest, deserialized.ImageDigest);
|
||||
Assert.Equal(job.CorrelationId, deserialized.CorrelationId);
|
||||
Assert.Equal(job.Metadata, deserialized.Metadata);
|
||||
Assert.Equal(job.Failure?.Message, deserialized.Failure?.Message);
|
||||
Assert.Equal(job.Failure?.Details, deserialized.Failure?.Details);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ScanProgressEvent_RoundTripMatchesGoldenFixture()
|
||||
{
|
||||
var progress = CreateSampleProgressEvent();
|
||||
|
||||
var json = JsonSerializer.Serialize(progress, Options);
|
||||
var expected = LoadFixture("scan-progress-event.json");
|
||||
Assert.Equal(expected, json);
|
||||
|
||||
var deserialized = JsonSerializer.Deserialize<ScanProgressEvent>(expected, Options);
|
||||
Assert.NotNull(deserialized);
|
||||
Assert.Equal(progress.JobId, deserialized!.JobId);
|
||||
Assert.Equal(progress.Stage, deserialized.Stage);
|
||||
Assert.Equal(progress.Kind, deserialized.Kind);
|
||||
Assert.Equal(progress.Sequence, deserialized.Sequence);
|
||||
Assert.Equal(progress.Error?.Details, deserialized.Error?.Details);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ScannerError_RoundTripMatchesGoldenFixture()
|
||||
{
|
||||
var error = CreateSampleError();
|
||||
|
||||
var json = JsonSerializer.Serialize(error, Options);
|
||||
var expected = LoadFixture("scanner-error.json");
|
||||
Assert.Equal(expected, json);
|
||||
|
||||
var deserialized = JsonSerializer.Deserialize<ScannerError>(expected, Options);
|
||||
Assert.NotNull(deserialized);
|
||||
Assert.Equal(error.Code, deserialized!.Code);
|
||||
Assert.Equal(error.Severity, deserialized.Severity);
|
||||
Assert.Equal(error.Details, deserialized.Details);
|
||||
}
|
||||
|
||||
private static ScanJob CreateSampleJob()
|
||||
{
|
||||
var updatedAt = SampleCreatedAt.AddSeconds(5);
|
||||
var correlationId = ScannerIdentifiers.CreateCorrelationId(SampleJobId, nameof(ScanStage.AnalyzeOperatingSystem));
|
||||
|
||||
return new ScanJob(
|
||||
SampleJobId,
|
||||
ScanJobStatus.Running,
|
||||
"registry.example.com/stellaops/scanner:1.2.3",
|
||||
"SHA256:ABCDEF",
|
||||
SampleCreatedAt,
|
||||
updatedAt,
|
||||
correlationId,
|
||||
"tenant-a",
|
||||
new Dictionary<string, string>
|
||||
{
|
||||
["requestId"] = "req-1234",
|
||||
["source"] = "ci"
|
||||
},
|
||||
CreateSampleError());
|
||||
}
|
||||
|
||||
private static ScanProgressEvent CreateSampleProgressEvent()
|
||||
{
|
||||
return new ScanProgressEvent(
|
||||
SampleJobId,
|
||||
ScanStage.AnalyzeOperatingSystem,
|
||||
ScanProgressEventKind.Warning,
|
||||
sequence: 3,
|
||||
timestamp: SampleCreatedAt.AddSeconds(1),
|
||||
percentComplete: 42.5,
|
||||
message: "OS analyzer reported missing packages",
|
||||
attributes: new Dictionary<string, string>
|
||||
{
|
||||
["package"] = "openssl",
|
||||
["version"] = "1.1.1w"
|
||||
},
|
||||
error: CreateSampleError());
|
||||
}
|
||||
|
||||
private static ScannerError CreateSampleError()
|
||||
{
|
||||
return new ScannerError(
|
||||
ScannerErrorCode.AnalyzerFailure,
|
||||
ScannerErrorSeverity.Error,
|
||||
"Analyzer failed to parse layer",
|
||||
SampleCreatedAt,
|
||||
retryable: false,
|
||||
details: new Dictionary<string, string>
|
||||
{
|
||||
["layerDigest"] = "sha256:deadbeef",
|
||||
["attempt"] = "1"
|
||||
},
|
||||
stage: nameof(ScanStage.AnalyzeOperatingSystem),
|
||||
component: "os-analyzer");
|
||||
}
|
||||
|
||||
private static string LoadFixture(string fileName)
|
||||
{
|
||||
var path = Path.Combine(AppContext.BaseDirectory, "Fixtures", fileName);
|
||||
return File.ReadAllText(path).Trim();
|
||||
}
|
||||
}
|
||||
using System.Text.Json;
|
||||
using StellaOps.Scanner.Core.Contracts;
|
||||
using StellaOps.Scanner.Core.Serialization;
|
||||
using StellaOps.Scanner.Core.Utility;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Scanner.Core.Tests.Contracts;
|
||||
|
||||
public sealed class ScannerCoreContractsTests
|
||||
{
|
||||
private static readonly JsonSerializerOptions Options = ScannerJsonOptions.CreateDefault();
|
||||
private static readonly ScanJobId SampleJobId = ScanJobId.From(Guid.Parse("8f4cc9c5-8245-4b9d-9b4f-5ae049631b7d"));
|
||||
private static readonly DateTimeOffset SampleCreatedAt = new DateTimeOffset(2025, 10, 18, 14, 30, 15, TimeSpan.Zero).AddTicks(1_234_560);
|
||||
|
||||
[Fact]
|
||||
public void ScanJob_RoundTripMatchesGoldenFixture()
|
||||
{
|
||||
var job = CreateSampleJob();
|
||||
|
||||
var json = JsonSerializer.Serialize(job, Options);
|
||||
var expected = LoadFixture("scan-job.json");
|
||||
Assert.Equal(expected, json);
|
||||
|
||||
var deserialized = JsonSerializer.Deserialize<ScanJob>(expected, Options);
|
||||
Assert.NotNull(deserialized);
|
||||
Assert.Equal(job.Id, deserialized!.Id);
|
||||
Assert.Equal(job.ImageDigest, deserialized.ImageDigest);
|
||||
Assert.Equal(job.CorrelationId, deserialized.CorrelationId);
|
||||
Assert.Equal(job.Metadata, deserialized.Metadata);
|
||||
Assert.Equal(job.Failure?.Message, deserialized.Failure?.Message);
|
||||
Assert.Equal(job.Failure?.Details, deserialized.Failure?.Details);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ScanProgressEvent_RoundTripMatchesGoldenFixture()
|
||||
{
|
||||
var progress = CreateSampleProgressEvent();
|
||||
|
||||
var json = JsonSerializer.Serialize(progress, Options);
|
||||
var expected = LoadFixture("scan-progress-event.json");
|
||||
Assert.Equal(expected, json);
|
||||
|
||||
var deserialized = JsonSerializer.Deserialize<ScanProgressEvent>(expected, Options);
|
||||
Assert.NotNull(deserialized);
|
||||
Assert.Equal(progress.JobId, deserialized!.JobId);
|
||||
Assert.Equal(progress.Stage, deserialized.Stage);
|
||||
Assert.Equal(progress.Kind, deserialized.Kind);
|
||||
Assert.Equal(progress.Sequence, deserialized.Sequence);
|
||||
Assert.Equal(progress.Error?.Details, deserialized.Error?.Details);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ScannerError_RoundTripMatchesGoldenFixture()
|
||||
{
|
||||
var error = CreateSampleError();
|
||||
|
||||
var json = JsonSerializer.Serialize(error, Options);
|
||||
var expected = LoadFixture("scanner-error.json");
|
||||
Assert.Equal(expected, json);
|
||||
|
||||
var deserialized = JsonSerializer.Deserialize<ScannerError>(expected, Options);
|
||||
Assert.NotNull(deserialized);
|
||||
Assert.Equal(error.Code, deserialized!.Code);
|
||||
Assert.Equal(error.Severity, deserialized.Severity);
|
||||
Assert.Equal(error.Details, deserialized.Details);
|
||||
}
|
||||
|
||||
private static ScanJob CreateSampleJob()
|
||||
{
|
||||
var updatedAt = SampleCreatedAt.AddSeconds(5);
|
||||
var correlationId = ScannerIdentifiers.CreateCorrelationId(SampleJobId, nameof(ScanStage.AnalyzeOperatingSystem));
|
||||
|
||||
return new ScanJob(
|
||||
SampleJobId,
|
||||
ScanJobStatus.Running,
|
||||
"registry.example.com/stellaops/scanner:1.2.3",
|
||||
"SHA256:ABCDEF",
|
||||
SampleCreatedAt,
|
||||
updatedAt,
|
||||
correlationId,
|
||||
"tenant-a",
|
||||
new Dictionary<string, string>
|
||||
{
|
||||
["requestId"] = "req-1234",
|
||||
["source"] = "ci"
|
||||
},
|
||||
CreateSampleError());
|
||||
}
|
||||
|
||||
private static ScanProgressEvent CreateSampleProgressEvent()
|
||||
{
|
||||
return new ScanProgressEvent(
|
||||
SampleJobId,
|
||||
ScanStage.AnalyzeOperatingSystem,
|
||||
ScanProgressEventKind.Warning,
|
||||
sequence: 3,
|
||||
timestamp: SampleCreatedAt.AddSeconds(1),
|
||||
percentComplete: 42.5,
|
||||
message: "OS analyzer reported missing packages",
|
||||
attributes: new Dictionary<string, string>
|
||||
{
|
||||
["package"] = "openssl",
|
||||
["version"] = "1.1.1w"
|
||||
},
|
||||
error: CreateSampleError());
|
||||
}
|
||||
|
||||
private static ScannerError CreateSampleError()
|
||||
{
|
||||
return new ScannerError(
|
||||
ScannerErrorCode.AnalyzerFailure,
|
||||
ScannerErrorSeverity.Error,
|
||||
"Analyzer failed to parse layer",
|
||||
SampleCreatedAt,
|
||||
retryable: false,
|
||||
details: new Dictionary<string, string>
|
||||
{
|
||||
["layerDigest"] = "sha256:deadbeef",
|
||||
["attempt"] = "1"
|
||||
},
|
||||
stage: nameof(ScanStage.AnalyzeOperatingSystem),
|
||||
component: "os-analyzer");
|
||||
}
|
||||
|
||||
private static string LoadFixture(string fileName)
|
||||
{
|
||||
var path = Path.Combine(AppContext.BaseDirectory, "Fixtures", fileName);
|
||||
return File.ReadAllText(path).Trim();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,103 +1,103 @@
|
||||
using System.Collections.Generic;
|
||||
using System.Diagnostics;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using StellaOps.Scanner.Core.Contracts;
|
||||
using StellaOps.Scanner.Core.Observability;
|
||||
using StellaOps.Scanner.Core.Utility;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Scanner.Core.Tests.Observability;
|
||||
|
||||
public sealed class ScannerLogExtensionsPerformanceTests
|
||||
{
|
||||
private const double ThresholdMicroseconds = 5.0;
|
||||
private const int WarmupIterations = 5_000;
|
||||
private const int MeasuredIterations = 200_000;
|
||||
private static readonly DateTimeOffset Timestamp = ScannerTimestamps.Normalize(new DateTimeOffset(2025, 10, 19, 12, 0, 0, TimeSpan.Zero));
|
||||
private static readonly string Stage = nameof(ScanStage.AnalyzeOperatingSystem);
|
||||
private static readonly string Component = "os-analyzer";
|
||||
|
||||
[Fact]
|
||||
public void BeginScanScope_CompletesWithinThreshold()
|
||||
{
|
||||
using var factory = LoggerFactory.Create(builder => builder.AddFilter(static _ => false));
|
||||
var logger = factory.CreateLogger("ScannerPerformance");
|
||||
var job = CreateScanJob();
|
||||
|
||||
var microseconds = Measure(() => logger.BeginScanScope(job, Stage, Component));
|
||||
|
||||
Assert.True(microseconds <= ThresholdMicroseconds, $"Expected BeginScanScope to stay ≤ {ThresholdMicroseconds} µs but measured {microseconds:F3} µs.");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void BeginProgressScope_CompletesWithinThreshold()
|
||||
{
|
||||
using var factory = LoggerFactory.Create(builder => builder.AddFilter(static _ => false));
|
||||
var logger = factory.CreateLogger("ScannerPerformance");
|
||||
var progress = CreateProgressEvent();
|
||||
|
||||
var microseconds = Measure(() => logger.BeginProgressScope(progress, Component));
|
||||
|
||||
Assert.True(microseconds <= ThresholdMicroseconds, $"Expected BeginProgressScope to stay ≤ {ThresholdMicroseconds} µs but measured {microseconds:F3} µs.");
|
||||
}
|
||||
|
||||
private static double Measure(Func<IDisposable> scopeFactory)
|
||||
{
|
||||
for (var i = 0; i < WarmupIterations; i++)
|
||||
{
|
||||
using var scope = scopeFactory();
|
||||
}
|
||||
|
||||
GC.Collect();
|
||||
GC.WaitForPendingFinalizers();
|
||||
GC.Collect();
|
||||
|
||||
var stopwatch = Stopwatch.StartNew();
|
||||
for (var i = 0; i < MeasuredIterations; i++)
|
||||
{
|
||||
using var scope = scopeFactory();
|
||||
}
|
||||
|
||||
stopwatch.Stop();
|
||||
|
||||
return stopwatch.Elapsed.TotalSeconds * 1_000_000 / MeasuredIterations;
|
||||
}
|
||||
|
||||
private static ScanJob CreateScanJob()
|
||||
{
|
||||
var jobId = ScannerIdentifiers.CreateJobId("registry.example.com/stellaops/scanner:1.2.3", "sha256:abcdef", "tenant-a", "perf");
|
||||
var correlationId = ScannerIdentifiers.CreateCorrelationId(jobId, Stage, Component);
|
||||
|
||||
return new ScanJob(
|
||||
jobId,
|
||||
ScanJobStatus.Running,
|
||||
"registry.example.com/stellaops/scanner:1.2.3",
|
||||
"sha256:abcdef",
|
||||
Timestamp,
|
||||
Timestamp,
|
||||
correlationId,
|
||||
"tenant-a",
|
||||
new Dictionary<string, string>(StringComparer.Ordinal)
|
||||
{
|
||||
["requestId"] = "req-perf"
|
||||
});
|
||||
}
|
||||
|
||||
private static ScanProgressEvent CreateProgressEvent()
|
||||
{
|
||||
var jobId = ScannerIdentifiers.CreateJobId("registry.example.com/stellaops/scanner:1.2.3", "sha256:abcdef", "tenant-a", "perf");
|
||||
|
||||
return new ScanProgressEvent(
|
||||
jobId,
|
||||
ScanStage.AnalyzeOperatingSystem,
|
||||
ScanProgressEventKind.Progress,
|
||||
sequence: 42,
|
||||
Timestamp,
|
||||
percentComplete: 10.5,
|
||||
message: "performance check",
|
||||
attributes: new Dictionary<string, string>(StringComparer.Ordinal)
|
||||
{
|
||||
["sample"] = "true"
|
||||
});
|
||||
}
|
||||
}
|
||||
using System.Collections.Generic;
|
||||
using System.Diagnostics;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using StellaOps.Scanner.Core.Contracts;
|
||||
using StellaOps.Scanner.Core.Observability;
|
||||
using StellaOps.Scanner.Core.Utility;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Scanner.Core.Tests.Observability;
|
||||
|
||||
public sealed class ScannerLogExtensionsPerformanceTests
|
||||
{
|
||||
private const double ThresholdMicroseconds = 5.0;
|
||||
private const int WarmupIterations = 5_000;
|
||||
private const int MeasuredIterations = 200_000;
|
||||
private static readonly DateTimeOffset Timestamp = ScannerTimestamps.Normalize(new DateTimeOffset(2025, 10, 19, 12, 0, 0, TimeSpan.Zero));
|
||||
private static readonly string Stage = nameof(ScanStage.AnalyzeOperatingSystem);
|
||||
private static readonly string Component = "os-analyzer";
|
||||
|
||||
[Fact]
|
||||
public void BeginScanScope_CompletesWithinThreshold()
|
||||
{
|
||||
using var factory = LoggerFactory.Create(builder => builder.AddFilter(static _ => false));
|
||||
var logger = factory.CreateLogger("ScannerPerformance");
|
||||
var job = CreateScanJob();
|
||||
|
||||
var microseconds = Measure(() => logger.BeginScanScope(job, Stage, Component));
|
||||
|
||||
Assert.True(microseconds <= ThresholdMicroseconds, $"Expected BeginScanScope to stay ≤ {ThresholdMicroseconds} µs but measured {microseconds:F3} µs.");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void BeginProgressScope_CompletesWithinThreshold()
|
||||
{
|
||||
using var factory = LoggerFactory.Create(builder => builder.AddFilter(static _ => false));
|
||||
var logger = factory.CreateLogger("ScannerPerformance");
|
||||
var progress = CreateProgressEvent();
|
||||
|
||||
var microseconds = Measure(() => logger.BeginProgressScope(progress, Component));
|
||||
|
||||
Assert.True(microseconds <= ThresholdMicroseconds, $"Expected BeginProgressScope to stay ≤ {ThresholdMicroseconds} µs but measured {microseconds:F3} µs.");
|
||||
}
|
||||
|
||||
private static double Measure(Func<IDisposable> scopeFactory)
|
||||
{
|
||||
for (var i = 0; i < WarmupIterations; i++)
|
||||
{
|
||||
using var scope = scopeFactory();
|
||||
}
|
||||
|
||||
GC.Collect();
|
||||
GC.WaitForPendingFinalizers();
|
||||
GC.Collect();
|
||||
|
||||
var stopwatch = Stopwatch.StartNew();
|
||||
for (var i = 0; i < MeasuredIterations; i++)
|
||||
{
|
||||
using var scope = scopeFactory();
|
||||
}
|
||||
|
||||
stopwatch.Stop();
|
||||
|
||||
return stopwatch.Elapsed.TotalSeconds * 1_000_000 / MeasuredIterations;
|
||||
}
|
||||
|
||||
private static ScanJob CreateScanJob()
|
||||
{
|
||||
var jobId = ScannerIdentifiers.CreateJobId("registry.example.com/stellaops/scanner:1.2.3", "sha256:abcdef", "tenant-a", "perf");
|
||||
var correlationId = ScannerIdentifiers.CreateCorrelationId(jobId, Stage, Component);
|
||||
|
||||
return new ScanJob(
|
||||
jobId,
|
||||
ScanJobStatus.Running,
|
||||
"registry.example.com/stellaops/scanner:1.2.3",
|
||||
"sha256:abcdef",
|
||||
Timestamp,
|
||||
Timestamp,
|
||||
correlationId,
|
||||
"tenant-a",
|
||||
new Dictionary<string, string>(StringComparer.Ordinal)
|
||||
{
|
||||
["requestId"] = "req-perf"
|
||||
});
|
||||
}
|
||||
|
||||
private static ScanProgressEvent CreateProgressEvent()
|
||||
{
|
||||
var jobId = ScannerIdentifiers.CreateJobId("registry.example.com/stellaops/scanner:1.2.3", "sha256:abcdef", "tenant-a", "perf");
|
||||
|
||||
return new ScanProgressEvent(
|
||||
jobId,
|
||||
ScanStage.AnalyzeOperatingSystem,
|
||||
ScanProgressEventKind.Progress,
|
||||
sequence: 42,
|
||||
Timestamp,
|
||||
percentComplete: 10.5,
|
||||
message: "performance check",
|
||||
attributes: new Dictionary<string, string>(StringComparer.Ordinal)
|
||||
{
|
||||
["sample"] = "true"
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,39 +1,39 @@
|
||||
using Microsoft.Extensions.Logging;
|
||||
using StellaOps.Scanner.Core.Contracts;
|
||||
using StellaOps.Scanner.Core.Observability;
|
||||
using StellaOps.Scanner.Core.Utility;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Scanner.Core.Tests.Observability;
|
||||
|
||||
public sealed class ScannerLogExtensionsTests
|
||||
{
|
||||
[Fact]
|
||||
public void BeginScanScope_PopulatesCorrelationContext()
|
||||
{
|
||||
using var factory = LoggerFactory.Create(builder => builder.AddFilter(_ => true));
|
||||
var logger = factory.CreateLogger("test");
|
||||
|
||||
var jobId = ScannerIdentifiers.CreateJobId("example/scanner:1.0", "sha256:abc", null, null);
|
||||
var correlationId = ScannerIdentifiers.CreateCorrelationId(jobId, "enqueue");
|
||||
var job = new ScanJob(
|
||||
jobId,
|
||||
ScanJobStatus.Pending,
|
||||
"example/scanner:1.0",
|
||||
"sha256:abc",
|
||||
DateTimeOffset.UtcNow,
|
||||
null,
|
||||
correlationId,
|
||||
null,
|
||||
null,
|
||||
null);
|
||||
|
||||
using (logger.BeginScanScope(job, "enqueue"))
|
||||
{
|
||||
Assert.True(ScannerCorrelationContextAccessor.TryGetCorrelationId(out var current));
|
||||
Assert.Equal(correlationId, current);
|
||||
}
|
||||
|
||||
Assert.False(ScannerCorrelationContextAccessor.TryGetCorrelationId(out _));
|
||||
}
|
||||
}
|
||||
using Microsoft.Extensions.Logging;
|
||||
using StellaOps.Scanner.Core.Contracts;
|
||||
using StellaOps.Scanner.Core.Observability;
|
||||
using StellaOps.Scanner.Core.Utility;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Scanner.Core.Tests.Observability;
|
||||
|
||||
public sealed class ScannerLogExtensionsTests
|
||||
{
|
||||
[Fact]
|
||||
public void BeginScanScope_PopulatesCorrelationContext()
|
||||
{
|
||||
using var factory = LoggerFactory.Create(builder => builder.AddFilter(_ => true));
|
||||
var logger = factory.CreateLogger("test");
|
||||
|
||||
var jobId = ScannerIdentifiers.CreateJobId("example/scanner:1.0", "sha256:abc", null, null);
|
||||
var correlationId = ScannerIdentifiers.CreateCorrelationId(jobId, "enqueue");
|
||||
var job = new ScanJob(
|
||||
jobId,
|
||||
ScanJobStatus.Pending,
|
||||
"example/scanner:1.0",
|
||||
"sha256:abc",
|
||||
DateTimeOffset.UtcNow,
|
||||
null,
|
||||
correlationId,
|
||||
null,
|
||||
null,
|
||||
null);
|
||||
|
||||
using (logger.BeginScanScope(job, "enqueue"))
|
||||
{
|
||||
Assert.True(ScannerCorrelationContextAccessor.TryGetCorrelationId(out var current));
|
||||
Assert.Equal(correlationId, current);
|
||||
}
|
||||
|
||||
Assert.False(ScannerCorrelationContextAccessor.TryGetCorrelationId(out _));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,95 +1,95 @@
|
||||
using System.Collections.Generic;
|
||||
using System;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using Microsoft.Extensions.Time.Testing;
|
||||
using Microsoft.IdentityModel.Tokens;
|
||||
using StellaOps.Auth.Client;
|
||||
using StellaOps.Scanner.Core.Security;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Scanner.Core.Tests.Security;
|
||||
|
||||
public sealed class AuthorityTokenSourceTests
|
||||
{
|
||||
[Fact]
|
||||
public async Task GetAsync_ReusesCachedTokenUntilRefreshSkew()
|
||||
{
|
||||
var timeProvider = new FakeTimeProvider(new DateTimeOffset(2025, 10, 18, 12, 0, 0, TimeSpan.Zero));
|
||||
var client = new FakeTokenClient(timeProvider);
|
||||
var source = new AuthorityTokenSource(client, TimeSpan.FromSeconds(30), timeProvider, NullLogger<AuthorityTokenSource>.Instance);
|
||||
|
||||
var token1 = await source.GetAsync("scanner", new[] { "scanner.read" });
|
||||
Assert.Equal(1, client.RequestCount);
|
||||
Assert.Null(client.LastAdditionalParameters);
|
||||
|
||||
var token2 = await source.GetAsync("scanner", new[] { "scanner.read" });
|
||||
Assert.Equal(1, client.RequestCount);
|
||||
Assert.Equal(token1.AccessToken, token2.AccessToken);
|
||||
|
||||
timeProvider.Advance(TimeSpan.FromMinutes(3));
|
||||
var token3 = await source.GetAsync("scanner", new[] { "scanner.read" });
|
||||
Assert.Equal(2, client.RequestCount);
|
||||
Assert.NotEqual(token1.AccessToken, token3.AccessToken);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task InvalidateAsync_RemovesCachedToken()
|
||||
{
|
||||
var timeProvider = new FakeTimeProvider(new DateTimeOffset(2025, 10, 18, 12, 0, 0, TimeSpan.Zero));
|
||||
var client = new FakeTokenClient(timeProvider);
|
||||
var source = new AuthorityTokenSource(client, TimeSpan.FromSeconds(30), timeProvider, NullLogger<AuthorityTokenSource>.Instance);
|
||||
|
||||
_ = await source.GetAsync("scanner", new[] { "scanner.read" });
|
||||
Assert.Equal(1, client.RequestCount);
|
||||
Assert.Null(client.LastAdditionalParameters);
|
||||
|
||||
await source.InvalidateAsync("scanner", new[] { "scanner.read" });
|
||||
_ = await source.GetAsync("scanner", new[] { "scanner.read" });
|
||||
|
||||
Assert.Equal(2, client.RequestCount);
|
||||
}
|
||||
|
||||
private sealed class FakeTokenClient : IStellaOpsTokenClient
|
||||
{
|
||||
private readonly FakeTimeProvider timeProvider;
|
||||
private int counter;
|
||||
|
||||
public FakeTokenClient(FakeTimeProvider timeProvider)
|
||||
{
|
||||
this.timeProvider = timeProvider;
|
||||
}
|
||||
|
||||
public int RequestCount => counter;
|
||||
|
||||
public IReadOnlyDictionary<string, string>? LastAdditionalParameters { get; private set; }
|
||||
|
||||
public Task<StellaOpsTokenResult> RequestClientCredentialsTokenAsync(string? scope = null, IReadOnlyDictionary<string, string>? additionalParameters = null, CancellationToken cancellationToken = default)
|
||||
{
|
||||
LastAdditionalParameters = additionalParameters;
|
||||
var access = $"token-{Interlocked.Increment(ref counter)}";
|
||||
var expires = timeProvider.GetUtcNow().AddMinutes(2);
|
||||
var scopes = scope is null
|
||||
? Array.Empty<string>()
|
||||
: scope.Split(' ', StringSplitOptions.RemoveEmptyEntries | StringSplitOptions.TrimEntries);
|
||||
|
||||
return Task.FromResult(new StellaOpsTokenResult(access, "Bearer", expires, scopes));
|
||||
}
|
||||
|
||||
public Task<StellaOpsTokenResult> RequestPasswordTokenAsync(string username, string password, string? scope = null, IReadOnlyDictionary<string, string>? additionalParameters = null, CancellationToken cancellationToken = default)
|
||||
=> throw new NotSupportedException();
|
||||
|
||||
public Task<JsonWebKeySet> GetJsonWebKeySetAsync(CancellationToken cancellationToken = default)
|
||||
=> throw new NotSupportedException();
|
||||
|
||||
public ValueTask<StellaOpsTokenCacheEntry?> GetCachedTokenAsync(string key, CancellationToken cancellationToken = default)
|
||||
=> ValueTask.FromResult<StellaOpsTokenCacheEntry?>(null);
|
||||
|
||||
public ValueTask CacheTokenAsync(string key, StellaOpsTokenCacheEntry entry, CancellationToken cancellationToken = default)
|
||||
=> ValueTask.CompletedTask;
|
||||
|
||||
public ValueTask ClearCachedTokenAsync(string key, CancellationToken cancellationToken = default)
|
||||
=> ValueTask.CompletedTask;
|
||||
}
|
||||
}
|
||||
using System.Collections.Generic;
|
||||
using System;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using Microsoft.Extensions.Time.Testing;
|
||||
using Microsoft.IdentityModel.Tokens;
|
||||
using StellaOps.Auth.Client;
|
||||
using StellaOps.Scanner.Core.Security;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Scanner.Core.Tests.Security;
|
||||
|
||||
public sealed class AuthorityTokenSourceTests
|
||||
{
|
||||
[Fact]
|
||||
public async Task GetAsync_ReusesCachedTokenUntilRefreshSkew()
|
||||
{
|
||||
var timeProvider = new FakeTimeProvider(new DateTimeOffset(2025, 10, 18, 12, 0, 0, TimeSpan.Zero));
|
||||
var client = new FakeTokenClient(timeProvider);
|
||||
var source = new AuthorityTokenSource(client, TimeSpan.FromSeconds(30), timeProvider, NullLogger<AuthorityTokenSource>.Instance);
|
||||
|
||||
var token1 = await source.GetAsync("scanner", new[] { "scanner.read" });
|
||||
Assert.Equal(1, client.RequestCount);
|
||||
Assert.Null(client.LastAdditionalParameters);
|
||||
|
||||
var token2 = await source.GetAsync("scanner", new[] { "scanner.read" });
|
||||
Assert.Equal(1, client.RequestCount);
|
||||
Assert.Equal(token1.AccessToken, token2.AccessToken);
|
||||
|
||||
timeProvider.Advance(TimeSpan.FromMinutes(3));
|
||||
var token3 = await source.GetAsync("scanner", new[] { "scanner.read" });
|
||||
Assert.Equal(2, client.RequestCount);
|
||||
Assert.NotEqual(token1.AccessToken, token3.AccessToken);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task InvalidateAsync_RemovesCachedToken()
|
||||
{
|
||||
var timeProvider = new FakeTimeProvider(new DateTimeOffset(2025, 10, 18, 12, 0, 0, TimeSpan.Zero));
|
||||
var client = new FakeTokenClient(timeProvider);
|
||||
var source = new AuthorityTokenSource(client, TimeSpan.FromSeconds(30), timeProvider, NullLogger<AuthorityTokenSource>.Instance);
|
||||
|
||||
_ = await source.GetAsync("scanner", new[] { "scanner.read" });
|
||||
Assert.Equal(1, client.RequestCount);
|
||||
Assert.Null(client.LastAdditionalParameters);
|
||||
|
||||
await source.InvalidateAsync("scanner", new[] { "scanner.read" });
|
||||
_ = await source.GetAsync("scanner", new[] { "scanner.read" });
|
||||
|
||||
Assert.Equal(2, client.RequestCount);
|
||||
}
|
||||
|
||||
private sealed class FakeTokenClient : IStellaOpsTokenClient
|
||||
{
|
||||
private readonly FakeTimeProvider timeProvider;
|
||||
private int counter;
|
||||
|
||||
public FakeTokenClient(FakeTimeProvider timeProvider)
|
||||
{
|
||||
this.timeProvider = timeProvider;
|
||||
}
|
||||
|
||||
public int RequestCount => counter;
|
||||
|
||||
public IReadOnlyDictionary<string, string>? LastAdditionalParameters { get; private set; }
|
||||
|
||||
public Task<StellaOpsTokenResult> RequestClientCredentialsTokenAsync(string? scope = null, IReadOnlyDictionary<string, string>? additionalParameters = null, CancellationToken cancellationToken = default)
|
||||
{
|
||||
LastAdditionalParameters = additionalParameters;
|
||||
var access = $"token-{Interlocked.Increment(ref counter)}";
|
||||
var expires = timeProvider.GetUtcNow().AddMinutes(2);
|
||||
var scopes = scope is null
|
||||
? Array.Empty<string>()
|
||||
: scope.Split(' ', StringSplitOptions.RemoveEmptyEntries | StringSplitOptions.TrimEntries);
|
||||
|
||||
return Task.FromResult(new StellaOpsTokenResult(access, "Bearer", expires, scopes));
|
||||
}
|
||||
|
||||
public Task<StellaOpsTokenResult> RequestPasswordTokenAsync(string username, string password, string? scope = null, IReadOnlyDictionary<string, string>? additionalParameters = null, CancellationToken cancellationToken = default)
|
||||
=> throw new NotSupportedException();
|
||||
|
||||
public Task<JsonWebKeySet> GetJsonWebKeySetAsync(CancellationToken cancellationToken = default)
|
||||
=> throw new NotSupportedException();
|
||||
|
||||
public ValueTask<StellaOpsTokenCacheEntry?> GetCachedTokenAsync(string key, CancellationToken cancellationToken = default)
|
||||
=> ValueTask.FromResult<StellaOpsTokenCacheEntry?>(null);
|
||||
|
||||
public ValueTask CacheTokenAsync(string key, StellaOpsTokenCacheEntry entry, CancellationToken cancellationToken = default)
|
||||
=> ValueTask.CompletedTask;
|
||||
|
||||
public ValueTask ClearCachedTokenAsync(string key, CancellationToken cancellationToken = default)
|
||||
=> ValueTask.CompletedTask;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,117 +1,117 @@
|
||||
using System.Collections.Generic;
|
||||
using System.IdentityModel.Tokens.Jwt;
|
||||
using System.Security.Cryptography;
|
||||
using Microsoft.Extensions.Time.Testing;
|
||||
using Microsoft.IdentityModel.Tokens;
|
||||
using Microsoft.Extensions.Options;
|
||||
using StellaOps.Auth.Security.Dpop;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Scanner.Core.Tests.Security;
|
||||
|
||||
public sealed class DpopProofValidatorTests
|
||||
{
|
||||
[Fact]
|
||||
public async Task ValidateAsync_ReturnsSuccess_ForValidProof()
|
||||
{
|
||||
var timeProvider = new FakeTimeProvider(new DateTimeOffset(2025, 10, 18, 12, 0, 0, TimeSpan.Zero));
|
||||
var validator = new DpopProofValidator(Options.Create(new DpopValidationOptions()), new InMemoryDpopReplayCache(timeProvider), timeProvider);
|
||||
using var key = ECDsa.Create(ECCurve.NamedCurves.nistP256);
|
||||
var securityKey = new ECDsaSecurityKey(key) { KeyId = Guid.NewGuid().ToString("N") };
|
||||
|
||||
var proof = CreateProof(timeProvider, securityKey, "GET", new Uri("https://scanner.example.com/api/v1/scans"));
|
||||
var result = await validator.ValidateAsync(proof, "GET", new Uri("https://scanner.example.com/api/v1/scans"));
|
||||
|
||||
Assert.True(result.IsValid);
|
||||
Assert.NotNull(result.PublicKey);
|
||||
Assert.NotNull(result.JwtId);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ValidateAsync_Fails_OnNonceMismatch()
|
||||
{
|
||||
var timeProvider = new FakeTimeProvider(new DateTimeOffset(2025, 10, 18, 12, 0, 0, TimeSpan.Zero));
|
||||
var validator = new DpopProofValidator(Options.Create(new DpopValidationOptions()), new InMemoryDpopReplayCache(timeProvider), timeProvider);
|
||||
using var key = ECDsa.Create(ECCurve.NamedCurves.nistP256);
|
||||
var securityKey = new ECDsaSecurityKey(key) { KeyId = Guid.NewGuid().ToString("N") };
|
||||
|
||||
var proof = CreateProof(timeProvider, securityKey, "POST", new Uri("https://scanner.example.com/api/v1/scans"), nonce: "expected");
|
||||
var result = await validator.ValidateAsync(proof, "POST", new Uri("https://scanner.example.com/api/v1/scans"), nonce: "different");
|
||||
|
||||
Assert.False(result.IsValid);
|
||||
Assert.Equal("invalid_token", result.ErrorCode);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ValidateAsync_Fails_OnReplay()
|
||||
{
|
||||
var timeProvider = new FakeTimeProvider(new DateTimeOffset(2025, 10, 18, 12, 0, 0, TimeSpan.Zero));
|
||||
var cache = new InMemoryDpopReplayCache(timeProvider);
|
||||
var validator = new DpopProofValidator(Options.Create(new DpopValidationOptions()), cache, timeProvider);
|
||||
using var key = ECDsa.Create(ECCurve.NamedCurves.nistP256);
|
||||
var securityKey = new ECDsaSecurityKey(key) { KeyId = Guid.NewGuid().ToString("N") };
|
||||
var jti = Guid.NewGuid().ToString();
|
||||
|
||||
var proof = CreateProof(timeProvider, securityKey, "GET", new Uri("https://scanner.example.com/api/v1/scans"), jti: jti);
|
||||
|
||||
var first = await validator.ValidateAsync(proof, "GET", new Uri("https://scanner.example.com/api/v1/scans"));
|
||||
Assert.True(first.IsValid);
|
||||
|
||||
var second = await validator.ValidateAsync(proof, "GET", new Uri("https://scanner.example.com/api/v1/scans"));
|
||||
Assert.False(second.IsValid);
|
||||
Assert.Equal("replay", second.ErrorCode);
|
||||
}
|
||||
|
||||
private static string CreateProof(FakeTimeProvider timeProvider, ECDsaSecurityKey key, string method, Uri uri, string? nonce = null, string? jti = null)
|
||||
{
|
||||
var handler = new JwtSecurityTokenHandler();
|
||||
var signingCredentials = new SigningCredentials(key, SecurityAlgorithms.EcdsaSha256);
|
||||
var jwk = JsonWebKeyConverter.ConvertFromECDsaSecurityKey(key);
|
||||
|
||||
var header = new JwtHeader(signingCredentials)
|
||||
{
|
||||
["typ"] = "dpop+jwt",
|
||||
["jwk"] = new Dictionary<string, object?>
|
||||
{
|
||||
["kty"] = jwk.Kty,
|
||||
["crv"] = jwk.Crv,
|
||||
["x"] = jwk.X,
|
||||
["y"] = jwk.Y
|
||||
}
|
||||
};
|
||||
|
||||
var payload = new JwtPayload
|
||||
{
|
||||
["htm"] = method.ToUpperInvariant(),
|
||||
["htu"] = Normalize(uri),
|
||||
["iat"] = timeProvider.GetUtcNow().ToUnixTimeSeconds(),
|
||||
["jti"] = jti ?? Guid.NewGuid().ToString()
|
||||
};
|
||||
|
||||
if (nonce is not null)
|
||||
{
|
||||
payload["nonce"] = nonce;
|
||||
}
|
||||
|
||||
var token = new JwtSecurityToken(header, payload);
|
||||
return handler.WriteToken(token);
|
||||
}
|
||||
|
||||
private static string Normalize(Uri uri)
|
||||
{
|
||||
var builder = new UriBuilder(uri)
|
||||
{
|
||||
Fragment = string.Empty
|
||||
};
|
||||
|
||||
builder.Host = builder.Host.ToLowerInvariant();
|
||||
builder.Scheme = builder.Scheme.ToLowerInvariant();
|
||||
|
||||
if ((builder.Scheme == "http" && builder.Port == 80) || (builder.Scheme == "https" && builder.Port == 443))
|
||||
{
|
||||
builder.Port = -1;
|
||||
}
|
||||
|
||||
return builder.Uri.GetComponents(UriComponents.SchemeAndServer | UriComponents.PathAndQuery, UriFormat.UriEscaped);
|
||||
}
|
||||
}
|
||||
using System.Collections.Generic;
|
||||
using System.IdentityModel.Tokens.Jwt;
|
||||
using System.Security.Cryptography;
|
||||
using Microsoft.Extensions.Time.Testing;
|
||||
using Microsoft.IdentityModel.Tokens;
|
||||
using Microsoft.Extensions.Options;
|
||||
using StellaOps.Auth.Security.Dpop;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Scanner.Core.Tests.Security;
|
||||
|
||||
public sealed class DpopProofValidatorTests
|
||||
{
|
||||
[Fact]
|
||||
public async Task ValidateAsync_ReturnsSuccess_ForValidProof()
|
||||
{
|
||||
var timeProvider = new FakeTimeProvider(new DateTimeOffset(2025, 10, 18, 12, 0, 0, TimeSpan.Zero));
|
||||
var validator = new DpopProofValidator(Options.Create(new DpopValidationOptions()), new InMemoryDpopReplayCache(timeProvider), timeProvider);
|
||||
using var key = ECDsa.Create(ECCurve.NamedCurves.nistP256);
|
||||
var securityKey = new ECDsaSecurityKey(key) { KeyId = Guid.NewGuid().ToString("N") };
|
||||
|
||||
var proof = CreateProof(timeProvider, securityKey, "GET", new Uri("https://scanner.example.com/api/v1/scans"));
|
||||
var result = await validator.ValidateAsync(proof, "GET", new Uri("https://scanner.example.com/api/v1/scans"));
|
||||
|
||||
Assert.True(result.IsValid);
|
||||
Assert.NotNull(result.PublicKey);
|
||||
Assert.NotNull(result.JwtId);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ValidateAsync_Fails_OnNonceMismatch()
|
||||
{
|
||||
var timeProvider = new FakeTimeProvider(new DateTimeOffset(2025, 10, 18, 12, 0, 0, TimeSpan.Zero));
|
||||
var validator = new DpopProofValidator(Options.Create(new DpopValidationOptions()), new InMemoryDpopReplayCache(timeProvider), timeProvider);
|
||||
using var key = ECDsa.Create(ECCurve.NamedCurves.nistP256);
|
||||
var securityKey = new ECDsaSecurityKey(key) { KeyId = Guid.NewGuid().ToString("N") };
|
||||
|
||||
var proof = CreateProof(timeProvider, securityKey, "POST", new Uri("https://scanner.example.com/api/v1/scans"), nonce: "expected");
|
||||
var result = await validator.ValidateAsync(proof, "POST", new Uri("https://scanner.example.com/api/v1/scans"), nonce: "different");
|
||||
|
||||
Assert.False(result.IsValid);
|
||||
Assert.Equal("invalid_token", result.ErrorCode);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ValidateAsync_Fails_OnReplay()
|
||||
{
|
||||
var timeProvider = new FakeTimeProvider(new DateTimeOffset(2025, 10, 18, 12, 0, 0, TimeSpan.Zero));
|
||||
var cache = new InMemoryDpopReplayCache(timeProvider);
|
||||
var validator = new DpopProofValidator(Options.Create(new DpopValidationOptions()), cache, timeProvider);
|
||||
using var key = ECDsa.Create(ECCurve.NamedCurves.nistP256);
|
||||
var securityKey = new ECDsaSecurityKey(key) { KeyId = Guid.NewGuid().ToString("N") };
|
||||
var jti = Guid.NewGuid().ToString();
|
||||
|
||||
var proof = CreateProof(timeProvider, securityKey, "GET", new Uri("https://scanner.example.com/api/v1/scans"), jti: jti);
|
||||
|
||||
var first = await validator.ValidateAsync(proof, "GET", new Uri("https://scanner.example.com/api/v1/scans"));
|
||||
Assert.True(first.IsValid);
|
||||
|
||||
var second = await validator.ValidateAsync(proof, "GET", new Uri("https://scanner.example.com/api/v1/scans"));
|
||||
Assert.False(second.IsValid);
|
||||
Assert.Equal("replay", second.ErrorCode);
|
||||
}
|
||||
|
||||
private static string CreateProof(FakeTimeProvider timeProvider, ECDsaSecurityKey key, string method, Uri uri, string? nonce = null, string? jti = null)
|
||||
{
|
||||
var handler = new JwtSecurityTokenHandler();
|
||||
var signingCredentials = new SigningCredentials(key, SecurityAlgorithms.EcdsaSha256);
|
||||
var jwk = JsonWebKeyConverter.ConvertFromECDsaSecurityKey(key);
|
||||
|
||||
var header = new JwtHeader(signingCredentials)
|
||||
{
|
||||
["typ"] = "dpop+jwt",
|
||||
["jwk"] = new Dictionary<string, object?>
|
||||
{
|
||||
["kty"] = jwk.Kty,
|
||||
["crv"] = jwk.Crv,
|
||||
["x"] = jwk.X,
|
||||
["y"] = jwk.Y
|
||||
}
|
||||
};
|
||||
|
||||
var payload = new JwtPayload
|
||||
{
|
||||
["htm"] = method.ToUpperInvariant(),
|
||||
["htu"] = Normalize(uri),
|
||||
["iat"] = timeProvider.GetUtcNow().ToUnixTimeSeconds(),
|
||||
["jti"] = jti ?? Guid.NewGuid().ToString()
|
||||
};
|
||||
|
||||
if (nonce is not null)
|
||||
{
|
||||
payload["nonce"] = nonce;
|
||||
}
|
||||
|
||||
var token = new JwtSecurityToken(header, payload);
|
||||
return handler.WriteToken(token);
|
||||
}
|
||||
|
||||
private static string Normalize(Uri uri)
|
||||
{
|
||||
var builder = new UriBuilder(uri)
|
||||
{
|
||||
Fragment = string.Empty
|
||||
};
|
||||
|
||||
builder.Host = builder.Host.ToLowerInvariant();
|
||||
builder.Scheme = builder.Scheme.ToLowerInvariant();
|
||||
|
||||
if ((builder.Scheme == "http" && builder.Port == 80) || (builder.Scheme == "https" && builder.Port == 443))
|
||||
{
|
||||
builder.Port = -1;
|
||||
}
|
||||
|
||||
return builder.Uri.GetComponents(UriComponents.SchemeAndServer | UriComponents.PathAndQuery, UriFormat.UriEscaped);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,26 +1,26 @@
|
||||
using System;
|
||||
using StellaOps.Scanner.Core.Security;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Scanner.Core.Tests.Security;
|
||||
|
||||
public sealed class RestartOnlyPluginGuardTests
|
||||
{
|
||||
[Fact]
|
||||
public void EnsureRegistrationAllowed_AllowsNewPluginsBeforeSeal()
|
||||
{
|
||||
var guard = new RestartOnlyPluginGuard();
|
||||
guard.EnsureRegistrationAllowed("./plugins/analyzer.dll");
|
||||
|
||||
Assert.Contains(guard.KnownPlugins, path => path.EndsWith("analyzer.dll", StringComparison.OrdinalIgnoreCase));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void EnsureRegistrationAllowed_ThrowsAfterSeal()
|
||||
{
|
||||
var guard = new RestartOnlyPluginGuard(new[] { "./plugins/a.dll" });
|
||||
guard.Seal();
|
||||
|
||||
Assert.Throws<InvalidOperationException>(() => guard.EnsureRegistrationAllowed("./plugins/new.dll"));
|
||||
}
|
||||
}
|
||||
using System;
|
||||
using StellaOps.Scanner.Core.Security;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Scanner.Core.Tests.Security;
|
||||
|
||||
public sealed class RestartOnlyPluginGuardTests
|
||||
{
|
||||
[Fact]
|
||||
public void EnsureRegistrationAllowed_AllowsNewPluginsBeforeSeal()
|
||||
{
|
||||
var guard = new RestartOnlyPluginGuard();
|
||||
guard.EnsureRegistrationAllowed("./plugins/analyzer.dll");
|
||||
|
||||
Assert.Contains(guard.KnownPlugins, path => path.EndsWith("analyzer.dll", StringComparison.OrdinalIgnoreCase));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void EnsureRegistrationAllowed_ThrowsAfterSeal()
|
||||
{
|
||||
var guard = new RestartOnlyPluginGuard(new[] { "./plugins/a.dll" });
|
||||
guard.Seal();
|
||||
|
||||
Assert.Throws<InvalidOperationException>(() => guard.EnsureRegistrationAllowed("./plugins/new.dll"));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,33 +1,33 @@
|
||||
using StellaOps.Scanner.Core.Utility;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Scanner.Core.Tests.Utility;
|
||||
|
||||
public sealed class ScannerIdentifiersTests
|
||||
{
|
||||
[Fact]
|
||||
public void CreateJobId_IsDeterministicAndCaseInsensitive()
|
||||
{
|
||||
var first = ScannerIdentifiers.CreateJobId("registry.example.com/repo:latest", "SHA256:ABC", "Tenant-A", "salt");
|
||||
var second = ScannerIdentifiers.CreateJobId("REGISTRY.EXAMPLE.COM/REPO:latest", "sha256:abc", "tenant-a", "salt");
|
||||
|
||||
Assert.Equal(first, second);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void CreateDeterministicHash_ProducesLowercaseHex()
|
||||
{
|
||||
var hash = ScannerIdentifiers.CreateDeterministicHash("scan", "abc", "123");
|
||||
|
||||
Assert.Matches("^[0-9a-f]{64}$", hash);
|
||||
Assert.Equal(hash, hash.ToLowerInvariant());
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void NormalizeImageReference_LowercasesRegistryAndRepository()
|
||||
{
|
||||
var normalized = ScannerIdentifiers.NormalizeImageReference("Registry.Example.com/StellaOps/Scanner:1.0");
|
||||
|
||||
Assert.Equal("registry.example.com/stellaops/scanner:1.0", normalized);
|
||||
}
|
||||
}
|
||||
using StellaOps.Scanner.Core.Utility;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Scanner.Core.Tests.Utility;
|
||||
|
||||
public sealed class ScannerIdentifiersTests
|
||||
{
|
||||
[Fact]
|
||||
public void CreateJobId_IsDeterministicAndCaseInsensitive()
|
||||
{
|
||||
var first = ScannerIdentifiers.CreateJobId("registry.example.com/repo:latest", "SHA256:ABC", "Tenant-A", "salt");
|
||||
var second = ScannerIdentifiers.CreateJobId("REGISTRY.EXAMPLE.COM/REPO:latest", "sha256:abc", "tenant-a", "salt");
|
||||
|
||||
Assert.Equal(first, second);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void CreateDeterministicHash_ProducesLowercaseHex()
|
||||
{
|
||||
var hash = ScannerIdentifiers.CreateDeterministicHash("scan", "abc", "123");
|
||||
|
||||
Assert.Matches("^[0-9a-f]{64}$", hash);
|
||||
Assert.Equal(hash, hash.ToLowerInvariant());
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void NormalizeImageReference_LowercasesRegistryAndRepository()
|
||||
{
|
||||
var normalized = ScannerIdentifiers.NormalizeImageReference("Registry.Example.com/StellaOps/Scanner:1.0");
|
||||
|
||||
Assert.Equal("registry.example.com/stellaops/scanner:1.0", normalized);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,26 +1,26 @@
|
||||
using StellaOps.Scanner.Core.Utility;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Scanner.Core.Tests.Utility;
|
||||
|
||||
public sealed class ScannerTimestampsTests
|
||||
{
|
||||
[Fact]
|
||||
public void Normalize_TrimsToMicroseconds()
|
||||
{
|
||||
var value = new DateTimeOffset(2025, 10, 18, 14, 30, 15, TimeSpan.Zero).AddTicks(7);
|
||||
var normalized = ScannerTimestamps.Normalize(value);
|
||||
|
||||
var expectedTicks = value.UtcTicks - (value.UtcTicks % 10);
|
||||
Assert.Equal(expectedTicks, normalized.UtcTicks);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ToIso8601_ProducesUtcString()
|
||||
{
|
||||
var value = new DateTimeOffset(2025, 10, 18, 14, 30, 15, TimeSpan.FromHours(-4));
|
||||
var iso = ScannerTimestamps.ToIso8601(value);
|
||||
|
||||
Assert.Equal("2025-10-18T18:30:15.000000Z", iso);
|
||||
}
|
||||
}
|
||||
using StellaOps.Scanner.Core.Utility;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Scanner.Core.Tests.Utility;
|
||||
|
||||
public sealed class ScannerTimestampsTests
|
||||
{
|
||||
[Fact]
|
||||
public void Normalize_TrimsToMicroseconds()
|
||||
{
|
||||
var value = new DateTimeOffset(2025, 10, 18, 14, 30, 15, TimeSpan.Zero).AddTicks(7);
|
||||
var normalized = ScannerTimestamps.Normalize(value);
|
||||
|
||||
var expectedTicks = value.UtcTicks - (value.UtcTicks % 10);
|
||||
Assert.Equal(expectedTicks, normalized.UtcTicks);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ToIso8601_ProducesUtcString()
|
||||
{
|
||||
var value = new DateTimeOffset(2025, 10, 18, 14, 30, 15, TimeSpan.FromHours(-4));
|
||||
var iso = ScannerTimestamps.ToIso8601(value);
|
||||
|
||||
Assert.Equal("2025-10-18T18:30:15.000000Z", iso);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -4,272 +4,272 @@ using System.Collections.Immutable;
|
||||
using System.Linq;
|
||||
using System.Text.Json;
|
||||
using System.Globalization;
|
||||
using StellaOps.Scanner.Core.Contracts;
|
||||
using StellaOps.Scanner.Diff;
|
||||
using StellaOps.Scanner.Core.Utility;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Scanner.Diff.Tests;
|
||||
|
||||
public sealed class ComponentDifferTests
|
||||
{
|
||||
[Fact]
|
||||
public void Compute_CapturesAddedRemovedAndChangedComponents()
|
||||
{
|
||||
var oldFragments = new[]
|
||||
{
|
||||
LayerComponentFragment.Create("sha256:layer1", new[]
|
||||
{
|
||||
CreateComponent(
|
||||
"pkg:npm/a",
|
||||
version: "1.0.0",
|
||||
layer: "sha256:layer1",
|
||||
usage: ComponentUsage.Create(true, new[] { "/app/start.sh" }),
|
||||
evidence: new[] { ComponentEvidence.FromPath("/app/package-lock.json") }),
|
||||
CreateComponent("pkg:npm/b", version: "2.0.0", layer: "sha256:layer1", scope: "runtime"),
|
||||
}),
|
||||
LayerComponentFragment.Create("sha256:layer1b", new[]
|
||||
{
|
||||
CreateComponent(
|
||||
"pkg:npm/a",
|
||||
version: "1.0.0",
|
||||
layer: "sha256:layer1b",
|
||||
usage: ComponentUsage.Create(true, new[] { "/app/start.sh" })),
|
||||
CreateComponent("pkg:npm/d", version: "0.9.0", layer: "sha256:layer1b"),
|
||||
})
|
||||
};
|
||||
|
||||
var newFragments = new[]
|
||||
{
|
||||
LayerComponentFragment.Create("sha256:layer2", new[]
|
||||
{
|
||||
CreateComponent(
|
||||
"pkg:npm/a",
|
||||
version: "1.1.0",
|
||||
layer: "sha256:layer2",
|
||||
usage: ComponentUsage.Create(true, new[] { "/app/start.sh" }),
|
||||
evidence: new[] { ComponentEvidence.FromPath("/app/package-lock.json") }),
|
||||
}),
|
||||
LayerComponentFragment.Create("sha256:layer3", new[]
|
||||
{
|
||||
CreateComponent(
|
||||
"pkg:npm/b",
|
||||
version: "2.0.0",
|
||||
layer: "sha256:layer3",
|
||||
usage: ComponentUsage.Create(true, new[] { "/app/init.sh" }),
|
||||
scope: "runtime"),
|
||||
CreateComponent("pkg:npm/c", version: "3.0.0", layer: "sha256:layer3"),
|
||||
})
|
||||
};
|
||||
|
||||
var oldGraph = ComponentGraphBuilder.Build(oldFragments);
|
||||
var newGraph = ComponentGraphBuilder.Build(newFragments);
|
||||
|
||||
var request = new ComponentDiffRequest
|
||||
{
|
||||
OldGraph = oldGraph,
|
||||
NewGraph = newGraph,
|
||||
GeneratedAt = new DateTimeOffset(2025, 10, 19, 10, 0, 0, TimeSpan.Zero),
|
||||
View = SbomView.Inventory,
|
||||
OldImageDigest = "sha256:old",
|
||||
NewImageDigest = "sha256:new",
|
||||
};
|
||||
|
||||
var differ = new ComponentDiffer();
|
||||
var document = differ.Compute(request);
|
||||
|
||||
Assert.Equal(SbomView.Inventory, document.View);
|
||||
Assert.Equal("sha256:old", document.OldImageDigest);
|
||||
Assert.Equal("sha256:new", document.NewImageDigest);
|
||||
Assert.Equal(1, document.Summary.Added);
|
||||
Assert.Equal(1, document.Summary.Removed);
|
||||
Assert.Equal(1, document.Summary.VersionChanged);
|
||||
Assert.Equal(1, document.Summary.MetadataChanged);
|
||||
|
||||
Assert.Equal(new[] { "sha256:layer2", "sha256:layer3", "sha256:layer1b" }, document.Layers.Select(layer => layer.LayerDigest));
|
||||
|
||||
var layerGroups = document.Layers.ToDictionary(layer => layer.LayerDigest);
|
||||
Assert.True(layerGroups.ContainsKey("sha256:layer2"), "Expected layer2 group present");
|
||||
Assert.True(layerGroups.ContainsKey("sha256:layer3"), "Expected layer3 group present");
|
||||
Assert.True(layerGroups.ContainsKey("sha256:layer1b"), "Expected layer1b group present");
|
||||
|
||||
var addedChange = layerGroups["sha256:layer3"].Changes.Single(change => change.Kind == ComponentChangeKind.Added);
|
||||
Assert.Equal("pkg:npm/c", addedChange.ComponentKey);
|
||||
Assert.NotNull(addedChange.NewComponent);
|
||||
|
||||
var versionChange = layerGroups["sha256:layer2"].Changes.Single(change => change.Kind == ComponentChangeKind.VersionChanged);
|
||||
Assert.Equal("pkg:npm/a", versionChange.ComponentKey);
|
||||
Assert.Equal("sha256:layer1b", versionChange.RemovingLayer);
|
||||
Assert.Equal("sha256:layer2", versionChange.IntroducingLayer);
|
||||
Assert.Equal("1.1.0", versionChange.NewComponent!.Identity.Version);
|
||||
|
||||
var metadataChange = layerGroups["sha256:layer3"].Changes.Single(change => change.Kind == ComponentChangeKind.MetadataChanged);
|
||||
Assert.True(metadataChange.NewComponent!.Usage.UsedByEntrypoint);
|
||||
Assert.False(metadataChange.OldComponent!.Usage.UsedByEntrypoint);
|
||||
Assert.Equal("sha256:layer3", metadataChange.IntroducingLayer);
|
||||
Assert.Equal("sha256:layer1", metadataChange.RemovingLayer);
|
||||
|
||||
var removedChange = layerGroups["sha256:layer1b"].Changes.Single(change => change.Kind == ComponentChangeKind.Removed);
|
||||
Assert.Equal("pkg:npm/d", removedChange.ComponentKey);
|
||||
Assert.Equal("sha256:layer1b", removedChange.RemovingLayer);
|
||||
Assert.Null(removedChange.IntroducingLayer);
|
||||
|
||||
var json = DiffJsonSerializer.Serialize(document);
|
||||
using var parsed = JsonDocument.Parse(json);
|
||||
var root = parsed.RootElement;
|
||||
using StellaOps.Scanner.Core.Contracts;
|
||||
using StellaOps.Scanner.Diff;
|
||||
using StellaOps.Scanner.Core.Utility;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Scanner.Diff.Tests;
|
||||
|
||||
public sealed class ComponentDifferTests
|
||||
{
|
||||
[Fact]
|
||||
public void Compute_CapturesAddedRemovedAndChangedComponents()
|
||||
{
|
||||
var oldFragments = new[]
|
||||
{
|
||||
LayerComponentFragment.Create("sha256:layer1", new[]
|
||||
{
|
||||
CreateComponent(
|
||||
"pkg:npm/a",
|
||||
version: "1.0.0",
|
||||
layer: "sha256:layer1",
|
||||
usage: ComponentUsage.Create(true, new[] { "/app/start.sh" }),
|
||||
evidence: new[] { ComponentEvidence.FromPath("/app/package-lock.json") }),
|
||||
CreateComponent("pkg:npm/b", version: "2.0.0", layer: "sha256:layer1", scope: "runtime"),
|
||||
}),
|
||||
LayerComponentFragment.Create("sha256:layer1b", new[]
|
||||
{
|
||||
CreateComponent(
|
||||
"pkg:npm/a",
|
||||
version: "1.0.0",
|
||||
layer: "sha256:layer1b",
|
||||
usage: ComponentUsage.Create(true, new[] { "/app/start.sh" })),
|
||||
CreateComponent("pkg:npm/d", version: "0.9.0", layer: "sha256:layer1b"),
|
||||
})
|
||||
};
|
||||
|
||||
var newFragments = new[]
|
||||
{
|
||||
LayerComponentFragment.Create("sha256:layer2", new[]
|
||||
{
|
||||
CreateComponent(
|
||||
"pkg:npm/a",
|
||||
version: "1.1.0",
|
||||
layer: "sha256:layer2",
|
||||
usage: ComponentUsage.Create(true, new[] { "/app/start.sh" }),
|
||||
evidence: new[] { ComponentEvidence.FromPath("/app/package-lock.json") }),
|
||||
}),
|
||||
LayerComponentFragment.Create("sha256:layer3", new[]
|
||||
{
|
||||
CreateComponent(
|
||||
"pkg:npm/b",
|
||||
version: "2.0.0",
|
||||
layer: "sha256:layer3",
|
||||
usage: ComponentUsage.Create(true, new[] { "/app/init.sh" }),
|
||||
scope: "runtime"),
|
||||
CreateComponent("pkg:npm/c", version: "3.0.0", layer: "sha256:layer3"),
|
||||
})
|
||||
};
|
||||
|
||||
var oldGraph = ComponentGraphBuilder.Build(oldFragments);
|
||||
var newGraph = ComponentGraphBuilder.Build(newFragments);
|
||||
|
||||
var request = new ComponentDiffRequest
|
||||
{
|
||||
OldGraph = oldGraph,
|
||||
NewGraph = newGraph,
|
||||
GeneratedAt = new DateTimeOffset(2025, 10, 19, 10, 0, 0, TimeSpan.Zero),
|
||||
View = SbomView.Inventory,
|
||||
OldImageDigest = "sha256:old",
|
||||
NewImageDigest = "sha256:new",
|
||||
};
|
||||
|
||||
var differ = new ComponentDiffer();
|
||||
var document = differ.Compute(request);
|
||||
|
||||
Assert.Equal(SbomView.Inventory, document.View);
|
||||
Assert.Equal("sha256:old", document.OldImageDigest);
|
||||
Assert.Equal("sha256:new", document.NewImageDigest);
|
||||
Assert.Equal(1, document.Summary.Added);
|
||||
Assert.Equal(1, document.Summary.Removed);
|
||||
Assert.Equal(1, document.Summary.VersionChanged);
|
||||
Assert.Equal(1, document.Summary.MetadataChanged);
|
||||
|
||||
Assert.Equal(new[] { "sha256:layer2", "sha256:layer3", "sha256:layer1b" }, document.Layers.Select(layer => layer.LayerDigest));
|
||||
|
||||
var layerGroups = document.Layers.ToDictionary(layer => layer.LayerDigest);
|
||||
Assert.True(layerGroups.ContainsKey("sha256:layer2"), "Expected layer2 group present");
|
||||
Assert.True(layerGroups.ContainsKey("sha256:layer3"), "Expected layer3 group present");
|
||||
Assert.True(layerGroups.ContainsKey("sha256:layer1b"), "Expected layer1b group present");
|
||||
|
||||
var addedChange = layerGroups["sha256:layer3"].Changes.Single(change => change.Kind == ComponentChangeKind.Added);
|
||||
Assert.Equal("pkg:npm/c", addedChange.ComponentKey);
|
||||
Assert.NotNull(addedChange.NewComponent);
|
||||
|
||||
var versionChange = layerGroups["sha256:layer2"].Changes.Single(change => change.Kind == ComponentChangeKind.VersionChanged);
|
||||
Assert.Equal("pkg:npm/a", versionChange.ComponentKey);
|
||||
Assert.Equal("sha256:layer1b", versionChange.RemovingLayer);
|
||||
Assert.Equal("sha256:layer2", versionChange.IntroducingLayer);
|
||||
Assert.Equal("1.1.0", versionChange.NewComponent!.Identity.Version);
|
||||
|
||||
var metadataChange = layerGroups["sha256:layer3"].Changes.Single(change => change.Kind == ComponentChangeKind.MetadataChanged);
|
||||
Assert.True(metadataChange.NewComponent!.Usage.UsedByEntrypoint);
|
||||
Assert.False(metadataChange.OldComponent!.Usage.UsedByEntrypoint);
|
||||
Assert.Equal("sha256:layer3", metadataChange.IntroducingLayer);
|
||||
Assert.Equal("sha256:layer1", metadataChange.RemovingLayer);
|
||||
|
||||
var removedChange = layerGroups["sha256:layer1b"].Changes.Single(change => change.Kind == ComponentChangeKind.Removed);
|
||||
Assert.Equal("pkg:npm/d", removedChange.ComponentKey);
|
||||
Assert.Equal("sha256:layer1b", removedChange.RemovingLayer);
|
||||
Assert.Null(removedChange.IntroducingLayer);
|
||||
|
||||
var json = DiffJsonSerializer.Serialize(document);
|
||||
using var parsed = JsonDocument.Parse(json);
|
||||
var root = parsed.RootElement;
|
||||
Assert.Equal("inventory", root.GetProperty("view").GetString());
|
||||
var generatedAt = DateTimeOffset.Parse(root.GetProperty("generatedAt").GetString()!, CultureInfo.InvariantCulture);
|
||||
Assert.Equal(request.GeneratedAt, generatedAt);
|
||||
Assert.Equal("sha256:old", root.GetProperty("oldImageDigest").GetString());
|
||||
Assert.Equal("sha256:new", root.GetProperty("newImageDigest").GetString());
|
||||
|
||||
var summaryJson = root.GetProperty("summary");
|
||||
Assert.Equal(1, summaryJson.GetProperty("added").GetInt32());
|
||||
Assert.Equal(1, summaryJson.GetProperty("removed").GetInt32());
|
||||
Assert.Equal(1, summaryJson.GetProperty("versionChanged").GetInt32());
|
||||
Assert.Equal(1, summaryJson.GetProperty("metadataChanged").GetInt32());
|
||||
|
||||
var layersJson = root.GetProperty("layers");
|
||||
Assert.Equal(3, layersJson.GetArrayLength());
|
||||
|
||||
var layer2Json = layersJson[0];
|
||||
Assert.Equal("sha256:layer2", layer2Json.GetProperty("layerDigest").GetString());
|
||||
var layer2Changes = layer2Json.GetProperty("changes");
|
||||
Assert.Equal(1, layer2Changes.GetArrayLength());
|
||||
var versionChangeJson = layer2Changes.EnumerateArray().Single();
|
||||
Assert.Equal("versionChanged", versionChangeJson.GetProperty("kind").GetString());
|
||||
Assert.Equal("pkg:npm/a", versionChangeJson.GetProperty("componentKey").GetString());
|
||||
Assert.Equal("sha256:layer2", versionChangeJson.GetProperty("introducingLayer").GetString());
|
||||
Assert.Equal("sha256:layer1b", versionChangeJson.GetProperty("removingLayer").GetString());
|
||||
Assert.Equal("1.1.0", versionChangeJson.GetProperty("newComponent").GetProperty("identity").GetProperty("version").GetString());
|
||||
|
||||
var layer3Json = layersJson[1];
|
||||
Assert.Equal("sha256:layer3", layer3Json.GetProperty("layerDigest").GetString());
|
||||
var layer3Changes = layer3Json.GetProperty("changes");
|
||||
Assert.Equal(2, layer3Changes.GetArrayLength());
|
||||
var layer3ChangeArray = layer3Changes.EnumerateArray().ToArray();
|
||||
var metadataChangeJson = layer3ChangeArray[0];
|
||||
Assert.Equal("metadataChanged", metadataChangeJson.GetProperty("kind").GetString());
|
||||
Assert.Equal("pkg:npm/b", metadataChangeJson.GetProperty("componentKey").GetString());
|
||||
Assert.Equal("sha256:layer3", metadataChangeJson.GetProperty("introducingLayer").GetString());
|
||||
Assert.Equal("sha256:layer1", metadataChangeJson.GetProperty("removingLayer").GetString());
|
||||
Assert.True(metadataChangeJson.GetProperty("newComponent").GetProperty("usage").GetProperty("usedByEntrypoint").GetBoolean());
|
||||
Assert.False(metadataChangeJson.GetProperty("oldComponent").GetProperty("usage").GetProperty("usedByEntrypoint").GetBoolean());
|
||||
|
||||
var addedJson = layer3ChangeArray[1];
|
||||
Assert.Equal("added", addedJson.GetProperty("kind").GetString());
|
||||
Assert.Equal("pkg:npm/c", addedJson.GetProperty("componentKey").GetString());
|
||||
Assert.Equal("sha256:layer3", addedJson.GetProperty("introducingLayer").GetString());
|
||||
Assert.False(addedJson.TryGetProperty("removingLayer", out _));
|
||||
|
||||
var removedLayerJson = layersJson[2];
|
||||
Assert.Equal("sha256:layer1b", removedLayerJson.GetProperty("layerDigest").GetString());
|
||||
var removedChanges = removedLayerJson.GetProperty("changes");
|
||||
Assert.Equal(1, removedChanges.GetArrayLength());
|
||||
var removedJson = removedChanges.EnumerateArray().Single();
|
||||
Assert.Equal("removed", removedJson.GetProperty("kind").GetString());
|
||||
Assert.Equal("pkg:npm/d", removedJson.GetProperty("componentKey").GetString());
|
||||
Assert.Equal("sha256:layer1b", removedJson.GetProperty("removingLayer").GetString());
|
||||
Assert.False(removedJson.TryGetProperty("introducingLayer", out _));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Compute_UsageView_FiltersComponents()
|
||||
{
|
||||
var oldFragments = new[]
|
||||
{
|
||||
LayerComponentFragment.Create("sha256:base", new[]
|
||||
{
|
||||
CreateComponent("pkg:npm/a", "1", "sha256:base", usage: ComponentUsage.Create(false)),
|
||||
})
|
||||
};
|
||||
|
||||
var newFragments = new[]
|
||||
{
|
||||
LayerComponentFragment.Create("sha256:new", new[]
|
||||
{
|
||||
CreateComponent("pkg:npm/a", "1", "sha256:new", usage: ComponentUsage.Create(false)),
|
||||
CreateComponent("pkg:npm/b", "1", "sha256:new", usage: ComponentUsage.Create(true, new[] { "/entry" })),
|
||||
})
|
||||
};
|
||||
|
||||
var request = new ComponentDiffRequest
|
||||
{
|
||||
OldGraph = ComponentGraphBuilder.Build(oldFragments),
|
||||
NewGraph = ComponentGraphBuilder.Build(newFragments),
|
||||
View = SbomView.Usage,
|
||||
GeneratedAt = DateTimeOffset.UtcNow,
|
||||
};
|
||||
|
||||
var differ = new ComponentDiffer();
|
||||
var document = differ.Compute(request);
|
||||
|
||||
Assert.Single(document.Layers);
|
||||
var layer = document.Layers[0];
|
||||
Assert.Single(layer.Changes);
|
||||
Assert.Equal(ComponentChangeKind.Added, layer.Changes[0].Kind);
|
||||
Assert.Equal("pkg:npm/b", layer.Changes[0].ComponentKey);
|
||||
|
||||
var json = DiffJsonSerializer.Serialize(document);
|
||||
using var parsed = JsonDocument.Parse(json);
|
||||
Assert.Equal("usage", parsed.RootElement.GetProperty("view").GetString());
|
||||
Assert.Equal(1, parsed.RootElement.GetProperty("summary").GetProperty("added").GetInt32());
|
||||
Assert.False(parsed.RootElement.TryGetProperty("oldImageDigest", out _));
|
||||
Assert.False(parsed.RootElement.TryGetProperty("newImageDigest", out _));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
Assert.Equal("sha256:old", root.GetProperty("oldImageDigest").GetString());
|
||||
Assert.Equal("sha256:new", root.GetProperty("newImageDigest").GetString());
|
||||
|
||||
var summaryJson = root.GetProperty("summary");
|
||||
Assert.Equal(1, summaryJson.GetProperty("added").GetInt32());
|
||||
Assert.Equal(1, summaryJson.GetProperty("removed").GetInt32());
|
||||
Assert.Equal(1, summaryJson.GetProperty("versionChanged").GetInt32());
|
||||
Assert.Equal(1, summaryJson.GetProperty("metadataChanged").GetInt32());
|
||||
|
||||
var layersJson = root.GetProperty("layers");
|
||||
Assert.Equal(3, layersJson.GetArrayLength());
|
||||
|
||||
var layer2Json = layersJson[0];
|
||||
Assert.Equal("sha256:layer2", layer2Json.GetProperty("layerDigest").GetString());
|
||||
var layer2Changes = layer2Json.GetProperty("changes");
|
||||
Assert.Equal(1, layer2Changes.GetArrayLength());
|
||||
var versionChangeJson = layer2Changes.EnumerateArray().Single();
|
||||
Assert.Equal("versionChanged", versionChangeJson.GetProperty("kind").GetString());
|
||||
Assert.Equal("pkg:npm/a", versionChangeJson.GetProperty("componentKey").GetString());
|
||||
Assert.Equal("sha256:layer2", versionChangeJson.GetProperty("introducingLayer").GetString());
|
||||
Assert.Equal("sha256:layer1b", versionChangeJson.GetProperty("removingLayer").GetString());
|
||||
Assert.Equal("1.1.0", versionChangeJson.GetProperty("newComponent").GetProperty("identity").GetProperty("version").GetString());
|
||||
|
||||
var layer3Json = layersJson[1];
|
||||
Assert.Equal("sha256:layer3", layer3Json.GetProperty("layerDigest").GetString());
|
||||
var layer3Changes = layer3Json.GetProperty("changes");
|
||||
Assert.Equal(2, layer3Changes.GetArrayLength());
|
||||
var layer3ChangeArray = layer3Changes.EnumerateArray().ToArray();
|
||||
var metadataChangeJson = layer3ChangeArray[0];
|
||||
Assert.Equal("metadataChanged", metadataChangeJson.GetProperty("kind").GetString());
|
||||
Assert.Equal("pkg:npm/b", metadataChangeJson.GetProperty("componentKey").GetString());
|
||||
Assert.Equal("sha256:layer3", metadataChangeJson.GetProperty("introducingLayer").GetString());
|
||||
Assert.Equal("sha256:layer1", metadataChangeJson.GetProperty("removingLayer").GetString());
|
||||
Assert.True(metadataChangeJson.GetProperty("newComponent").GetProperty("usage").GetProperty("usedByEntrypoint").GetBoolean());
|
||||
Assert.False(metadataChangeJson.GetProperty("oldComponent").GetProperty("usage").GetProperty("usedByEntrypoint").GetBoolean());
|
||||
|
||||
var addedJson = layer3ChangeArray[1];
|
||||
Assert.Equal("added", addedJson.GetProperty("kind").GetString());
|
||||
Assert.Equal("pkg:npm/c", addedJson.GetProperty("componentKey").GetString());
|
||||
Assert.Equal("sha256:layer3", addedJson.GetProperty("introducingLayer").GetString());
|
||||
Assert.False(addedJson.TryGetProperty("removingLayer", out _));
|
||||
|
||||
var removedLayerJson = layersJson[2];
|
||||
Assert.Equal("sha256:layer1b", removedLayerJson.GetProperty("layerDigest").GetString());
|
||||
var removedChanges = removedLayerJson.GetProperty("changes");
|
||||
Assert.Equal(1, removedChanges.GetArrayLength());
|
||||
var removedJson = removedChanges.EnumerateArray().Single();
|
||||
Assert.Equal("removed", removedJson.GetProperty("kind").GetString());
|
||||
Assert.Equal("pkg:npm/d", removedJson.GetProperty("componentKey").GetString());
|
||||
Assert.Equal("sha256:layer1b", removedJson.GetProperty("removingLayer").GetString());
|
||||
Assert.False(removedJson.TryGetProperty("introducingLayer", out _));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Compute_UsageView_FiltersComponents()
|
||||
{
|
||||
var oldFragments = new[]
|
||||
{
|
||||
LayerComponentFragment.Create("sha256:base", new[]
|
||||
{
|
||||
CreateComponent("pkg:npm/a", "1", "sha256:base", usage: ComponentUsage.Create(false)),
|
||||
})
|
||||
};
|
||||
|
||||
var newFragments = new[]
|
||||
{
|
||||
LayerComponentFragment.Create("sha256:new", new[]
|
||||
{
|
||||
CreateComponent("pkg:npm/a", "1", "sha256:new", usage: ComponentUsage.Create(false)),
|
||||
CreateComponent("pkg:npm/b", "1", "sha256:new", usage: ComponentUsage.Create(true, new[] { "/entry" })),
|
||||
})
|
||||
};
|
||||
|
||||
var request = new ComponentDiffRequest
|
||||
{
|
||||
OldGraph = ComponentGraphBuilder.Build(oldFragments),
|
||||
NewGraph = ComponentGraphBuilder.Build(newFragments),
|
||||
View = SbomView.Usage,
|
||||
GeneratedAt = DateTimeOffset.UtcNow,
|
||||
};
|
||||
|
||||
var differ = new ComponentDiffer();
|
||||
var document = differ.Compute(request);
|
||||
|
||||
Assert.Single(document.Layers);
|
||||
var layer = document.Layers[0];
|
||||
Assert.Single(layer.Changes);
|
||||
Assert.Equal(ComponentChangeKind.Added, layer.Changes[0].Kind);
|
||||
Assert.Equal("pkg:npm/b", layer.Changes[0].ComponentKey);
|
||||
|
||||
var json = DiffJsonSerializer.Serialize(document);
|
||||
using var parsed = JsonDocument.Parse(json);
|
||||
Assert.Equal("usage", parsed.RootElement.GetProperty("view").GetString());
|
||||
Assert.Equal(1, parsed.RootElement.GetProperty("summary").GetProperty("added").GetInt32());
|
||||
Assert.False(parsed.RootElement.TryGetProperty("oldImageDigest", out _));
|
||||
Assert.False(parsed.RootElement.TryGetProperty("newImageDigest", out _));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Compute_MetadataChange_WhenEvidenceDiffers()
|
||||
{
|
||||
var oldFragments = new[]
|
||||
{
|
||||
LayerComponentFragment.Create("sha256:underlay", new[]
|
||||
{
|
||||
CreateComponent(
|
||||
"pkg:npm/a",
|
||||
version: "1.0.0",
|
||||
layer: "sha256:underlay",
|
||||
usage: ComponentUsage.Create(false),
|
||||
evidence: new[] { ComponentEvidence.FromPath("/workspace/package-lock.json") }),
|
||||
}),
|
||||
};
|
||||
|
||||
var newFragments = new[]
|
||||
{
|
||||
LayerComponentFragment.Create("sha256:overlay", new[]
|
||||
{
|
||||
CreateComponent(
|
||||
"pkg:npm/a",
|
||||
version: "1.0.0",
|
||||
layer: "sha256:overlay",
|
||||
usage: ComponentUsage.Create(false),
|
||||
evidence: new[]
|
||||
{
|
||||
ComponentEvidence.FromPath("/workspace/package-lock.json"),
|
||||
ComponentEvidence.FromPath("/workspace/yarn.lock"),
|
||||
}),
|
||||
}),
|
||||
};
|
||||
|
||||
var request = new ComponentDiffRequest
|
||||
{
|
||||
OldGraph = ComponentGraphBuilder.Build(oldFragments),
|
||||
NewGraph = ComponentGraphBuilder.Build(newFragments),
|
||||
GeneratedAt = new DateTimeOffset(2025, 10, 19, 12, 0, 0, TimeSpan.Zero),
|
||||
};
|
||||
|
||||
var differ = new ComponentDiffer();
|
||||
var document = differ.Compute(request);
|
||||
|
||||
Assert.Equal(0, document.Summary.Added);
|
||||
Assert.Equal(0, document.Summary.Removed);
|
||||
Assert.Equal(0, document.Summary.VersionChanged);
|
||||
Assert.Equal(1, document.Summary.MetadataChanged);
|
||||
|
||||
var layer = Assert.Single(document.Layers);
|
||||
Assert.Equal("sha256:overlay", layer.LayerDigest);
|
||||
|
||||
var change = Assert.Single(layer.Changes);
|
||||
{
|
||||
CreateComponent(
|
||||
"pkg:npm/a",
|
||||
version: "1.0.0",
|
||||
layer: "sha256:underlay",
|
||||
usage: ComponentUsage.Create(false),
|
||||
evidence: new[] { ComponentEvidence.FromPath("/workspace/package-lock.json") }),
|
||||
}),
|
||||
};
|
||||
|
||||
var newFragments = new[]
|
||||
{
|
||||
LayerComponentFragment.Create("sha256:overlay", new[]
|
||||
{
|
||||
CreateComponent(
|
||||
"pkg:npm/a",
|
||||
version: "1.0.0",
|
||||
layer: "sha256:overlay",
|
||||
usage: ComponentUsage.Create(false),
|
||||
evidence: new[]
|
||||
{
|
||||
ComponentEvidence.FromPath("/workspace/package-lock.json"),
|
||||
ComponentEvidence.FromPath("/workspace/yarn.lock"),
|
||||
}),
|
||||
}),
|
||||
};
|
||||
|
||||
var request = new ComponentDiffRequest
|
||||
{
|
||||
OldGraph = ComponentGraphBuilder.Build(oldFragments),
|
||||
NewGraph = ComponentGraphBuilder.Build(newFragments),
|
||||
GeneratedAt = new DateTimeOffset(2025, 10, 19, 12, 0, 0, TimeSpan.Zero),
|
||||
};
|
||||
|
||||
var differ = new ComponentDiffer();
|
||||
var document = differ.Compute(request);
|
||||
|
||||
Assert.Equal(0, document.Summary.Added);
|
||||
Assert.Equal(0, document.Summary.Removed);
|
||||
Assert.Equal(0, document.Summary.VersionChanged);
|
||||
Assert.Equal(1, document.Summary.MetadataChanged);
|
||||
|
||||
var layer = Assert.Single(document.Layers);
|
||||
Assert.Equal("sha256:overlay", layer.LayerDigest);
|
||||
|
||||
var change = Assert.Single(layer.Changes);
|
||||
Assert.Equal(ComponentChangeKind.MetadataChanged, change.Kind);
|
||||
Assert.Equal("sha256:overlay", change.IntroducingLayer);
|
||||
Assert.Equal("sha256:underlay", change.RemovingLayer);
|
||||
@@ -339,7 +339,7 @@ public sealed class ComponentDifferTests
|
||||
Assert.Equal("abcdef1234567890abcdef1234567890abcdef12", changeJson.GetProperty("oldComponent").GetProperty("metadata").GetProperty("buildId").GetString());
|
||||
Assert.Equal("6e0d8f6aa1b2c3d4e5f60718293a4b5c6d7e8f90", changeJson.GetProperty("newComponent").GetProperty("metadata").GetProperty("buildId").GetString());
|
||||
}
|
||||
|
||||
|
||||
private static ComponentRecord CreateComponent(
|
||||
string key,
|
||||
string version,
|
||||
|
||||
@@ -1,26 +1,26 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Collections.Immutable;
|
||||
using System.Linq;
|
||||
using System.Text.Json;
|
||||
using StellaOps.Scanner.Core.Contracts;
|
||||
using StellaOps.Scanner.Emit.Composition;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Scanner.Emit.Tests.Composition;
|
||||
|
||||
public sealed class CycloneDxComposerTests
|
||||
{
|
||||
[Fact]
|
||||
public void Compose_ProducesInventoryAndUsageArtifacts()
|
||||
{
|
||||
var request = BuildRequest();
|
||||
var composer = new CycloneDxComposer();
|
||||
|
||||
var result = composer.Compose(request);
|
||||
|
||||
Assert.NotNull(result.Inventory);
|
||||
Assert.StartsWith("urn:uuid:", result.Inventory.SerialNumber, StringComparison.Ordinal);
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Collections.Immutable;
|
||||
using System.Linq;
|
||||
using System.Text.Json;
|
||||
using StellaOps.Scanner.Core.Contracts;
|
||||
using StellaOps.Scanner.Emit.Composition;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Scanner.Emit.Tests.Composition;
|
||||
|
||||
public sealed class CycloneDxComposerTests
|
||||
{
|
||||
[Fact]
|
||||
public void Compose_ProducesInventoryAndUsageArtifacts()
|
||||
{
|
||||
var request = BuildRequest();
|
||||
var composer = new CycloneDxComposer();
|
||||
|
||||
var result = composer.Compose(request);
|
||||
|
||||
Assert.NotNull(result.Inventory);
|
||||
Assert.StartsWith("urn:uuid:", result.Inventory.SerialNumber, StringComparison.Ordinal);
|
||||
Assert.Equal("application/vnd.cyclonedx+json; version=1.6", result.Inventory.JsonMediaType);
|
||||
Assert.Equal("application/vnd.cyclonedx+protobuf; version=1.6", result.Inventory.ProtobufMediaType);
|
||||
Assert.Equal(2, result.Inventory.Components.Length);
|
||||
@@ -64,17 +64,17 @@ public sealed class CycloneDxComposerTests
|
||||
var usageVulns = usageVulnerabilities.EnumerateArray().ToArray();
|
||||
Assert.Single(usageVulns);
|
||||
Assert.Equal("finding-a", usageVulns[0].GetProperty("bom-ref").GetString());
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Compose_IsDeterministic()
|
||||
{
|
||||
var request = BuildRequest();
|
||||
var composer = new CycloneDxComposer();
|
||||
|
||||
var first = composer.Compose(request);
|
||||
var second = composer.Compose(request);
|
||||
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Compose_IsDeterministic()
|
||||
{
|
||||
var request = BuildRequest();
|
||||
var composer = new CycloneDxComposer();
|
||||
|
||||
var first = composer.Compose(request);
|
||||
var second = composer.Compose(request);
|
||||
|
||||
Assert.Equal(first.Inventory.JsonSha256, second.Inventory.JsonSha256);
|
||||
Assert.Equal(first.Inventory.ContentHash, first.Inventory.JsonSha256);
|
||||
Assert.Equal(first.Inventory.ProtobufSha256, second.Inventory.ProtobufSha256);
|
||||
@@ -99,20 +99,20 @@ public sealed class CycloneDxComposerTests
|
||||
Assert.Equal(result.CompositionRecipeSha256.Length, 64);
|
||||
Assert.NotEmpty(result.CompositionRecipeJson);
|
||||
}
|
||||
|
||||
private static SbomCompositionRequest BuildRequest()
|
||||
{
|
||||
var fragments = new[]
|
||||
{
|
||||
LayerComponentFragment.Create("sha256:layer1", new[]
|
||||
{
|
||||
new ComponentRecord
|
||||
{
|
||||
Identity = ComponentIdentity.Create("pkg:npm/a", "component-a", "1.0.0", "pkg:npm/a@1.0.0", "library"),
|
||||
LayerDigest = "sha256:layer1",
|
||||
Evidence = ImmutableArray.Create(ComponentEvidence.FromPath("/app/node_modules/a/package.json")),
|
||||
Dependencies = ImmutableArray.Create("pkg:npm/b"),
|
||||
Usage = ComponentUsage.Create(true, new[] { "/app/start.sh" }),
|
||||
|
||||
private static SbomCompositionRequest BuildRequest()
|
||||
{
|
||||
var fragments = new[]
|
||||
{
|
||||
LayerComponentFragment.Create("sha256:layer1", new[]
|
||||
{
|
||||
new ComponentRecord
|
||||
{
|
||||
Identity = ComponentIdentity.Create("pkg:npm/a", "component-a", "1.0.0", "pkg:npm/a@1.0.0", "library"),
|
||||
LayerDigest = "sha256:layer1",
|
||||
Evidence = ImmutableArray.Create(ComponentEvidence.FromPath("/app/node_modules/a/package.json")),
|
||||
Dependencies = ImmutableArray.Create("pkg:npm/b"),
|
||||
Usage = ComponentUsage.Create(true, new[] { "/app/start.sh" }),
|
||||
Metadata = new ComponentMetadata
|
||||
{
|
||||
Scope = "runtime",
|
||||
@@ -127,35 +127,35 @@ public sealed class CycloneDxComposerTests
|
||||
},
|
||||
}
|
||||
}),
|
||||
LayerComponentFragment.Create("sha256:layer2", new[]
|
||||
{
|
||||
new ComponentRecord
|
||||
{
|
||||
Identity = ComponentIdentity.Create("pkg:npm/b", "component-b", "2.0.0", "pkg:npm/b@2.0.0", "library"),
|
||||
LayerDigest = "sha256:layer2",
|
||||
Evidence = ImmutableArray.Create(ComponentEvidence.FromPath("/app/node_modules/b/package.json")),
|
||||
Usage = ComponentUsage.Create(false),
|
||||
Metadata = new ComponentMetadata
|
||||
{
|
||||
Scope = "development",
|
||||
Properties = new Dictionary<string, string>
|
||||
{
|
||||
["stellaops.os.analyzer"] = "language-node",
|
||||
},
|
||||
},
|
||||
}
|
||||
})
|
||||
};
|
||||
|
||||
var image = new ImageArtifactDescriptor
|
||||
{
|
||||
ImageDigest = "sha256:1234567890abcdef",
|
||||
ImageReference = "registry.example.com/app/service:1.2.3",
|
||||
Repository = "registry.example.com/app/service",
|
||||
Tag = "1.2.3",
|
||||
Architecture = "amd64",
|
||||
};
|
||||
|
||||
LayerComponentFragment.Create("sha256:layer2", new[]
|
||||
{
|
||||
new ComponentRecord
|
||||
{
|
||||
Identity = ComponentIdentity.Create("pkg:npm/b", "component-b", "2.0.0", "pkg:npm/b@2.0.0", "library"),
|
||||
LayerDigest = "sha256:layer2",
|
||||
Evidence = ImmutableArray.Create(ComponentEvidence.FromPath("/app/node_modules/b/package.json")),
|
||||
Usage = ComponentUsage.Create(false),
|
||||
Metadata = new ComponentMetadata
|
||||
{
|
||||
Scope = "development",
|
||||
Properties = new Dictionary<string, string>
|
||||
{
|
||||
["stellaops.os.analyzer"] = "language-node",
|
||||
},
|
||||
},
|
||||
}
|
||||
})
|
||||
};
|
||||
|
||||
var image = new ImageArtifactDescriptor
|
||||
{
|
||||
ImageDigest = "sha256:1234567890abcdef",
|
||||
ImageReference = "registry.example.com/app/service:1.2.3",
|
||||
Repository = "registry.example.com/app/service",
|
||||
Tag = "1.2.3",
|
||||
Architecture = "amd64",
|
||||
};
|
||||
|
||||
return SbomCompositionRequest.Create(
|
||||
image,
|
||||
fragments,
|
||||
@@ -204,22 +204,22 @@ public sealed class CycloneDxComposerTests
|
||||
new KeyValuePair<string, double>("trustWeight", 0.85))
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
private static void ValidateJson(byte[] data, int expectedComponentCount, string expectedView)
|
||||
{
|
||||
using var document = JsonDocument.Parse(data);
|
||||
var root = document.RootElement;
|
||||
|
||||
Assert.True(root.TryGetProperty("metadata", out var metadata), "metadata property missing");
|
||||
var properties = metadata.GetProperty("properties");
|
||||
}
|
||||
|
||||
private static void ValidateJson(byte[] data, int expectedComponentCount, string expectedView)
|
||||
{
|
||||
using var document = JsonDocument.Parse(data);
|
||||
var root = document.RootElement;
|
||||
|
||||
Assert.True(root.TryGetProperty("metadata", out var metadata), "metadata property missing");
|
||||
var properties = metadata.GetProperty("properties");
|
||||
var viewProperty = properties.EnumerateArray()
|
||||
.Single(prop => string.Equals(prop.GetProperty("name").GetString(), "stellaops:sbom.view", StringComparison.Ordinal));
|
||||
Assert.Equal(expectedView, viewProperty.GetProperty("value").GetString());
|
||||
|
||||
var components = root.GetProperty("components").EnumerateArray().ToArray();
|
||||
Assert.Equal(expectedComponentCount, components.Length);
|
||||
|
||||
Assert.Equal(expectedView, viewProperty.GetProperty("value").GetString());
|
||||
|
||||
var components = root.GetProperty("components").EnumerateArray().ToArray();
|
||||
Assert.Equal(expectedComponentCount, components.Length);
|
||||
|
||||
var names = components.Select(component => component.GetProperty("name").GetString()!).ToArray();
|
||||
Assert.Equal(names, names.OrderBy(n => n, StringComparer.Ordinal).ToArray());
|
||||
|
||||
|
||||
@@ -1,52 +1,52 @@
|
||||
using System.Collections.Immutable;
|
||||
using StellaOps.Scanner.Core.Contracts;
|
||||
using StellaOps.Scanner.Emit.Composition;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Scanner.Emit.Tests.Composition;
|
||||
|
||||
public class ScanAnalysisCompositionBuilderTests
|
||||
{
|
||||
[Fact]
|
||||
public void FromAnalysis_BuildsRequest_WhenFragmentsPresent()
|
||||
{
|
||||
var analysis = new ScanAnalysisStore();
|
||||
var fragment = LayerComponentFragment.Create(
|
||||
"sha256:layer",
|
||||
new[]
|
||||
{
|
||||
new ComponentRecord
|
||||
{
|
||||
Identity = ComponentIdentity.Create("pkg:test/a", "a", "1.0.0", "pkg:test/a@1.0.0", "library"),
|
||||
LayerDigest = "sha256:layer",
|
||||
Evidence = ImmutableArray<ComponentEvidence>.Empty,
|
||||
Dependencies = ImmutableArray<string>.Empty,
|
||||
Metadata = null,
|
||||
Usage = ComponentUsage.Unused,
|
||||
}
|
||||
});
|
||||
|
||||
analysis.AppendLayerFragments(new[] { fragment });
|
||||
|
||||
var request = ScanAnalysisCompositionBuilder.FromAnalysis(
|
||||
analysis,
|
||||
new ImageArtifactDescriptor { ImageDigest = "sha256:image" },
|
||||
DateTimeOffset.UtcNow,
|
||||
generatorName: "test",
|
||||
generatorVersion: "1.0.0");
|
||||
|
||||
Assert.Equal("sha256:image", request.Image.ImageDigest);
|
||||
Assert.Single(request.LayerFragments);
|
||||
Assert.Equal(fragment.LayerDigest, request.LayerFragments[0].LayerDigest);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void BuildComponentGraph_ReturnsEmpty_WhenNoFragments()
|
||||
{
|
||||
var analysis = new ScanAnalysisStore();
|
||||
var graph = ScanAnalysisCompositionBuilder.BuildComponentGraph(analysis);
|
||||
|
||||
Assert.Empty(graph.Components);
|
||||
Assert.Empty(graph.Layers);
|
||||
}
|
||||
}
|
||||
using System.Collections.Immutable;
|
||||
using StellaOps.Scanner.Core.Contracts;
|
||||
using StellaOps.Scanner.Emit.Composition;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Scanner.Emit.Tests.Composition;
|
||||
|
||||
public class ScanAnalysisCompositionBuilderTests
|
||||
{
|
||||
[Fact]
|
||||
public void FromAnalysis_BuildsRequest_WhenFragmentsPresent()
|
||||
{
|
||||
var analysis = new ScanAnalysisStore();
|
||||
var fragment = LayerComponentFragment.Create(
|
||||
"sha256:layer",
|
||||
new[]
|
||||
{
|
||||
new ComponentRecord
|
||||
{
|
||||
Identity = ComponentIdentity.Create("pkg:test/a", "a", "1.0.0", "pkg:test/a@1.0.0", "library"),
|
||||
LayerDigest = "sha256:layer",
|
||||
Evidence = ImmutableArray<ComponentEvidence>.Empty,
|
||||
Dependencies = ImmutableArray<string>.Empty,
|
||||
Metadata = null,
|
||||
Usage = ComponentUsage.Unused,
|
||||
}
|
||||
});
|
||||
|
||||
analysis.AppendLayerFragments(new[] { fragment });
|
||||
|
||||
var request = ScanAnalysisCompositionBuilder.FromAnalysis(
|
||||
analysis,
|
||||
new ImageArtifactDescriptor { ImageDigest = "sha256:image" },
|
||||
DateTimeOffset.UtcNow,
|
||||
generatorName: "test",
|
||||
generatorVersion: "1.0.0");
|
||||
|
||||
Assert.Equal("sha256:image", request.Image.ImageDigest);
|
||||
Assert.Single(request.LayerFragments);
|
||||
Assert.Equal(fragment.LayerDigest, request.LayerFragments[0].LayerDigest);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void BuildComponentGraph_ReturnsEmpty_WhenNoFragments()
|
||||
{
|
||||
var analysis = new ScanAnalysisStore();
|
||||
var graph = ScanAnalysisCompositionBuilder.BuildComponentGraph(analysis);
|
||||
|
||||
Assert.Empty(graph.Components);
|
||||
Assert.Empty(graph.Layers);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,141 +1,141 @@
|
||||
using System;
|
||||
using System.Collections.Immutable;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using Collections.Special;
|
||||
using StellaOps.Scanner.Core.Contracts;
|
||||
using StellaOps.Scanner.Emit.Index;
|
||||
|
||||
namespace StellaOps.Scanner.Emit.Tests.Index;
|
||||
|
||||
public sealed class BomIndexBuilderTests
|
||||
{
|
||||
[Fact]
|
||||
public void Build_GeneratesDeterministicBinaryIndex_WithUsageBitmaps()
|
||||
{
|
||||
var graph = ComponentGraphBuilder.Build(new[]
|
||||
{
|
||||
LayerComponentFragment.Create("sha256:layer1", new[]
|
||||
{
|
||||
CreateComponent("pkg:npm/a", "1.0.0", "sha256:layer1", usageEntrypoints: new[] { "/app/start.sh" }),
|
||||
CreateComponent("pkg:npm/b", "2.0.0", "sha256:layer1"),
|
||||
}),
|
||||
LayerComponentFragment.Create("sha256:layer2", new[]
|
||||
{
|
||||
CreateComponent("pkg:npm/b", "2.0.0", "sha256:layer2"),
|
||||
CreateComponent("pkg:npm/c", "3.1.0", "sha256:layer2", usageEntrypoints: new[] { "/app/init.sh" }),
|
||||
}),
|
||||
});
|
||||
|
||||
var request = new BomIndexBuildRequest
|
||||
{
|
||||
ImageDigest = "sha256:image",
|
||||
Graph = graph,
|
||||
GeneratedAt = new DateTimeOffset(2025, 10, 19, 9, 45, 0, TimeSpan.Zero),
|
||||
};
|
||||
|
||||
var builder = new BomIndexBuilder();
|
||||
var artifact = builder.Build(request);
|
||||
var second = builder.Build(request);
|
||||
|
||||
Assert.Equal(artifact.Sha256, second.Sha256);
|
||||
Assert.Equal(artifact.Bytes, second.Bytes);
|
||||
Assert.Equal(2, artifact.LayerCount);
|
||||
Assert.Equal(3, artifact.ComponentCount);
|
||||
Assert.Equal(2, artifact.EntrypointCount);
|
||||
|
||||
using var reader = new BinaryReader(new MemoryStream(artifact.Bytes), System.Text.Encoding.UTF8, leaveOpen: false);
|
||||
ValidateHeader(reader, request);
|
||||
var layers = ReadTable(reader, artifact.LayerCount);
|
||||
Assert.Equal(new[] { "sha256:layer1", "sha256:layer2" }, layers);
|
||||
|
||||
var purls = ReadTable(reader, artifact.ComponentCount);
|
||||
Assert.Equal(new[] { "pkg:npm/a", "pkg:npm/b", "pkg:npm/c" }, purls);
|
||||
|
||||
var componentBitmaps = ReadBitmaps(reader, artifact.ComponentCount);
|
||||
Assert.Equal(new[] { new[] { 0 }, new[] { 0, 1 }, new[] { 1 } }, componentBitmaps);
|
||||
|
||||
var entrypoints = ReadTable(reader, artifact.EntrypointCount);
|
||||
Assert.Equal(new[] { "/app/init.sh", "/app/start.sh" }, entrypoints);
|
||||
|
||||
var usageBitmaps = ReadBitmaps(reader, artifact.ComponentCount);
|
||||
Assert.Equal(new[] { new[] { 1 }, Array.Empty<int>(), new[] { 0 } }, usageBitmaps);
|
||||
}
|
||||
|
||||
private static void ValidateHeader(BinaryReader reader, BomIndexBuildRequest request)
|
||||
{
|
||||
var magic = reader.ReadBytes(7);
|
||||
Assert.Equal("BOMIDX1", System.Text.Encoding.ASCII.GetString(magic));
|
||||
|
||||
var version = reader.ReadUInt16();
|
||||
Assert.Equal(1u, version);
|
||||
|
||||
var flags = reader.ReadUInt16();
|
||||
Assert.Equal(0x1, flags);
|
||||
|
||||
var digestLength = reader.ReadUInt16();
|
||||
var digestBytes = reader.ReadBytes(digestLength);
|
||||
Assert.Equal(request.ImageDigest, System.Text.Encoding.UTF8.GetString(digestBytes));
|
||||
|
||||
var unixMicroseconds = reader.ReadInt64();
|
||||
var expectedMicroseconds = request.GeneratedAt.ToUniversalTime().ToUnixTimeMilliseconds() * 1000L;
|
||||
expectedMicroseconds += request.GeneratedAt.ToUniversalTime().Ticks % TimeSpan.TicksPerMillisecond / 10;
|
||||
Assert.Equal(expectedMicroseconds, unixMicroseconds);
|
||||
|
||||
var layers = reader.ReadUInt32();
|
||||
var components = reader.ReadUInt32();
|
||||
var entrypoints = reader.ReadUInt32();
|
||||
|
||||
Assert.Equal(2u, layers);
|
||||
Assert.Equal(3u, components);
|
||||
Assert.Equal(2u, entrypoints);
|
||||
}
|
||||
|
||||
private static string[] ReadTable(BinaryReader reader, int count)
|
||||
{
|
||||
var values = new string[count];
|
||||
for (var i = 0; i < count; i++)
|
||||
{
|
||||
var length = reader.ReadUInt16();
|
||||
var bytes = reader.ReadBytes(length);
|
||||
values[i] = System.Text.Encoding.UTF8.GetString(bytes);
|
||||
}
|
||||
|
||||
return values;
|
||||
}
|
||||
|
||||
private static int[][] ReadBitmaps(BinaryReader reader, int count)
|
||||
{
|
||||
var result = new int[count][];
|
||||
for (var i = 0; i < count; i++)
|
||||
{
|
||||
var length = reader.ReadUInt32();
|
||||
if (length == 0)
|
||||
{
|
||||
result[i] = Array.Empty<int>();
|
||||
continue;
|
||||
}
|
||||
|
||||
var bytes = reader.ReadBytes((int)length);
|
||||
using var ms = new MemoryStream(bytes, writable: false);
|
||||
var bitmap = RoaringBitmap.Deserialize(ms);
|
||||
result[i] = bitmap.ToArray();
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
private static ComponentRecord CreateComponent(string key, string version, string layerDigest, string[]? usageEntrypoints = null)
|
||||
{
|
||||
var usage = usageEntrypoints is null
|
||||
? ComponentUsage.Unused
|
||||
: ComponentUsage.Create(true, usageEntrypoints);
|
||||
|
||||
return new ComponentRecord
|
||||
{
|
||||
Identity = ComponentIdentity.Create(key, key.Split('/', 2)[^1], version, key, "library"),
|
||||
LayerDigest = layerDigest,
|
||||
Usage = usage,
|
||||
};
|
||||
}
|
||||
}
|
||||
using System;
|
||||
using System.Collections.Immutable;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using Collections.Special;
|
||||
using StellaOps.Scanner.Core.Contracts;
|
||||
using StellaOps.Scanner.Emit.Index;
|
||||
|
||||
namespace StellaOps.Scanner.Emit.Tests.Index;
|
||||
|
||||
public sealed class BomIndexBuilderTests
|
||||
{
|
||||
[Fact]
|
||||
public void Build_GeneratesDeterministicBinaryIndex_WithUsageBitmaps()
|
||||
{
|
||||
var graph = ComponentGraphBuilder.Build(new[]
|
||||
{
|
||||
LayerComponentFragment.Create("sha256:layer1", new[]
|
||||
{
|
||||
CreateComponent("pkg:npm/a", "1.0.0", "sha256:layer1", usageEntrypoints: new[] { "/app/start.sh" }),
|
||||
CreateComponent("pkg:npm/b", "2.0.0", "sha256:layer1"),
|
||||
}),
|
||||
LayerComponentFragment.Create("sha256:layer2", new[]
|
||||
{
|
||||
CreateComponent("pkg:npm/b", "2.0.0", "sha256:layer2"),
|
||||
CreateComponent("pkg:npm/c", "3.1.0", "sha256:layer2", usageEntrypoints: new[] { "/app/init.sh" }),
|
||||
}),
|
||||
});
|
||||
|
||||
var request = new BomIndexBuildRequest
|
||||
{
|
||||
ImageDigest = "sha256:image",
|
||||
Graph = graph,
|
||||
GeneratedAt = new DateTimeOffset(2025, 10, 19, 9, 45, 0, TimeSpan.Zero),
|
||||
};
|
||||
|
||||
var builder = new BomIndexBuilder();
|
||||
var artifact = builder.Build(request);
|
||||
var second = builder.Build(request);
|
||||
|
||||
Assert.Equal(artifact.Sha256, second.Sha256);
|
||||
Assert.Equal(artifact.Bytes, second.Bytes);
|
||||
Assert.Equal(2, artifact.LayerCount);
|
||||
Assert.Equal(3, artifact.ComponentCount);
|
||||
Assert.Equal(2, artifact.EntrypointCount);
|
||||
|
||||
using var reader = new BinaryReader(new MemoryStream(artifact.Bytes), System.Text.Encoding.UTF8, leaveOpen: false);
|
||||
ValidateHeader(reader, request);
|
||||
var layers = ReadTable(reader, artifact.LayerCount);
|
||||
Assert.Equal(new[] { "sha256:layer1", "sha256:layer2" }, layers);
|
||||
|
||||
var purls = ReadTable(reader, artifact.ComponentCount);
|
||||
Assert.Equal(new[] { "pkg:npm/a", "pkg:npm/b", "pkg:npm/c" }, purls);
|
||||
|
||||
var componentBitmaps = ReadBitmaps(reader, artifact.ComponentCount);
|
||||
Assert.Equal(new[] { new[] { 0 }, new[] { 0, 1 }, new[] { 1 } }, componentBitmaps);
|
||||
|
||||
var entrypoints = ReadTable(reader, artifact.EntrypointCount);
|
||||
Assert.Equal(new[] { "/app/init.sh", "/app/start.sh" }, entrypoints);
|
||||
|
||||
var usageBitmaps = ReadBitmaps(reader, artifact.ComponentCount);
|
||||
Assert.Equal(new[] { new[] { 1 }, Array.Empty<int>(), new[] { 0 } }, usageBitmaps);
|
||||
}
|
||||
|
||||
private static void ValidateHeader(BinaryReader reader, BomIndexBuildRequest request)
|
||||
{
|
||||
var magic = reader.ReadBytes(7);
|
||||
Assert.Equal("BOMIDX1", System.Text.Encoding.ASCII.GetString(magic));
|
||||
|
||||
var version = reader.ReadUInt16();
|
||||
Assert.Equal(1u, version);
|
||||
|
||||
var flags = reader.ReadUInt16();
|
||||
Assert.Equal(0x1, flags);
|
||||
|
||||
var digestLength = reader.ReadUInt16();
|
||||
var digestBytes = reader.ReadBytes(digestLength);
|
||||
Assert.Equal(request.ImageDigest, System.Text.Encoding.UTF8.GetString(digestBytes));
|
||||
|
||||
var unixMicroseconds = reader.ReadInt64();
|
||||
var expectedMicroseconds = request.GeneratedAt.ToUniversalTime().ToUnixTimeMilliseconds() * 1000L;
|
||||
expectedMicroseconds += request.GeneratedAt.ToUniversalTime().Ticks % TimeSpan.TicksPerMillisecond / 10;
|
||||
Assert.Equal(expectedMicroseconds, unixMicroseconds);
|
||||
|
||||
var layers = reader.ReadUInt32();
|
||||
var components = reader.ReadUInt32();
|
||||
var entrypoints = reader.ReadUInt32();
|
||||
|
||||
Assert.Equal(2u, layers);
|
||||
Assert.Equal(3u, components);
|
||||
Assert.Equal(2u, entrypoints);
|
||||
}
|
||||
|
||||
private static string[] ReadTable(BinaryReader reader, int count)
|
||||
{
|
||||
var values = new string[count];
|
||||
for (var i = 0; i < count; i++)
|
||||
{
|
||||
var length = reader.ReadUInt16();
|
||||
var bytes = reader.ReadBytes(length);
|
||||
values[i] = System.Text.Encoding.UTF8.GetString(bytes);
|
||||
}
|
||||
|
||||
return values;
|
||||
}
|
||||
|
||||
private static int[][] ReadBitmaps(BinaryReader reader, int count)
|
||||
{
|
||||
var result = new int[count][];
|
||||
for (var i = 0; i < count; i++)
|
||||
{
|
||||
var length = reader.ReadUInt32();
|
||||
if (length == 0)
|
||||
{
|
||||
result[i] = Array.Empty<int>();
|
||||
continue;
|
||||
}
|
||||
|
||||
var bytes = reader.ReadBytes((int)length);
|
||||
using var ms = new MemoryStream(bytes, writable: false);
|
||||
var bitmap = RoaringBitmap.Deserialize(ms);
|
||||
result[i] = bitmap.ToArray();
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
private static ComponentRecord CreateComponent(string key, string version, string layerDigest, string[]? usageEntrypoints = null)
|
||||
{
|
||||
var usage = usageEntrypoints is null
|
||||
? ComponentUsage.Unused
|
||||
: ComponentUsage.Create(true, usageEntrypoints);
|
||||
|
||||
return new ComponentRecord
|
||||
{
|
||||
Identity = ComponentIdentity.Create(key, key.Split('/', 2)[^1], version, key, "library"),
|
||||
LayerDigest = layerDigest,
|
||||
Usage = usage,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,69 +1,69 @@
|
||||
using System;
|
||||
using System.Collections.Immutable;
|
||||
using System.Linq;
|
||||
using System.Text.Json;
|
||||
using StellaOps.Scanner.Core.Contracts;
|
||||
using StellaOps.Scanner.Emit.Composition;
|
||||
using StellaOps.Scanner.Emit.Index;
|
||||
using StellaOps.Scanner.Emit.Packaging;
|
||||
|
||||
namespace StellaOps.Scanner.Emit.Tests.Packaging;
|
||||
|
||||
public sealed class ScannerArtifactPackageBuilderTests
|
||||
{
|
||||
[Fact]
|
||||
public void BuildPackage_ProducesDescriptorsAndManifest()
|
||||
{
|
||||
var fragments = new[]
|
||||
{
|
||||
LayerComponentFragment.Create("sha256:layer1", new[]
|
||||
{
|
||||
CreateComponent(
|
||||
"pkg:npm/a",
|
||||
"1.0.0",
|
||||
"sha256:layer1",
|
||||
usage: ComponentUsage.Create(true, new[] { "/app/start.sh" }),
|
||||
metadata: new Dictionary<string, string>
|
||||
{
|
||||
["stellaops.os.analyzer"] = "apk",
|
||||
["stellaops.os.architecture"] = "x86_64",
|
||||
}),
|
||||
CreateComponent("pkg:npm/b", "2.0.0", "sha256:layer1"),
|
||||
}),
|
||||
LayerComponentFragment.Create("sha256:layer2", new[]
|
||||
{
|
||||
CreateComponent("pkg:npm/b", "2.0.0", "sha256:layer2"),
|
||||
CreateComponent("pkg:npm/c", "3.0.0", "sha256:layer2", usage: ComponentUsage.Create(true, new[] { "/app/init.sh" })),
|
||||
})
|
||||
};
|
||||
|
||||
var request = SbomCompositionRequest.Create(
|
||||
new ImageArtifactDescriptor
|
||||
{
|
||||
ImageDigest = "sha256:image",
|
||||
ImageReference = "registry.example/app:latest",
|
||||
Repository = "registry.example/app",
|
||||
Tag = "latest",
|
||||
},
|
||||
fragments,
|
||||
new DateTimeOffset(2025, 10, 19, 12, 30, 0, TimeSpan.Zero),
|
||||
generatorName: "StellaOps.Scanner",
|
||||
generatorVersion: "0.10.0");
|
||||
|
||||
var composer = new CycloneDxComposer();
|
||||
var composition = composer.Compose(request);
|
||||
|
||||
var indexBuilder = new BomIndexBuilder();
|
||||
var bomIndex = indexBuilder.Build(new BomIndexBuildRequest
|
||||
{
|
||||
ImageDigest = request.Image.ImageDigest,
|
||||
Graph = composition.Graph,
|
||||
GeneratedAt = request.GeneratedAt,
|
||||
});
|
||||
|
||||
var packageBuilder = new ScannerArtifactPackageBuilder();
|
||||
var package = packageBuilder.Build(request.Image.ImageDigest, request.GeneratedAt, composition, bomIndex);
|
||||
|
||||
using System;
|
||||
using System.Collections.Immutable;
|
||||
using System.Linq;
|
||||
using System.Text.Json;
|
||||
using StellaOps.Scanner.Core.Contracts;
|
||||
using StellaOps.Scanner.Emit.Composition;
|
||||
using StellaOps.Scanner.Emit.Index;
|
||||
using StellaOps.Scanner.Emit.Packaging;
|
||||
|
||||
namespace StellaOps.Scanner.Emit.Tests.Packaging;
|
||||
|
||||
public sealed class ScannerArtifactPackageBuilderTests
|
||||
{
|
||||
[Fact]
|
||||
public void BuildPackage_ProducesDescriptorsAndManifest()
|
||||
{
|
||||
var fragments = new[]
|
||||
{
|
||||
LayerComponentFragment.Create("sha256:layer1", new[]
|
||||
{
|
||||
CreateComponent(
|
||||
"pkg:npm/a",
|
||||
"1.0.0",
|
||||
"sha256:layer1",
|
||||
usage: ComponentUsage.Create(true, new[] { "/app/start.sh" }),
|
||||
metadata: new Dictionary<string, string>
|
||||
{
|
||||
["stellaops.os.analyzer"] = "apk",
|
||||
["stellaops.os.architecture"] = "x86_64",
|
||||
}),
|
||||
CreateComponent("pkg:npm/b", "2.0.0", "sha256:layer1"),
|
||||
}),
|
||||
LayerComponentFragment.Create("sha256:layer2", new[]
|
||||
{
|
||||
CreateComponent("pkg:npm/b", "2.0.0", "sha256:layer2"),
|
||||
CreateComponent("pkg:npm/c", "3.0.0", "sha256:layer2", usage: ComponentUsage.Create(true, new[] { "/app/init.sh" })),
|
||||
})
|
||||
};
|
||||
|
||||
var request = SbomCompositionRequest.Create(
|
||||
new ImageArtifactDescriptor
|
||||
{
|
||||
ImageDigest = "sha256:image",
|
||||
ImageReference = "registry.example/app:latest",
|
||||
Repository = "registry.example/app",
|
||||
Tag = "latest",
|
||||
},
|
||||
fragments,
|
||||
new DateTimeOffset(2025, 10, 19, 12, 30, 0, TimeSpan.Zero),
|
||||
generatorName: "StellaOps.Scanner",
|
||||
generatorVersion: "0.10.0");
|
||||
|
||||
var composer = new CycloneDxComposer();
|
||||
var composition = composer.Compose(request);
|
||||
|
||||
var indexBuilder = new BomIndexBuilder();
|
||||
var bomIndex = indexBuilder.Build(new BomIndexBuildRequest
|
||||
{
|
||||
ImageDigest = request.Image.ImageDigest,
|
||||
Graph = composition.Graph,
|
||||
GeneratedAt = request.GeneratedAt,
|
||||
});
|
||||
|
||||
var packageBuilder = new ScannerArtifactPackageBuilder();
|
||||
var package = packageBuilder.Build(request.Image.ImageDigest, request.GeneratedAt, composition, bomIndex);
|
||||
|
||||
Assert.Equal(6, package.Artifacts.Length); // inventory JSON+PB, usage JSON+PB, index, composition recipe
|
||||
|
||||
var kinds = package.Manifest.Artifacts.Select(entry => entry.Kind).ToArray();
|
||||
@@ -74,24 +74,24 @@ public sealed class ScannerArtifactPackageBuilderTests
|
||||
var root = document.RootElement;
|
||||
Assert.Equal("sha256:image", root.GetProperty("imageDigest").GetString());
|
||||
Assert.Equal(6, root.GetProperty("artifacts").GetArrayLength());
|
||||
|
||||
var usageEntry = root.GetProperty("artifacts").EnumerateArray().First(element => element.GetProperty("kind").GetString() == "sbom-usage");
|
||||
|
||||
var usageEntry = root.GetProperty("artifacts").EnumerateArray().First(element => element.GetProperty("kind").GetString() == "sbom-usage");
|
||||
Assert.Equal("application/vnd.cyclonedx+json; version=1.6; view=usage", usageEntry.GetProperty("mediaType").GetString());
|
||||
}
|
||||
|
||||
private static ComponentRecord CreateComponent(string key, string version, string layerDigest, ComponentUsage? usage = null, IReadOnlyDictionary<string, string>? metadata = null)
|
||||
{
|
||||
return new ComponentRecord
|
||||
{
|
||||
Identity = ComponentIdentity.Create(key, key.Split('/', 2)[^1], version, key, "library"),
|
||||
LayerDigest = layerDigest,
|
||||
Usage = usage ?? ComponentUsage.Unused,
|
||||
Metadata = metadata is null
|
||||
? null
|
||||
: new ComponentMetadata
|
||||
{
|
||||
Properties = metadata,
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private static ComponentRecord CreateComponent(string key, string version, string layerDigest, ComponentUsage? usage = null, IReadOnlyDictionary<string, string>? metadata = null)
|
||||
{
|
||||
return new ComponentRecord
|
||||
{
|
||||
Identity = ComponentIdentity.Create(key, key.Split('/', 2)[^1], version, key, "library"),
|
||||
LayerDigest = layerDigest,
|
||||
Usage = usage ?? ComponentUsage.Unused,
|
||||
Metadata = metadata is null
|
||||
? null
|
||||
: new ComponentMetadata
|
||||
{
|
||||
Properties = metadata,
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,86 +1,86 @@
|
||||
using System.Collections.Immutable;
|
||||
using System.IO;
|
||||
using System.Text;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Scanner.EntryTrace.Tests;
|
||||
|
||||
public sealed class EntryTraceImageContextFactoryTests
|
||||
{
|
||||
[Fact]
|
||||
public void Create_UsesEnvironmentAndEntrypointFromConfig()
|
||||
{
|
||||
var json = """
|
||||
{
|
||||
"config": {
|
||||
"Env": ["PATH=/custom/bin:/usr/bin", "FOO=bar"],
|
||||
"Entrypoint": ["/bin/sh", "-c"],
|
||||
"Cmd": ["./start.sh"],
|
||||
"WorkingDir": "/srv/app",
|
||||
"User": "1000:1000"
|
||||
}
|
||||
}
|
||||
""";
|
||||
|
||||
var config = OciImageConfigLoader.Load(new MemoryStream(Encoding.UTF8.GetBytes(json)));
|
||||
var options = new EntryTraceAnalyzerOptions
|
||||
{
|
||||
DefaultPath = "/default/bin"
|
||||
};
|
||||
|
||||
var fs = new TestRootFileSystem();
|
||||
var imageContext = EntryTraceImageContextFactory.Create(
|
||||
config,
|
||||
fs,
|
||||
options,
|
||||
"sha256:testimage",
|
||||
"scan-001",
|
||||
NullLogger.Instance);
|
||||
|
||||
Assert.Equal("/bin/sh", imageContext.Entrypoint.Entrypoint[0]);
|
||||
Assert.Equal("./start.sh", imageContext.Entrypoint.Command[0]);
|
||||
|
||||
Assert.Equal("/srv/app", imageContext.Context.WorkingDirectory);
|
||||
Assert.Equal("1000:1000", imageContext.Context.User);
|
||||
Assert.Equal("sha256:testimage", imageContext.Context.ImageDigest);
|
||||
Assert.Equal("scan-001", imageContext.Context.ScanId);
|
||||
|
||||
Assert.True(imageContext.Context.Environment.ContainsKey("FOO"));
|
||||
Assert.Equal("bar", imageContext.Context.Environment["FOO"]);
|
||||
|
||||
Assert.Equal("/custom/bin:/usr/bin", string.Join(":", imageContext.Context.Path));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Create_FallsBackToDefaultPathWhenMissing()
|
||||
{
|
||||
var json = """
|
||||
{
|
||||
"config": {
|
||||
"Env": ["FOO=bar"],
|
||||
"Cmd": ["node", "server.js"]
|
||||
}
|
||||
}
|
||||
""";
|
||||
|
||||
var config = OciImageConfigLoader.Load(new MemoryStream(Encoding.UTF8.GetBytes(json)));
|
||||
var options = new EntryTraceAnalyzerOptions
|
||||
{
|
||||
DefaultPath = "/usr/local/sbin:/usr/local/bin"
|
||||
};
|
||||
|
||||
var fs = new TestRootFileSystem();
|
||||
var imageContext = EntryTraceImageContextFactory.Create(
|
||||
config,
|
||||
fs,
|
||||
options,
|
||||
"sha256:abc",
|
||||
"scan-xyz",
|
||||
NullLogger.Instance);
|
||||
|
||||
Assert.Equal("/usr/local/sbin:/usr/local/bin", string.Join(":", imageContext.Context.Path));
|
||||
Assert.Equal("root", imageContext.Context.User);
|
||||
Assert.Equal("/", imageContext.Context.WorkingDirectory);
|
||||
}
|
||||
}
|
||||
using System.Collections.Immutable;
|
||||
using System.IO;
|
||||
using System.Text;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Scanner.EntryTrace.Tests;
|
||||
|
||||
public sealed class EntryTraceImageContextFactoryTests
|
||||
{
|
||||
[Fact]
|
||||
public void Create_UsesEnvironmentAndEntrypointFromConfig()
|
||||
{
|
||||
var json = """
|
||||
{
|
||||
"config": {
|
||||
"Env": ["PATH=/custom/bin:/usr/bin", "FOO=bar"],
|
||||
"Entrypoint": ["/bin/sh", "-c"],
|
||||
"Cmd": ["./start.sh"],
|
||||
"WorkingDir": "/srv/app",
|
||||
"User": "1000:1000"
|
||||
}
|
||||
}
|
||||
""";
|
||||
|
||||
var config = OciImageConfigLoader.Load(new MemoryStream(Encoding.UTF8.GetBytes(json)));
|
||||
var options = new EntryTraceAnalyzerOptions
|
||||
{
|
||||
DefaultPath = "/default/bin"
|
||||
};
|
||||
|
||||
var fs = new TestRootFileSystem();
|
||||
var imageContext = EntryTraceImageContextFactory.Create(
|
||||
config,
|
||||
fs,
|
||||
options,
|
||||
"sha256:testimage",
|
||||
"scan-001",
|
||||
NullLogger.Instance);
|
||||
|
||||
Assert.Equal("/bin/sh", imageContext.Entrypoint.Entrypoint[0]);
|
||||
Assert.Equal("./start.sh", imageContext.Entrypoint.Command[0]);
|
||||
|
||||
Assert.Equal("/srv/app", imageContext.Context.WorkingDirectory);
|
||||
Assert.Equal("1000:1000", imageContext.Context.User);
|
||||
Assert.Equal("sha256:testimage", imageContext.Context.ImageDigest);
|
||||
Assert.Equal("scan-001", imageContext.Context.ScanId);
|
||||
|
||||
Assert.True(imageContext.Context.Environment.ContainsKey("FOO"));
|
||||
Assert.Equal("bar", imageContext.Context.Environment["FOO"]);
|
||||
|
||||
Assert.Equal("/custom/bin:/usr/bin", string.Join(":", imageContext.Context.Path));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Create_FallsBackToDefaultPathWhenMissing()
|
||||
{
|
||||
var json = """
|
||||
{
|
||||
"config": {
|
||||
"Env": ["FOO=bar"],
|
||||
"Cmd": ["node", "server.js"]
|
||||
}
|
||||
}
|
||||
""";
|
||||
|
||||
var config = OciImageConfigLoader.Load(new MemoryStream(Encoding.UTF8.GetBytes(json)));
|
||||
var options = new EntryTraceAnalyzerOptions
|
||||
{
|
||||
DefaultPath = "/usr/local/sbin:/usr/local/bin"
|
||||
};
|
||||
|
||||
var fs = new TestRootFileSystem();
|
||||
var imageContext = EntryTraceImageContextFactory.Create(
|
||||
config,
|
||||
fs,
|
||||
options,
|
||||
"sha256:abc",
|
||||
"scan-xyz",
|
||||
NullLogger.Instance);
|
||||
|
||||
Assert.Equal("/usr/local/sbin:/usr/local/bin", string.Join(":", imageContext.Context.Path));
|
||||
Assert.Equal("root", imageContext.Context.User);
|
||||
Assert.Equal("/", imageContext.Context.WorkingDirectory);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -4,20 +4,20 @@ using System.IO;
|
||||
using System.Text;
|
||||
using StellaOps.Scanner.EntryTrace.FileSystem;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Scanner.EntryTrace.Tests;
|
||||
|
||||
public sealed class LayeredRootFileSystemTests : IDisposable
|
||||
{
|
||||
private readonly string _tempRoot;
|
||||
|
||||
public LayeredRootFileSystemTests()
|
||||
{
|
||||
_tempRoot = Path.Combine(Path.GetTempPath(), $"entrytrace-layerfs-{Guid.NewGuid():n}");
|
||||
Directory.CreateDirectory(_tempRoot);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
|
||||
namespace StellaOps.Scanner.EntryTrace.Tests;
|
||||
|
||||
public sealed class LayeredRootFileSystemTests : IDisposable
|
||||
{
|
||||
private readonly string _tempRoot;
|
||||
|
||||
public LayeredRootFileSystemTests()
|
||||
{
|
||||
_tempRoot = Path.Combine(Path.GetTempPath(), $"entrytrace-layerfs-{Guid.NewGuid():n}");
|
||||
Directory.CreateDirectory(_tempRoot);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void FromDirectories_HandlesWhiteoutsAndResolution()
|
||||
{
|
||||
var layer1 = CreateLayerDirectory("layer1");
|
||||
@@ -33,32 +33,32 @@ public sealed class LayeredRootFileSystemTests : IDisposable
|
||||
UnixFileMode.GroupRead | UnixFileMode.GroupExecute |
|
||||
UnixFileMode.OtherRead | UnixFileMode.OtherExecute);
|
||||
#endif
|
||||
|
||||
var optDirectory1 = Path.Combine(layer1, "opt");
|
||||
Directory.CreateDirectory(optDirectory1);
|
||||
File.WriteAllText(Path.Combine(optDirectory1, "setup.sh"), "echo setup\n");
|
||||
|
||||
var optDirectory2 = Path.Combine(layer2, "opt");
|
||||
Directory.CreateDirectory(optDirectory2);
|
||||
File.WriteAllText(Path.Combine(optDirectory2, ".wh.setup.sh"), string.Empty);
|
||||
|
||||
var fs = LayeredRootFileSystem.FromDirectories(new[]
|
||||
{
|
||||
new LayeredRootFileSystem.LayerDirectory("sha256:layer1", layer1),
|
||||
new LayeredRootFileSystem.LayerDirectory("sha256:layer2", layer2)
|
||||
});
|
||||
|
||||
Assert.True(fs.TryResolveExecutable("entrypoint.sh", new[] { "/usr/bin" }, out var descriptor));
|
||||
Assert.Equal("/usr/bin/entrypoint.sh", descriptor.Path);
|
||||
Assert.Equal("sha256:layer1", descriptor.LayerDigest);
|
||||
|
||||
Assert.True(fs.TryReadAllText("/usr/bin/entrypoint.sh", out var textDescriptor, out var content));
|
||||
Assert.Equal(descriptor.Path, textDescriptor.Path);
|
||||
Assert.Contains("echo layer1", content);
|
||||
|
||||
Assert.False(fs.TryReadAllText("/opt/setup.sh", out _, out _));
|
||||
|
||||
var optEntries = fs.EnumerateDirectory("/opt");
|
||||
|
||||
var optDirectory1 = Path.Combine(layer1, "opt");
|
||||
Directory.CreateDirectory(optDirectory1);
|
||||
File.WriteAllText(Path.Combine(optDirectory1, "setup.sh"), "echo setup\n");
|
||||
|
||||
var optDirectory2 = Path.Combine(layer2, "opt");
|
||||
Directory.CreateDirectory(optDirectory2);
|
||||
File.WriteAllText(Path.Combine(optDirectory2, ".wh.setup.sh"), string.Empty);
|
||||
|
||||
var fs = LayeredRootFileSystem.FromDirectories(new[]
|
||||
{
|
||||
new LayeredRootFileSystem.LayerDirectory("sha256:layer1", layer1),
|
||||
new LayeredRootFileSystem.LayerDirectory("sha256:layer2", layer2)
|
||||
});
|
||||
|
||||
Assert.True(fs.TryResolveExecutable("entrypoint.sh", new[] { "/usr/bin" }, out var descriptor));
|
||||
Assert.Equal("/usr/bin/entrypoint.sh", descriptor.Path);
|
||||
Assert.Equal("sha256:layer1", descriptor.LayerDigest);
|
||||
|
||||
Assert.True(fs.TryReadAllText("/usr/bin/entrypoint.sh", out var textDescriptor, out var content));
|
||||
Assert.Equal(descriptor.Path, textDescriptor.Path);
|
||||
Assert.Contains("echo layer1", content);
|
||||
|
||||
Assert.False(fs.TryReadAllText("/opt/setup.sh", out _, out _));
|
||||
|
||||
var optEntries = fs.EnumerateDirectory("/opt");
|
||||
Assert.DoesNotContain(optEntries, entry => entry.Path.EndsWith("setup.sh", StringComparison.Ordinal));
|
||||
}
|
||||
|
||||
@@ -81,122 +81,122 @@ public sealed class LayeredRootFileSystemTests : IDisposable
|
||||
Assert.Equal("abcd", Encoding.UTF8.GetString(preview.Span));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void FromArchives_ResolvesSymlinkAndWhiteout()
|
||||
{
|
||||
var layer1Path = Path.Combine(_tempRoot, "layer1.tar");
|
||||
var layer2Path = Path.Combine(_tempRoot, "layer2.tar");
|
||||
|
||||
CreateArchive(layer1Path, writer =>
|
||||
{
|
||||
var scriptEntry = new PaxTarEntry(TarEntryType.RegularFile, "usr/local/bin/start.sh");
|
||||
scriptEntry.Mode = UnixFileMode.UserRead | UnixFileMode.UserExecute |
|
||||
UnixFileMode.GroupRead | UnixFileMode.GroupExecute |
|
||||
UnixFileMode.OtherRead | UnixFileMode.OtherExecute;
|
||||
scriptEntry.DataStream = new MemoryStream(Encoding.UTF8.GetBytes("#!/bin/sh\necho start\n"));
|
||||
writer.WriteEntry(scriptEntry);
|
||||
|
||||
var oldScript = new PaxTarEntry(TarEntryType.RegularFile, "opt/old.sh");
|
||||
oldScript.Mode = UnixFileMode.UserRead | UnixFileMode.UserExecute |
|
||||
UnixFileMode.GroupRead | UnixFileMode.GroupExecute |
|
||||
UnixFileMode.OtherRead | UnixFileMode.OtherExecute;
|
||||
oldScript.DataStream = new MemoryStream(Encoding.UTF8.GetBytes("echo old\n"));
|
||||
writer.WriteEntry(oldScript);
|
||||
});
|
||||
|
||||
CreateArchive(layer2Path, writer =>
|
||||
{
|
||||
var symlinkEntry = new PaxTarEntry(TarEntryType.SymbolicLink, "usr/bin/start.sh");
|
||||
symlinkEntry.LinkName = "/usr/local/bin/start.sh";
|
||||
writer.WriteEntry(symlinkEntry);
|
||||
|
||||
var whiteout = new PaxTarEntry(TarEntryType.RegularFile, "opt/.wh.old.sh");
|
||||
whiteout.DataStream = new MemoryStream(Array.Empty<byte>());
|
||||
writer.WriteEntry(whiteout);
|
||||
});
|
||||
|
||||
var fs = LayeredRootFileSystem.FromArchives(new[]
|
||||
{
|
||||
new LayeredRootFileSystem.LayerArchive("sha256:base", layer1Path),
|
||||
new LayeredRootFileSystem.LayerArchive("sha256:update", layer2Path)
|
||||
});
|
||||
|
||||
Assert.True(fs.TryResolveExecutable("start.sh", new[] { "/usr/bin" }, out var descriptor));
|
||||
Assert.Equal("/usr/local/bin/start.sh", descriptor.Path);
|
||||
Assert.Equal("sha256:base", descriptor.LayerDigest);
|
||||
|
||||
Assert.True(fs.TryReadAllText("/usr/bin/start.sh", out var resolvedDescriptor, out var content));
|
||||
Assert.Equal(descriptor.Path, resolvedDescriptor.Path);
|
||||
Assert.Contains("echo start", content);
|
||||
|
||||
Assert.False(fs.TryReadAllText("/opt/old.sh", out _, out _));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void FromArchives_ResolvesHardLinkContent()
|
||||
{
|
||||
var baseLayer = Path.Combine(_tempRoot, "base.tar");
|
||||
var hardLinkLayer = Path.Combine(_tempRoot, "hardlink.tar");
|
||||
|
||||
CreateArchive(baseLayer, writer =>
|
||||
{
|
||||
var baseEntry = new PaxTarEntry(TarEntryType.RegularFile, "usr/bin/tool.sh");
|
||||
baseEntry.Mode = UnixFileMode.UserRead | UnixFileMode.UserExecute |
|
||||
UnixFileMode.GroupRead | UnixFileMode.GroupExecute |
|
||||
UnixFileMode.OtherRead | UnixFileMode.OtherExecute;
|
||||
baseEntry.DataStream = new MemoryStream(Encoding.UTF8.GetBytes("#!/bin/sh\necho tool\n"));
|
||||
writer.WriteEntry(baseEntry);
|
||||
});
|
||||
|
||||
CreateArchive(hardLinkLayer, writer =>
|
||||
{
|
||||
var hardLink = new PaxTarEntry(TarEntryType.HardLink, "bin/tool.sh")
|
||||
{
|
||||
LinkName = "/usr/bin/tool.sh",
|
||||
Mode = UnixFileMode.UserRead | UnixFileMode.UserExecute |
|
||||
UnixFileMode.GroupRead | UnixFileMode.GroupExecute |
|
||||
UnixFileMode.OtherRead | UnixFileMode.OtherExecute
|
||||
};
|
||||
writer.WriteEntry(hardLink);
|
||||
});
|
||||
|
||||
var fs = LayeredRootFileSystem.FromArchives(new[]
|
||||
{
|
||||
new LayeredRootFileSystem.LayerArchive("sha256:base", baseLayer),
|
||||
new LayeredRootFileSystem.LayerArchive("sha256:hardlink", hardLinkLayer)
|
||||
});
|
||||
|
||||
Assert.True(fs.TryReadAllText("/bin/tool.sh", out var descriptor, out var content));
|
||||
Assert.Equal("/usr/bin/tool.sh", descriptor.Path);
|
||||
Assert.Contains("echo tool", content);
|
||||
}
|
||||
|
||||
private string CreateLayerDirectory(string name)
|
||||
{
|
||||
var path = Path.Combine(_tempRoot, name);
|
||||
Directory.CreateDirectory(path);
|
||||
return path;
|
||||
}
|
||||
|
||||
private static void CreateArchive(string path, Action<TarWriter> writerAction)
|
||||
{
|
||||
using var stream = File.Create(path);
|
||||
using var writer = new TarWriter(stream, leaveOpen: false);
|
||||
writerAction(writer);
|
||||
}
|
||||
|
||||
public void Dispose()
|
||||
{
|
||||
try
|
||||
{
|
||||
if (Directory.Exists(_tempRoot))
|
||||
{
|
||||
Directory.Delete(_tempRoot, recursive: true);
|
||||
}
|
||||
}
|
||||
catch
|
||||
{
|
||||
// ignore cleanup failures
|
||||
}
|
||||
}
|
||||
}
|
||||
[Fact]
|
||||
public void FromArchives_ResolvesSymlinkAndWhiteout()
|
||||
{
|
||||
var layer1Path = Path.Combine(_tempRoot, "layer1.tar");
|
||||
var layer2Path = Path.Combine(_tempRoot, "layer2.tar");
|
||||
|
||||
CreateArchive(layer1Path, writer =>
|
||||
{
|
||||
var scriptEntry = new PaxTarEntry(TarEntryType.RegularFile, "usr/local/bin/start.sh");
|
||||
scriptEntry.Mode = UnixFileMode.UserRead | UnixFileMode.UserExecute |
|
||||
UnixFileMode.GroupRead | UnixFileMode.GroupExecute |
|
||||
UnixFileMode.OtherRead | UnixFileMode.OtherExecute;
|
||||
scriptEntry.DataStream = new MemoryStream(Encoding.UTF8.GetBytes("#!/bin/sh\necho start\n"));
|
||||
writer.WriteEntry(scriptEntry);
|
||||
|
||||
var oldScript = new PaxTarEntry(TarEntryType.RegularFile, "opt/old.sh");
|
||||
oldScript.Mode = UnixFileMode.UserRead | UnixFileMode.UserExecute |
|
||||
UnixFileMode.GroupRead | UnixFileMode.GroupExecute |
|
||||
UnixFileMode.OtherRead | UnixFileMode.OtherExecute;
|
||||
oldScript.DataStream = new MemoryStream(Encoding.UTF8.GetBytes("echo old\n"));
|
||||
writer.WriteEntry(oldScript);
|
||||
});
|
||||
|
||||
CreateArchive(layer2Path, writer =>
|
||||
{
|
||||
var symlinkEntry = new PaxTarEntry(TarEntryType.SymbolicLink, "usr/bin/start.sh");
|
||||
symlinkEntry.LinkName = "/usr/local/bin/start.sh";
|
||||
writer.WriteEntry(symlinkEntry);
|
||||
|
||||
var whiteout = new PaxTarEntry(TarEntryType.RegularFile, "opt/.wh.old.sh");
|
||||
whiteout.DataStream = new MemoryStream(Array.Empty<byte>());
|
||||
writer.WriteEntry(whiteout);
|
||||
});
|
||||
|
||||
var fs = LayeredRootFileSystem.FromArchives(new[]
|
||||
{
|
||||
new LayeredRootFileSystem.LayerArchive("sha256:base", layer1Path),
|
||||
new LayeredRootFileSystem.LayerArchive("sha256:update", layer2Path)
|
||||
});
|
||||
|
||||
Assert.True(fs.TryResolveExecutable("start.sh", new[] { "/usr/bin" }, out var descriptor));
|
||||
Assert.Equal("/usr/local/bin/start.sh", descriptor.Path);
|
||||
Assert.Equal("sha256:base", descriptor.LayerDigest);
|
||||
|
||||
Assert.True(fs.TryReadAllText("/usr/bin/start.sh", out var resolvedDescriptor, out var content));
|
||||
Assert.Equal(descriptor.Path, resolvedDescriptor.Path);
|
||||
Assert.Contains("echo start", content);
|
||||
|
||||
Assert.False(fs.TryReadAllText("/opt/old.sh", out _, out _));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void FromArchives_ResolvesHardLinkContent()
|
||||
{
|
||||
var baseLayer = Path.Combine(_tempRoot, "base.tar");
|
||||
var hardLinkLayer = Path.Combine(_tempRoot, "hardlink.tar");
|
||||
|
||||
CreateArchive(baseLayer, writer =>
|
||||
{
|
||||
var baseEntry = new PaxTarEntry(TarEntryType.RegularFile, "usr/bin/tool.sh");
|
||||
baseEntry.Mode = UnixFileMode.UserRead | UnixFileMode.UserExecute |
|
||||
UnixFileMode.GroupRead | UnixFileMode.GroupExecute |
|
||||
UnixFileMode.OtherRead | UnixFileMode.OtherExecute;
|
||||
baseEntry.DataStream = new MemoryStream(Encoding.UTF8.GetBytes("#!/bin/sh\necho tool\n"));
|
||||
writer.WriteEntry(baseEntry);
|
||||
});
|
||||
|
||||
CreateArchive(hardLinkLayer, writer =>
|
||||
{
|
||||
var hardLink = new PaxTarEntry(TarEntryType.HardLink, "bin/tool.sh")
|
||||
{
|
||||
LinkName = "/usr/bin/tool.sh",
|
||||
Mode = UnixFileMode.UserRead | UnixFileMode.UserExecute |
|
||||
UnixFileMode.GroupRead | UnixFileMode.GroupExecute |
|
||||
UnixFileMode.OtherRead | UnixFileMode.OtherExecute
|
||||
};
|
||||
writer.WriteEntry(hardLink);
|
||||
});
|
||||
|
||||
var fs = LayeredRootFileSystem.FromArchives(new[]
|
||||
{
|
||||
new LayeredRootFileSystem.LayerArchive("sha256:base", baseLayer),
|
||||
new LayeredRootFileSystem.LayerArchive("sha256:hardlink", hardLinkLayer)
|
||||
});
|
||||
|
||||
Assert.True(fs.TryReadAllText("/bin/tool.sh", out var descriptor, out var content));
|
||||
Assert.Equal("/usr/bin/tool.sh", descriptor.Path);
|
||||
Assert.Contains("echo tool", content);
|
||||
}
|
||||
|
||||
private string CreateLayerDirectory(string name)
|
||||
{
|
||||
var path = Path.Combine(_tempRoot, name);
|
||||
Directory.CreateDirectory(path);
|
||||
return path;
|
||||
}
|
||||
|
||||
private static void CreateArchive(string path, Action<TarWriter> writerAction)
|
||||
{
|
||||
using var stream = File.Create(path);
|
||||
using var writer = new TarWriter(stream, leaveOpen: false);
|
||||
writerAction(writer);
|
||||
}
|
||||
|
||||
public void Dispose()
|
||||
{
|
||||
try
|
||||
{
|
||||
if (Directory.Exists(_tempRoot))
|
||||
{
|
||||
Directory.Delete(_tempRoot, recursive: true);
|
||||
}
|
||||
}
|
||||
catch
|
||||
{
|
||||
// ignore cleanup failures
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,33 +1,33 @@
|
||||
using StellaOps.Scanner.EntryTrace.Parsing;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Scanner.EntryTrace.Tests;
|
||||
|
||||
public sealed class ShellParserTests
|
||||
{
|
||||
[Fact]
|
||||
public void Parse_ProducesDeterministicNodes()
|
||||
{
|
||||
const string script = """
|
||||
#!/bin/sh
|
||||
source /opt/init.sh
|
||||
if [ -f /etc/profile ]; then
|
||||
. /etc/profile
|
||||
fi
|
||||
|
||||
run-parts /etc/entry.d
|
||||
exec python -m app.main --flag
|
||||
""";
|
||||
|
||||
var first = ShellParser.Parse(script);
|
||||
var second = ShellParser.Parse(script);
|
||||
|
||||
Assert.Equal(first.Nodes.Length, second.Nodes.Length);
|
||||
var actual = first.Nodes.Select(n => n.GetType().Name).ToArray();
|
||||
var expected = new[] { nameof(ShellIncludeNode), nameof(ShellIfNode), nameof(ShellRunPartsNode), nameof(ShellExecNode) };
|
||||
Assert.Equal(expected, actual);
|
||||
|
||||
var actualSecond = second.Nodes.Select(n => n.GetType().Name).ToArray();
|
||||
Assert.Equal(expected, actualSecond);
|
||||
}
|
||||
}
|
||||
using StellaOps.Scanner.EntryTrace.Parsing;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Scanner.EntryTrace.Tests;
|
||||
|
||||
public sealed class ShellParserTests
|
||||
{
|
||||
[Fact]
|
||||
public void Parse_ProducesDeterministicNodes()
|
||||
{
|
||||
const string script = """
|
||||
#!/bin/sh
|
||||
source /opt/init.sh
|
||||
if [ -f /etc/profile ]; then
|
||||
. /etc/profile
|
||||
fi
|
||||
|
||||
run-parts /etc/entry.d
|
||||
exec python -m app.main --flag
|
||||
""";
|
||||
|
||||
var first = ShellParser.Parse(script);
|
||||
var second = ShellParser.Parse(script);
|
||||
|
||||
Assert.Equal(first.Nodes.Length, second.Nodes.Length);
|
||||
var actual = first.Nodes.Select(n => n.GetType().Name).ToArray();
|
||||
var expected = new[] { nameof(ShellIncludeNode), nameof(ShellIfNode), nameof(ShellRunPartsNode), nameof(ShellExecNode) };
|
||||
Assert.Equal(expected, actual);
|
||||
|
||||
var actualSecond = second.Nodes.Select(n => n.GetType().Name).ToArray();
|
||||
Assert.Equal(expected, actualSecond);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,21 +1,21 @@
|
||||
using System.Collections.Generic;
|
||||
using System.Collections.Immutable;
|
||||
using System.Collections.Generic;
|
||||
using System.Collections.Immutable;
|
||||
using System.IO;
|
||||
using System.Text;
|
||||
using StellaOps.Scanner.EntryTrace.FileSystem;
|
||||
|
||||
namespace StellaOps.Scanner.EntryTrace.Tests;
|
||||
|
||||
internal sealed class TestRootFileSystem : IRootFileSystem
|
||||
{
|
||||
private readonly Dictionary<string, FileEntry> _entries = new(StringComparer.Ordinal);
|
||||
private readonly HashSet<string> _directories = new(StringComparer.Ordinal);
|
||||
|
||||
public TestRootFileSystem()
|
||||
{
|
||||
_directories.Add("/");
|
||||
}
|
||||
|
||||
|
||||
namespace StellaOps.Scanner.EntryTrace.Tests;
|
||||
|
||||
internal sealed class TestRootFileSystem : IRootFileSystem
|
||||
{
|
||||
private readonly Dictionary<string, FileEntry> _entries = new(StringComparer.Ordinal);
|
||||
private readonly HashSet<string> _directories = new(StringComparer.Ordinal);
|
||||
|
||||
public TestRootFileSystem()
|
||||
{
|
||||
_directories.Add("/");
|
||||
}
|
||||
|
||||
public void AddFile(string path, string content, bool executable = true, string? layer = "sha256:layer-a")
|
||||
{
|
||||
var normalized = Normalize(path);
|
||||
@@ -40,42 +40,42 @@ internal sealed class TestRootFileSystem : IRootFileSystem
|
||||
|
||||
_entries[normalized] = FileEntry.Create(normalized, content, text: null, executable, layer, isDirectory: false);
|
||||
}
|
||||
|
||||
public void AddDirectory(string path)
|
||||
{
|
||||
|
||||
public void AddDirectory(string path)
|
||||
{
|
||||
var normalized = Normalize(path);
|
||||
EnsureDirectoryChain(normalized);
|
||||
}
|
||||
|
||||
public bool TryResolveExecutable(string name, IReadOnlyList<string> searchPaths, out RootFileDescriptor descriptor)
|
||||
{
|
||||
if (name.Contains('/', StringComparison.Ordinal))
|
||||
{
|
||||
var normalized = Normalize(name);
|
||||
if (_entries.TryGetValue(normalized, out var file) && file.IsExecutable)
|
||||
{
|
||||
descriptor = file.ToDescriptor();
|
||||
return true;
|
||||
}
|
||||
|
||||
descriptor = null!;
|
||||
return false;
|
||||
}
|
||||
|
||||
foreach (var prefix in searchPaths)
|
||||
{
|
||||
var candidate = Combine(prefix, name);
|
||||
if (_entries.TryGetValue(candidate, out var file) && file.IsExecutable)
|
||||
{
|
||||
descriptor = file.ToDescriptor();
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
descriptor = null!;
|
||||
return false;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
public bool TryResolveExecutable(string name, IReadOnlyList<string> searchPaths, out RootFileDescriptor descriptor)
|
||||
{
|
||||
if (name.Contains('/', StringComparison.Ordinal))
|
||||
{
|
||||
var normalized = Normalize(name);
|
||||
if (_entries.TryGetValue(normalized, out var file) && file.IsExecutable)
|
||||
{
|
||||
descriptor = file.ToDescriptor();
|
||||
return true;
|
||||
}
|
||||
|
||||
descriptor = null!;
|
||||
return false;
|
||||
}
|
||||
|
||||
foreach (var prefix in searchPaths)
|
||||
{
|
||||
var candidate = Combine(prefix, name);
|
||||
if (_entries.TryGetValue(candidate, out var file) && file.IsExecutable)
|
||||
{
|
||||
descriptor = file.ToDescriptor();
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
descriptor = null!;
|
||||
return false;
|
||||
}
|
||||
|
||||
public bool TryReadAllText(string path, out RootFileDescriptor descriptor, out string content)
|
||||
{
|
||||
var normalized = Normalize(path);
|
||||
@@ -103,7 +103,7 @@ internal sealed class TestRootFileSystem : IRootFileSystem
|
||||
content = default;
|
||||
return false;
|
||||
}
|
||||
|
||||
|
||||
public ImmutableArray<RootFileDescriptor> EnumerateDirectory(string path)
|
||||
{
|
||||
var normalized = Normalize(path);
|
||||
@@ -136,57 +136,57 @@ internal sealed class TestRootFileSystem : IRootFileSystem
|
||||
entries.Sort(static (left, right) => string.CompareOrdinal(left.Path, right.Path));
|
||||
return entries.ToImmutableArray();
|
||||
}
|
||||
|
||||
public bool DirectoryExists(string path)
|
||||
{
|
||||
var normalized = Normalize(path);
|
||||
return _directories.Contains(normalized);
|
||||
}
|
||||
|
||||
|
||||
public bool DirectoryExists(string path)
|
||||
{
|
||||
var normalized = Normalize(path);
|
||||
return _directories.Contains(normalized);
|
||||
}
|
||||
|
||||
private static string Combine(string prefix, string name)
|
||||
{
|
||||
var normalizedPrefix = Normalize(prefix);
|
||||
if (normalizedPrefix == "/")
|
||||
{
|
||||
return Normalize("/" + name);
|
||||
}
|
||||
|
||||
return Normalize($"{normalizedPrefix}/{name}");
|
||||
}
|
||||
|
||||
{
|
||||
var normalizedPrefix = Normalize(prefix);
|
||||
if (normalizedPrefix == "/")
|
||||
{
|
||||
return Normalize("/" + name);
|
||||
}
|
||||
|
||||
return Normalize($"{normalizedPrefix}/{name}");
|
||||
}
|
||||
|
||||
private static string Normalize(string path)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(path))
|
||||
{
|
||||
return "/";
|
||||
}
|
||||
|
||||
var text = path.Replace('\\', '/').Trim();
|
||||
if (!text.StartsWith("/", StringComparison.Ordinal))
|
||||
{
|
||||
text = "/" + text;
|
||||
}
|
||||
|
||||
var parts = new List<string>();
|
||||
foreach (var part in text.Split('/', StringSplitOptions.RemoveEmptyEntries))
|
||||
{
|
||||
if (part == ".")
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
if (part == "..")
|
||||
{
|
||||
if (parts.Count > 0)
|
||||
{
|
||||
parts.RemoveAt(parts.Count - 1);
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
parts.Add(part);
|
||||
}
|
||||
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(path))
|
||||
{
|
||||
return "/";
|
||||
}
|
||||
|
||||
var text = path.Replace('\\', '/').Trim();
|
||||
if (!text.StartsWith("/", StringComparison.Ordinal))
|
||||
{
|
||||
text = "/" + text;
|
||||
}
|
||||
|
||||
var parts = new List<string>();
|
||||
foreach (var part in text.Split('/', StringSplitOptions.RemoveEmptyEntries))
|
||||
{
|
||||
if (part == ".")
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
if (part == "..")
|
||||
{
|
||||
if (parts.Count > 0)
|
||||
{
|
||||
parts.RemoveAt(parts.Count - 1);
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
parts.Add(part);
|
||||
}
|
||||
|
||||
return "/" + string.Join('/', parts);
|
||||
}
|
||||
|
||||
@@ -207,7 +207,7 @@ internal sealed class TestRootFileSystem : IRootFileSystem
|
||||
_directories.Add(current);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
private sealed class FileEntry
|
||||
{
|
||||
private readonly byte[] _content;
|
||||
@@ -300,4 +300,4 @@ internal sealed class TestRootFileSystem : IRootFileSystem
|
||||
|
||||
return null;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,390 +1,390 @@
|
||||
using System;
|
||||
using System.Collections.Concurrent;
|
||||
using System.Collections.Generic;
|
||||
using System.Collections.ObjectModel;
|
||||
using System.Linq;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using FluentAssertions;
|
||||
using Microsoft.Extensions.Time.Testing;
|
||||
using StellaOps.Scanner.Queue;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Scanner.Queue.Tests;
|
||||
|
||||
public sealed class QueueLeaseIntegrationTests
|
||||
{
|
||||
private readonly ScannerQueueOptions _options = new()
|
||||
{
|
||||
MaxDeliveryAttempts = 3,
|
||||
RetryInitialBackoff = TimeSpan.FromMilliseconds(1),
|
||||
RetryMaxBackoff = TimeSpan.FromMilliseconds(5),
|
||||
DefaultLeaseDuration = TimeSpan.FromSeconds(5)
|
||||
};
|
||||
|
||||
[Fact]
|
||||
public async Task Enqueue_ShouldDeduplicate_ByIdempotencyKey()
|
||||
{
|
||||
var clock = new FakeTimeProvider();
|
||||
var queue = new InMemoryScanQueue(_options, clock);
|
||||
|
||||
var payload = new byte[] { 1, 2, 3 };
|
||||
var message = new ScanQueueMessage("job-1", payload)
|
||||
{
|
||||
IdempotencyKey = "idem-1"
|
||||
};
|
||||
|
||||
var first = await queue.EnqueueAsync(message);
|
||||
first.Deduplicated.Should().BeFalse();
|
||||
|
||||
var second = await queue.EnqueueAsync(message);
|
||||
second.Deduplicated.Should().BeTrue();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task Lease_ShouldExposeTraceId_FromQueuedMessage()
|
||||
{
|
||||
var clock = new FakeTimeProvider();
|
||||
var queue = new InMemoryScanQueue(_options, clock);
|
||||
|
||||
var payload = new byte[] { 9 };
|
||||
var message = new ScanQueueMessage("job-trace", payload)
|
||||
{
|
||||
TraceId = "trace-123"
|
||||
};
|
||||
|
||||
await queue.EnqueueAsync(message);
|
||||
|
||||
var lease = await LeaseSingleAsync(queue, consumer: "worker-trace");
|
||||
lease.Should().NotBeNull();
|
||||
lease!.TraceId.Should().Be("trace-123");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task Lease_Acknowledge_ShouldRemoveFromQueue()
|
||||
{
|
||||
var clock = new FakeTimeProvider();
|
||||
var queue = new InMemoryScanQueue(_options, clock);
|
||||
|
||||
var message = new ScanQueueMessage("job-ack", new byte[] { 42 });
|
||||
await queue.EnqueueAsync(message);
|
||||
|
||||
var lease = await LeaseSingleAsync(queue, consumer: "worker-1");
|
||||
lease.Should().NotBeNull();
|
||||
|
||||
await lease!.AcknowledgeAsync();
|
||||
|
||||
var afterAck = await queue.LeaseAsync(new QueueLeaseRequest("worker-1", 1, TimeSpan.FromSeconds(1)));
|
||||
afterAck.Should().BeEmpty();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task Release_WithRetry_ShouldDeadLetterAfterMaxAttempts()
|
||||
{
|
||||
var clock = new FakeTimeProvider();
|
||||
var queue = new InMemoryScanQueue(_options, clock);
|
||||
|
||||
var message = new ScanQueueMessage("job-retry", new byte[] { 5 });
|
||||
await queue.EnqueueAsync(message);
|
||||
|
||||
for (var attempt = 1; attempt <= _options.MaxDeliveryAttempts; attempt++)
|
||||
{
|
||||
var lease = await LeaseSingleAsync(queue, consumer: $"worker-{attempt}");
|
||||
lease.Should().NotBeNull();
|
||||
|
||||
await lease!.ReleaseAsync(QueueReleaseDisposition.Retry);
|
||||
}
|
||||
|
||||
queue.DeadLetters.Should().ContainSingle(dead => dead.JobId == "job-retry");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task Retry_ShouldIncreaseAttemptOnNextLease()
|
||||
{
|
||||
var clock = new FakeTimeProvider();
|
||||
var queue = new InMemoryScanQueue(_options, clock);
|
||||
|
||||
await queue.EnqueueAsync(new ScanQueueMessage("job-retry-attempt", new byte[] { 77 }));
|
||||
|
||||
var firstLease = await LeaseSingleAsync(queue, "worker-retry");
|
||||
firstLease.Should().NotBeNull();
|
||||
firstLease!.Attempt.Should().Be(1);
|
||||
|
||||
await firstLease.ReleaseAsync(QueueReleaseDisposition.Retry);
|
||||
|
||||
var secondLease = await LeaseSingleAsync(queue, "worker-retry");
|
||||
secondLease.Should().NotBeNull();
|
||||
secondLease!.Attempt.Should().Be(2);
|
||||
}
|
||||
|
||||
private static async Task<IScanQueueLease?> LeaseSingleAsync(InMemoryScanQueue queue, string consumer)
|
||||
{
|
||||
var leases = await queue.LeaseAsync(new QueueLeaseRequest(consumer, 1, TimeSpan.FromSeconds(1)));
|
||||
return leases.FirstOrDefault();
|
||||
}
|
||||
|
||||
private sealed class InMemoryScanQueue : IScanQueue
|
||||
{
|
||||
private readonly ScannerQueueOptions _options;
|
||||
private readonly TimeProvider _timeProvider;
|
||||
private readonly ConcurrentQueue<QueueEntry> _ready = new();
|
||||
private readonly ConcurrentDictionary<string, QueueEntry> _idempotency = new(StringComparer.Ordinal);
|
||||
private readonly ConcurrentDictionary<string, QueueEntry> _inFlight = new(StringComparer.Ordinal);
|
||||
private readonly List<QueueEntry> _deadLetters = new();
|
||||
private long _sequence;
|
||||
|
||||
public InMemoryScanQueue(ScannerQueueOptions options, TimeProvider timeProvider)
|
||||
{
|
||||
_options = options;
|
||||
_timeProvider = timeProvider;
|
||||
}
|
||||
|
||||
public IReadOnlyList<QueueEntry> DeadLetters => _deadLetters;
|
||||
|
||||
public ValueTask<QueueEnqueueResult> EnqueueAsync(ScanQueueMessage message, CancellationToken cancellationToken = default)
|
||||
{
|
||||
var token = message.IdempotencyKey ?? message.JobId;
|
||||
if (_idempotency.TryGetValue(token, out var existing))
|
||||
{
|
||||
return ValueTask.FromResult(new QueueEnqueueResult(existing.SequenceId, true));
|
||||
}
|
||||
|
||||
var entry = new QueueEntry(
|
||||
sequenceId: Interlocked.Increment(ref _sequence).ToString(),
|
||||
jobId: message.JobId,
|
||||
payload: message.Payload.ToArray(),
|
||||
idempotencyKey: token,
|
||||
attempt: 1,
|
||||
enqueuedAt: _timeProvider.GetUtcNow(),
|
||||
traceId: message.TraceId,
|
||||
attributes: message.Attributes is null
|
||||
? new ReadOnlyDictionary<string, string>(new Dictionary<string, string>(0, StringComparer.Ordinal))
|
||||
: new ReadOnlyDictionary<string, string>(new Dictionary<string, string>(message.Attributes, StringComparer.Ordinal)));
|
||||
|
||||
_idempotency[token] = entry;
|
||||
_ready.Enqueue(entry);
|
||||
return ValueTask.FromResult(new QueueEnqueueResult(entry.SequenceId, false));
|
||||
}
|
||||
|
||||
public ValueTask<IReadOnlyList<IScanQueueLease>> LeaseAsync(QueueLeaseRequest request, CancellationToken cancellationToken = default)
|
||||
{
|
||||
var now = _timeProvider.GetUtcNow();
|
||||
var leases = new List<IScanQueueLease>(request.BatchSize);
|
||||
|
||||
while (leases.Count < request.BatchSize && _ready.TryDequeue(out var entry))
|
||||
{
|
||||
entry.Attempt = Math.Max(entry.Attempt, entry.Deliveries + 1);
|
||||
entry.Deliveries = entry.Attempt;
|
||||
entry.LastLeaseAt = now;
|
||||
_inFlight[entry.SequenceId] = entry;
|
||||
|
||||
var lease = new InMemoryLease(
|
||||
this,
|
||||
entry,
|
||||
request.Consumer,
|
||||
now,
|
||||
request.LeaseDuration);
|
||||
leases.Add(lease);
|
||||
}
|
||||
|
||||
return ValueTask.FromResult<IReadOnlyList<IScanQueueLease>>(leases);
|
||||
}
|
||||
|
||||
public ValueTask<IReadOnlyList<IScanQueueLease>> ClaimExpiredLeasesAsync(QueueClaimOptions options, CancellationToken cancellationToken = default)
|
||||
{
|
||||
var now = _timeProvider.GetUtcNow();
|
||||
var leases = _inFlight.Values
|
||||
.Where(entry => now - entry.LastLeaseAt >= options.MinIdleTime)
|
||||
.Take(options.BatchSize)
|
||||
.Select(entry => new InMemoryLease(this, entry, options.ClaimantConsumer, now, _options.DefaultLeaseDuration))
|
||||
.Cast<IScanQueueLease>()
|
||||
.ToList();
|
||||
|
||||
return ValueTask.FromResult<IReadOnlyList<IScanQueueLease>>(leases);
|
||||
}
|
||||
|
||||
internal Task AcknowledgeAsync(QueueEntry entry)
|
||||
{
|
||||
_inFlight.TryRemove(entry.SequenceId, out _);
|
||||
_idempotency.TryRemove(entry.IdempotencyKey, out _);
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
|
||||
internal Task<DateTimeOffset> RenewAsync(QueueEntry entry, TimeSpan leaseDuration)
|
||||
{
|
||||
var expires = _timeProvider.GetUtcNow().Add(leaseDuration);
|
||||
entry.LeaseExpiresAt = expires;
|
||||
return Task.FromResult(expires);
|
||||
}
|
||||
|
||||
internal Task ReleaseAsync(QueueEntry entry, QueueReleaseDisposition disposition)
|
||||
{
|
||||
if (disposition == QueueReleaseDisposition.Retry && entry.Attempt >= _options.MaxDeliveryAttempts)
|
||||
{
|
||||
return DeadLetterAsync(entry, $"max-delivery-attempts:{entry.Attempt}");
|
||||
}
|
||||
|
||||
if (disposition == QueueReleaseDisposition.Retry)
|
||||
{
|
||||
entry.Attempt++;
|
||||
_ready.Enqueue(entry);
|
||||
}
|
||||
else
|
||||
{
|
||||
_idempotency.TryRemove(entry.IdempotencyKey, out _);
|
||||
}
|
||||
|
||||
_inFlight.TryRemove(entry.SequenceId, out _);
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
|
||||
internal Task DeadLetterAsync(QueueEntry entry, string reason)
|
||||
{
|
||||
entry.DeadLetterReason = reason;
|
||||
_inFlight.TryRemove(entry.SequenceId, out _);
|
||||
_idempotency.TryRemove(entry.IdempotencyKey, out _);
|
||||
_deadLetters.Add(entry);
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
|
||||
private sealed class InMemoryLease : IScanQueueLease
|
||||
{
|
||||
private readonly InMemoryScanQueue _owner;
|
||||
private readonly QueueEntry _entry;
|
||||
private int _completed;
|
||||
|
||||
public InMemoryLease(
|
||||
InMemoryScanQueue owner,
|
||||
QueueEntry entry,
|
||||
string consumer,
|
||||
DateTimeOffset now,
|
||||
TimeSpan leaseDuration)
|
||||
{
|
||||
_owner = owner;
|
||||
_entry = entry;
|
||||
Consumer = consumer;
|
||||
MessageId = entry.SequenceId;
|
||||
JobId = entry.JobId;
|
||||
Payload = entry.Payload;
|
||||
Attempt = entry.Attempt;
|
||||
EnqueuedAt = entry.EnqueuedAt;
|
||||
LeaseExpiresAt = now.Add(leaseDuration);
|
||||
IdempotencyKey = entry.IdempotencyKey;
|
||||
TraceId = entry.TraceId;
|
||||
Attributes = entry.Attributes;
|
||||
}
|
||||
|
||||
public string MessageId { get; }
|
||||
|
||||
public string JobId { get; }
|
||||
|
||||
public ReadOnlyMemory<byte> Payload { get; }
|
||||
|
||||
public int Attempt { get; }
|
||||
|
||||
public DateTimeOffset EnqueuedAt { get; }
|
||||
|
||||
public DateTimeOffset LeaseExpiresAt { get; private set; }
|
||||
|
||||
public string Consumer { get; }
|
||||
|
||||
public string? IdempotencyKey { get; }
|
||||
|
||||
public string? TraceId { get; }
|
||||
|
||||
public IReadOnlyDictionary<string, string> Attributes { get; }
|
||||
|
||||
public Task AcknowledgeAsync(CancellationToken cancellationToken = default)
|
||||
{
|
||||
if (TryComplete())
|
||||
{
|
||||
return _owner.AcknowledgeAsync(_entry);
|
||||
}
|
||||
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
|
||||
public Task RenewAsync(TimeSpan leaseDuration, CancellationToken cancellationToken = default)
|
||||
{
|
||||
return RenewInternalAsync(leaseDuration);
|
||||
}
|
||||
|
||||
public Task ReleaseAsync(QueueReleaseDisposition disposition, CancellationToken cancellationToken = default)
|
||||
{
|
||||
if (TryComplete())
|
||||
{
|
||||
return _owner.ReleaseAsync(_entry, disposition);
|
||||
}
|
||||
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
|
||||
public Task DeadLetterAsync(string reason, CancellationToken cancellationToken = default)
|
||||
{
|
||||
if (TryComplete())
|
||||
{
|
||||
return _owner.DeadLetterAsync(_entry, reason);
|
||||
}
|
||||
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
|
||||
private async Task RenewInternalAsync(TimeSpan leaseDuration)
|
||||
{
|
||||
var expires = await _owner.RenewAsync(_entry, leaseDuration).ConfigureAwait(false);
|
||||
LeaseExpiresAt = expires;
|
||||
}
|
||||
|
||||
private bool TryComplete()
|
||||
=> Interlocked.CompareExchange(ref _completed, 1, 0) == 0;
|
||||
}
|
||||
|
||||
internal sealed class QueueEntry
|
||||
{
|
||||
public QueueEntry(
|
||||
string sequenceId,
|
||||
string jobId,
|
||||
byte[] payload,
|
||||
string idempotencyKey,
|
||||
int attempt,
|
||||
DateTimeOffset enqueuedAt,
|
||||
string? traceId,
|
||||
IReadOnlyDictionary<string, string> attributes)
|
||||
{
|
||||
SequenceId = sequenceId;
|
||||
JobId = jobId;
|
||||
Payload = payload;
|
||||
IdempotencyKey = idempotencyKey;
|
||||
Attempt = attempt;
|
||||
EnqueuedAt = enqueuedAt;
|
||||
LastLeaseAt = enqueuedAt;
|
||||
TraceId = traceId;
|
||||
Attributes = attributes;
|
||||
}
|
||||
|
||||
public string SequenceId { get; }
|
||||
|
||||
public string JobId { get; }
|
||||
|
||||
public byte[] Payload { get; }
|
||||
|
||||
public string IdempotencyKey { get; }
|
||||
|
||||
public int Attempt { get; set; }
|
||||
|
||||
public int Deliveries { get; set; }
|
||||
|
||||
public DateTimeOffset EnqueuedAt { get; }
|
||||
|
||||
public DateTimeOffset LeaseExpiresAt { get; set; }
|
||||
|
||||
public DateTimeOffset LastLeaseAt { get; set; }
|
||||
|
||||
public string? TraceId { get; }
|
||||
|
||||
public IReadOnlyDictionary<string, string> Attributes { get; }
|
||||
|
||||
public string? DeadLetterReason { get; set; }
|
||||
}
|
||||
}
|
||||
}
|
||||
using System;
|
||||
using System.Collections.Concurrent;
|
||||
using System.Collections.Generic;
|
||||
using System.Collections.ObjectModel;
|
||||
using System.Linq;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using FluentAssertions;
|
||||
using Microsoft.Extensions.Time.Testing;
|
||||
using StellaOps.Scanner.Queue;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Scanner.Queue.Tests;
|
||||
|
||||
public sealed class QueueLeaseIntegrationTests
|
||||
{
|
||||
private readonly ScannerQueueOptions _options = new()
|
||||
{
|
||||
MaxDeliveryAttempts = 3,
|
||||
RetryInitialBackoff = TimeSpan.FromMilliseconds(1),
|
||||
RetryMaxBackoff = TimeSpan.FromMilliseconds(5),
|
||||
DefaultLeaseDuration = TimeSpan.FromSeconds(5)
|
||||
};
|
||||
|
||||
[Fact]
|
||||
public async Task Enqueue_ShouldDeduplicate_ByIdempotencyKey()
|
||||
{
|
||||
var clock = new FakeTimeProvider();
|
||||
var queue = new InMemoryScanQueue(_options, clock);
|
||||
|
||||
var payload = new byte[] { 1, 2, 3 };
|
||||
var message = new ScanQueueMessage("job-1", payload)
|
||||
{
|
||||
IdempotencyKey = "idem-1"
|
||||
};
|
||||
|
||||
var first = await queue.EnqueueAsync(message);
|
||||
first.Deduplicated.Should().BeFalse();
|
||||
|
||||
var second = await queue.EnqueueAsync(message);
|
||||
second.Deduplicated.Should().BeTrue();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task Lease_ShouldExposeTraceId_FromQueuedMessage()
|
||||
{
|
||||
var clock = new FakeTimeProvider();
|
||||
var queue = new InMemoryScanQueue(_options, clock);
|
||||
|
||||
var payload = new byte[] { 9 };
|
||||
var message = new ScanQueueMessage("job-trace", payload)
|
||||
{
|
||||
TraceId = "trace-123"
|
||||
};
|
||||
|
||||
await queue.EnqueueAsync(message);
|
||||
|
||||
var lease = await LeaseSingleAsync(queue, consumer: "worker-trace");
|
||||
lease.Should().NotBeNull();
|
||||
lease!.TraceId.Should().Be("trace-123");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task Lease_Acknowledge_ShouldRemoveFromQueue()
|
||||
{
|
||||
var clock = new FakeTimeProvider();
|
||||
var queue = new InMemoryScanQueue(_options, clock);
|
||||
|
||||
var message = new ScanQueueMessage("job-ack", new byte[] { 42 });
|
||||
await queue.EnqueueAsync(message);
|
||||
|
||||
var lease = await LeaseSingleAsync(queue, consumer: "worker-1");
|
||||
lease.Should().NotBeNull();
|
||||
|
||||
await lease!.AcknowledgeAsync();
|
||||
|
||||
var afterAck = await queue.LeaseAsync(new QueueLeaseRequest("worker-1", 1, TimeSpan.FromSeconds(1)));
|
||||
afterAck.Should().BeEmpty();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task Release_WithRetry_ShouldDeadLetterAfterMaxAttempts()
|
||||
{
|
||||
var clock = new FakeTimeProvider();
|
||||
var queue = new InMemoryScanQueue(_options, clock);
|
||||
|
||||
var message = new ScanQueueMessage("job-retry", new byte[] { 5 });
|
||||
await queue.EnqueueAsync(message);
|
||||
|
||||
for (var attempt = 1; attempt <= _options.MaxDeliveryAttempts; attempt++)
|
||||
{
|
||||
var lease = await LeaseSingleAsync(queue, consumer: $"worker-{attempt}");
|
||||
lease.Should().NotBeNull();
|
||||
|
||||
await lease!.ReleaseAsync(QueueReleaseDisposition.Retry);
|
||||
}
|
||||
|
||||
queue.DeadLetters.Should().ContainSingle(dead => dead.JobId == "job-retry");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task Retry_ShouldIncreaseAttemptOnNextLease()
|
||||
{
|
||||
var clock = new FakeTimeProvider();
|
||||
var queue = new InMemoryScanQueue(_options, clock);
|
||||
|
||||
await queue.EnqueueAsync(new ScanQueueMessage("job-retry-attempt", new byte[] { 77 }));
|
||||
|
||||
var firstLease = await LeaseSingleAsync(queue, "worker-retry");
|
||||
firstLease.Should().NotBeNull();
|
||||
firstLease!.Attempt.Should().Be(1);
|
||||
|
||||
await firstLease.ReleaseAsync(QueueReleaseDisposition.Retry);
|
||||
|
||||
var secondLease = await LeaseSingleAsync(queue, "worker-retry");
|
||||
secondLease.Should().NotBeNull();
|
||||
secondLease!.Attempt.Should().Be(2);
|
||||
}
|
||||
|
||||
private static async Task<IScanQueueLease?> LeaseSingleAsync(InMemoryScanQueue queue, string consumer)
|
||||
{
|
||||
var leases = await queue.LeaseAsync(new QueueLeaseRequest(consumer, 1, TimeSpan.FromSeconds(1)));
|
||||
return leases.FirstOrDefault();
|
||||
}
|
||||
|
||||
private sealed class InMemoryScanQueue : IScanQueue
|
||||
{
|
||||
private readonly ScannerQueueOptions _options;
|
||||
private readonly TimeProvider _timeProvider;
|
||||
private readonly ConcurrentQueue<QueueEntry> _ready = new();
|
||||
private readonly ConcurrentDictionary<string, QueueEntry> _idempotency = new(StringComparer.Ordinal);
|
||||
private readonly ConcurrentDictionary<string, QueueEntry> _inFlight = new(StringComparer.Ordinal);
|
||||
private readonly List<QueueEntry> _deadLetters = new();
|
||||
private long _sequence;
|
||||
|
||||
public InMemoryScanQueue(ScannerQueueOptions options, TimeProvider timeProvider)
|
||||
{
|
||||
_options = options;
|
||||
_timeProvider = timeProvider;
|
||||
}
|
||||
|
||||
public IReadOnlyList<QueueEntry> DeadLetters => _deadLetters;
|
||||
|
||||
public ValueTask<QueueEnqueueResult> EnqueueAsync(ScanQueueMessage message, CancellationToken cancellationToken = default)
|
||||
{
|
||||
var token = message.IdempotencyKey ?? message.JobId;
|
||||
if (_idempotency.TryGetValue(token, out var existing))
|
||||
{
|
||||
return ValueTask.FromResult(new QueueEnqueueResult(existing.SequenceId, true));
|
||||
}
|
||||
|
||||
var entry = new QueueEntry(
|
||||
sequenceId: Interlocked.Increment(ref _sequence).ToString(),
|
||||
jobId: message.JobId,
|
||||
payload: message.Payload.ToArray(),
|
||||
idempotencyKey: token,
|
||||
attempt: 1,
|
||||
enqueuedAt: _timeProvider.GetUtcNow(),
|
||||
traceId: message.TraceId,
|
||||
attributes: message.Attributes is null
|
||||
? new ReadOnlyDictionary<string, string>(new Dictionary<string, string>(0, StringComparer.Ordinal))
|
||||
: new ReadOnlyDictionary<string, string>(new Dictionary<string, string>(message.Attributes, StringComparer.Ordinal)));
|
||||
|
||||
_idempotency[token] = entry;
|
||||
_ready.Enqueue(entry);
|
||||
return ValueTask.FromResult(new QueueEnqueueResult(entry.SequenceId, false));
|
||||
}
|
||||
|
||||
public ValueTask<IReadOnlyList<IScanQueueLease>> LeaseAsync(QueueLeaseRequest request, CancellationToken cancellationToken = default)
|
||||
{
|
||||
var now = _timeProvider.GetUtcNow();
|
||||
var leases = new List<IScanQueueLease>(request.BatchSize);
|
||||
|
||||
while (leases.Count < request.BatchSize && _ready.TryDequeue(out var entry))
|
||||
{
|
||||
entry.Attempt = Math.Max(entry.Attempt, entry.Deliveries + 1);
|
||||
entry.Deliveries = entry.Attempt;
|
||||
entry.LastLeaseAt = now;
|
||||
_inFlight[entry.SequenceId] = entry;
|
||||
|
||||
var lease = new InMemoryLease(
|
||||
this,
|
||||
entry,
|
||||
request.Consumer,
|
||||
now,
|
||||
request.LeaseDuration);
|
||||
leases.Add(lease);
|
||||
}
|
||||
|
||||
return ValueTask.FromResult<IReadOnlyList<IScanQueueLease>>(leases);
|
||||
}
|
||||
|
||||
public ValueTask<IReadOnlyList<IScanQueueLease>> ClaimExpiredLeasesAsync(QueueClaimOptions options, CancellationToken cancellationToken = default)
|
||||
{
|
||||
var now = _timeProvider.GetUtcNow();
|
||||
var leases = _inFlight.Values
|
||||
.Where(entry => now - entry.LastLeaseAt >= options.MinIdleTime)
|
||||
.Take(options.BatchSize)
|
||||
.Select(entry => new InMemoryLease(this, entry, options.ClaimantConsumer, now, _options.DefaultLeaseDuration))
|
||||
.Cast<IScanQueueLease>()
|
||||
.ToList();
|
||||
|
||||
return ValueTask.FromResult<IReadOnlyList<IScanQueueLease>>(leases);
|
||||
}
|
||||
|
||||
internal Task AcknowledgeAsync(QueueEntry entry)
|
||||
{
|
||||
_inFlight.TryRemove(entry.SequenceId, out _);
|
||||
_idempotency.TryRemove(entry.IdempotencyKey, out _);
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
|
||||
internal Task<DateTimeOffset> RenewAsync(QueueEntry entry, TimeSpan leaseDuration)
|
||||
{
|
||||
var expires = _timeProvider.GetUtcNow().Add(leaseDuration);
|
||||
entry.LeaseExpiresAt = expires;
|
||||
return Task.FromResult(expires);
|
||||
}
|
||||
|
||||
internal Task ReleaseAsync(QueueEntry entry, QueueReleaseDisposition disposition)
|
||||
{
|
||||
if (disposition == QueueReleaseDisposition.Retry && entry.Attempt >= _options.MaxDeliveryAttempts)
|
||||
{
|
||||
return DeadLetterAsync(entry, $"max-delivery-attempts:{entry.Attempt}");
|
||||
}
|
||||
|
||||
if (disposition == QueueReleaseDisposition.Retry)
|
||||
{
|
||||
entry.Attempt++;
|
||||
_ready.Enqueue(entry);
|
||||
}
|
||||
else
|
||||
{
|
||||
_idempotency.TryRemove(entry.IdempotencyKey, out _);
|
||||
}
|
||||
|
||||
_inFlight.TryRemove(entry.SequenceId, out _);
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
|
||||
internal Task DeadLetterAsync(QueueEntry entry, string reason)
|
||||
{
|
||||
entry.DeadLetterReason = reason;
|
||||
_inFlight.TryRemove(entry.SequenceId, out _);
|
||||
_idempotency.TryRemove(entry.IdempotencyKey, out _);
|
||||
_deadLetters.Add(entry);
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
|
||||
private sealed class InMemoryLease : IScanQueueLease
|
||||
{
|
||||
private readonly InMemoryScanQueue _owner;
|
||||
private readonly QueueEntry _entry;
|
||||
private int _completed;
|
||||
|
||||
public InMemoryLease(
|
||||
InMemoryScanQueue owner,
|
||||
QueueEntry entry,
|
||||
string consumer,
|
||||
DateTimeOffset now,
|
||||
TimeSpan leaseDuration)
|
||||
{
|
||||
_owner = owner;
|
||||
_entry = entry;
|
||||
Consumer = consumer;
|
||||
MessageId = entry.SequenceId;
|
||||
JobId = entry.JobId;
|
||||
Payload = entry.Payload;
|
||||
Attempt = entry.Attempt;
|
||||
EnqueuedAt = entry.EnqueuedAt;
|
||||
LeaseExpiresAt = now.Add(leaseDuration);
|
||||
IdempotencyKey = entry.IdempotencyKey;
|
||||
TraceId = entry.TraceId;
|
||||
Attributes = entry.Attributes;
|
||||
}
|
||||
|
||||
public string MessageId { get; }
|
||||
|
||||
public string JobId { get; }
|
||||
|
||||
public ReadOnlyMemory<byte> Payload { get; }
|
||||
|
||||
public int Attempt { get; }
|
||||
|
||||
public DateTimeOffset EnqueuedAt { get; }
|
||||
|
||||
public DateTimeOffset LeaseExpiresAt { get; private set; }
|
||||
|
||||
public string Consumer { get; }
|
||||
|
||||
public string? IdempotencyKey { get; }
|
||||
|
||||
public string? TraceId { get; }
|
||||
|
||||
public IReadOnlyDictionary<string, string> Attributes { get; }
|
||||
|
||||
public Task AcknowledgeAsync(CancellationToken cancellationToken = default)
|
||||
{
|
||||
if (TryComplete())
|
||||
{
|
||||
return _owner.AcknowledgeAsync(_entry);
|
||||
}
|
||||
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
|
||||
public Task RenewAsync(TimeSpan leaseDuration, CancellationToken cancellationToken = default)
|
||||
{
|
||||
return RenewInternalAsync(leaseDuration);
|
||||
}
|
||||
|
||||
public Task ReleaseAsync(QueueReleaseDisposition disposition, CancellationToken cancellationToken = default)
|
||||
{
|
||||
if (TryComplete())
|
||||
{
|
||||
return _owner.ReleaseAsync(_entry, disposition);
|
||||
}
|
||||
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
|
||||
public Task DeadLetterAsync(string reason, CancellationToken cancellationToken = default)
|
||||
{
|
||||
if (TryComplete())
|
||||
{
|
||||
return _owner.DeadLetterAsync(_entry, reason);
|
||||
}
|
||||
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
|
||||
private async Task RenewInternalAsync(TimeSpan leaseDuration)
|
||||
{
|
||||
var expires = await _owner.RenewAsync(_entry, leaseDuration).ConfigureAwait(false);
|
||||
LeaseExpiresAt = expires;
|
||||
}
|
||||
|
||||
private bool TryComplete()
|
||||
=> Interlocked.CompareExchange(ref _completed, 1, 0) == 0;
|
||||
}
|
||||
|
||||
internal sealed class QueueEntry
|
||||
{
|
||||
public QueueEntry(
|
||||
string sequenceId,
|
||||
string jobId,
|
||||
byte[] payload,
|
||||
string idempotencyKey,
|
||||
int attempt,
|
||||
DateTimeOffset enqueuedAt,
|
||||
string? traceId,
|
||||
IReadOnlyDictionary<string, string> attributes)
|
||||
{
|
||||
SequenceId = sequenceId;
|
||||
JobId = jobId;
|
||||
Payload = payload;
|
||||
IdempotencyKey = idempotencyKey;
|
||||
Attempt = attempt;
|
||||
EnqueuedAt = enqueuedAt;
|
||||
LastLeaseAt = enqueuedAt;
|
||||
TraceId = traceId;
|
||||
Attributes = attributes;
|
||||
}
|
||||
|
||||
public string SequenceId { get; }
|
||||
|
||||
public string JobId { get; }
|
||||
|
||||
public byte[] Payload { get; }
|
||||
|
||||
public string IdempotencyKey { get; }
|
||||
|
||||
public int Attempt { get; set; }
|
||||
|
||||
public int Deliveries { get; set; }
|
||||
|
||||
public DateTimeOffset EnqueuedAt { get; }
|
||||
|
||||
public DateTimeOffset LeaseExpiresAt { get; set; }
|
||||
|
||||
public DateTimeOffset LastLeaseAt { get; set; }
|
||||
|
||||
public string? TraceId { get; }
|
||||
|
||||
public IReadOnlyDictionary<string, string> Attributes { get; }
|
||||
|
||||
public string? DeadLetterReason { get; set; }
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,4 +1,6 @@
|
||||
using System.Buffers.Binary;
|
||||
using System.Security.Cryptography;
|
||||
using System.Text;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using StellaOps.Scanner.Reachability;
|
||||
@@ -132,6 +134,39 @@ public class BinaryReachabilityLifterTests
|
||||
Assert.DoesNotContain(graph.Nodes, n => n.Kind == "entry_point");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task EmitsUnknownsForElfUndefinedDynsymSymbols()
|
||||
{
|
||||
using var temp = new TempDir();
|
||||
var binaryPath = System.IO.Path.Combine(temp.Path, "sample.so");
|
||||
var bytes = CreateElfWithDynsymUndefinedSymbol("puts");
|
||||
await System.IO.File.WriteAllBytesAsync(binaryPath, bytes);
|
||||
|
||||
var context = new ReachabilityLifterContext
|
||||
{
|
||||
RootPath = temp.Path,
|
||||
AnalysisId = "analysis-unknowns"
|
||||
};
|
||||
|
||||
var builder = new ReachabilityGraphBuilder();
|
||||
var lifter = new BinaryReachabilityLifter();
|
||||
|
||||
await lifter.LiftAsync(context, builder, CancellationToken.None);
|
||||
var graph = builder.ToUnionGraph(SymbolId.Lang.Binary);
|
||||
|
||||
var binaryNode = Assert.Single(graph.Nodes, n => n.Kind == "binary");
|
||||
var unknownNode = Assert.Single(graph.Nodes, n => n.Kind == "unknown" && n.Display == "?puts");
|
||||
|
||||
Assert.NotNull(unknownNode.Attributes);
|
||||
Assert.Equal("true", unknownNode.Attributes!["is_unknown"]);
|
||||
Assert.Equal("elf-dynsym-undef", unknownNode.Attributes["reason"]);
|
||||
|
||||
Assert.Contains(graph.Edges, e =>
|
||||
e.EdgeType == EdgeTypes.Call &&
|
||||
e.From == binaryNode.SymbolId &&
|
||||
e.To == unknownNode.SymbolId);
|
||||
}
|
||||
|
||||
private static byte[] CreateMinimalElf()
|
||||
{
|
||||
var data = new byte[64];
|
||||
@@ -165,4 +200,111 @@ public class BinaryReachabilityLifterTests
|
||||
BitConverter.TryWriteBytes(data.AsSpan(24, 8), entryAddr);
|
||||
return data;
|
||||
}
|
||||
|
||||
private static byte[] CreateElfWithDynsymUndefinedSymbol(string symbolName)
|
||||
{
|
||||
var shstr = Encoding.ASCII.GetBytes("\0.shstrtab\0.dynstr\0.dynsym\0");
|
||||
var dynstr = Encoding.ASCII.GetBytes("\0" + symbolName + "\0");
|
||||
|
||||
const int elfHeaderSize = 64;
|
||||
const int shEntrySize = 64;
|
||||
const int dynsymEntrySize = 24;
|
||||
const int dynsymEntries = 2;
|
||||
|
||||
var offset = elfHeaderSize;
|
||||
var shstrOffset = offset;
|
||||
offset = Align(offset + shstr.Length, 8);
|
||||
|
||||
var dynstrOffset = offset;
|
||||
offset = Align(offset + dynstr.Length, 8);
|
||||
|
||||
var dynsymOffset = offset;
|
||||
var dynsymSize = dynsymEntrySize * dynsymEntries;
|
||||
offset = Align(offset + dynsymSize, 8);
|
||||
|
||||
var shoff = offset;
|
||||
const int shnum = 4;
|
||||
var totalSize = shoff + shnum * shEntrySize;
|
||||
|
||||
var buffer = new byte[totalSize];
|
||||
|
||||
// ELF header (64-bit LE) with section headers.
|
||||
buffer[0] = 0x7F;
|
||||
buffer[1] = (byte)'E';
|
||||
buffer[2] = (byte)'L';
|
||||
buffer[3] = (byte)'F';
|
||||
buffer[4] = 2; // 64-bit
|
||||
buffer[5] = 1; // little endian
|
||||
buffer[6] = 1; // version
|
||||
buffer[7] = 0; // System V ABI
|
||||
|
||||
WriteU16LE(buffer, 16, 3); // e_type = ET_DYN
|
||||
WriteU16LE(buffer, 18, 0x3E); // e_machine = EM_X86_64
|
||||
WriteU32LE(buffer, 20, 1); // e_version
|
||||
WriteU64LE(buffer, 24, 0); // e_entry
|
||||
WriteU64LE(buffer, 32, 0); // e_phoff
|
||||
WriteU64LE(buffer, 40, (ulong)shoff); // e_shoff
|
||||
WriteU32LE(buffer, 48, 0); // e_flags
|
||||
WriteU16LE(buffer, 52, elfHeaderSize); // e_ehsize
|
||||
WriteU16LE(buffer, 54, 0); // e_phentsize
|
||||
WriteU16LE(buffer, 56, 0); // e_phnum
|
||||
WriteU16LE(buffer, 58, shEntrySize); // e_shentsize
|
||||
WriteU16LE(buffer, 60, shnum); // e_shnum
|
||||
WriteU16LE(buffer, 62, 1); // e_shstrndx
|
||||
|
||||
shstr.CopyTo(buffer, shstrOffset);
|
||||
dynstr.CopyTo(buffer, dynstrOffset);
|
||||
|
||||
// .dynsym with one undefined global function symbol.
|
||||
var sym1 = dynsymOffset + dynsymEntrySize;
|
||||
WriteU32LE(buffer, sym1 + 0, 1u); // st_name (offset into dynstr)
|
||||
buffer[sym1 + 4] = 0x12; // st_info = STB_GLOBAL(1) | STT_FUNC(2)
|
||||
buffer[sym1 + 5] = 0x00; // st_other
|
||||
WriteU16LE(buffer, sym1 + 6, 0); // st_shndx = SHN_UNDEF
|
||||
|
||||
// Section headers.
|
||||
// Section 1: .shstrtab
|
||||
var sh1 = shoff + shEntrySize;
|
||||
WriteU32LE(buffer, sh1 + 0, 1u); // sh_name
|
||||
WriteU32LE(buffer, sh1 + 4, 3u); // sh_type = SHT_STRTAB
|
||||
WriteU64LE(buffer, sh1 + 24, (ulong)shstrOffset); // sh_offset
|
||||
WriteU64LE(buffer, sh1 + 32, (ulong)shstr.Length); // sh_size
|
||||
WriteU64LE(buffer, sh1 + 48, 1u); // sh_addralign
|
||||
|
||||
// Section 2: .dynstr
|
||||
var sh2 = shoff + shEntrySize * 2;
|
||||
WriteU32LE(buffer, sh2 + 0, 11u); // sh_name
|
||||
WriteU32LE(buffer, sh2 + 4, 3u); // sh_type = SHT_STRTAB
|
||||
WriteU64LE(buffer, sh2 + 24, (ulong)dynstrOffset); // sh_offset
|
||||
WriteU64LE(buffer, sh2 + 32, (ulong)dynstr.Length); // sh_size
|
||||
WriteU64LE(buffer, sh2 + 48, 1u); // sh_addralign
|
||||
|
||||
// Section 3: .dynsym
|
||||
var sh3 = shoff + shEntrySize * 3;
|
||||
WriteU32LE(buffer, sh3 + 0, 19u); // sh_name
|
||||
WriteU32LE(buffer, sh3 + 4, 11u); // sh_type = SHT_DYNSYM
|
||||
WriteU64LE(buffer, sh3 + 24, (ulong)dynsymOffset); // sh_offset
|
||||
WriteU64LE(buffer, sh3 + 32, (ulong)dynsymSize); // sh_size
|
||||
WriteU32LE(buffer, sh3 + 40, 2u); // sh_link = dynstr
|
||||
WriteU32LE(buffer, sh3 + 44, 1u); // sh_info (one local symbol)
|
||||
WriteU64LE(buffer, sh3 + 48, 8u); // sh_addralign
|
||||
WriteU64LE(buffer, sh3 + 56, dynsymEntrySize); // sh_entsize
|
||||
|
||||
return buffer;
|
||||
}
|
||||
|
||||
private static int Align(int value, int alignment)
|
||||
=> (value + (alignment - 1)) / alignment * alignment;
|
||||
|
||||
private static void WriteU16LE(byte[] buffer, int offset, int value)
|
||||
=> BinaryPrimitives.WriteUInt16LittleEndian(buffer.AsSpan(offset, 2), (ushort)value);
|
||||
|
||||
private static void WriteU16LE(byte[] buffer, int offset, ushort value)
|
||||
=> BinaryPrimitives.WriteUInt16LittleEndian(buffer.AsSpan(offset, 2), value);
|
||||
|
||||
private static void WriteU32LE(byte[] buffer, int offset, uint value)
|
||||
=> BinaryPrimitives.WriteUInt32LittleEndian(buffer.AsSpan(offset, 4), value);
|
||||
|
||||
private static void WriteU64LE(byte[] buffer, int offset, ulong value)
|
||||
=> BinaryPrimitives.WriteUInt64LittleEndian(buffer.AsSpan(offset, 8), value);
|
||||
}
|
||||
|
||||
@@ -1,82 +1,82 @@
|
||||
using System;
|
||||
using System.Net;
|
||||
using System.Net.Http;
|
||||
using System.Text.Json;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using StellaOps.Scanner.Sbomer.BuildXPlugin;
|
||||
using StellaOps.Scanner.Sbomer.BuildXPlugin.Attestation;
|
||||
using StellaOps.Scanner.Sbomer.BuildXPlugin.Descriptor;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Scanner.Sbomer.BuildXPlugin.Tests.Attestation;
|
||||
|
||||
public sealed class AttestorClientTests
|
||||
{
|
||||
[Fact]
|
||||
public async Task SendPlaceholderAsync_PostsJsonPayload()
|
||||
{
|
||||
var handler = new RecordingHandler(new HttpResponseMessage(HttpStatusCode.Accepted));
|
||||
using var httpClient = new HttpClient(handler);
|
||||
var client = new AttestorClient(httpClient);
|
||||
|
||||
var document = BuildDescriptorDocument();
|
||||
var attestorUri = new Uri("https://attestor.example.com/api/v1/provenance");
|
||||
|
||||
await client.SendPlaceholderAsync(attestorUri, document, CancellationToken.None);
|
||||
|
||||
Assert.NotNull(handler.CapturedRequest);
|
||||
Assert.Equal(HttpMethod.Post, handler.CapturedRequest!.Method);
|
||||
Assert.Equal(attestorUri, handler.CapturedRequest.RequestUri);
|
||||
|
||||
var content = await handler.CapturedRequest.Content!.ReadAsStringAsync();
|
||||
var json = JsonDocument.Parse(content);
|
||||
Assert.Equal(document.Subject.Digest, json.RootElement.GetProperty("imageDigest").GetString());
|
||||
Assert.Equal(document.Artifact.Digest, json.RootElement.GetProperty("sbomDigest").GetString());
|
||||
Assert.Equal(document.Provenance.ExpectedDsseSha256, json.RootElement.GetProperty("expectedDsseSha256").GetString());
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task SendPlaceholderAsync_ThrowsOnFailure()
|
||||
{
|
||||
var handler = new RecordingHandler(new HttpResponseMessage(HttpStatusCode.BadRequest)
|
||||
{
|
||||
Content = new StringContent("invalid")
|
||||
});
|
||||
using var httpClient = new HttpClient(handler);
|
||||
var client = new AttestorClient(httpClient);
|
||||
|
||||
var document = BuildDescriptorDocument();
|
||||
var attestorUri = new Uri("https://attestor.example.com/api/v1/provenance");
|
||||
|
||||
await Assert.ThrowsAsync<BuildxPluginException>(() => client.SendPlaceholderAsync(attestorUri, document, CancellationToken.None));
|
||||
}
|
||||
|
||||
private static DescriptorDocument BuildDescriptorDocument()
|
||||
{
|
||||
var subject = new DescriptorSubject("application/vnd.oci.image.manifest.v1+json", "sha256:img");
|
||||
var artifact = new DescriptorArtifact("application/vnd.cyclonedx+json", "sha256:sbom", 42, new System.Collections.Generic.Dictionary<string, string>());
|
||||
var provenance = new DescriptorProvenance("pending", "sha256:dsse", "nonce", "https://attestor.example.com/api/v1/provenance", "https://slsa.dev/provenance/v1");
|
||||
var generatorMetadata = new DescriptorGeneratorMetadata("generator", "1.0.0");
|
||||
var metadata = new System.Collections.Generic.Dictionary<string, string>();
|
||||
return new DescriptorDocument("schema", DateTimeOffset.UtcNow, generatorMetadata, subject, artifact, provenance, metadata);
|
||||
}
|
||||
|
||||
private sealed class RecordingHandler : HttpMessageHandler
|
||||
{
|
||||
private readonly HttpResponseMessage response;
|
||||
|
||||
public RecordingHandler(HttpResponseMessage response)
|
||||
{
|
||||
this.response = response;
|
||||
}
|
||||
|
||||
public HttpRequestMessage? CapturedRequest { get; private set; }
|
||||
|
||||
protected override Task<HttpResponseMessage> SendAsync(HttpRequestMessage request, CancellationToken cancellationToken)
|
||||
{
|
||||
CapturedRequest = request;
|
||||
return Task.FromResult(response);
|
||||
}
|
||||
}
|
||||
}
|
||||
using System;
|
||||
using System.Net;
|
||||
using System.Net.Http;
|
||||
using System.Text.Json;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using StellaOps.Scanner.Sbomer.BuildXPlugin;
|
||||
using StellaOps.Scanner.Sbomer.BuildXPlugin.Attestation;
|
||||
using StellaOps.Scanner.Sbomer.BuildXPlugin.Descriptor;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Scanner.Sbomer.BuildXPlugin.Tests.Attestation;
|
||||
|
||||
public sealed class AttestorClientTests
|
||||
{
|
||||
[Fact]
|
||||
public async Task SendPlaceholderAsync_PostsJsonPayload()
|
||||
{
|
||||
var handler = new RecordingHandler(new HttpResponseMessage(HttpStatusCode.Accepted));
|
||||
using var httpClient = new HttpClient(handler);
|
||||
var client = new AttestorClient(httpClient);
|
||||
|
||||
var document = BuildDescriptorDocument();
|
||||
var attestorUri = new Uri("https://attestor.example.com/api/v1/provenance");
|
||||
|
||||
await client.SendPlaceholderAsync(attestorUri, document, CancellationToken.None);
|
||||
|
||||
Assert.NotNull(handler.CapturedRequest);
|
||||
Assert.Equal(HttpMethod.Post, handler.CapturedRequest!.Method);
|
||||
Assert.Equal(attestorUri, handler.CapturedRequest.RequestUri);
|
||||
|
||||
var content = await handler.CapturedRequest.Content!.ReadAsStringAsync();
|
||||
var json = JsonDocument.Parse(content);
|
||||
Assert.Equal(document.Subject.Digest, json.RootElement.GetProperty("imageDigest").GetString());
|
||||
Assert.Equal(document.Artifact.Digest, json.RootElement.GetProperty("sbomDigest").GetString());
|
||||
Assert.Equal(document.Provenance.ExpectedDsseSha256, json.RootElement.GetProperty("expectedDsseSha256").GetString());
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task SendPlaceholderAsync_ThrowsOnFailure()
|
||||
{
|
||||
var handler = new RecordingHandler(new HttpResponseMessage(HttpStatusCode.BadRequest)
|
||||
{
|
||||
Content = new StringContent("invalid")
|
||||
});
|
||||
using var httpClient = new HttpClient(handler);
|
||||
var client = new AttestorClient(httpClient);
|
||||
|
||||
var document = BuildDescriptorDocument();
|
||||
var attestorUri = new Uri("https://attestor.example.com/api/v1/provenance");
|
||||
|
||||
await Assert.ThrowsAsync<BuildxPluginException>(() => client.SendPlaceholderAsync(attestorUri, document, CancellationToken.None));
|
||||
}
|
||||
|
||||
private static DescriptorDocument BuildDescriptorDocument()
|
||||
{
|
||||
var subject = new DescriptorSubject("application/vnd.oci.image.manifest.v1+json", "sha256:img");
|
||||
var artifact = new DescriptorArtifact("application/vnd.cyclonedx+json", "sha256:sbom", 42, new System.Collections.Generic.Dictionary<string, string>());
|
||||
var provenance = new DescriptorProvenance("pending", "sha256:dsse", "nonce", "https://attestor.example.com/api/v1/provenance", "https://slsa.dev/provenance/v1");
|
||||
var generatorMetadata = new DescriptorGeneratorMetadata("generator", "1.0.0");
|
||||
var metadata = new System.Collections.Generic.Dictionary<string, string>();
|
||||
return new DescriptorDocument("schema", DateTimeOffset.UtcNow, generatorMetadata, subject, artifact, provenance, metadata);
|
||||
}
|
||||
|
||||
private sealed class RecordingHandler : HttpMessageHandler
|
||||
{
|
||||
private readonly HttpResponseMessage response;
|
||||
|
||||
public RecordingHandler(HttpResponseMessage response)
|
||||
{
|
||||
this.response = response;
|
||||
}
|
||||
|
||||
public HttpRequestMessage? CapturedRequest { get; private set; }
|
||||
|
||||
protected override Task<HttpResponseMessage> SendAsync(HttpRequestMessage request, CancellationToken cancellationToken)
|
||||
{
|
||||
CapturedRequest = request;
|
||||
return Task.FromResult(response);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,154 +1,154 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Globalization;
|
||||
using System.IO;
|
||||
using System.Security.Cryptography;
|
||||
using System.Text;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using Microsoft.Extensions.Time.Testing;
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Globalization;
|
||||
using System.IO;
|
||||
using System.Security.Cryptography;
|
||||
using System.Text;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using Microsoft.Extensions.Time.Testing;
|
||||
using StellaOps.Cryptography;
|
||||
using StellaOps.Scanner.Sbomer.BuildXPlugin.Descriptor;
|
||||
using StellaOps.Scanner.Sbomer.BuildXPlugin.Tests.TestUtilities;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Scanner.Sbomer.BuildXPlugin.Tests.Descriptor;
|
||||
|
||||
public sealed class DescriptorGeneratorTests
|
||||
{
|
||||
[Fact]
|
||||
public async Task CreateAsync_BuildsDeterministicDescriptor()
|
||||
{
|
||||
await using var temp = new TempDirectory();
|
||||
var sbomPath = Path.Combine(temp.Path, "sample.cdx.json");
|
||||
await File.WriteAllTextAsync(sbomPath, "{\"bomFormat\":\"CycloneDX\",\"specVersion\":\"1.5\"}");
|
||||
|
||||
var fakeTime = new FakeTimeProvider(new DateTimeOffset(2025, 10, 18, 12, 0, 0, TimeSpan.Zero));
|
||||
using StellaOps.Scanner.Sbomer.BuildXPlugin.Tests.TestUtilities;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Scanner.Sbomer.BuildXPlugin.Tests.Descriptor;
|
||||
|
||||
public sealed class DescriptorGeneratorTests
|
||||
{
|
||||
[Fact]
|
||||
public async Task CreateAsync_BuildsDeterministicDescriptor()
|
||||
{
|
||||
await using var temp = new TempDirectory();
|
||||
var sbomPath = Path.Combine(temp.Path, "sample.cdx.json");
|
||||
await File.WriteAllTextAsync(sbomPath, "{\"bomFormat\":\"CycloneDX\",\"specVersion\":\"1.5\"}");
|
||||
|
||||
var fakeTime = new FakeTimeProvider(new DateTimeOffset(2025, 10, 18, 12, 0, 0, TimeSpan.Zero));
|
||||
var generator = CreateGenerator(fakeTime);
|
||||
|
||||
var request = new DescriptorRequest
|
||||
{
|
||||
ImageDigest = "sha256:0123456789abcdef",
|
||||
SbomPath = sbomPath,
|
||||
SbomMediaType = "application/vnd.cyclonedx+json",
|
||||
SbomFormat = "cyclonedx-json",
|
||||
SbomKind = "inventory",
|
||||
SbomArtifactType = "application/vnd.stellaops.sbom.layer+json",
|
||||
SubjectMediaType = "application/vnd.oci.image.manifest.v1+json",
|
||||
GeneratorVersion = "1.2.3",
|
||||
GeneratorName = "StellaOps.Scanner.Sbomer.BuildXPlugin",
|
||||
LicenseId = "lic-123",
|
||||
SbomName = "sample.cdx.json",
|
||||
Repository = "git.stella-ops.org/stellaops",
|
||||
BuildRef = "refs/heads/main",
|
||||
AttestorUri = "https://attestor.local/api/v1/provenance"
|
||||
}.Validate();
|
||||
|
||||
var document = await generator.CreateAsync(request, CancellationToken.None);
|
||||
|
||||
Assert.Equal(DescriptorGenerator.Schema, document.Schema);
|
||||
Assert.Equal(fakeTime.GetUtcNow(), document.GeneratedAt);
|
||||
Assert.Equal(request.ImageDigest, document.Subject.Digest);
|
||||
Assert.Equal(request.SbomMediaType, document.Artifact.MediaType);
|
||||
Assert.Equal(request.SbomName, document.Artifact.Annotations["org.opencontainers.image.title"]);
|
||||
Assert.Equal("pending", document.Provenance.Status);
|
||||
Assert.Equal(request.AttestorUri, document.Provenance.AttestorUri);
|
||||
Assert.Equal(request.PredicateType, document.Provenance.PredicateType);
|
||||
|
||||
var expectedSbomDigest = ComputeSha256File(sbomPath);
|
||||
Assert.Equal(expectedSbomDigest, document.Artifact.Digest);
|
||||
Assert.Equal(expectedSbomDigest, document.Metadata["sbomDigest"]);
|
||||
|
||||
var expectedDsse = ComputeExpectedDsse(request.ImageDigest, expectedSbomDigest, document.Provenance.Nonce);
|
||||
Assert.Equal(expectedDsse, document.Provenance.ExpectedDsseSha256);
|
||||
Assert.Equal(expectedDsse, document.Artifact.Annotations["org.stellaops.provenance.dsse.sha256"]);
|
||||
Assert.Equal(document.Provenance.Nonce, document.Artifact.Annotations["org.stellaops.provenance.nonce"]);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task CreateAsync_RepeatedInvocationsReuseDeterministicNonce()
|
||||
{
|
||||
await using var temp = new TempDirectory();
|
||||
var sbomPath = Path.Combine(temp.Path, "sample.cdx.json");
|
||||
await File.WriteAllTextAsync(sbomPath, "{\"bomFormat\":\"CycloneDX\",\"specVersion\":\"1.5\"}");
|
||||
|
||||
var fakeTime = new FakeTimeProvider(new DateTimeOffset(2025, 10, 18, 12, 0, 0, TimeSpan.Zero));
|
||||
|
||||
var request = new DescriptorRequest
|
||||
{
|
||||
ImageDigest = "sha256:0123456789abcdef",
|
||||
SbomPath = sbomPath,
|
||||
SbomMediaType = "application/vnd.cyclonedx+json",
|
||||
SbomFormat = "cyclonedx-json",
|
||||
SbomKind = "inventory",
|
||||
SbomArtifactType = "application/vnd.stellaops.sbom.layer+json",
|
||||
SubjectMediaType = "application/vnd.oci.image.manifest.v1+json",
|
||||
GeneratorVersion = "1.2.3",
|
||||
GeneratorName = "StellaOps.Scanner.Sbomer.BuildXPlugin",
|
||||
LicenseId = "lic-123",
|
||||
SbomName = "sample.cdx.json",
|
||||
Repository = "git.stella-ops.org/stellaops",
|
||||
BuildRef = "refs/heads/main",
|
||||
AttestorUri = "https://attestor.local/api/v1/provenance"
|
||||
}.Validate();
|
||||
|
||||
var document = await generator.CreateAsync(request, CancellationToken.None);
|
||||
|
||||
Assert.Equal(DescriptorGenerator.Schema, document.Schema);
|
||||
Assert.Equal(fakeTime.GetUtcNow(), document.GeneratedAt);
|
||||
Assert.Equal(request.ImageDigest, document.Subject.Digest);
|
||||
Assert.Equal(request.SbomMediaType, document.Artifact.MediaType);
|
||||
Assert.Equal(request.SbomName, document.Artifact.Annotations["org.opencontainers.image.title"]);
|
||||
Assert.Equal("pending", document.Provenance.Status);
|
||||
Assert.Equal(request.AttestorUri, document.Provenance.AttestorUri);
|
||||
Assert.Equal(request.PredicateType, document.Provenance.PredicateType);
|
||||
|
||||
var expectedSbomDigest = ComputeSha256File(sbomPath);
|
||||
Assert.Equal(expectedSbomDigest, document.Artifact.Digest);
|
||||
Assert.Equal(expectedSbomDigest, document.Metadata["sbomDigest"]);
|
||||
|
||||
var expectedDsse = ComputeExpectedDsse(request.ImageDigest, expectedSbomDigest, document.Provenance.Nonce);
|
||||
Assert.Equal(expectedDsse, document.Provenance.ExpectedDsseSha256);
|
||||
Assert.Equal(expectedDsse, document.Artifact.Annotations["org.stellaops.provenance.dsse.sha256"]);
|
||||
Assert.Equal(document.Provenance.Nonce, document.Artifact.Annotations["org.stellaops.provenance.nonce"]);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task CreateAsync_RepeatedInvocationsReuseDeterministicNonce()
|
||||
{
|
||||
await using var temp = new TempDirectory();
|
||||
var sbomPath = Path.Combine(temp.Path, "sample.cdx.json");
|
||||
await File.WriteAllTextAsync(sbomPath, "{\"bomFormat\":\"CycloneDX\",\"specVersion\":\"1.5\"}");
|
||||
|
||||
var fakeTime = new FakeTimeProvider(new DateTimeOffset(2025, 10, 18, 12, 0, 0, TimeSpan.Zero));
|
||||
var generator = CreateGenerator(fakeTime);
|
||||
|
||||
var request = new DescriptorRequest
|
||||
{
|
||||
ImageDigest = "sha256:0123456789abcdef",
|
||||
SbomPath = sbomPath,
|
||||
SbomMediaType = "application/vnd.cyclonedx+json",
|
||||
SbomFormat = "cyclonedx-json",
|
||||
SbomKind = "inventory",
|
||||
SbomArtifactType = "application/vnd.stellaops.sbom.layer+json",
|
||||
SubjectMediaType = "application/vnd.oci.image.manifest.v1+json",
|
||||
GeneratorVersion = "1.2.3",
|
||||
GeneratorName = "StellaOps.Scanner.Sbomer.BuildXPlugin",
|
||||
LicenseId = "lic-123",
|
||||
SbomName = "sample.cdx.json",
|
||||
Repository = "git.stella-ops.org/stellaops",
|
||||
BuildRef = "refs/heads/main",
|
||||
AttestorUri = "https://attestor.local/api/v1/provenance"
|
||||
}.Validate();
|
||||
|
||||
var first = await generator.CreateAsync(request, CancellationToken.None);
|
||||
var second = await generator.CreateAsync(request, CancellationToken.None);
|
||||
|
||||
Assert.Equal(first.Provenance.Nonce, second.Provenance.Nonce);
|
||||
Assert.Equal(first.Provenance.ExpectedDsseSha256, second.Provenance.ExpectedDsseSha256);
|
||||
Assert.Equal(first.Artifact.Annotations["org.stellaops.provenance.nonce"], second.Artifact.Annotations["org.stellaops.provenance.nonce"]);
|
||||
Assert.Equal(first.Artifact.Annotations["org.stellaops.provenance.dsse.sha256"], second.Artifact.Annotations["org.stellaops.provenance.dsse.sha256"]);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task CreateAsync_MetadataDifferencesYieldDistinctNonce()
|
||||
{
|
||||
await using var temp = new TempDirectory();
|
||||
var sbomPath = Path.Combine(temp.Path, "sample.cdx.json");
|
||||
await File.WriteAllTextAsync(sbomPath, "{\"bomFormat\":\"CycloneDX\",\"specVersion\":\"1.5\"}");
|
||||
|
||||
var fakeTime = new FakeTimeProvider(new DateTimeOffset(2025, 10, 18, 12, 0, 0, TimeSpan.Zero));
|
||||
|
||||
var request = new DescriptorRequest
|
||||
{
|
||||
ImageDigest = "sha256:0123456789abcdef",
|
||||
SbomPath = sbomPath,
|
||||
SbomMediaType = "application/vnd.cyclonedx+json",
|
||||
SbomFormat = "cyclonedx-json",
|
||||
SbomKind = "inventory",
|
||||
SbomArtifactType = "application/vnd.stellaops.sbom.layer+json",
|
||||
SubjectMediaType = "application/vnd.oci.image.manifest.v1+json",
|
||||
GeneratorVersion = "1.2.3",
|
||||
GeneratorName = "StellaOps.Scanner.Sbomer.BuildXPlugin",
|
||||
LicenseId = "lic-123",
|
||||
SbomName = "sample.cdx.json",
|
||||
Repository = "git.stella-ops.org/stellaops",
|
||||
BuildRef = "refs/heads/main",
|
||||
AttestorUri = "https://attestor.local/api/v1/provenance"
|
||||
}.Validate();
|
||||
|
||||
var first = await generator.CreateAsync(request, CancellationToken.None);
|
||||
var second = await generator.CreateAsync(request, CancellationToken.None);
|
||||
|
||||
Assert.Equal(first.Provenance.Nonce, second.Provenance.Nonce);
|
||||
Assert.Equal(first.Provenance.ExpectedDsseSha256, second.Provenance.ExpectedDsseSha256);
|
||||
Assert.Equal(first.Artifact.Annotations["org.stellaops.provenance.nonce"], second.Artifact.Annotations["org.stellaops.provenance.nonce"]);
|
||||
Assert.Equal(first.Artifact.Annotations["org.stellaops.provenance.dsse.sha256"], second.Artifact.Annotations["org.stellaops.provenance.dsse.sha256"]);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task CreateAsync_MetadataDifferencesYieldDistinctNonce()
|
||||
{
|
||||
await using var temp = new TempDirectory();
|
||||
var sbomPath = Path.Combine(temp.Path, "sample.cdx.json");
|
||||
await File.WriteAllTextAsync(sbomPath, "{\"bomFormat\":\"CycloneDX\",\"specVersion\":\"1.5\"}");
|
||||
|
||||
var fakeTime = new FakeTimeProvider(new DateTimeOffset(2025, 10, 18, 12, 0, 0, TimeSpan.Zero));
|
||||
var generator = CreateGenerator(fakeTime);
|
||||
|
||||
var baseline = new DescriptorRequest
|
||||
{
|
||||
ImageDigest = "sha256:0123456789abcdef",
|
||||
SbomPath = sbomPath,
|
||||
Repository = "git.stella-ops.org/stellaops",
|
||||
BuildRef = "refs/heads/main"
|
||||
}.Validate();
|
||||
|
||||
var variant = baseline with
|
||||
{
|
||||
BuildRef = "refs/heads/feature",
|
||||
Repository = "git.stella-ops.org/stellaops/feature"
|
||||
};
|
||||
variant = variant.Validate();
|
||||
|
||||
var baselineDocument = await generator.CreateAsync(baseline, CancellationToken.None);
|
||||
var variantDocument = await generator.CreateAsync(variant, CancellationToken.None);
|
||||
|
||||
Assert.NotEqual(baselineDocument.Provenance.Nonce, variantDocument.Provenance.Nonce);
|
||||
Assert.NotEqual(baselineDocument.Provenance.ExpectedDsseSha256, variantDocument.Provenance.ExpectedDsseSha256);
|
||||
}
|
||||
|
||||
|
||||
var baseline = new DescriptorRequest
|
||||
{
|
||||
ImageDigest = "sha256:0123456789abcdef",
|
||||
SbomPath = sbomPath,
|
||||
Repository = "git.stella-ops.org/stellaops",
|
||||
BuildRef = "refs/heads/main"
|
||||
}.Validate();
|
||||
|
||||
var variant = baseline with
|
||||
{
|
||||
BuildRef = "refs/heads/feature",
|
||||
Repository = "git.stella-ops.org/stellaops/feature"
|
||||
};
|
||||
variant = variant.Validate();
|
||||
|
||||
var baselineDocument = await generator.CreateAsync(baseline, CancellationToken.None);
|
||||
var variantDocument = await generator.CreateAsync(variant, CancellationToken.None);
|
||||
|
||||
Assert.NotEqual(baselineDocument.Provenance.Nonce, variantDocument.Provenance.Nonce);
|
||||
Assert.NotEqual(baselineDocument.Provenance.ExpectedDsseSha256, variantDocument.Provenance.ExpectedDsseSha256);
|
||||
}
|
||||
|
||||
private static DescriptorGenerator CreateGenerator(TimeProvider timeProvider)
|
||||
=> new(timeProvider, CryptoHashFactory.CreateDefault());
|
||||
|
||||
private static string ComputeSha256File(string path)
|
||||
{
|
||||
using var stream = File.OpenRead(path);
|
||||
var hash = SHA256.HashData(stream);
|
||||
return $"sha256:{Convert.ToHexString(hash).ToLower(CultureInfo.InvariantCulture)}";
|
||||
}
|
||||
|
||||
private static string ComputeExpectedDsse(string imageDigest, string sbomDigest, string nonce)
|
||||
{
|
||||
var payload = $"{imageDigest}\n{sbomDigest}\n{nonce}";
|
||||
Span<byte> hash = stackalloc byte[32];
|
||||
SHA256.HashData(Encoding.UTF8.GetBytes(payload), hash);
|
||||
return $"sha256:{Convert.ToHexString(hash).ToLower(CultureInfo.InvariantCulture)}";
|
||||
}
|
||||
}
|
||||
{
|
||||
using var stream = File.OpenRead(path);
|
||||
var hash = SHA256.HashData(stream);
|
||||
return $"sha256:{Convert.ToHexString(hash).ToLower(CultureInfo.InvariantCulture)}";
|
||||
}
|
||||
|
||||
private static string ComputeExpectedDsse(string imageDigest, string sbomDigest, string nonce)
|
||||
{
|
||||
var payload = $"{imageDigest}\n{sbomDigest}\n{nonce}";
|
||||
Span<byte> hash = stackalloc byte[32];
|
||||
SHA256.HashData(Encoding.UTF8.GetBytes(payload), hash);
|
||||
return $"sha256:{Convert.ToHexString(hash).ToLower(CultureInfo.InvariantCulture)}";
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,135 +1,135 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Text.Json;
|
||||
using System.Text.Json.Nodes;
|
||||
using System.Text.Json.Serialization;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Text.Json;
|
||||
using System.Text.Json.Nodes;
|
||||
using System.Text.Json.Serialization;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using Microsoft.Extensions.Time.Testing;
|
||||
using StellaOps.Cryptography;
|
||||
using StellaOps.Scanner.Sbomer.BuildXPlugin.Descriptor;
|
||||
using StellaOps.Scanner.Sbomer.BuildXPlugin.Tests.TestUtilities;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Scanner.Sbomer.BuildXPlugin.Tests.Descriptor;
|
||||
|
||||
public sealed class DescriptorGoldenTests
|
||||
{
|
||||
private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web)
|
||||
{
|
||||
WriteIndented = true,
|
||||
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull
|
||||
};
|
||||
|
||||
[Fact]
|
||||
public async Task DescriptorMatchesBaselineFixture()
|
||||
{
|
||||
await using var temp = new TempDirectory();
|
||||
var sbomPath = Path.Combine(temp.Path, "sample.cdx.json");
|
||||
await File.WriteAllTextAsync(sbomPath, "{\"bomFormat\":\"CycloneDX\",\"specVersion\":\"1.5\"}");
|
||||
|
||||
var request = new DescriptorRequest
|
||||
{
|
||||
ImageDigest = "sha256:0123456789abcdef",
|
||||
SbomPath = sbomPath,
|
||||
SbomMediaType = "application/vnd.cyclonedx+json",
|
||||
SbomFormat = "cyclonedx-json",
|
||||
SbomKind = "inventory",
|
||||
SbomArtifactType = "application/vnd.stellaops.sbom.layer+json",
|
||||
SubjectMediaType = "application/vnd.oci.image.manifest.v1+json",
|
||||
GeneratorVersion = "1.2.3",
|
||||
GeneratorName = "StellaOps.Scanner.Sbomer.BuildXPlugin",
|
||||
LicenseId = "lic-123",
|
||||
SbomName = "sample.cdx.json",
|
||||
Repository = "git.stella-ops.org/stellaops",
|
||||
BuildRef = "refs/heads/main",
|
||||
AttestorUri = "https://attestor.local/api/v1/provenance"
|
||||
}.Validate();
|
||||
|
||||
var fakeTime = new FakeTimeProvider(new DateTimeOffset(2025, 10, 18, 12, 0, 0, TimeSpan.Zero));
|
||||
using StellaOps.Scanner.Sbomer.BuildXPlugin.Tests.TestUtilities;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Scanner.Sbomer.BuildXPlugin.Tests.Descriptor;
|
||||
|
||||
public sealed class DescriptorGoldenTests
|
||||
{
|
||||
private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web)
|
||||
{
|
||||
WriteIndented = true,
|
||||
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull
|
||||
};
|
||||
|
||||
[Fact]
|
||||
public async Task DescriptorMatchesBaselineFixture()
|
||||
{
|
||||
await using var temp = new TempDirectory();
|
||||
var sbomPath = Path.Combine(temp.Path, "sample.cdx.json");
|
||||
await File.WriteAllTextAsync(sbomPath, "{\"bomFormat\":\"CycloneDX\",\"specVersion\":\"1.5\"}");
|
||||
|
||||
var request = new DescriptorRequest
|
||||
{
|
||||
ImageDigest = "sha256:0123456789abcdef",
|
||||
SbomPath = sbomPath,
|
||||
SbomMediaType = "application/vnd.cyclonedx+json",
|
||||
SbomFormat = "cyclonedx-json",
|
||||
SbomKind = "inventory",
|
||||
SbomArtifactType = "application/vnd.stellaops.sbom.layer+json",
|
||||
SubjectMediaType = "application/vnd.oci.image.manifest.v1+json",
|
||||
GeneratorVersion = "1.2.3",
|
||||
GeneratorName = "StellaOps.Scanner.Sbomer.BuildXPlugin",
|
||||
LicenseId = "lic-123",
|
||||
SbomName = "sample.cdx.json",
|
||||
Repository = "git.stella-ops.org/stellaops",
|
||||
BuildRef = "refs/heads/main",
|
||||
AttestorUri = "https://attestor.local/api/v1/provenance"
|
||||
}.Validate();
|
||||
|
||||
var fakeTime = new FakeTimeProvider(new DateTimeOffset(2025, 10, 18, 12, 0, 0, TimeSpan.Zero));
|
||||
var generator = CreateGenerator(fakeTime);
|
||||
var document = await generator.CreateAsync(request, CancellationToken.None);
|
||||
var actualJson = JsonSerializer.Serialize(document, SerializerOptions);
|
||||
var normalizedJson = NormalizeDescriptorJson(actualJson, Path.GetFileName(sbomPath));
|
||||
|
||||
var projectRoot = Path.GetFullPath(Path.Combine(AppContext.BaseDirectory, "..", "..", ".."));
|
||||
var fixturePath = Path.Combine(projectRoot, "Fixtures", "descriptor.baseline.json");
|
||||
var updateRequested = string.Equals(Environment.GetEnvironmentVariable("UPDATE_BUILDX_FIXTURES"), "1", StringComparison.OrdinalIgnoreCase);
|
||||
|
||||
if (updateRequested)
|
||||
{
|
||||
Directory.CreateDirectory(Path.GetDirectoryName(fixturePath)!);
|
||||
await File.WriteAllTextAsync(fixturePath, normalizedJson);
|
||||
return;
|
||||
}
|
||||
|
||||
if (!File.Exists(fixturePath))
|
||||
{
|
||||
throw new InvalidOperationException($"Baseline fixture '{fixturePath}' is missing. Set UPDATE_BUILDX_FIXTURES=1 and re-run the tests to generate it.");
|
||||
}
|
||||
|
||||
var baselineJson = await File.ReadAllTextAsync(fixturePath);
|
||||
|
||||
using var baselineDoc = JsonDocument.Parse(baselineJson);
|
||||
using var actualDoc = JsonDocument.Parse(normalizedJson);
|
||||
|
||||
AssertJsonEquivalent(baselineDoc.RootElement, actualDoc.RootElement);
|
||||
}
|
||||
|
||||
private static string NormalizeDescriptorJson(string json, string sbomFileName)
|
||||
{
|
||||
var node = JsonNode.Parse(json)?.AsObject()
|
||||
?? throw new InvalidOperationException("Failed to parse descriptor JSON for normalization.");
|
||||
|
||||
if (node["metadata"] is JsonObject metadata)
|
||||
{
|
||||
metadata["sbomPath"] = sbomFileName;
|
||||
}
|
||||
|
||||
return node.ToJsonString(SerializerOptions);
|
||||
}
|
||||
|
||||
private static void AssertJsonEquivalent(JsonElement expected, JsonElement actual)
|
||||
{
|
||||
if (expected.ValueKind != actual.ValueKind)
|
||||
{
|
||||
throw new Xunit.Sdk.XunitException($"Value kind mismatch. Expected '{expected.ValueKind}' but found '{actual.ValueKind}'.");
|
||||
}
|
||||
|
||||
switch (expected.ValueKind)
|
||||
{
|
||||
case JsonValueKind.Object:
|
||||
var expectedProperties = expected.EnumerateObject().ToDictionary(p => p.Name, p => p.Value, StringComparer.Ordinal);
|
||||
var actualProperties = actual.EnumerateObject().ToDictionary(p => p.Name, p => p.Value, StringComparer.Ordinal);
|
||||
|
||||
Assert.Equal(
|
||||
expectedProperties.Keys.OrderBy(static name => name).ToArray(),
|
||||
actualProperties.Keys.OrderBy(static name => name).ToArray());
|
||||
|
||||
foreach (var propertyName in expectedProperties.Keys)
|
||||
{
|
||||
AssertJsonEquivalent(expectedProperties[propertyName], actualProperties[propertyName]);
|
||||
}
|
||||
|
||||
break;
|
||||
case JsonValueKind.Array:
|
||||
var expectedItems = expected.EnumerateArray().ToArray();
|
||||
var actualItems = actual.EnumerateArray().ToArray();
|
||||
|
||||
Assert.Equal(expectedItems.Length, actualItems.Length);
|
||||
for (var i = 0; i < expectedItems.Length; i++)
|
||||
{
|
||||
AssertJsonEquivalent(expectedItems[i], actualItems[i]);
|
||||
}
|
||||
|
||||
break;
|
||||
default:
|
||||
Assert.Equal(expected.ToString(), actual.ToString());
|
||||
break;
|
||||
}
|
||||
}
|
||||
var document = await generator.CreateAsync(request, CancellationToken.None);
|
||||
var actualJson = JsonSerializer.Serialize(document, SerializerOptions);
|
||||
var normalizedJson = NormalizeDescriptorJson(actualJson, Path.GetFileName(sbomPath));
|
||||
|
||||
var projectRoot = Path.GetFullPath(Path.Combine(AppContext.BaseDirectory, "..", "..", ".."));
|
||||
var fixturePath = Path.Combine(projectRoot, "Fixtures", "descriptor.baseline.json");
|
||||
var updateRequested = string.Equals(Environment.GetEnvironmentVariable("UPDATE_BUILDX_FIXTURES"), "1", StringComparison.OrdinalIgnoreCase);
|
||||
|
||||
if (updateRequested)
|
||||
{
|
||||
Directory.CreateDirectory(Path.GetDirectoryName(fixturePath)!);
|
||||
await File.WriteAllTextAsync(fixturePath, normalizedJson);
|
||||
return;
|
||||
}
|
||||
|
||||
if (!File.Exists(fixturePath))
|
||||
{
|
||||
throw new InvalidOperationException($"Baseline fixture '{fixturePath}' is missing. Set UPDATE_BUILDX_FIXTURES=1 and re-run the tests to generate it.");
|
||||
}
|
||||
|
||||
var baselineJson = await File.ReadAllTextAsync(fixturePath);
|
||||
|
||||
using var baselineDoc = JsonDocument.Parse(baselineJson);
|
||||
using var actualDoc = JsonDocument.Parse(normalizedJson);
|
||||
|
||||
AssertJsonEquivalent(baselineDoc.RootElement, actualDoc.RootElement);
|
||||
}
|
||||
|
||||
private static string NormalizeDescriptorJson(string json, string sbomFileName)
|
||||
{
|
||||
var node = JsonNode.Parse(json)?.AsObject()
|
||||
?? throw new InvalidOperationException("Failed to parse descriptor JSON for normalization.");
|
||||
|
||||
if (node["metadata"] is JsonObject metadata)
|
||||
{
|
||||
metadata["sbomPath"] = sbomFileName;
|
||||
}
|
||||
|
||||
return node.ToJsonString(SerializerOptions);
|
||||
}
|
||||
|
||||
private static void AssertJsonEquivalent(JsonElement expected, JsonElement actual)
|
||||
{
|
||||
if (expected.ValueKind != actual.ValueKind)
|
||||
{
|
||||
throw new Xunit.Sdk.XunitException($"Value kind mismatch. Expected '{expected.ValueKind}' but found '{actual.ValueKind}'.");
|
||||
}
|
||||
|
||||
switch (expected.ValueKind)
|
||||
{
|
||||
case JsonValueKind.Object:
|
||||
var expectedProperties = expected.EnumerateObject().ToDictionary(p => p.Name, p => p.Value, StringComparer.Ordinal);
|
||||
var actualProperties = actual.EnumerateObject().ToDictionary(p => p.Name, p => p.Value, StringComparer.Ordinal);
|
||||
|
||||
Assert.Equal(
|
||||
expectedProperties.Keys.OrderBy(static name => name).ToArray(),
|
||||
actualProperties.Keys.OrderBy(static name => name).ToArray());
|
||||
|
||||
foreach (var propertyName in expectedProperties.Keys)
|
||||
{
|
||||
AssertJsonEquivalent(expectedProperties[propertyName], actualProperties[propertyName]);
|
||||
}
|
||||
|
||||
break;
|
||||
case JsonValueKind.Array:
|
||||
var expectedItems = expected.EnumerateArray().ToArray();
|
||||
var actualItems = actual.EnumerateArray().ToArray();
|
||||
|
||||
Assert.Equal(expectedItems.Length, actualItems.Length);
|
||||
for (var i = 0; i < expectedItems.Length; i++)
|
||||
{
|
||||
AssertJsonEquivalent(expectedItems[i], actualItems[i]);
|
||||
}
|
||||
|
||||
break;
|
||||
default:
|
||||
Assert.Equal(expected.ToString(), actual.ToString());
|
||||
break;
|
||||
}
|
||||
}
|
||||
private static DescriptorGenerator CreateGenerator(TimeProvider timeProvider)
|
||||
=> new(timeProvider, CryptoHashFactory.CreateDefault());
|
||||
}
|
||||
|
||||
@@ -1,80 +1,80 @@
|
||||
using System.IO;
|
||||
using System.Text.Json;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using StellaOps.Scanner.Sbomer.BuildXPlugin;
|
||||
using StellaOps.Scanner.Sbomer.BuildXPlugin.Manifest;
|
||||
using StellaOps.Scanner.Sbomer.BuildXPlugin.Tests.TestUtilities;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Scanner.Sbomer.BuildXPlugin.Tests.Manifest;
|
||||
|
||||
public sealed class BuildxPluginManifestLoaderTests
|
||||
{
|
||||
private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web)
|
||||
{
|
||||
WriteIndented = true
|
||||
};
|
||||
|
||||
[Fact]
|
||||
public async Task LoadAsync_ReturnsManifestWithSourceInformation()
|
||||
{
|
||||
await using var temp = new TempDirectory();
|
||||
var manifestPath = System.IO.Path.Combine(temp.Path, "stellaops.manifest.json");
|
||||
await File.WriteAllTextAsync(manifestPath, BuildSampleManifestJson("stellaops.sbom-indexer"));
|
||||
|
||||
var loader = new BuildxPluginManifestLoader(temp.Path);
|
||||
var manifests = await loader.LoadAsync(CancellationToken.None);
|
||||
|
||||
var manifest = Assert.Single(manifests);
|
||||
Assert.Equal("stellaops.sbom-indexer", manifest.Id);
|
||||
Assert.Equal("0.1.0", manifest.Version);
|
||||
Assert.Equal(manifestPath, manifest.SourcePath);
|
||||
Assert.Equal(Path.GetDirectoryName(manifestPath), manifest.SourceDirectory);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task LoadDefaultAsync_ThrowsWhenNoManifests()
|
||||
{
|
||||
await using var temp = new TempDirectory();
|
||||
var loader = new BuildxPluginManifestLoader(temp.Path);
|
||||
|
||||
await Assert.ThrowsAsync<BuildxPluginException>(() => loader.LoadDefaultAsync(CancellationToken.None));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task LoadAsync_ThrowsWhenRestartRequiredMissing()
|
||||
{
|
||||
await using var temp = new TempDirectory();
|
||||
var manifestPath = Path.Combine(temp.Path, "failure.manifest.json");
|
||||
await File.WriteAllTextAsync(manifestPath, BuildSampleManifestJson("stellaops.failure", requiresRestart: false));
|
||||
|
||||
var loader = new BuildxPluginManifestLoader(temp.Path);
|
||||
|
||||
await Assert.ThrowsAsync<BuildxPluginException>(() => loader.LoadAsync(CancellationToken.None));
|
||||
}
|
||||
|
||||
private static string BuildSampleManifestJson(string id, bool requiresRestart = true)
|
||||
{
|
||||
var manifest = new BuildxPluginManifest
|
||||
{
|
||||
SchemaVersion = BuildxPluginManifest.CurrentSchemaVersion,
|
||||
Id = id,
|
||||
DisplayName = "Sample",
|
||||
Version = "0.1.0",
|
||||
RequiresRestart = requiresRestart,
|
||||
EntryPoint = new BuildxPluginEntryPoint
|
||||
{
|
||||
Type = "dotnet",
|
||||
Executable = "StellaOps.Scanner.Sbomer.BuildXPlugin.dll"
|
||||
},
|
||||
Cas = new BuildxPluginCas
|
||||
{
|
||||
Protocol = "filesystem",
|
||||
DefaultRoot = "cas"
|
||||
}
|
||||
};
|
||||
|
||||
return JsonSerializer.Serialize(manifest, SerializerOptions);
|
||||
}
|
||||
}
|
||||
using System.IO;
|
||||
using System.Text.Json;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using StellaOps.Scanner.Sbomer.BuildXPlugin;
|
||||
using StellaOps.Scanner.Sbomer.BuildXPlugin.Manifest;
|
||||
using StellaOps.Scanner.Sbomer.BuildXPlugin.Tests.TestUtilities;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Scanner.Sbomer.BuildXPlugin.Tests.Manifest;
|
||||
|
||||
public sealed class BuildxPluginManifestLoaderTests
|
||||
{
|
||||
private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web)
|
||||
{
|
||||
WriteIndented = true
|
||||
};
|
||||
|
||||
[Fact]
|
||||
public async Task LoadAsync_ReturnsManifestWithSourceInformation()
|
||||
{
|
||||
await using var temp = new TempDirectory();
|
||||
var manifestPath = System.IO.Path.Combine(temp.Path, "stellaops.manifest.json");
|
||||
await File.WriteAllTextAsync(manifestPath, BuildSampleManifestJson("stellaops.sbom-indexer"));
|
||||
|
||||
var loader = new BuildxPluginManifestLoader(temp.Path);
|
||||
var manifests = await loader.LoadAsync(CancellationToken.None);
|
||||
|
||||
var manifest = Assert.Single(manifests);
|
||||
Assert.Equal("stellaops.sbom-indexer", manifest.Id);
|
||||
Assert.Equal("0.1.0", manifest.Version);
|
||||
Assert.Equal(manifestPath, manifest.SourcePath);
|
||||
Assert.Equal(Path.GetDirectoryName(manifestPath), manifest.SourceDirectory);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task LoadDefaultAsync_ThrowsWhenNoManifests()
|
||||
{
|
||||
await using var temp = new TempDirectory();
|
||||
var loader = new BuildxPluginManifestLoader(temp.Path);
|
||||
|
||||
await Assert.ThrowsAsync<BuildxPluginException>(() => loader.LoadDefaultAsync(CancellationToken.None));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task LoadAsync_ThrowsWhenRestartRequiredMissing()
|
||||
{
|
||||
await using var temp = new TempDirectory();
|
||||
var manifestPath = Path.Combine(temp.Path, "failure.manifest.json");
|
||||
await File.WriteAllTextAsync(manifestPath, BuildSampleManifestJson("stellaops.failure", requiresRestart: false));
|
||||
|
||||
var loader = new BuildxPluginManifestLoader(temp.Path);
|
||||
|
||||
await Assert.ThrowsAsync<BuildxPluginException>(() => loader.LoadAsync(CancellationToken.None));
|
||||
}
|
||||
|
||||
private static string BuildSampleManifestJson(string id, bool requiresRestart = true)
|
||||
{
|
||||
var manifest = new BuildxPluginManifest
|
||||
{
|
||||
SchemaVersion = BuildxPluginManifest.CurrentSchemaVersion,
|
||||
Id = id,
|
||||
DisplayName = "Sample",
|
||||
Version = "0.1.0",
|
||||
RequiresRestart = requiresRestart,
|
||||
EntryPoint = new BuildxPluginEntryPoint
|
||||
{
|
||||
Type = "dotnet",
|
||||
Executable = "StellaOps.Scanner.Sbomer.BuildXPlugin.dll"
|
||||
},
|
||||
Cas = new BuildxPluginCas
|
||||
{
|
||||
Protocol = "filesystem",
|
||||
DefaultRoot = "cas"
|
||||
}
|
||||
};
|
||||
|
||||
return JsonSerializer.Serialize(manifest, SerializerOptions);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,44 +1,44 @@
|
||||
using System;
|
||||
using System.IO;
|
||||
using System.Threading.Tasks;
|
||||
|
||||
namespace StellaOps.Scanner.Sbomer.BuildXPlugin.Tests.TestUtilities;
|
||||
|
||||
internal sealed class TempDirectory : IDisposable, IAsyncDisposable
|
||||
{
|
||||
public string Path { get; }
|
||||
|
||||
public TempDirectory()
|
||||
{
|
||||
Path = System.IO.Path.Combine(System.IO.Path.GetTempPath(), $"stellaops-buildx-{Guid.NewGuid():N}");
|
||||
Directory.CreateDirectory(Path);
|
||||
}
|
||||
|
||||
public void Dispose()
|
||||
{
|
||||
Cleanup();
|
||||
GC.SuppressFinalize(this);
|
||||
}
|
||||
|
||||
public ValueTask DisposeAsync()
|
||||
{
|
||||
Cleanup();
|
||||
GC.SuppressFinalize(this);
|
||||
return ValueTask.CompletedTask;
|
||||
}
|
||||
|
||||
private void Cleanup()
|
||||
{
|
||||
try
|
||||
{
|
||||
if (Directory.Exists(Path))
|
||||
{
|
||||
Directory.Delete(Path, recursive: true);
|
||||
}
|
||||
}
|
||||
catch
|
||||
{
|
||||
// Best effort cleanup only.
|
||||
}
|
||||
}
|
||||
}
|
||||
using System;
|
||||
using System.IO;
|
||||
using System.Threading.Tasks;
|
||||
|
||||
namespace StellaOps.Scanner.Sbomer.BuildXPlugin.Tests.TestUtilities;
|
||||
|
||||
internal sealed class TempDirectory : IDisposable, IAsyncDisposable
|
||||
{
|
||||
public string Path { get; }
|
||||
|
||||
public TempDirectory()
|
||||
{
|
||||
Path = System.IO.Path.Combine(System.IO.Path.GetTempPath(), $"stellaops-buildx-{Guid.NewGuid():N}");
|
||||
Directory.CreateDirectory(Path);
|
||||
}
|
||||
|
||||
public void Dispose()
|
||||
{
|
||||
Cleanup();
|
||||
GC.SuppressFinalize(this);
|
||||
}
|
||||
|
||||
public ValueTask DisposeAsync()
|
||||
{
|
||||
Cleanup();
|
||||
GC.SuppressFinalize(this);
|
||||
return ValueTask.CompletedTask;
|
||||
}
|
||||
|
||||
private void Cleanup()
|
||||
{
|
||||
try
|
||||
{
|
||||
if (Directory.Exists(Path))
|
||||
{
|
||||
Directory.Delete(Path, recursive: true);
|
||||
}
|
||||
}
|
||||
catch
|
||||
{
|
||||
// Best effort cleanup only.
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,34 +1,34 @@
|
||||
using System.Collections.Concurrent;
|
||||
using StellaOps.Scanner.Storage.ObjectStore;
|
||||
|
||||
namespace StellaOps.Scanner.Storage.Tests;
|
||||
|
||||
internal sealed class InMemoryArtifactObjectStore : IArtifactObjectStore
|
||||
{
|
||||
private readonly ConcurrentDictionary<(string Bucket, string Key), byte[]> _objects = new();
|
||||
|
||||
public IReadOnlyDictionary<(string Bucket, string Key), byte[]> Objects => _objects;
|
||||
|
||||
public Task DeleteAsync(ArtifactObjectDescriptor descriptor, CancellationToken cancellationToken)
|
||||
{
|
||||
_objects.TryRemove((descriptor.Bucket, descriptor.Key), out _);
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
|
||||
public Task<Stream?> GetAsync(ArtifactObjectDescriptor descriptor, CancellationToken cancellationToken)
|
||||
{
|
||||
if (_objects.TryGetValue((descriptor.Bucket, descriptor.Key), out var bytes))
|
||||
{
|
||||
return Task.FromResult<Stream?>(new MemoryStream(bytes, writable: false));
|
||||
}
|
||||
|
||||
return Task.FromResult<Stream?>(null);
|
||||
}
|
||||
|
||||
public async Task PutAsync(ArtifactObjectDescriptor descriptor, Stream content, CancellationToken cancellationToken)
|
||||
{
|
||||
using var buffer = new MemoryStream();
|
||||
await content.CopyToAsync(buffer, cancellationToken).ConfigureAwait(false);
|
||||
_objects[(descriptor.Bucket, descriptor.Key)] = buffer.ToArray();
|
||||
}
|
||||
}
|
||||
using System.Collections.Concurrent;
|
||||
using StellaOps.Scanner.Storage.ObjectStore;
|
||||
|
||||
namespace StellaOps.Scanner.Storage.Tests;
|
||||
|
||||
internal sealed class InMemoryArtifactObjectStore : IArtifactObjectStore
|
||||
{
|
||||
private readonly ConcurrentDictionary<(string Bucket, string Key), byte[]> _objects = new();
|
||||
|
||||
public IReadOnlyDictionary<(string Bucket, string Key), byte[]> Objects => _objects;
|
||||
|
||||
public Task DeleteAsync(ArtifactObjectDescriptor descriptor, CancellationToken cancellationToken)
|
||||
{
|
||||
_objects.TryRemove((descriptor.Bucket, descriptor.Key), out _);
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
|
||||
public Task<Stream?> GetAsync(ArtifactObjectDescriptor descriptor, CancellationToken cancellationToken)
|
||||
{
|
||||
if (_objects.TryGetValue((descriptor.Bucket, descriptor.Key), out var bytes))
|
||||
{
|
||||
return Task.FromResult<Stream?>(new MemoryStream(bytes, writable: false));
|
||||
}
|
||||
|
||||
return Task.FromResult<Stream?>(null);
|
||||
}
|
||||
|
||||
public async Task PutAsync(ArtifactObjectDescriptor descriptor, Stream content, CancellationToken cancellationToken)
|
||||
{
|
||||
using var buffer = new MemoryStream();
|
||||
await content.CopyToAsync(buffer, cancellationToken).ConfigureAwait(false);
|
||||
_objects[(descriptor.Bucket, descriptor.Key)] = buffer.ToArray();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,116 +1,116 @@
|
||||
using System.Security.Cryptography;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using Microsoft.Extensions.Options;
|
||||
using Microsoft.Extensions.Time.Testing;
|
||||
using StellaOps.Scanner.Storage;
|
||||
using StellaOps.Scanner.Storage.Catalog;
|
||||
using StellaOps.Scanner.Storage.ObjectStore;
|
||||
using StellaOps.Scanner.Storage.Postgres;
|
||||
using StellaOps.Scanner.Storage.Repositories;
|
||||
using StellaOps.Scanner.Storage.Services;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Scanner.Storage.Tests;
|
||||
|
||||
[Collection("scanner-postgres")]
|
||||
public sealed class StorageDualWriteFixture
|
||||
{
|
||||
private readonly ScannerPostgresFixture _fixture;
|
||||
|
||||
public StorageDualWriteFixture(ScannerPostgresFixture fixture)
|
||||
{
|
||||
_fixture = fixture;
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task StoreArtifactAsync_DualWrite_WritesToMirrorAndCatalog()
|
||||
{
|
||||
await _fixture.TruncateAllTablesAsync();
|
||||
|
||||
var options = BuildOptions(dualWrite: true, mirrorBucket: "mirror-bucket");
|
||||
var objectStore = new InMemoryArtifactObjectStore();
|
||||
var fakeTime = new FakeTimeProvider(new DateTimeOffset(2025, 10, 19, 12, 0, 0, TimeSpan.Zero));
|
||||
|
||||
var dataSource = new ScannerDataSource(Options.Create(options), NullLogger<ScannerDataSource>.Instance);
|
||||
await using var _ = dataSource;
|
||||
var artifactRepository = new ArtifactRepository(dataSource, NullLogger<ArtifactRepository>.Instance, fakeTime);
|
||||
var lifecycleRepository = new LifecycleRuleRepository(dataSource, NullLogger<LifecycleRuleRepository>.Instance, fakeTime);
|
||||
var service = new ArtifactStorageService(
|
||||
artifactRepository,
|
||||
lifecycleRepository,
|
||||
objectStore,
|
||||
Options.Create(options),
|
||||
NullLogger<ArtifactStorageService>.Instance,
|
||||
fakeTime);
|
||||
|
||||
var bytes = System.Text.Encoding.UTF8.GetBytes("test artifact payload");
|
||||
using var stream = new MemoryStream(bytes);
|
||||
var expiresAt = DateTime.UtcNow.AddHours(6);
|
||||
var expectedTimestamp = fakeTime.GetUtcNow().UtcDateTime;
|
||||
|
||||
var document = await service.StoreArtifactAsync(
|
||||
ArtifactDocumentType.LayerBom,
|
||||
ArtifactDocumentFormat.CycloneDxJson,
|
||||
mediaType: "application/vnd.cyclonedx+json",
|
||||
content: stream,
|
||||
immutable: true,
|
||||
ttlClass: "compliance",
|
||||
expiresAtUtc: expiresAt,
|
||||
cancellationToken: CancellationToken.None);
|
||||
|
||||
var digest = Convert.ToHexString(SHA256.HashData(bytes)).ToLowerInvariant();
|
||||
var expectedKey = $"{options.ObjectStore.RootPrefix.TrimEnd('/')}/layers/{digest}/sbom.cdx.json";
|
||||
Assert.Contains(objectStore.Objects.Keys, key => key.Bucket == options.ObjectStore.BucketName && key.Key == expectedKey);
|
||||
Assert.Contains(objectStore.Objects.Keys, key => key.Bucket == options.DualWrite.MirrorBucket && key.Key == expectedKey);
|
||||
|
||||
var artifact = await artifactRepository.GetAsync(document.Id, CancellationToken.None);
|
||||
Assert.NotNull(artifact);
|
||||
Assert.Equal($"sha256:{digest}", artifact!.BytesSha256);
|
||||
Assert.Equal(1, artifact.RefCount);
|
||||
Assert.Equal("compliance", artifact.TtlClass);
|
||||
Assert.True(artifact.Immutable);
|
||||
Assert.Equal(expectedTimestamp, artifact.CreatedAtUtc);
|
||||
Assert.Equal(expectedTimestamp, artifact.UpdatedAtUtc);
|
||||
|
||||
var lifecycle = await lifecycleRepository.ListExpiredAsync(DateTime.MaxValue, CancellationToken.None);
|
||||
var lifecycleEntry = lifecycle.SingleOrDefault(x => x.ArtifactId == document.Id);
|
||||
Assert.NotNull(lifecycleEntry);
|
||||
Assert.Equal("compliance", lifecycleEntry!.Class);
|
||||
Assert.True(lifecycleEntry.ExpiresAtUtc.HasValue);
|
||||
Assert.True(lifecycleEntry.ExpiresAtUtc.Value <= expiresAt.AddSeconds(5));
|
||||
Assert.Equal(expectedTimestamp, lifecycleEntry.CreatedAtUtc);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task Migrations_ApplySuccessfully()
|
||||
{
|
||||
await _fixture.TruncateAllTablesAsync();
|
||||
|
||||
await using var connection = new Npgsql.NpgsqlConnection(_fixture.ConnectionString);
|
||||
await connection.OpenAsync();
|
||||
await using var command = new Npgsql.NpgsqlCommand(
|
||||
"SELECT EXISTS (SELECT FROM information_schema.tables WHERE table_schema = @schema AND table_name = 'artifacts');",
|
||||
connection);
|
||||
command.Parameters.AddWithValue("schema", _fixture.SchemaName);
|
||||
var exists = (bool)(await command.ExecuteScalarAsync() ?? false);
|
||||
Assert.True(exists);
|
||||
}
|
||||
|
||||
private ScannerStorageOptions BuildOptions(bool dualWrite, string? mirrorBucket)
|
||||
{
|
||||
var options = new ScannerStorageOptions
|
||||
{
|
||||
Postgres = _fixture.Fixture.CreateOptions(),
|
||||
ObjectStore =
|
||||
{
|
||||
BucketName = "primary-bucket",
|
||||
RootPrefix = "scanner",
|
||||
EnableObjectLock = true,
|
||||
},
|
||||
};
|
||||
|
||||
options.DualWrite.Enabled = dualWrite;
|
||||
options.DualWrite.MirrorBucket = mirrorBucket;
|
||||
return options;
|
||||
}
|
||||
}
|
||||
using System.Security.Cryptography;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using Microsoft.Extensions.Options;
|
||||
using Microsoft.Extensions.Time.Testing;
|
||||
using StellaOps.Scanner.Storage;
|
||||
using StellaOps.Scanner.Storage.Catalog;
|
||||
using StellaOps.Scanner.Storage.ObjectStore;
|
||||
using StellaOps.Scanner.Storage.Postgres;
|
||||
using StellaOps.Scanner.Storage.Repositories;
|
||||
using StellaOps.Scanner.Storage.Services;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Scanner.Storage.Tests;
|
||||
|
||||
[Collection("scanner-postgres")]
|
||||
public sealed class StorageDualWriteFixture
|
||||
{
|
||||
private readonly ScannerPostgresFixture _fixture;
|
||||
|
||||
public StorageDualWriteFixture(ScannerPostgresFixture fixture)
|
||||
{
|
||||
_fixture = fixture;
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task StoreArtifactAsync_DualWrite_WritesToMirrorAndCatalog()
|
||||
{
|
||||
await _fixture.TruncateAllTablesAsync();
|
||||
|
||||
var options = BuildOptions(dualWrite: true, mirrorBucket: "mirror-bucket");
|
||||
var objectStore = new InMemoryArtifactObjectStore();
|
||||
var fakeTime = new FakeTimeProvider(new DateTimeOffset(2025, 10, 19, 12, 0, 0, TimeSpan.Zero));
|
||||
|
||||
var dataSource = new ScannerDataSource(Options.Create(options), NullLogger<ScannerDataSource>.Instance);
|
||||
await using var _ = dataSource;
|
||||
var artifactRepository = new ArtifactRepository(dataSource, NullLogger<ArtifactRepository>.Instance, fakeTime);
|
||||
var lifecycleRepository = new LifecycleRuleRepository(dataSource, NullLogger<LifecycleRuleRepository>.Instance, fakeTime);
|
||||
var service = new ArtifactStorageService(
|
||||
artifactRepository,
|
||||
lifecycleRepository,
|
||||
objectStore,
|
||||
Options.Create(options),
|
||||
NullLogger<ArtifactStorageService>.Instance,
|
||||
fakeTime);
|
||||
|
||||
var bytes = System.Text.Encoding.UTF8.GetBytes("test artifact payload");
|
||||
using var stream = new MemoryStream(bytes);
|
||||
var expiresAt = DateTime.UtcNow.AddHours(6);
|
||||
var expectedTimestamp = fakeTime.GetUtcNow().UtcDateTime;
|
||||
|
||||
var document = await service.StoreArtifactAsync(
|
||||
ArtifactDocumentType.LayerBom,
|
||||
ArtifactDocumentFormat.CycloneDxJson,
|
||||
mediaType: "application/vnd.cyclonedx+json",
|
||||
content: stream,
|
||||
immutable: true,
|
||||
ttlClass: "compliance",
|
||||
expiresAtUtc: expiresAt,
|
||||
cancellationToken: CancellationToken.None);
|
||||
|
||||
var digest = Convert.ToHexString(SHA256.HashData(bytes)).ToLowerInvariant();
|
||||
var expectedKey = $"{options.ObjectStore.RootPrefix.TrimEnd('/')}/layers/{digest}/sbom.cdx.json";
|
||||
Assert.Contains(objectStore.Objects.Keys, key => key.Bucket == options.ObjectStore.BucketName && key.Key == expectedKey);
|
||||
Assert.Contains(objectStore.Objects.Keys, key => key.Bucket == options.DualWrite.MirrorBucket && key.Key == expectedKey);
|
||||
|
||||
var artifact = await artifactRepository.GetAsync(document.Id, CancellationToken.None);
|
||||
Assert.NotNull(artifact);
|
||||
Assert.Equal($"sha256:{digest}", artifact!.BytesSha256);
|
||||
Assert.Equal(1, artifact.RefCount);
|
||||
Assert.Equal("compliance", artifact.TtlClass);
|
||||
Assert.True(artifact.Immutable);
|
||||
Assert.Equal(expectedTimestamp, artifact.CreatedAtUtc);
|
||||
Assert.Equal(expectedTimestamp, artifact.UpdatedAtUtc);
|
||||
|
||||
var lifecycle = await lifecycleRepository.ListExpiredAsync(DateTime.MaxValue, CancellationToken.None);
|
||||
var lifecycleEntry = lifecycle.SingleOrDefault(x => x.ArtifactId == document.Id);
|
||||
Assert.NotNull(lifecycleEntry);
|
||||
Assert.Equal("compliance", lifecycleEntry!.Class);
|
||||
Assert.True(lifecycleEntry.ExpiresAtUtc.HasValue);
|
||||
Assert.True(lifecycleEntry.ExpiresAtUtc.Value <= expiresAt.AddSeconds(5));
|
||||
Assert.Equal(expectedTimestamp, lifecycleEntry.CreatedAtUtc);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task Migrations_ApplySuccessfully()
|
||||
{
|
||||
await _fixture.TruncateAllTablesAsync();
|
||||
|
||||
await using var connection = new Npgsql.NpgsqlConnection(_fixture.ConnectionString);
|
||||
await connection.OpenAsync();
|
||||
await using var command = new Npgsql.NpgsqlCommand(
|
||||
"SELECT EXISTS (SELECT FROM information_schema.tables WHERE table_schema = @schema AND table_name = 'artifacts');",
|
||||
connection);
|
||||
command.Parameters.AddWithValue("schema", _fixture.SchemaName);
|
||||
var exists = (bool)(await command.ExecuteScalarAsync() ?? false);
|
||||
Assert.True(exists);
|
||||
}
|
||||
|
||||
private ScannerStorageOptions BuildOptions(bool dualWrite, string? mirrorBucket)
|
||||
{
|
||||
var options = new ScannerStorageOptions
|
||||
{
|
||||
Postgres = _fixture.Fixture.CreateOptions(),
|
||||
ObjectStore =
|
||||
{
|
||||
BucketName = "primary-bucket",
|
||||
RootPrefix = "scanner",
|
||||
EnableObjectLock = true,
|
||||
},
|
||||
};
|
||||
|
||||
options.DualWrite.Enabled = dualWrite;
|
||||
options.DualWrite.MirrorBucket = mirrorBucket;
|
||||
return options;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,25 +1,25 @@
|
||||
using System.Net;
|
||||
|
||||
namespace StellaOps.Scanner.WebService.Tests;
|
||||
|
||||
public sealed class AuthorizationTests
|
||||
{
|
||||
[Fact]
|
||||
public async Task ApiRoutesRequireAuthenticationWhenAuthorityEnabled()
|
||||
{
|
||||
using var factory = new ScannerApplicationFactory(configuration =>
|
||||
{
|
||||
configuration["scanner:authority:enabled"] = "true";
|
||||
configuration["scanner:authority:allowAnonymousFallback"] = "false";
|
||||
configuration["scanner:authority:issuer"] = "https://authority.local";
|
||||
configuration["scanner:authority:audiences:0"] = "scanner-api";
|
||||
configuration["scanner:authority:clientId"] = "scanner-web";
|
||||
configuration["scanner:authority:clientSecret"] = "secret";
|
||||
});
|
||||
|
||||
using var client = factory.CreateClient();
|
||||
var response = await client.GetAsync("/api/v1/__auth-probe");
|
||||
|
||||
Assert.Equal(HttpStatusCode.Unauthorized, response.StatusCode);
|
||||
}
|
||||
}
|
||||
using System.Net;
|
||||
|
||||
namespace StellaOps.Scanner.WebService.Tests;
|
||||
|
||||
public sealed class AuthorizationTests
|
||||
{
|
||||
[Fact]
|
||||
public async Task ApiRoutesRequireAuthenticationWhenAuthorityEnabled()
|
||||
{
|
||||
using var factory = new ScannerApplicationFactory(configuration =>
|
||||
{
|
||||
configuration["scanner:authority:enabled"] = "true";
|
||||
configuration["scanner:authority:allowAnonymousFallback"] = "false";
|
||||
configuration["scanner:authority:issuer"] = "https://authority.local";
|
||||
configuration["scanner:authority:audiences:0"] = "scanner-api";
|
||||
configuration["scanner:authority:clientId"] = "scanner-web";
|
||||
configuration["scanner:authority:clientSecret"] = "secret";
|
||||
});
|
||||
|
||||
using var client = factory.CreateClient();
|
||||
var response = await client.GetAsync("/api/v1/__auth-probe");
|
||||
|
||||
Assert.Equal(HttpStatusCode.Unauthorized, response.StatusCode);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,49 +1,49 @@
|
||||
using System.Net.Http.Json;
|
||||
|
||||
namespace StellaOps.Scanner.WebService.Tests;
|
||||
|
||||
public sealed class HealthEndpointsTests
|
||||
{
|
||||
[Fact]
|
||||
public async Task HealthAndReadyEndpointsRespond()
|
||||
{
|
||||
using var factory = new ScannerApplicationFactory();
|
||||
using var client = factory.CreateClient();
|
||||
|
||||
var healthResponse = await client.GetAsync("/healthz");
|
||||
Assert.True(healthResponse.IsSuccessStatusCode, $"Expected 200 from /healthz, received {(int)healthResponse.StatusCode}.");
|
||||
|
||||
var readyResponse = await client.GetAsync("/readyz");
|
||||
Assert.True(readyResponse.IsSuccessStatusCode, $"Expected 200 from /readyz, received {(int)readyResponse.StatusCode}.");
|
||||
|
||||
var healthDocument = await healthResponse.Content.ReadFromJsonAsync<HealthDocument>();
|
||||
Assert.NotNull(healthDocument);
|
||||
Assert.Equal("healthy", healthDocument!.Status);
|
||||
Assert.True(healthDocument.UptimeSeconds >= 0);
|
||||
Assert.NotNull(healthDocument.Telemetry);
|
||||
|
||||
var readyDocument = await readyResponse.Content.ReadFromJsonAsync<ReadyDocument>();
|
||||
Assert.NotNull(readyDocument);
|
||||
Assert.Equal("ready", readyDocument!.Status);
|
||||
Assert.Null(readyDocument.Error);
|
||||
}
|
||||
|
||||
private sealed record HealthDocument(
|
||||
string Status,
|
||||
DateTimeOffset StartedAt,
|
||||
DateTimeOffset CapturedAt,
|
||||
double UptimeSeconds,
|
||||
TelemetryDocument Telemetry);
|
||||
|
||||
private sealed record TelemetryDocument(
|
||||
bool Enabled,
|
||||
bool Logging,
|
||||
bool Metrics,
|
||||
bool Tracing);
|
||||
|
||||
private sealed record ReadyDocument(
|
||||
string Status,
|
||||
DateTimeOffset CheckedAt,
|
||||
double? LatencyMs,
|
||||
string? Error);
|
||||
}
|
||||
using System.Net.Http.Json;
|
||||
|
||||
namespace StellaOps.Scanner.WebService.Tests;
|
||||
|
||||
public sealed class HealthEndpointsTests
|
||||
{
|
||||
[Fact]
|
||||
public async Task HealthAndReadyEndpointsRespond()
|
||||
{
|
||||
using var factory = new ScannerApplicationFactory();
|
||||
using var client = factory.CreateClient();
|
||||
|
||||
var healthResponse = await client.GetAsync("/healthz");
|
||||
Assert.True(healthResponse.IsSuccessStatusCode, $"Expected 200 from /healthz, received {(int)healthResponse.StatusCode}.");
|
||||
|
||||
var readyResponse = await client.GetAsync("/readyz");
|
||||
Assert.True(readyResponse.IsSuccessStatusCode, $"Expected 200 from /readyz, received {(int)readyResponse.StatusCode}.");
|
||||
|
||||
var healthDocument = await healthResponse.Content.ReadFromJsonAsync<HealthDocument>();
|
||||
Assert.NotNull(healthDocument);
|
||||
Assert.Equal("healthy", healthDocument!.Status);
|
||||
Assert.True(healthDocument.UptimeSeconds >= 0);
|
||||
Assert.NotNull(healthDocument.Telemetry);
|
||||
|
||||
var readyDocument = await readyResponse.Content.ReadFromJsonAsync<ReadyDocument>();
|
||||
Assert.NotNull(readyDocument);
|
||||
Assert.Equal("ready", readyDocument!.Status);
|
||||
Assert.Null(readyDocument.Error);
|
||||
}
|
||||
|
||||
private sealed record HealthDocument(
|
||||
string Status,
|
||||
DateTimeOffset StartedAt,
|
||||
DateTimeOffset CapturedAt,
|
||||
double UptimeSeconds,
|
||||
TelemetryDocument Telemetry);
|
||||
|
||||
private sealed record TelemetryDocument(
|
||||
bool Enabled,
|
||||
bool Logging,
|
||||
bool Metrics,
|
||||
bool Tracing);
|
||||
|
||||
private sealed record ReadyDocument(
|
||||
string Status,
|
||||
DateTimeOffset CheckedAt,
|
||||
double? LatencyMs,
|
||||
string? Error);
|
||||
}
|
||||
|
||||
@@ -1,71 +1,71 @@
|
||||
using Microsoft.Extensions.DependencyInjection;
|
||||
using Microsoft.Extensions.Options;
|
||||
using StellaOps.Scanner.WebService.Options;
|
||||
using StellaOps.Scanner.WebService.Services;
|
||||
|
||||
namespace StellaOps.Scanner.WebService.Tests;
|
||||
|
||||
public sealed class PlatformEventPublisherRegistrationTests
|
||||
{
|
||||
[Fact]
|
||||
public void NullPublisherRegisteredWhenEventsDisabled()
|
||||
{
|
||||
using var factory = new ScannerApplicationFactory(configuration =>
|
||||
{
|
||||
configuration["scanner:events:enabled"] = "false";
|
||||
configuration["scanner:events:dsn"] = string.Empty;
|
||||
});
|
||||
using var scope = factory.Services.CreateScope();
|
||||
|
||||
var publisher = scope.ServiceProvider.GetRequiredService<IPlatformEventPublisher>();
|
||||
Assert.IsType<NullPlatformEventPublisher>(publisher);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void RedisPublisherRegisteredWhenEventsEnabled()
|
||||
{
|
||||
var originalEnabled = Environment.GetEnvironmentVariable("SCANNER__EVENTS__ENABLED");
|
||||
var originalDriver = Environment.GetEnvironmentVariable("SCANNER__EVENTS__DRIVER");
|
||||
var originalDsn = Environment.GetEnvironmentVariable("SCANNER__EVENTS__DSN");
|
||||
var originalStream = Environment.GetEnvironmentVariable("SCANNER__EVENTS__STREAM");
|
||||
var originalTimeout = Environment.GetEnvironmentVariable("SCANNER__EVENTS__PUBLISHTIMEOUTSECONDS");
|
||||
var originalMax = Environment.GetEnvironmentVariable("SCANNER__EVENTS__MAXSTREAMLENGTH");
|
||||
|
||||
Environment.SetEnvironmentVariable("SCANNER__EVENTS__ENABLED", "true");
|
||||
Environment.SetEnvironmentVariable("SCANNER__EVENTS__DRIVER", "redis");
|
||||
Environment.SetEnvironmentVariable("SCANNER__EVENTS__DSN", "localhost:6379");
|
||||
Environment.SetEnvironmentVariable("SCANNER__EVENTS__STREAM", "stella.events.tests");
|
||||
Environment.SetEnvironmentVariable("SCANNER__EVENTS__PUBLISHTIMEOUTSECONDS", "1");
|
||||
Environment.SetEnvironmentVariable("SCANNER__EVENTS__MAXSTREAMLENGTH", "100");
|
||||
|
||||
try
|
||||
{
|
||||
using var factory = new ScannerApplicationFactory(configuration =>
|
||||
{
|
||||
configuration["scanner:events:enabled"] = "true";
|
||||
configuration["scanner:events:driver"] = "redis";
|
||||
configuration["scanner:events:dsn"] = "localhost:6379";
|
||||
configuration["scanner:events:stream"] = "stella.events.tests";
|
||||
configuration["scanner:events:publishTimeoutSeconds"] = "1";
|
||||
configuration["scanner:events:maxStreamLength"] = "100";
|
||||
});
|
||||
using var scope = factory.Services.CreateScope();
|
||||
|
||||
var options = scope.ServiceProvider.GetRequiredService<IOptions<ScannerWebServiceOptions>>().Value;
|
||||
Assert.True(options.Events.Enabled);
|
||||
Assert.Equal("redis", options.Events.Driver);
|
||||
|
||||
var publisher = scope.ServiceProvider.GetRequiredService<IPlatformEventPublisher>();
|
||||
Assert.IsType<RedisPlatformEventPublisher>(publisher);
|
||||
}
|
||||
finally
|
||||
{
|
||||
Environment.SetEnvironmentVariable("SCANNER__EVENTS__ENABLED", originalEnabled);
|
||||
Environment.SetEnvironmentVariable("SCANNER__EVENTS__DRIVER", originalDriver);
|
||||
Environment.SetEnvironmentVariable("SCANNER__EVENTS__DSN", originalDsn);
|
||||
Environment.SetEnvironmentVariable("SCANNER__EVENTS__STREAM", originalStream);
|
||||
Environment.SetEnvironmentVariable("SCANNER__EVENTS__PUBLISHTIMEOUTSECONDS", originalTimeout);
|
||||
Environment.SetEnvironmentVariable("SCANNER__EVENTS__MAXSTREAMLENGTH", originalMax);
|
||||
}
|
||||
}
|
||||
}
|
||||
using Microsoft.Extensions.DependencyInjection;
|
||||
using Microsoft.Extensions.Options;
|
||||
using StellaOps.Scanner.WebService.Options;
|
||||
using StellaOps.Scanner.WebService.Services;
|
||||
|
||||
namespace StellaOps.Scanner.WebService.Tests;
|
||||
|
||||
public sealed class PlatformEventPublisherRegistrationTests
|
||||
{
|
||||
[Fact]
|
||||
public void NullPublisherRegisteredWhenEventsDisabled()
|
||||
{
|
||||
using var factory = new ScannerApplicationFactory(configuration =>
|
||||
{
|
||||
configuration["scanner:events:enabled"] = "false";
|
||||
configuration["scanner:events:dsn"] = string.Empty;
|
||||
});
|
||||
using var scope = factory.Services.CreateScope();
|
||||
|
||||
var publisher = scope.ServiceProvider.GetRequiredService<IPlatformEventPublisher>();
|
||||
Assert.IsType<NullPlatformEventPublisher>(publisher);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void RedisPublisherRegisteredWhenEventsEnabled()
|
||||
{
|
||||
var originalEnabled = Environment.GetEnvironmentVariable("SCANNER__EVENTS__ENABLED");
|
||||
var originalDriver = Environment.GetEnvironmentVariable("SCANNER__EVENTS__DRIVER");
|
||||
var originalDsn = Environment.GetEnvironmentVariable("SCANNER__EVENTS__DSN");
|
||||
var originalStream = Environment.GetEnvironmentVariable("SCANNER__EVENTS__STREAM");
|
||||
var originalTimeout = Environment.GetEnvironmentVariable("SCANNER__EVENTS__PUBLISHTIMEOUTSECONDS");
|
||||
var originalMax = Environment.GetEnvironmentVariable("SCANNER__EVENTS__MAXSTREAMLENGTH");
|
||||
|
||||
Environment.SetEnvironmentVariable("SCANNER__EVENTS__ENABLED", "true");
|
||||
Environment.SetEnvironmentVariable("SCANNER__EVENTS__DRIVER", "redis");
|
||||
Environment.SetEnvironmentVariable("SCANNER__EVENTS__DSN", "localhost:6379");
|
||||
Environment.SetEnvironmentVariable("SCANNER__EVENTS__STREAM", "stella.events.tests");
|
||||
Environment.SetEnvironmentVariable("SCANNER__EVENTS__PUBLISHTIMEOUTSECONDS", "1");
|
||||
Environment.SetEnvironmentVariable("SCANNER__EVENTS__MAXSTREAMLENGTH", "100");
|
||||
|
||||
try
|
||||
{
|
||||
using var factory = new ScannerApplicationFactory(configuration =>
|
||||
{
|
||||
configuration["scanner:events:enabled"] = "true";
|
||||
configuration["scanner:events:driver"] = "redis";
|
||||
configuration["scanner:events:dsn"] = "localhost:6379";
|
||||
configuration["scanner:events:stream"] = "stella.events.tests";
|
||||
configuration["scanner:events:publishTimeoutSeconds"] = "1";
|
||||
configuration["scanner:events:maxStreamLength"] = "100";
|
||||
});
|
||||
using var scope = factory.Services.CreateScope();
|
||||
|
||||
var options = scope.ServiceProvider.GetRequiredService<IOptions<ScannerWebServiceOptions>>().Value;
|
||||
Assert.True(options.Events.Enabled);
|
||||
Assert.Equal("redis", options.Events.Driver);
|
||||
|
||||
var publisher = scope.ServiceProvider.GetRequiredService<IPlatformEventPublisher>();
|
||||
Assert.IsType<RedisPlatformEventPublisher>(publisher);
|
||||
}
|
||||
finally
|
||||
{
|
||||
Environment.SetEnvironmentVariable("SCANNER__EVENTS__ENABLED", originalEnabled);
|
||||
Environment.SetEnvironmentVariable("SCANNER__EVENTS__DRIVER", originalDriver);
|
||||
Environment.SetEnvironmentVariable("SCANNER__EVENTS__DSN", originalDsn);
|
||||
Environment.SetEnvironmentVariable("SCANNER__EVENTS__STREAM", originalStream);
|
||||
Environment.SetEnvironmentVariable("SCANNER__EVENTS__PUBLISHTIMEOUTSECONDS", originalTimeout);
|
||||
Environment.SetEnvironmentVariable("SCANNER__EVENTS__MAXSTREAMLENGTH", originalMax);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -9,18 +9,18 @@ using System.Text.Json.Serialization;
|
||||
using StellaOps.Scanner.WebService.Contracts;
|
||||
using StellaOps.Scanner.WebService.Serialization;
|
||||
using Xunit.Sdk;
|
||||
|
||||
namespace StellaOps.Scanner.WebService.Tests;
|
||||
|
||||
public sealed class PlatformEventSamplesTests
|
||||
{
|
||||
private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web)
|
||||
{
|
||||
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull,
|
||||
Converters = { new JsonStringEnumConverter() }
|
||||
};
|
||||
|
||||
[Theory]
|
||||
|
||||
namespace StellaOps.Scanner.WebService.Tests;
|
||||
|
||||
public sealed class PlatformEventSamplesTests
|
||||
{
|
||||
private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web)
|
||||
{
|
||||
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull,
|
||||
Converters = { new JsonStringEnumConverter() }
|
||||
};
|
||||
|
||||
[Theory]
|
||||
[InlineData("scanner.event.report.ready@1.sample.json", OrchestratorEventKinds.ScannerReportReady)]
|
||||
[InlineData("scanner.event.scan.completed@1.sample.json", OrchestratorEventKinds.ScannerScanCompleted)]
|
||||
public void PlatformEventSamplesStayCanonical(string fileName, string expectedKind)
|
||||
@@ -174,5 +174,5 @@ public sealed class PlatformEventSamplesTests
|
||||
var path = Path.Combine(AppContext.BaseDirectory, fileName);
|
||||
Assert.True(File.Exists(path), $"Sample file not found at '{path}'.");
|
||||
return File.ReadAllText(path);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,108 +1,108 @@
|
||||
using System.Net;
|
||||
using System.Net.Http.Json;
|
||||
using System.Text.Json;
|
||||
using System.Threading.Tasks;
|
||||
using StellaOps.Policy;
|
||||
using StellaOps.Scanner.WebService.Contracts;
|
||||
|
||||
namespace StellaOps.Scanner.WebService.Tests;
|
||||
|
||||
public sealed class PolicyEndpointsTests
|
||||
{
|
||||
private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web);
|
||||
|
||||
[Fact]
|
||||
public async Task PolicySchemaReturnsEmbeddedSchema()
|
||||
{
|
||||
using var factory = new ScannerApplicationFactory();
|
||||
using var client = factory.CreateClient();
|
||||
|
||||
var response = await client.GetAsync("/api/v1/policy/schema");
|
||||
Assert.Equal(HttpStatusCode.OK, response.StatusCode);
|
||||
Assert.Equal("application/schema+json", response.Content.Headers.ContentType?.MediaType);
|
||||
|
||||
var payload = await response.Content.ReadAsStringAsync();
|
||||
Assert.Contains("\"$schema\"", payload);
|
||||
Assert.Contains("\"properties\"", payload);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task PolicyDiagnosticsReturnsRecommendations()
|
||||
{
|
||||
using var factory = new ScannerApplicationFactory();
|
||||
using var client = factory.CreateClient();
|
||||
|
||||
var request = new PolicyDiagnosticsRequestDto
|
||||
{
|
||||
Policy = new PolicyPreviewPolicyDto
|
||||
{
|
||||
Content = "version: \"1.0\"\nrules: []\n",
|
||||
Format = "yaml",
|
||||
Actor = "tester",
|
||||
Description = "empty ruleset"
|
||||
}
|
||||
};
|
||||
|
||||
var response = await client.PostAsJsonAsync("/api/v1/policy/diagnostics", request);
|
||||
Assert.Equal(HttpStatusCode.OK, response.StatusCode);
|
||||
|
||||
var diagnostics = await response.Content.ReadFromJsonAsync<PolicyDiagnosticsResponseDto>(SerializerOptions);
|
||||
Assert.NotNull(diagnostics);
|
||||
Assert.False(diagnostics!.Success);
|
||||
Assert.True(diagnostics.ErrorCount >= 0);
|
||||
Assert.NotEmpty(diagnostics.Recommendations);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task PolicyPreviewUsesProposedPolicy()
|
||||
{
|
||||
using var factory = new ScannerApplicationFactory();
|
||||
using var client = factory.CreateClient();
|
||||
|
||||
const string policyYaml = """
|
||||
version: "1.0"
|
||||
rules:
|
||||
- name: Block Critical
|
||||
severity: [Critical]
|
||||
action: block
|
||||
""";
|
||||
|
||||
var request = new PolicyPreviewRequestDto
|
||||
{
|
||||
ImageDigest = "sha256:abc123",
|
||||
Findings = new[]
|
||||
{
|
||||
new PolicyPreviewFindingDto
|
||||
{
|
||||
Id = "finding-1",
|
||||
Severity = "Critical",
|
||||
Source = "NVD",
|
||||
Tags = new[] { "reachability:runtime" }
|
||||
}
|
||||
},
|
||||
Policy = new PolicyPreviewPolicyDto
|
||||
{
|
||||
Content = policyYaml,
|
||||
Format = "yaml",
|
||||
Actor = "preview",
|
||||
Description = "test policy"
|
||||
}
|
||||
};
|
||||
|
||||
var response = await client.PostAsJsonAsync("/api/v1/policy/preview", request);
|
||||
Assert.Equal(HttpStatusCode.OK, response.StatusCode);
|
||||
|
||||
var preview = await response.Content.ReadFromJsonAsync<PolicyPreviewResponseDto>(SerializerOptions);
|
||||
Assert.NotNull(preview);
|
||||
Assert.True(preview!.Success);
|
||||
Assert.Equal(1, preview.Changed);
|
||||
var diff = Assert.Single(preview.Diffs);
|
||||
Assert.Equal("finding-1", diff.Projected?.FindingId);
|
||||
Assert.Equal("Blocked", diff.Projected?.Status);
|
||||
Assert.Equal(PolicyScoringConfig.Default.Version, diff.Projected?.ConfigVersion);
|
||||
Assert.NotNull(diff.Projected?.Inputs);
|
||||
Assert.True(diff.Projected!.Inputs!.ContainsKey("severityWeight"));
|
||||
Assert.Equal("NVD", diff.Projected.SourceTrust);
|
||||
Assert.Equal("runtime", diff.Projected.Reachability);
|
||||
}
|
||||
}
|
||||
using System.Net;
|
||||
using System.Net.Http.Json;
|
||||
using System.Text.Json;
|
||||
using System.Threading.Tasks;
|
||||
using StellaOps.Policy;
|
||||
using StellaOps.Scanner.WebService.Contracts;
|
||||
|
||||
namespace StellaOps.Scanner.WebService.Tests;
|
||||
|
||||
public sealed class PolicyEndpointsTests
|
||||
{
|
||||
private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web);
|
||||
|
||||
[Fact]
|
||||
public async Task PolicySchemaReturnsEmbeddedSchema()
|
||||
{
|
||||
using var factory = new ScannerApplicationFactory();
|
||||
using var client = factory.CreateClient();
|
||||
|
||||
var response = await client.GetAsync("/api/v1/policy/schema");
|
||||
Assert.Equal(HttpStatusCode.OK, response.StatusCode);
|
||||
Assert.Equal("application/schema+json", response.Content.Headers.ContentType?.MediaType);
|
||||
|
||||
var payload = await response.Content.ReadAsStringAsync();
|
||||
Assert.Contains("\"$schema\"", payload);
|
||||
Assert.Contains("\"properties\"", payload);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task PolicyDiagnosticsReturnsRecommendations()
|
||||
{
|
||||
using var factory = new ScannerApplicationFactory();
|
||||
using var client = factory.CreateClient();
|
||||
|
||||
var request = new PolicyDiagnosticsRequestDto
|
||||
{
|
||||
Policy = new PolicyPreviewPolicyDto
|
||||
{
|
||||
Content = "version: \"1.0\"\nrules: []\n",
|
||||
Format = "yaml",
|
||||
Actor = "tester",
|
||||
Description = "empty ruleset"
|
||||
}
|
||||
};
|
||||
|
||||
var response = await client.PostAsJsonAsync("/api/v1/policy/diagnostics", request);
|
||||
Assert.Equal(HttpStatusCode.OK, response.StatusCode);
|
||||
|
||||
var diagnostics = await response.Content.ReadFromJsonAsync<PolicyDiagnosticsResponseDto>(SerializerOptions);
|
||||
Assert.NotNull(diagnostics);
|
||||
Assert.False(diagnostics!.Success);
|
||||
Assert.True(diagnostics.ErrorCount >= 0);
|
||||
Assert.NotEmpty(diagnostics.Recommendations);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task PolicyPreviewUsesProposedPolicy()
|
||||
{
|
||||
using var factory = new ScannerApplicationFactory();
|
||||
using var client = factory.CreateClient();
|
||||
|
||||
const string policyYaml = """
|
||||
version: "1.0"
|
||||
rules:
|
||||
- name: Block Critical
|
||||
severity: [Critical]
|
||||
action: block
|
||||
""";
|
||||
|
||||
var request = new PolicyPreviewRequestDto
|
||||
{
|
||||
ImageDigest = "sha256:abc123",
|
||||
Findings = new[]
|
||||
{
|
||||
new PolicyPreviewFindingDto
|
||||
{
|
||||
Id = "finding-1",
|
||||
Severity = "Critical",
|
||||
Source = "NVD",
|
||||
Tags = new[] { "reachability:runtime" }
|
||||
}
|
||||
},
|
||||
Policy = new PolicyPreviewPolicyDto
|
||||
{
|
||||
Content = policyYaml,
|
||||
Format = "yaml",
|
||||
Actor = "preview",
|
||||
Description = "test policy"
|
||||
}
|
||||
};
|
||||
|
||||
var response = await client.PostAsJsonAsync("/api/v1/policy/preview", request);
|
||||
Assert.Equal(HttpStatusCode.OK, response.StatusCode);
|
||||
|
||||
var preview = await response.Content.ReadFromJsonAsync<PolicyPreviewResponseDto>(SerializerOptions);
|
||||
Assert.NotNull(preview);
|
||||
Assert.True(preview!.Success);
|
||||
Assert.Equal(1, preview.Changed);
|
||||
var diff = Assert.Single(preview.Diffs);
|
||||
Assert.Equal("finding-1", diff.Projected?.FindingId);
|
||||
Assert.Equal("Blocked", diff.Projected?.Status);
|
||||
Assert.Equal(PolicyScoringConfig.Default.Version, diff.Projected?.ConfigVersion);
|
||||
Assert.NotNull(diff.Projected?.Inputs);
|
||||
Assert.True(diff.Projected!.Inputs!.ContainsKey("severityWeight"));
|
||||
Assert.Equal("NVD", diff.Projected.SourceTrust);
|
||||
Assert.Equal("runtime", diff.Projected.Reachability);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,35 +1,35 @@
|
||||
using System;
|
||||
using System.IO;
|
||||
using System.Text.Json;
|
||||
using System.Text.Json.Serialization;
|
||||
using System.Threading.Tasks;
|
||||
using StellaOps.Scanner.WebService.Contracts;
|
||||
|
||||
namespace StellaOps.Scanner.WebService.Tests;
|
||||
|
||||
public sealed class ReportSamplesTests
|
||||
{
|
||||
private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web)
|
||||
{
|
||||
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull,
|
||||
Converters = { new JsonStringEnumConverter() }
|
||||
};
|
||||
|
||||
[Fact]
|
||||
public async Task ReportSampleEnvelope_RemainsCanonical()
|
||||
{
|
||||
var baseDirectory = AppContext.BaseDirectory;
|
||||
var repoRoot = Path.GetFullPath(Path.Combine(baseDirectory, "..", "..", "..", "..", ".."));
|
||||
var path = Path.Combine(repoRoot, "samples", "api", "reports", "report-sample.dsse.json");
|
||||
Assert.True(File.Exists(path), $"Sample file not found at {path}.");
|
||||
await using var stream = File.OpenRead(path);
|
||||
var response = await JsonSerializer.DeserializeAsync<ReportResponseDto>(stream, SerializerOptions);
|
||||
Assert.NotNull(response);
|
||||
Assert.NotNull(response!.Report);
|
||||
Assert.NotNull(response.Dsse);
|
||||
|
||||
using System;
|
||||
using System.IO;
|
||||
using System.Text.Json;
|
||||
using System.Text.Json.Serialization;
|
||||
using System.Threading.Tasks;
|
||||
using StellaOps.Scanner.WebService.Contracts;
|
||||
|
||||
namespace StellaOps.Scanner.WebService.Tests;
|
||||
|
||||
public sealed class ReportSamplesTests
|
||||
{
|
||||
private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web)
|
||||
{
|
||||
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull,
|
||||
Converters = { new JsonStringEnumConverter() }
|
||||
};
|
||||
|
||||
[Fact]
|
||||
public async Task ReportSampleEnvelope_RemainsCanonical()
|
||||
{
|
||||
var baseDirectory = AppContext.BaseDirectory;
|
||||
var repoRoot = Path.GetFullPath(Path.Combine(baseDirectory, "..", "..", "..", "..", ".."));
|
||||
var path = Path.Combine(repoRoot, "samples", "api", "reports", "report-sample.dsse.json");
|
||||
Assert.True(File.Exists(path), $"Sample file not found at {path}.");
|
||||
await using var stream = File.OpenRead(path);
|
||||
var response = await JsonSerializer.DeserializeAsync<ReportResponseDto>(stream, SerializerOptions);
|
||||
Assert.NotNull(response);
|
||||
Assert.NotNull(response!.Report);
|
||||
Assert.NotNull(response.Dsse);
|
||||
|
||||
var reportBytes = JsonSerializer.SerializeToUtf8Bytes(response.Report, SerializerOptions);
|
||||
var expectedPayload = Convert.ToBase64String(reportBytes);
|
||||
Assert.Equal(expectedPayload, response.Dsse!.Payload);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
using System.Net;
|
||||
using System.Net.Http.Json;
|
||||
using System.Net;
|
||||
using System.Net.Http.Json;
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
using System.Text.Json.Serialization;
|
||||
@@ -12,113 +12,113 @@ using StellaOps.Policy;
|
||||
using StellaOps.Scanner.WebService.Contracts;
|
||||
using StellaOps.Scanner.WebService.Services;
|
||||
using System.Linq;
|
||||
|
||||
namespace StellaOps.Scanner.WebService.Tests;
|
||||
|
||||
public sealed class ReportsEndpointsTests
|
||||
{
|
||||
private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web)
|
||||
{
|
||||
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull
|
||||
};
|
||||
|
||||
[Fact]
|
||||
public async Task ReportsEndpointReturnsSignedEnvelope()
|
||||
{
|
||||
const string policyYaml = """
|
||||
version: "1.0"
|
||||
rules:
|
||||
- name: Block Critical
|
||||
severity: [Critical]
|
||||
action: block
|
||||
""";
|
||||
|
||||
var hmacKey = Convert.ToBase64String(Encoding.UTF8.GetBytes("scanner-report-hmac-key-2025!"));
|
||||
|
||||
using var factory = new ScannerApplicationFactory(configuration =>
|
||||
{
|
||||
configuration["scanner:signing:enabled"] = "true";
|
||||
configuration["scanner:signing:keyId"] = "scanner-report-signing";
|
||||
configuration["scanner:signing:algorithm"] = "hs256";
|
||||
configuration["scanner:signing:keyPem"] = hmacKey;
|
||||
configuration["scanner:features:enableSignedReports"] = "true";
|
||||
});
|
||||
|
||||
var store = factory.Services.GetRequiredService<PolicySnapshotStore>();
|
||||
await store.SaveAsync(
|
||||
new PolicySnapshotContent(policyYaml, PolicyDocumentFormat.Yaml, "tester", "seed", "initial"),
|
||||
CancellationToken.None);
|
||||
|
||||
using var client = factory.CreateClient();
|
||||
|
||||
var request = new ReportRequestDto
|
||||
{
|
||||
ImageDigest = "sha256:deadbeef",
|
||||
Findings = new[]
|
||||
{
|
||||
new PolicyPreviewFindingDto
|
||||
{
|
||||
Id = "finding-1",
|
||||
Severity = "Critical",
|
||||
Source = "NVD",
|
||||
Tags = new[] { "reachability:runtime" }
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
var response = await client.PostAsJsonAsync("/api/v1/reports", request);
|
||||
Assert.Equal(HttpStatusCode.OK, response.StatusCode);
|
||||
|
||||
var raw = await response.Content.ReadAsStringAsync();
|
||||
Assert.False(string.IsNullOrWhiteSpace(raw), raw);
|
||||
var payload = JsonSerializer.Deserialize<ReportResponseDto>(raw, SerializerOptions);
|
||||
Assert.NotNull(payload);
|
||||
Assert.NotNull(payload!.Report);
|
||||
Assert.NotNull(payload.Dsse);
|
||||
Assert.StartsWith("report-", payload.Report.ReportId, StringComparison.Ordinal);
|
||||
Assert.Equal("blocked", payload.Report.Verdict);
|
||||
|
||||
var dsse = payload.Dsse!;
|
||||
Assert.Equal("application/vnd.stellaops.report+json", dsse.PayloadType);
|
||||
var decodedPayload = Convert.FromBase64String(dsse.Payload);
|
||||
var canonicalPayload = JsonSerializer.SerializeToUtf8Bytes(payload.Report, SerializerOptions);
|
||||
var expectedBase64 = Convert.ToBase64String(canonicalPayload);
|
||||
Assert.Equal(expectedBase64, dsse.Payload);
|
||||
|
||||
var reportVerdict = Assert.Single(payload.Report.Verdicts);
|
||||
Assert.Equal("NVD", reportVerdict.SourceTrust);
|
||||
Assert.Equal("runtime", reportVerdict.Reachability);
|
||||
Assert.NotNull(reportVerdict.Inputs);
|
||||
Assert.True(reportVerdict.Inputs!.ContainsKey("severityWeight"));
|
||||
Assert.Equal(PolicyScoringConfig.Default.Version, reportVerdict.ConfigVersion);
|
||||
|
||||
var signature = Assert.Single(dsse.Signatures);
|
||||
Assert.Equal("scanner-report-signing", signature.KeyId);
|
||||
Assert.Equal("hs256", signature.Algorithm, ignoreCase: true);
|
||||
|
||||
using var hmac = new System.Security.Cryptography.HMACSHA256(Convert.FromBase64String(hmacKey));
|
||||
var expectedSig = Convert.ToBase64String(hmac.ComputeHash(decodedPayload));
|
||||
var actualSig = signature.Signature;
|
||||
Assert.True(expectedSig == actualSig, $"expected:{expectedSig}, actual:{actualSig}");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ReportsEndpointValidatesDigest()
|
||||
{
|
||||
using var factory = new ScannerApplicationFactory();
|
||||
using var client = factory.CreateClient();
|
||||
|
||||
var request = new ReportRequestDto
|
||||
{
|
||||
ImageDigest = "",
|
||||
Findings = Array.Empty<PolicyPreviewFindingDto>()
|
||||
};
|
||||
|
||||
var response = await client.PostAsJsonAsync("/api/v1/reports", request);
|
||||
Assert.Equal(HttpStatusCode.BadRequest, response.StatusCode);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
|
||||
namespace StellaOps.Scanner.WebService.Tests;
|
||||
|
||||
public sealed class ReportsEndpointsTests
|
||||
{
|
||||
private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web)
|
||||
{
|
||||
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull
|
||||
};
|
||||
|
||||
[Fact]
|
||||
public async Task ReportsEndpointReturnsSignedEnvelope()
|
||||
{
|
||||
const string policyYaml = """
|
||||
version: "1.0"
|
||||
rules:
|
||||
- name: Block Critical
|
||||
severity: [Critical]
|
||||
action: block
|
||||
""";
|
||||
|
||||
var hmacKey = Convert.ToBase64String(Encoding.UTF8.GetBytes("scanner-report-hmac-key-2025!"));
|
||||
|
||||
using var factory = new ScannerApplicationFactory(configuration =>
|
||||
{
|
||||
configuration["scanner:signing:enabled"] = "true";
|
||||
configuration["scanner:signing:keyId"] = "scanner-report-signing";
|
||||
configuration["scanner:signing:algorithm"] = "hs256";
|
||||
configuration["scanner:signing:keyPem"] = hmacKey;
|
||||
configuration["scanner:features:enableSignedReports"] = "true";
|
||||
});
|
||||
|
||||
var store = factory.Services.GetRequiredService<PolicySnapshotStore>();
|
||||
await store.SaveAsync(
|
||||
new PolicySnapshotContent(policyYaml, PolicyDocumentFormat.Yaml, "tester", "seed", "initial"),
|
||||
CancellationToken.None);
|
||||
|
||||
using var client = factory.CreateClient();
|
||||
|
||||
var request = new ReportRequestDto
|
||||
{
|
||||
ImageDigest = "sha256:deadbeef",
|
||||
Findings = new[]
|
||||
{
|
||||
new PolicyPreviewFindingDto
|
||||
{
|
||||
Id = "finding-1",
|
||||
Severity = "Critical",
|
||||
Source = "NVD",
|
||||
Tags = new[] { "reachability:runtime" }
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
var response = await client.PostAsJsonAsync("/api/v1/reports", request);
|
||||
Assert.Equal(HttpStatusCode.OK, response.StatusCode);
|
||||
|
||||
var raw = await response.Content.ReadAsStringAsync();
|
||||
Assert.False(string.IsNullOrWhiteSpace(raw), raw);
|
||||
var payload = JsonSerializer.Deserialize<ReportResponseDto>(raw, SerializerOptions);
|
||||
Assert.NotNull(payload);
|
||||
Assert.NotNull(payload!.Report);
|
||||
Assert.NotNull(payload.Dsse);
|
||||
Assert.StartsWith("report-", payload.Report.ReportId, StringComparison.Ordinal);
|
||||
Assert.Equal("blocked", payload.Report.Verdict);
|
||||
|
||||
var dsse = payload.Dsse!;
|
||||
Assert.Equal("application/vnd.stellaops.report+json", dsse.PayloadType);
|
||||
var decodedPayload = Convert.FromBase64String(dsse.Payload);
|
||||
var canonicalPayload = JsonSerializer.SerializeToUtf8Bytes(payload.Report, SerializerOptions);
|
||||
var expectedBase64 = Convert.ToBase64String(canonicalPayload);
|
||||
Assert.Equal(expectedBase64, dsse.Payload);
|
||||
|
||||
var reportVerdict = Assert.Single(payload.Report.Verdicts);
|
||||
Assert.Equal("NVD", reportVerdict.SourceTrust);
|
||||
Assert.Equal("runtime", reportVerdict.Reachability);
|
||||
Assert.NotNull(reportVerdict.Inputs);
|
||||
Assert.True(reportVerdict.Inputs!.ContainsKey("severityWeight"));
|
||||
Assert.Equal(PolicyScoringConfig.Default.Version, reportVerdict.ConfigVersion);
|
||||
|
||||
var signature = Assert.Single(dsse.Signatures);
|
||||
Assert.Equal("scanner-report-signing", signature.KeyId);
|
||||
Assert.Equal("hs256", signature.Algorithm, ignoreCase: true);
|
||||
|
||||
using var hmac = new System.Security.Cryptography.HMACSHA256(Convert.FromBase64String(hmacKey));
|
||||
var expectedSig = Convert.ToBase64String(hmac.ComputeHash(decodedPayload));
|
||||
var actualSig = signature.Signature;
|
||||
Assert.True(expectedSig == actualSig, $"expected:{expectedSig}, actual:{actualSig}");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ReportsEndpointValidatesDigest()
|
||||
{
|
||||
using var factory = new ScannerApplicationFactory();
|
||||
using var client = factory.CreateClient();
|
||||
|
||||
var request = new ReportRequestDto
|
||||
{
|
||||
ImageDigest = "",
|
||||
Findings = Array.Empty<PolicyPreviewFindingDto>()
|
||||
};
|
||||
|
||||
var response = await client.PostAsJsonAsync("/api/v1/reports", request);
|
||||
Assert.Equal(HttpStatusCode.BadRequest, response.StatusCode);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ReportsEndpointReturnsServiceUnavailableWhenPolicyMissing()
|
||||
{
|
||||
using var factory = new ScannerApplicationFactory();
|
||||
|
||||
@@ -1,357 +1,357 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using System.Net;
|
||||
using System.Net.Http.Json;
|
||||
using System.Text.Json;
|
||||
using Microsoft.AspNetCore.Http;
|
||||
using Microsoft.Extensions.DependencyInjection;
|
||||
using StellaOps.Policy;
|
||||
using StellaOps.Scanner.Storage.Catalog;
|
||||
using StellaOps.Scanner.Storage.Repositories;
|
||||
using StellaOps.Scanner.WebService.Contracts;
|
||||
using StellaOps.Zastava.Core.Contracts;
|
||||
|
||||
namespace StellaOps.Scanner.WebService.Tests;
|
||||
|
||||
public sealed class RuntimeEndpointsTests
|
||||
{
|
||||
[Fact]
|
||||
public async Task RuntimeEventsEndpointPersistsEvents()
|
||||
{
|
||||
using var factory = new ScannerApplicationFactory();
|
||||
using var client = factory.CreateClient();
|
||||
|
||||
var request = new RuntimeEventsIngestRequestDto
|
||||
{
|
||||
BatchId = "batch-1",
|
||||
Events = new[]
|
||||
{
|
||||
CreateEnvelope("evt-001", buildId: "ABCDEF1234567890ABCDEF1234567890ABCDEF12"),
|
||||
CreateEnvelope("evt-002", buildId: "abcdef1234567890abcdef1234567890abcdef12")
|
||||
}
|
||||
};
|
||||
|
||||
var response = await client.PostAsJsonAsync("/api/v1/runtime/events", request);
|
||||
Assert.Equal(HttpStatusCode.Accepted, response.StatusCode);
|
||||
|
||||
var payload = await response.Content.ReadFromJsonAsync<RuntimeEventsIngestResponseDto>();
|
||||
Assert.NotNull(payload);
|
||||
Assert.Equal(2, payload!.Accepted);
|
||||
Assert.Equal(0, payload.Duplicates);
|
||||
|
||||
using var scope = factory.Services.CreateScope();
|
||||
var repository = scope.ServiceProvider.GetRequiredService<RuntimeEventRepository>();
|
||||
var stored = await repository.ListAsync(CancellationToken.None);
|
||||
Assert.Equal(2, stored.Count);
|
||||
Assert.Contains(stored, doc => doc.EventId == "evt-001");
|
||||
Assert.All(stored, doc =>
|
||||
{
|
||||
Assert.Equal("tenant-alpha", doc.Tenant);
|
||||
Assert.True(doc.ExpiresAt > doc.ReceivedAt);
|
||||
Assert.Equal("sha256:deadbeef", doc.ImageDigest);
|
||||
Assert.Equal("abcdef1234567890abcdef1234567890abcdef12", doc.BuildId);
|
||||
});
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task RuntimeEventsEndpointRejectsUnsupportedSchema()
|
||||
{
|
||||
using var factory = new ScannerApplicationFactory();
|
||||
using var client = factory.CreateClient();
|
||||
|
||||
var envelope = CreateEnvelope("evt-100", schemaVersion: "zastava.runtime.event@v2.0");
|
||||
|
||||
var request = new RuntimeEventsIngestRequestDto
|
||||
{
|
||||
Events = new[] { envelope }
|
||||
};
|
||||
|
||||
var response = await client.PostAsJsonAsync("/api/v1/runtime/events", request);
|
||||
Assert.Equal(HttpStatusCode.BadRequest, response.StatusCode);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task RuntimeEventsEndpointEnforcesRateLimit()
|
||||
{
|
||||
using var factory = new ScannerApplicationFactory(configuration =>
|
||||
{
|
||||
configuration["scanner:runtime:perNodeBurst"] = "1";
|
||||
configuration["scanner:runtime:perNodeEventsPerSecond"] = "1";
|
||||
configuration["scanner:runtime:perTenantBurst"] = "1";
|
||||
configuration["scanner:runtime:perTenantEventsPerSecond"] = "1";
|
||||
});
|
||||
using var client = factory.CreateClient();
|
||||
|
||||
var request = new RuntimeEventsIngestRequestDto
|
||||
{
|
||||
Events = new[]
|
||||
{
|
||||
CreateEnvelope("evt-500"),
|
||||
CreateEnvelope("evt-501")
|
||||
}
|
||||
};
|
||||
|
||||
var response = await client.PostAsJsonAsync("/api/v1/runtime/events", request);
|
||||
Assert.Equal((HttpStatusCode)StatusCodes.Status429TooManyRequests, response.StatusCode);
|
||||
Assert.NotNull(response.Headers.RetryAfter);
|
||||
|
||||
using var scope = factory.Services.CreateScope();
|
||||
var repository = scope.ServiceProvider.GetRequiredService<RuntimeEventRepository>();
|
||||
var count = await repository.CountAsync(CancellationToken.None);
|
||||
Assert.Equal(0, count);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task RuntimePolicyEndpointReturnsDecisions()
|
||||
{
|
||||
using var factory = new ScannerApplicationFactory(configuration =>
|
||||
{
|
||||
configuration["scanner:runtime:policyCacheTtlSeconds"] = "600";
|
||||
});
|
||||
|
||||
const string imageDigest = "sha256:deadbeef";
|
||||
|
||||
using var client = factory.CreateClient();
|
||||
|
||||
using (var scope = factory.Services.CreateScope())
|
||||
{
|
||||
var artifacts = scope.ServiceProvider.GetRequiredService<ArtifactRepository>();
|
||||
var links = scope.ServiceProvider.GetRequiredService<LinkRepository>();
|
||||
var policyStore = scope.ServiceProvider.GetRequiredService<PolicySnapshotStore>();
|
||||
var runtimeRepository = scope.ServiceProvider.GetRequiredService<RuntimeEventRepository>();
|
||||
await runtimeRepository.TruncateAsync(CancellationToken.None);
|
||||
|
||||
const string policyYaml = """
|
||||
version: "1.0"
|
||||
rules:
|
||||
- name: Block Critical
|
||||
severity: [Critical]
|
||||
action: block
|
||||
""";
|
||||
var saveResult = await policyStore.SaveAsync(
|
||||
new PolicySnapshotContent(policyYaml, PolicyDocumentFormat.Yaml, "tester", "tests", "seed"),
|
||||
CancellationToken.None);
|
||||
Assert.True(saveResult.Success);
|
||||
|
||||
var snapshot = await policyStore.GetLatestAsync(CancellationToken.None);
|
||||
Assert.NotNull(snapshot);
|
||||
|
||||
var sbomArtifactId = CatalogIdFactory.CreateArtifactId(ArtifactDocumentType.ImageBom, "sha256:sbomdigest");
|
||||
var attestationArtifactId = CatalogIdFactory.CreateArtifactId(ArtifactDocumentType.Attestation, "sha256:attdigest");
|
||||
|
||||
await artifacts.UpsertAsync(new ArtifactDocument
|
||||
{
|
||||
Id = sbomArtifactId,
|
||||
Type = ArtifactDocumentType.ImageBom,
|
||||
Format = ArtifactDocumentFormat.CycloneDxJson,
|
||||
MediaType = "application/json",
|
||||
BytesSha256 = "sha256:sbomdigest",
|
||||
RefCount = 1
|
||||
}, CancellationToken.None);
|
||||
|
||||
await artifacts.UpsertAsync(new ArtifactDocument
|
||||
{
|
||||
Id = attestationArtifactId,
|
||||
Type = ArtifactDocumentType.Attestation,
|
||||
Format = ArtifactDocumentFormat.DsseJson,
|
||||
MediaType = "application/vnd.dsse.envelope+json",
|
||||
BytesSha256 = "sha256:attdigest",
|
||||
RefCount = 1,
|
||||
Rekor = new RekorReference { Uuid = "rekor-uuid", Url = "https://rekor.example/uuid/rekor-uuid", Index = 7 }
|
||||
}, CancellationToken.None);
|
||||
|
||||
await links.UpsertAsync(new LinkDocument
|
||||
{
|
||||
Id = Guid.NewGuid().ToString("N"),
|
||||
FromType = LinkSourceType.Image,
|
||||
FromDigest = imageDigest,
|
||||
ArtifactId = sbomArtifactId,
|
||||
CreatedAtUtc = DateTime.UtcNow
|
||||
}, CancellationToken.None);
|
||||
|
||||
await links.UpsertAsync(new LinkDocument
|
||||
{
|
||||
Id = Guid.NewGuid().ToString("N"),
|
||||
FromType = LinkSourceType.Image,
|
||||
FromDigest = imageDigest,
|
||||
ArtifactId = attestationArtifactId,
|
||||
CreatedAtUtc = DateTime.UtcNow
|
||||
}, CancellationToken.None);
|
||||
}
|
||||
|
||||
var ingestRequest = new RuntimeEventsIngestRequestDto
|
||||
{
|
||||
Events = new[]
|
||||
{
|
||||
CreateEnvelope("evt-210", imageDigest: imageDigest, buildId: "1122aabbccddeeff00112233445566778899aabb"),
|
||||
CreateEnvelope("evt-211", imageDigest: imageDigest, buildId: "1122AABBCCDDEEFF00112233445566778899AABB")
|
||||
}
|
||||
};
|
||||
var ingestResponse = await client.PostAsJsonAsync("/api/v1/runtime/events", ingestRequest);
|
||||
Assert.Equal(HttpStatusCode.Accepted, ingestResponse.StatusCode);
|
||||
|
||||
var request = new RuntimePolicyRequestDto
|
||||
{
|
||||
Namespace = "payments",
|
||||
Images = new[] { imageDigest, imageDigest },
|
||||
Labels = new Dictionary<string, string> { ["app"] = "api" }
|
||||
};
|
||||
|
||||
var response = await client.PostAsJsonAsync("/api/v1/policy/runtime", request);
|
||||
Assert.Equal(HttpStatusCode.OK, response.StatusCode);
|
||||
|
||||
var raw = await response.Content.ReadAsStringAsync();
|
||||
Assert.False(string.IsNullOrWhiteSpace(raw), "Runtime policy response body was empty.");
|
||||
var payload = JsonSerializer.Deserialize<RuntimePolicyResponseDto>(raw);
|
||||
Assert.True(payload is not null, $"Runtime policy response: {raw}");
|
||||
Assert.Equal(600, payload!.TtlSeconds);
|
||||
Assert.NotNull(payload.PolicyRevision);
|
||||
Assert.True(payload.ExpiresAtUtc > DateTimeOffset.UtcNow);
|
||||
|
||||
var decision = payload.Results[imageDigest];
|
||||
Assert.Equal("pass", decision.PolicyVerdict);
|
||||
Assert.True(decision.Signed);
|
||||
Assert.True(decision.HasSbomReferrers);
|
||||
Assert.True(decision.HasSbomLegacy);
|
||||
Assert.Empty(decision.Reasons);
|
||||
Assert.NotNull(decision.Rekor);
|
||||
Assert.Equal("rekor-uuid", decision.Rekor!.Uuid);
|
||||
Assert.True(decision.Rekor.Verified);
|
||||
Assert.NotNull(decision.Confidence);
|
||||
Assert.InRange(decision.Confidence!.Value, 0.0, 1.0);
|
||||
Assert.False(decision.Quieted.GetValueOrDefault());
|
||||
Assert.Null(decision.QuietedBy);
|
||||
Assert.NotNull(decision.BuildIds);
|
||||
Assert.Contains("1122aabbccddeeff00112233445566778899aabb", decision.BuildIds!);
|
||||
var metadataString = decision.Metadata;
|
||||
Console.WriteLine($"Runtime policy metadata: {metadataString ?? "<null>"}");
|
||||
Assert.False(string.IsNullOrWhiteSpace(metadataString));
|
||||
using var metadataDocument = JsonDocument.Parse(decision.Metadata!);
|
||||
Assert.True(metadataDocument.RootElement.TryGetProperty("heuristics", out _));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task RuntimePolicyEndpointFlagsUnsignedAndMissingSbom()
|
||||
{
|
||||
using var factory = new ScannerApplicationFactory();
|
||||
using var client = factory.CreateClient();
|
||||
|
||||
const string imageDigest = "sha256:feedface";
|
||||
|
||||
using (var scope = factory.Services.CreateScope())
|
||||
{
|
||||
var runtimeRepository = scope.ServiceProvider.GetRequiredService<RuntimeEventRepository>();
|
||||
var policyStore = scope.ServiceProvider.GetRequiredService<PolicySnapshotStore>();
|
||||
|
||||
const string policyYaml = """
|
||||
version: "1.0"
|
||||
rules: []
|
||||
""";
|
||||
await policyStore.SaveAsync(
|
||||
new PolicySnapshotContent(policyYaml, PolicyDocumentFormat.Yaml, "tester", "tests", "baseline"),
|
||||
CancellationToken.None);
|
||||
|
||||
// Intentionally skip artifacts/links to simulate missing metadata.
|
||||
await runtimeRepository.TruncateAsync(CancellationToken.None);
|
||||
}
|
||||
|
||||
var response = await client.PostAsJsonAsync("/api/v1/policy/runtime", new RuntimePolicyRequestDto
|
||||
{
|
||||
Namespace = "payments",
|
||||
Images = new[] { imageDigest }
|
||||
});
|
||||
|
||||
Assert.Equal(HttpStatusCode.OK, response.StatusCode);
|
||||
var payload = await response.Content.ReadFromJsonAsync<RuntimePolicyResponseDto>();
|
||||
Assert.NotNull(payload);
|
||||
var decision = payload!.Results[imageDigest];
|
||||
|
||||
Assert.Equal("fail", decision.PolicyVerdict);
|
||||
Assert.False(decision.Signed);
|
||||
Assert.False(decision.HasSbomReferrers);
|
||||
Assert.Contains("image.metadata.missing", decision.Reasons);
|
||||
Assert.Contains("unsigned", decision.Reasons);
|
||||
Assert.Contains("missing SBOM", decision.Reasons);
|
||||
Assert.NotNull(decision.Confidence);
|
||||
Assert.InRange(decision.Confidence!.Value, 0.0, 1.0);
|
||||
if (!string.IsNullOrWhiteSpace(decision.Metadata))
|
||||
{
|
||||
using var failureMetadata = JsonDocument.Parse(decision.Metadata!);
|
||||
if (failureMetadata.RootElement.TryGetProperty("heuristics", out var heuristicsElement))
|
||||
{
|
||||
var heuristics = heuristicsElement.EnumerateArray().Select(item => item.GetString()).ToArray();
|
||||
Assert.Contains("image.metadata.missing", heuristics);
|
||||
Assert.Contains("unsigned", heuristics);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task RuntimePolicyEndpointValidatesRequest()
|
||||
{
|
||||
using var factory = new ScannerApplicationFactory();
|
||||
using var client = factory.CreateClient();
|
||||
|
||||
var request = new RuntimePolicyRequestDto
|
||||
{
|
||||
Images = Array.Empty<string>()
|
||||
};
|
||||
|
||||
var response = await client.PostAsJsonAsync("/api/v1/policy/runtime", request);
|
||||
Assert.Equal(HttpStatusCode.BadRequest, response.StatusCode);
|
||||
}
|
||||
|
||||
private static RuntimeEventEnvelope CreateEnvelope(
|
||||
string eventId,
|
||||
string? schemaVersion = null,
|
||||
string? imageDigest = null,
|
||||
string? buildId = null)
|
||||
{
|
||||
var digest = string.IsNullOrWhiteSpace(imageDigest) ? "sha256:deadbeef" : imageDigest;
|
||||
var runtimeEvent = new RuntimeEvent
|
||||
{
|
||||
EventId = eventId,
|
||||
When = DateTimeOffset.UtcNow,
|
||||
Kind = RuntimeEventKind.ContainerStart,
|
||||
Tenant = "tenant-alpha",
|
||||
Node = "node-a",
|
||||
Runtime = new RuntimeEngine
|
||||
{
|
||||
Engine = "containerd",
|
||||
Version = "1.7.0"
|
||||
},
|
||||
Workload = new RuntimeWorkload
|
||||
{
|
||||
Platform = "kubernetes",
|
||||
Namespace = "default",
|
||||
Pod = "api-123",
|
||||
Container = "api",
|
||||
ContainerId = "containerd://abc",
|
||||
ImageRef = $"ghcr.io/example/api@{digest}"
|
||||
},
|
||||
Delta = new RuntimeDelta
|
||||
{
|
||||
BaselineImageDigest = digest
|
||||
},
|
||||
Process = new RuntimeProcess
|
||||
{
|
||||
Pid = 123,
|
||||
Entrypoint = new[] { "/bin/start" },
|
||||
EntryTrace = Array.Empty<RuntimeEntryTrace>(),
|
||||
BuildId = buildId
|
||||
}
|
||||
};
|
||||
|
||||
if (schemaVersion is null)
|
||||
{
|
||||
return RuntimeEventEnvelope.Create(runtimeEvent, ZastavaContractVersions.RuntimeEvent);
|
||||
}
|
||||
|
||||
return new RuntimeEventEnvelope
|
||||
{
|
||||
SchemaVersion = schemaVersion,
|
||||
Event = runtimeEvent
|
||||
};
|
||||
}
|
||||
}
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using System.Net;
|
||||
using System.Net.Http.Json;
|
||||
using System.Text.Json;
|
||||
using Microsoft.AspNetCore.Http;
|
||||
using Microsoft.Extensions.DependencyInjection;
|
||||
using StellaOps.Policy;
|
||||
using StellaOps.Scanner.Storage.Catalog;
|
||||
using StellaOps.Scanner.Storage.Repositories;
|
||||
using StellaOps.Scanner.WebService.Contracts;
|
||||
using StellaOps.Zastava.Core.Contracts;
|
||||
|
||||
namespace StellaOps.Scanner.WebService.Tests;
|
||||
|
||||
public sealed class RuntimeEndpointsTests
|
||||
{
|
||||
[Fact]
|
||||
public async Task RuntimeEventsEndpointPersistsEvents()
|
||||
{
|
||||
using var factory = new ScannerApplicationFactory();
|
||||
using var client = factory.CreateClient();
|
||||
|
||||
var request = new RuntimeEventsIngestRequestDto
|
||||
{
|
||||
BatchId = "batch-1",
|
||||
Events = new[]
|
||||
{
|
||||
CreateEnvelope("evt-001", buildId: "ABCDEF1234567890ABCDEF1234567890ABCDEF12"),
|
||||
CreateEnvelope("evt-002", buildId: "abcdef1234567890abcdef1234567890abcdef12")
|
||||
}
|
||||
};
|
||||
|
||||
var response = await client.PostAsJsonAsync("/api/v1/runtime/events", request);
|
||||
Assert.Equal(HttpStatusCode.Accepted, response.StatusCode);
|
||||
|
||||
var payload = await response.Content.ReadFromJsonAsync<RuntimeEventsIngestResponseDto>();
|
||||
Assert.NotNull(payload);
|
||||
Assert.Equal(2, payload!.Accepted);
|
||||
Assert.Equal(0, payload.Duplicates);
|
||||
|
||||
using var scope = factory.Services.CreateScope();
|
||||
var repository = scope.ServiceProvider.GetRequiredService<RuntimeEventRepository>();
|
||||
var stored = await repository.ListAsync(CancellationToken.None);
|
||||
Assert.Equal(2, stored.Count);
|
||||
Assert.Contains(stored, doc => doc.EventId == "evt-001");
|
||||
Assert.All(stored, doc =>
|
||||
{
|
||||
Assert.Equal("tenant-alpha", doc.Tenant);
|
||||
Assert.True(doc.ExpiresAt > doc.ReceivedAt);
|
||||
Assert.Equal("sha256:deadbeef", doc.ImageDigest);
|
||||
Assert.Equal("abcdef1234567890abcdef1234567890abcdef12", doc.BuildId);
|
||||
});
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task RuntimeEventsEndpointRejectsUnsupportedSchema()
|
||||
{
|
||||
using var factory = new ScannerApplicationFactory();
|
||||
using var client = factory.CreateClient();
|
||||
|
||||
var envelope = CreateEnvelope("evt-100", schemaVersion: "zastava.runtime.event@v2.0");
|
||||
|
||||
var request = new RuntimeEventsIngestRequestDto
|
||||
{
|
||||
Events = new[] { envelope }
|
||||
};
|
||||
|
||||
var response = await client.PostAsJsonAsync("/api/v1/runtime/events", request);
|
||||
Assert.Equal(HttpStatusCode.BadRequest, response.StatusCode);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task RuntimeEventsEndpointEnforcesRateLimit()
|
||||
{
|
||||
using var factory = new ScannerApplicationFactory(configuration =>
|
||||
{
|
||||
configuration["scanner:runtime:perNodeBurst"] = "1";
|
||||
configuration["scanner:runtime:perNodeEventsPerSecond"] = "1";
|
||||
configuration["scanner:runtime:perTenantBurst"] = "1";
|
||||
configuration["scanner:runtime:perTenantEventsPerSecond"] = "1";
|
||||
});
|
||||
using var client = factory.CreateClient();
|
||||
|
||||
var request = new RuntimeEventsIngestRequestDto
|
||||
{
|
||||
Events = new[]
|
||||
{
|
||||
CreateEnvelope("evt-500"),
|
||||
CreateEnvelope("evt-501")
|
||||
}
|
||||
};
|
||||
|
||||
var response = await client.PostAsJsonAsync("/api/v1/runtime/events", request);
|
||||
Assert.Equal((HttpStatusCode)StatusCodes.Status429TooManyRequests, response.StatusCode);
|
||||
Assert.NotNull(response.Headers.RetryAfter);
|
||||
|
||||
using var scope = factory.Services.CreateScope();
|
||||
var repository = scope.ServiceProvider.GetRequiredService<RuntimeEventRepository>();
|
||||
var count = await repository.CountAsync(CancellationToken.None);
|
||||
Assert.Equal(0, count);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task RuntimePolicyEndpointReturnsDecisions()
|
||||
{
|
||||
using var factory = new ScannerApplicationFactory(configuration =>
|
||||
{
|
||||
configuration["scanner:runtime:policyCacheTtlSeconds"] = "600";
|
||||
});
|
||||
|
||||
const string imageDigest = "sha256:deadbeef";
|
||||
|
||||
using var client = factory.CreateClient();
|
||||
|
||||
using (var scope = factory.Services.CreateScope())
|
||||
{
|
||||
var artifacts = scope.ServiceProvider.GetRequiredService<ArtifactRepository>();
|
||||
var links = scope.ServiceProvider.GetRequiredService<LinkRepository>();
|
||||
var policyStore = scope.ServiceProvider.GetRequiredService<PolicySnapshotStore>();
|
||||
var runtimeRepository = scope.ServiceProvider.GetRequiredService<RuntimeEventRepository>();
|
||||
await runtimeRepository.TruncateAsync(CancellationToken.None);
|
||||
|
||||
const string policyYaml = """
|
||||
version: "1.0"
|
||||
rules:
|
||||
- name: Block Critical
|
||||
severity: [Critical]
|
||||
action: block
|
||||
""";
|
||||
var saveResult = await policyStore.SaveAsync(
|
||||
new PolicySnapshotContent(policyYaml, PolicyDocumentFormat.Yaml, "tester", "tests", "seed"),
|
||||
CancellationToken.None);
|
||||
Assert.True(saveResult.Success);
|
||||
|
||||
var snapshot = await policyStore.GetLatestAsync(CancellationToken.None);
|
||||
Assert.NotNull(snapshot);
|
||||
|
||||
var sbomArtifactId = CatalogIdFactory.CreateArtifactId(ArtifactDocumentType.ImageBom, "sha256:sbomdigest");
|
||||
var attestationArtifactId = CatalogIdFactory.CreateArtifactId(ArtifactDocumentType.Attestation, "sha256:attdigest");
|
||||
|
||||
await artifacts.UpsertAsync(new ArtifactDocument
|
||||
{
|
||||
Id = sbomArtifactId,
|
||||
Type = ArtifactDocumentType.ImageBom,
|
||||
Format = ArtifactDocumentFormat.CycloneDxJson,
|
||||
MediaType = "application/json",
|
||||
BytesSha256 = "sha256:sbomdigest",
|
||||
RefCount = 1
|
||||
}, CancellationToken.None);
|
||||
|
||||
await artifacts.UpsertAsync(new ArtifactDocument
|
||||
{
|
||||
Id = attestationArtifactId,
|
||||
Type = ArtifactDocumentType.Attestation,
|
||||
Format = ArtifactDocumentFormat.DsseJson,
|
||||
MediaType = "application/vnd.dsse.envelope+json",
|
||||
BytesSha256 = "sha256:attdigest",
|
||||
RefCount = 1,
|
||||
Rekor = new RekorReference { Uuid = "rekor-uuid", Url = "https://rekor.example/uuid/rekor-uuid", Index = 7 }
|
||||
}, CancellationToken.None);
|
||||
|
||||
await links.UpsertAsync(new LinkDocument
|
||||
{
|
||||
Id = Guid.NewGuid().ToString("N"),
|
||||
FromType = LinkSourceType.Image,
|
||||
FromDigest = imageDigest,
|
||||
ArtifactId = sbomArtifactId,
|
||||
CreatedAtUtc = DateTime.UtcNow
|
||||
}, CancellationToken.None);
|
||||
|
||||
await links.UpsertAsync(new LinkDocument
|
||||
{
|
||||
Id = Guid.NewGuid().ToString("N"),
|
||||
FromType = LinkSourceType.Image,
|
||||
FromDigest = imageDigest,
|
||||
ArtifactId = attestationArtifactId,
|
||||
CreatedAtUtc = DateTime.UtcNow
|
||||
}, CancellationToken.None);
|
||||
}
|
||||
|
||||
var ingestRequest = new RuntimeEventsIngestRequestDto
|
||||
{
|
||||
Events = new[]
|
||||
{
|
||||
CreateEnvelope("evt-210", imageDigest: imageDigest, buildId: "1122aabbccddeeff00112233445566778899aabb"),
|
||||
CreateEnvelope("evt-211", imageDigest: imageDigest, buildId: "1122AABBCCDDEEFF00112233445566778899AABB")
|
||||
}
|
||||
};
|
||||
var ingestResponse = await client.PostAsJsonAsync("/api/v1/runtime/events", ingestRequest);
|
||||
Assert.Equal(HttpStatusCode.Accepted, ingestResponse.StatusCode);
|
||||
|
||||
var request = new RuntimePolicyRequestDto
|
||||
{
|
||||
Namespace = "payments",
|
||||
Images = new[] { imageDigest, imageDigest },
|
||||
Labels = new Dictionary<string, string> { ["app"] = "api" }
|
||||
};
|
||||
|
||||
var response = await client.PostAsJsonAsync("/api/v1/policy/runtime", request);
|
||||
Assert.Equal(HttpStatusCode.OK, response.StatusCode);
|
||||
|
||||
var raw = await response.Content.ReadAsStringAsync();
|
||||
Assert.False(string.IsNullOrWhiteSpace(raw), "Runtime policy response body was empty.");
|
||||
var payload = JsonSerializer.Deserialize<RuntimePolicyResponseDto>(raw);
|
||||
Assert.True(payload is not null, $"Runtime policy response: {raw}");
|
||||
Assert.Equal(600, payload!.TtlSeconds);
|
||||
Assert.NotNull(payload.PolicyRevision);
|
||||
Assert.True(payload.ExpiresAtUtc > DateTimeOffset.UtcNow);
|
||||
|
||||
var decision = payload.Results[imageDigest];
|
||||
Assert.Equal("pass", decision.PolicyVerdict);
|
||||
Assert.True(decision.Signed);
|
||||
Assert.True(decision.HasSbomReferrers);
|
||||
Assert.True(decision.HasSbomLegacy);
|
||||
Assert.Empty(decision.Reasons);
|
||||
Assert.NotNull(decision.Rekor);
|
||||
Assert.Equal("rekor-uuid", decision.Rekor!.Uuid);
|
||||
Assert.True(decision.Rekor.Verified);
|
||||
Assert.NotNull(decision.Confidence);
|
||||
Assert.InRange(decision.Confidence!.Value, 0.0, 1.0);
|
||||
Assert.False(decision.Quieted.GetValueOrDefault());
|
||||
Assert.Null(decision.QuietedBy);
|
||||
Assert.NotNull(decision.BuildIds);
|
||||
Assert.Contains("1122aabbccddeeff00112233445566778899aabb", decision.BuildIds!);
|
||||
var metadataString = decision.Metadata;
|
||||
Console.WriteLine($"Runtime policy metadata: {metadataString ?? "<null>"}");
|
||||
Assert.False(string.IsNullOrWhiteSpace(metadataString));
|
||||
using var metadataDocument = JsonDocument.Parse(decision.Metadata!);
|
||||
Assert.True(metadataDocument.RootElement.TryGetProperty("heuristics", out _));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task RuntimePolicyEndpointFlagsUnsignedAndMissingSbom()
|
||||
{
|
||||
using var factory = new ScannerApplicationFactory();
|
||||
using var client = factory.CreateClient();
|
||||
|
||||
const string imageDigest = "sha256:feedface";
|
||||
|
||||
using (var scope = factory.Services.CreateScope())
|
||||
{
|
||||
var runtimeRepository = scope.ServiceProvider.GetRequiredService<RuntimeEventRepository>();
|
||||
var policyStore = scope.ServiceProvider.GetRequiredService<PolicySnapshotStore>();
|
||||
|
||||
const string policyYaml = """
|
||||
version: "1.0"
|
||||
rules: []
|
||||
""";
|
||||
await policyStore.SaveAsync(
|
||||
new PolicySnapshotContent(policyYaml, PolicyDocumentFormat.Yaml, "tester", "tests", "baseline"),
|
||||
CancellationToken.None);
|
||||
|
||||
// Intentionally skip artifacts/links to simulate missing metadata.
|
||||
await runtimeRepository.TruncateAsync(CancellationToken.None);
|
||||
}
|
||||
|
||||
var response = await client.PostAsJsonAsync("/api/v1/policy/runtime", new RuntimePolicyRequestDto
|
||||
{
|
||||
Namespace = "payments",
|
||||
Images = new[] { imageDigest }
|
||||
});
|
||||
|
||||
Assert.Equal(HttpStatusCode.OK, response.StatusCode);
|
||||
var payload = await response.Content.ReadFromJsonAsync<RuntimePolicyResponseDto>();
|
||||
Assert.NotNull(payload);
|
||||
var decision = payload!.Results[imageDigest];
|
||||
|
||||
Assert.Equal("fail", decision.PolicyVerdict);
|
||||
Assert.False(decision.Signed);
|
||||
Assert.False(decision.HasSbomReferrers);
|
||||
Assert.Contains("image.metadata.missing", decision.Reasons);
|
||||
Assert.Contains("unsigned", decision.Reasons);
|
||||
Assert.Contains("missing SBOM", decision.Reasons);
|
||||
Assert.NotNull(decision.Confidence);
|
||||
Assert.InRange(decision.Confidence!.Value, 0.0, 1.0);
|
||||
if (!string.IsNullOrWhiteSpace(decision.Metadata))
|
||||
{
|
||||
using var failureMetadata = JsonDocument.Parse(decision.Metadata!);
|
||||
if (failureMetadata.RootElement.TryGetProperty("heuristics", out var heuristicsElement))
|
||||
{
|
||||
var heuristics = heuristicsElement.EnumerateArray().Select(item => item.GetString()).ToArray();
|
||||
Assert.Contains("image.metadata.missing", heuristics);
|
||||
Assert.Contains("unsigned", heuristics);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task RuntimePolicyEndpointValidatesRequest()
|
||||
{
|
||||
using var factory = new ScannerApplicationFactory();
|
||||
using var client = factory.CreateClient();
|
||||
|
||||
var request = new RuntimePolicyRequestDto
|
||||
{
|
||||
Images = Array.Empty<string>()
|
||||
};
|
||||
|
||||
var response = await client.PostAsJsonAsync("/api/v1/policy/runtime", request);
|
||||
Assert.Equal(HttpStatusCode.BadRequest, response.StatusCode);
|
||||
}
|
||||
|
||||
private static RuntimeEventEnvelope CreateEnvelope(
|
||||
string eventId,
|
||||
string? schemaVersion = null,
|
||||
string? imageDigest = null,
|
||||
string? buildId = null)
|
||||
{
|
||||
var digest = string.IsNullOrWhiteSpace(imageDigest) ? "sha256:deadbeef" : imageDigest;
|
||||
var runtimeEvent = new RuntimeEvent
|
||||
{
|
||||
EventId = eventId,
|
||||
When = DateTimeOffset.UtcNow,
|
||||
Kind = RuntimeEventKind.ContainerStart,
|
||||
Tenant = "tenant-alpha",
|
||||
Node = "node-a",
|
||||
Runtime = new RuntimeEngine
|
||||
{
|
||||
Engine = "containerd",
|
||||
Version = "1.7.0"
|
||||
},
|
||||
Workload = new RuntimeWorkload
|
||||
{
|
||||
Platform = "kubernetes",
|
||||
Namespace = "default",
|
||||
Pod = "api-123",
|
||||
Container = "api",
|
||||
ContainerId = "containerd://abc",
|
||||
ImageRef = $"ghcr.io/example/api@{digest}"
|
||||
},
|
||||
Delta = new RuntimeDelta
|
||||
{
|
||||
BaselineImageDigest = digest
|
||||
},
|
||||
Process = new RuntimeProcess
|
||||
{
|
||||
Pid = 123,
|
||||
Entrypoint = new[] { "/bin/start" },
|
||||
EntryTrace = Array.Empty<RuntimeEntryTrace>(),
|
||||
BuildId = buildId
|
||||
}
|
||||
};
|
||||
|
||||
if (schemaVersion is null)
|
||||
{
|
||||
return RuntimeEventEnvelope.Create(runtimeEvent, ZastavaContractVersions.RuntimeEvent);
|
||||
}
|
||||
|
||||
return new RuntimeEventEnvelope
|
||||
{
|
||||
SchemaVersion = schemaVersion,
|
||||
Event = runtimeEvent
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Collections.Immutable;
|
||||
using System.Collections.ObjectModel;
|
||||
using System.Diagnostics.Metrics;
|
||||
@@ -13,6 +13,7 @@ using Microsoft.Extensions.Logging;
|
||||
using Microsoft.Extensions.Options;
|
||||
using StellaOps.Scanner.Analyzers.Lang;
|
||||
using StellaOps.Scanner.Analyzers.Lang.Plugin;
|
||||
using StellaOps.Scanner.Analyzers.OS;
|
||||
using StellaOps.Scanner.Analyzers.OS.Abstractions;
|
||||
using StellaOps.Scanner.Analyzers.OS.Plugin;
|
||||
using StellaOps.Scanner.Core.Contracts;
|
||||
@@ -24,12 +25,12 @@ using StellaOps.Scanner.Worker.Diagnostics;
|
||||
using StellaOps.Scanner.Worker.Processing;
|
||||
using StellaOps.Scanner.Worker.Tests.TestInfrastructure;
|
||||
using Xunit;
|
||||
using WorkerOptions = StellaOps.Scanner.Worker.Options.ScannerWorkerOptions;
|
||||
|
||||
namespace StellaOps.Scanner.Worker.Tests;
|
||||
|
||||
public sealed class CompositeScanAnalyzerDispatcherTests
|
||||
{
|
||||
using WorkerOptions = StellaOps.Scanner.Worker.Options.ScannerWorkerOptions;
|
||||
|
||||
namespace StellaOps.Scanner.Worker.Tests;
|
||||
|
||||
public sealed class CompositeScanAnalyzerDispatcherTests
|
||||
{
|
||||
[Fact]
|
||||
public async Task ExecuteAsync_RunsLanguageAnalyzers_StoresResults()
|
||||
{
|
||||
@@ -67,6 +68,7 @@ public sealed class CompositeScanAnalyzerDispatcherTests
|
||||
serviceCollection.AddSingleton(TimeProvider.System);
|
||||
serviceCollection.AddSurfaceEnvironment(options => options.ComponentName = "Scanner.Worker");
|
||||
serviceCollection.AddSurfaceValidation();
|
||||
serviceCollection.AddSingleton<ISurfaceValidatorRunner, NoopSurfaceValidatorRunner>();
|
||||
serviceCollection.AddSurfaceFileCache(options => options.RootDirectory = cacheRoot.Path);
|
||||
serviceCollection.AddSurfaceSecrets();
|
||||
|
||||
@@ -145,42 +147,203 @@ public sealed class CompositeScanAnalyzerDispatcherTests
|
||||
services?.Dispose();
|
||||
}
|
||||
}
|
||||
|
||||
private sealed class FakeOsCatalog : IOSAnalyzerPluginCatalog
|
||||
{
|
||||
public IReadOnlyCollection<IOSAnalyzerPlugin> Plugins => Array.Empty<IOSAnalyzerPlugin>();
|
||||
|
||||
public void LoadFromDirectory(string directory, bool seal = true)
|
||||
{
|
||||
}
|
||||
|
||||
public IReadOnlyList<IOSPackageAnalyzer> CreateAnalyzers(IServiceProvider services) => Array.Empty<IOSPackageAnalyzer>();
|
||||
}
|
||||
|
||||
private sealed class FakeLanguageCatalog : ILanguageAnalyzerPluginCatalog
|
||||
{
|
||||
private readonly IReadOnlyList<ILanguageAnalyzer> _analyzers;
|
||||
|
||||
public FakeLanguageCatalog(params ILanguageAnalyzer[] analyzers)
|
||||
{
|
||||
_analyzers = analyzers ?? Array.Empty<ILanguageAnalyzer>();
|
||||
}
|
||||
|
||||
public IReadOnlyCollection<ILanguageAnalyzerPlugin> Plugins => Array.Empty<ILanguageAnalyzerPlugin>();
|
||||
|
||||
public void LoadFromDirectory(string directory, bool seal = true)
|
||||
{
|
||||
}
|
||||
|
||||
public IReadOnlyList<ILanguageAnalyzer> CreateAnalyzers(IServiceProvider services) => _analyzers;
|
||||
}
|
||||
|
||||
private sealed class FakeLanguageAnalyzer : ILanguageAnalyzer
|
||||
{
|
||||
public string Id => "lang.fake";
|
||||
|
||||
public string DisplayName => "Fake Language Analyzer";
|
||||
|
||||
|
||||
[Fact]
|
||||
public async Task ExecuteAsync_RunsOsAnalyzers_UsesSurfaceCache()
|
||||
{
|
||||
using var rootfs = new TempDirectory();
|
||||
using var cacheRoot = new TempDirectory();
|
||||
|
||||
Environment.SetEnvironmentVariable("SCANNER_SURFACE_FS_ENDPOINT", "https://surface.test");
|
||||
Environment.SetEnvironmentVariable("SCANNER_SURFACE_FS_BUCKET", "unit-test-bucket");
|
||||
Environment.SetEnvironmentVariable("SCANNER_SURFACE_CACHE_ROOT", cacheRoot.Path);
|
||||
Environment.SetEnvironmentVariable("SCANNER_SURFACE_SECRETS_PROVIDER", "inline");
|
||||
Environment.SetEnvironmentVariable("SCANNER_SURFACE_SECRETS_TENANT", "testtenant");
|
||||
Environment.SetEnvironmentVariable(
|
||||
"SURFACE_SECRET_TESTTENANT_SCANNERWORKEROSANALYZERS_REGISTRY_DEFAULT",
|
||||
Convert.ToBase64String(Encoding.UTF8.GetBytes("token-placeholder")));
|
||||
|
||||
var dpkgStatusPath = Path.Combine(rootfs.Path, "var", "lib", "dpkg", "status");
|
||||
Directory.CreateDirectory(Path.GetDirectoryName(dpkgStatusPath)!);
|
||||
await File.WriteAllTextAsync(dpkgStatusPath, "Package: demo\nStatus: install ok installed\n", CancellationToken.None);
|
||||
|
||||
var metadata = new Dictionary<string, string>(StringComparer.Ordinal)
|
||||
{
|
||||
{ ScanMetadataKeys.RootFilesystemPath, rootfs.Path },
|
||||
{ ScanMetadataKeys.WorkspacePath, rootfs.Path },
|
||||
};
|
||||
|
||||
var analyzer = new FakeOsAnalyzer();
|
||||
var osCatalog = new FakeOsCatalog(analyzer);
|
||||
var languageCatalog = new FakeLanguageCatalog();
|
||||
|
||||
long hits = 0;
|
||||
long misses = 0;
|
||||
MeterListener? meterListener = null;
|
||||
ServiceProvider? services = null;
|
||||
try
|
||||
{
|
||||
var serviceCollection = new ServiceCollection();
|
||||
serviceCollection.AddSingleton<IConfiguration>(new ConfigurationBuilder().Build());
|
||||
serviceCollection.AddLogging(builder => builder.SetMinimumLevel(LogLevel.Debug));
|
||||
serviceCollection.AddSingleton(TimeProvider.System);
|
||||
serviceCollection.AddSurfaceEnvironment(options => options.ComponentName = "Scanner.Worker");
|
||||
serviceCollection.AddSurfaceValidation();
|
||||
serviceCollection.AddSingleton<ISurfaceValidatorRunner, NoopSurfaceValidatorRunner>();
|
||||
serviceCollection.AddSurfaceFileCache(options => options.RootDirectory = cacheRoot.Path);
|
||||
serviceCollection.AddSurfaceSecrets();
|
||||
|
||||
var metrics = new ScannerWorkerMetrics();
|
||||
serviceCollection.AddSingleton(metrics);
|
||||
|
||||
meterListener = new MeterListener();
|
||||
|
||||
meterListener.InstrumentPublished = (instrument, listener) =>
|
||||
{
|
||||
if (instrument.Meter.Name == ScannerWorkerInstrumentation.MeterName &&
|
||||
(instrument.Name == "scanner_worker_os_cache_hits_total" || instrument.Name == "scanner_worker_os_cache_misses_total"))
|
||||
{
|
||||
listener.EnableMeasurementEvents(instrument);
|
||||
}
|
||||
};
|
||||
|
||||
meterListener.SetMeasurementEventCallback<long>((instrument, measurement, tags, state) =>
|
||||
{
|
||||
if (instrument.Name == "scanner_worker_os_cache_hits_total")
|
||||
{
|
||||
Interlocked.Add(ref hits, measurement);
|
||||
}
|
||||
else if (instrument.Name == "scanner_worker_os_cache_misses_total")
|
||||
{
|
||||
Interlocked.Add(ref misses, measurement);
|
||||
}
|
||||
});
|
||||
|
||||
meterListener.Start();
|
||||
|
||||
services = serviceCollection.BuildServiceProvider();
|
||||
|
||||
var scopeFactory = services.GetRequiredService<IServiceScopeFactory>();
|
||||
var loggerFactory = services.GetRequiredService<ILoggerFactory>();
|
||||
var options = Microsoft.Extensions.Options.Options.Create(new WorkerOptions());
|
||||
var dispatcher = new CompositeScanAnalyzerDispatcher(
|
||||
scopeFactory,
|
||||
osCatalog,
|
||||
languageCatalog,
|
||||
options,
|
||||
loggerFactory.CreateLogger<CompositeScanAnalyzerDispatcher>(),
|
||||
metrics,
|
||||
new TestCryptoHash());
|
||||
|
||||
var lease = new TestJobLease(metadata);
|
||||
var context = new ScanJobContext(lease, TimeProvider.System, TimeProvider.System.GetUtcNow(), CancellationToken.None);
|
||||
|
||||
await dispatcher.ExecuteAsync(context, CancellationToken.None);
|
||||
|
||||
var leaseSecond = new TestJobLease(metadata);
|
||||
var contextSecond = new ScanJobContext(leaseSecond, TimeProvider.System, TimeProvider.System.GetUtcNow(), CancellationToken.None);
|
||||
await dispatcher.ExecuteAsync(contextSecond, CancellationToken.None);
|
||||
|
||||
Assert.Equal(1, analyzer.InvocationCount);
|
||||
Assert.True(context.Analysis.TryGet<Dictionary<string, OSPackageAnalyzerResult>>(ScanAnalysisKeys.OsPackageAnalyzers, out var results));
|
||||
Assert.Single(results);
|
||||
Assert.True(context.Analysis.TryGet<ImmutableArray<LayerComponentFragment>>(ScanAnalysisKeys.OsComponentFragments, out var fragments));
|
||||
Assert.False(fragments.IsDefaultOrEmpty);
|
||||
|
||||
Assert.Equal(1, hits);
|
||||
Assert.Equal(1, misses);
|
||||
}
|
||||
finally
|
||||
{
|
||||
Environment.SetEnvironmentVariable("SCANNER_SURFACE_FS_ENDPOINT", null);
|
||||
Environment.SetEnvironmentVariable("SCANNER_SURFACE_FS_BUCKET", null);
|
||||
Environment.SetEnvironmentVariable("SCANNER_SURFACE_CACHE_ROOT", null);
|
||||
Environment.SetEnvironmentVariable("SCANNER_SURFACE_SECRETS_PROVIDER", null);
|
||||
Environment.SetEnvironmentVariable("SCANNER_SURFACE_SECRETS_TENANT", null);
|
||||
Environment.SetEnvironmentVariable("SURFACE_SECRET_TESTTENANT_SCANNERWORKEROSANALYZERS_REGISTRY_DEFAULT", null);
|
||||
meterListener?.Dispose();
|
||||
services?.Dispose();
|
||||
}
|
||||
}
|
||||
|
||||
private sealed class FakeOsCatalog : IOSAnalyzerPluginCatalog
|
||||
{
|
||||
private readonly IReadOnlyList<IOSPackageAnalyzer> _analyzers;
|
||||
|
||||
public FakeOsCatalog(params IOSPackageAnalyzer[] analyzers)
|
||||
{
|
||||
_analyzers = analyzers ?? Array.Empty<IOSPackageAnalyzer>();
|
||||
}
|
||||
|
||||
public IReadOnlyCollection<IOSAnalyzerPlugin> Plugins => Array.Empty<IOSAnalyzerPlugin>();
|
||||
|
||||
public void LoadFromDirectory(string directory, bool seal = true)
|
||||
{
|
||||
}
|
||||
|
||||
public IReadOnlyList<IOSPackageAnalyzer> CreateAnalyzers(IServiceProvider services) => _analyzers;
|
||||
}
|
||||
|
||||
private sealed class FakeLanguageCatalog : ILanguageAnalyzerPluginCatalog
|
||||
{
|
||||
private readonly IReadOnlyList<ILanguageAnalyzer> _analyzers;
|
||||
|
||||
public FakeLanguageCatalog(params ILanguageAnalyzer[] analyzers)
|
||||
{
|
||||
_analyzers = analyzers ?? Array.Empty<ILanguageAnalyzer>();
|
||||
}
|
||||
|
||||
public IReadOnlyCollection<ILanguageAnalyzerPlugin> Plugins => Array.Empty<ILanguageAnalyzerPlugin>();
|
||||
|
||||
public void LoadFromDirectory(string directory, bool seal = true)
|
||||
{
|
||||
}
|
||||
|
||||
public IReadOnlyList<ILanguageAnalyzer> CreateAnalyzers(IServiceProvider services) => _analyzers;
|
||||
}
|
||||
|
||||
private sealed class NoopSurfaceValidatorRunner : ISurfaceValidatorRunner
|
||||
{
|
||||
public ValueTask<SurfaceValidationResult> RunAllAsync(SurfaceValidationContext context, CancellationToken cancellationToken = default)
|
||||
=> ValueTask.FromResult(SurfaceValidationResult.Success());
|
||||
|
||||
public ValueTask EnsureAsync(SurfaceValidationContext context, CancellationToken cancellationToken = default)
|
||||
=> ValueTask.CompletedTask;
|
||||
}
|
||||
|
||||
private sealed class FakeOsAnalyzer : IOSPackageAnalyzer
|
||||
{
|
||||
public string AnalyzerId => "dpkg";
|
||||
|
||||
public int InvocationCount { get; private set; }
|
||||
|
||||
private int _invocationCount;
|
||||
|
||||
public ValueTask<OSPackageAnalyzerResult> AnalyzeAsync(OSPackageAnalyzerContext context, CancellationToken cancellationToken)
|
||||
{
|
||||
Interlocked.Increment(ref _invocationCount);
|
||||
InvocationCount = _invocationCount;
|
||||
|
||||
var package = new OSPackageRecord(
|
||||
analyzerId: AnalyzerId,
|
||||
packageUrl: "pkg:deb/debian/demo@1.0?arch=amd64",
|
||||
name: "demo",
|
||||
version: "1.0",
|
||||
architecture: "amd64",
|
||||
evidenceSource: PackageEvidenceSource.DpkgStatus,
|
||||
files: new[] { new OSPackageFileEvidence("/usr/bin/demo") });
|
||||
|
||||
var telemetry = new OSAnalyzerTelemetry(TimeSpan.Zero, 1, 1);
|
||||
return ValueTask.FromResult(new OSPackageAnalyzerResult(AnalyzerId, new[] { package }, telemetry));
|
||||
}
|
||||
}
|
||||
|
||||
private sealed class FakeLanguageAnalyzer : ILanguageAnalyzer
|
||||
{
|
||||
public string Id => "lang.fake";
|
||||
|
||||
public string DisplayName => "Fake Language Analyzer";
|
||||
|
||||
public int InvocationCount { get; private set; }
|
||||
|
||||
public ValueTask AnalyzeAsync(LanguageAnalyzerContext context, LanguageComponentWriter writer, CancellationToken cancellationToken)
|
||||
@@ -198,69 +361,69 @@ public sealed class CompositeScanAnalyzerDispatcherTests
|
||||
|
||||
private int _invocationCount;
|
||||
}
|
||||
|
||||
private sealed class TestJobLease : IScanJobLease
|
||||
{
|
||||
private readonly Dictionary<string, string> _metadata;
|
||||
|
||||
public TestJobLease(Dictionary<string, string> metadata)
|
||||
{
|
||||
_metadata = metadata;
|
||||
JobId = Guid.NewGuid().ToString("n");
|
||||
ScanId = $"scan-{Guid.NewGuid():n}";
|
||||
Attempt = 1;
|
||||
EnqueuedAtUtc = DateTimeOffset.UtcNow.AddSeconds(-1);
|
||||
LeasedAtUtc = DateTimeOffset.UtcNow;
|
||||
LeaseDuration = TimeSpan.FromMinutes(5);
|
||||
}
|
||||
|
||||
public string JobId { get; }
|
||||
|
||||
public string ScanId { get; }
|
||||
|
||||
public int Attempt { get; }
|
||||
|
||||
public DateTimeOffset EnqueuedAtUtc { get; }
|
||||
|
||||
public DateTimeOffset LeasedAtUtc { get; }
|
||||
|
||||
public TimeSpan LeaseDuration { get; }
|
||||
|
||||
public IReadOnlyDictionary<string, string> Metadata => _metadata;
|
||||
|
||||
public ValueTask RenewAsync(CancellationToken cancellationToken) => ValueTask.CompletedTask;
|
||||
|
||||
public ValueTask CompleteAsync(CancellationToken cancellationToken) => ValueTask.CompletedTask;
|
||||
|
||||
public ValueTask AbandonAsync(string reason, CancellationToken cancellationToken) => ValueTask.CompletedTask;
|
||||
|
||||
public ValueTask PoisonAsync(string reason, CancellationToken cancellationToken) => ValueTask.CompletedTask;
|
||||
|
||||
public ValueTask DisposeAsync() => ValueTask.CompletedTask;
|
||||
}
|
||||
|
||||
private sealed class TempDirectory : IDisposable
|
||||
{
|
||||
public TempDirectory()
|
||||
{
|
||||
Path = System.IO.Path.Combine(System.IO.Path.GetTempPath(), $"stellaops-test-{Guid.NewGuid():n}");
|
||||
Directory.CreateDirectory(Path);
|
||||
}
|
||||
|
||||
public string Path { get; }
|
||||
|
||||
public void Dispose()
|
||||
{
|
||||
try
|
||||
{
|
||||
if (Directory.Exists(Path))
|
||||
{
|
||||
Directory.Delete(Path, recursive: true);
|
||||
}
|
||||
}
|
||||
catch
|
||||
{
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private sealed class TestJobLease : IScanJobLease
|
||||
{
|
||||
private readonly Dictionary<string, string> _metadata;
|
||||
|
||||
public TestJobLease(Dictionary<string, string> metadata)
|
||||
{
|
||||
_metadata = metadata;
|
||||
JobId = Guid.NewGuid().ToString("n");
|
||||
ScanId = $"scan-{Guid.NewGuid():n}";
|
||||
Attempt = 1;
|
||||
EnqueuedAtUtc = DateTimeOffset.UtcNow.AddSeconds(-1);
|
||||
LeasedAtUtc = DateTimeOffset.UtcNow;
|
||||
LeaseDuration = TimeSpan.FromMinutes(5);
|
||||
}
|
||||
|
||||
public string JobId { get; }
|
||||
|
||||
public string ScanId { get; }
|
||||
|
||||
public int Attempt { get; }
|
||||
|
||||
public DateTimeOffset EnqueuedAtUtc { get; }
|
||||
|
||||
public DateTimeOffset LeasedAtUtc { get; }
|
||||
|
||||
public TimeSpan LeaseDuration { get; }
|
||||
|
||||
public IReadOnlyDictionary<string, string> Metadata => _metadata;
|
||||
|
||||
public ValueTask RenewAsync(CancellationToken cancellationToken) => ValueTask.CompletedTask;
|
||||
|
||||
public ValueTask CompleteAsync(CancellationToken cancellationToken) => ValueTask.CompletedTask;
|
||||
|
||||
public ValueTask AbandonAsync(string reason, CancellationToken cancellationToken) => ValueTask.CompletedTask;
|
||||
|
||||
public ValueTask PoisonAsync(string reason, CancellationToken cancellationToken) => ValueTask.CompletedTask;
|
||||
|
||||
public ValueTask DisposeAsync() => ValueTask.CompletedTask;
|
||||
}
|
||||
|
||||
private sealed class TempDirectory : IDisposable
|
||||
{
|
||||
public TempDirectory()
|
||||
{
|
||||
Path = System.IO.Path.Combine(System.IO.Path.GetTempPath(), $"stellaops-test-{Guid.NewGuid():n}");
|
||||
Directory.CreateDirectory(Path);
|
||||
}
|
||||
|
||||
public string Path { get; }
|
||||
|
||||
public void Dispose()
|
||||
{
|
||||
try
|
||||
{
|
||||
if (Directory.Exists(Path))
|
||||
{
|
||||
Directory.Delete(Path, recursive: true);
|
||||
}
|
||||
}
|
||||
catch
|
||||
{
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -6,6 +6,7 @@ using System.Threading.Tasks;
|
||||
using Microsoft.Extensions.DependencyInjection;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using Microsoft.Extensions.Options;
|
||||
using StellaOps.Cryptography;
|
||||
using StellaOps.Scanner.Worker.Options;
|
||||
using StellaOps.Scanner.Worker.Processing.Surface;
|
||||
using Xunit;
|
||||
@@ -24,12 +25,16 @@ public sealed class HmacDsseEnvelopeSignerTests
|
||||
signing.KeyId = "scanner-hmac";
|
||||
});
|
||||
|
||||
var signer = new HmacDsseEnvelopeSigner(options, NullLogger<HmacDsseEnvelopeSigner>.Instance, new ServiceCollection().BuildServiceProvider());
|
||||
var signer = new HmacDsseEnvelopeSigner(
|
||||
options,
|
||||
DefaultCryptoHmac.CreateForTests(),
|
||||
NullLogger<HmacDsseEnvelopeSigner>.Instance,
|
||||
new ServiceCollection().BuildServiceProvider());
|
||||
var payload = Encoding.UTF8.GetBytes("{\"hello\":\"world\"}");
|
||||
|
||||
var envelope = await signer.SignAsync("application/json", payload, "test.kind", "root", view: null, CancellationToken.None);
|
||||
|
||||
var json = JsonDocument.Parse(envelope.Content.Span);
|
||||
var json = JsonDocument.Parse(envelope.Content);
|
||||
var sig = json.RootElement.GetProperty("signatures")[0].GetProperty("sig").GetString();
|
||||
|
||||
var expectedSig = ComputeExpectedSignature("application/json", payload, "a2V5LXNlY3JldA==");
|
||||
@@ -49,11 +54,15 @@ public sealed class HmacDsseEnvelopeSignerTests
|
||||
signing.AllowDeterministicFallback = true;
|
||||
});
|
||||
|
||||
var signer = new HmacDsseEnvelopeSigner(options, NullLogger<HmacDsseEnvelopeSigner>.Instance, new ServiceCollection().BuildServiceProvider());
|
||||
var signer = new HmacDsseEnvelopeSigner(
|
||||
options,
|
||||
DefaultCryptoHmac.CreateForTests(),
|
||||
NullLogger<HmacDsseEnvelopeSigner>.Instance,
|
||||
new ServiceCollection().BuildServiceProvider());
|
||||
var payload = Encoding.UTF8.GetBytes("abc");
|
||||
|
||||
var envelope = await signer.SignAsync("text/plain", payload, "kind", "root", view: null, CancellationToken.None);
|
||||
var json = JsonDocument.Parse(envelope.Content.Span);
|
||||
var json = JsonDocument.Parse(envelope.Content);
|
||||
var sig = json.RootElement.GetProperty("signatures")[0].GetProperty("sig").GetString();
|
||||
|
||||
// Deterministic signer encodes sha256 hex of payload as signature.
|
||||
|
||||
@@ -1,121 +1,128 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using Microsoft.Extensions.Options;
|
||||
using StellaOps.Scanner.Worker.Options;
|
||||
using StellaOps.Scanner.Worker.Processing;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Scanner.Worker.Tests;
|
||||
|
||||
public sealed class LeaseHeartbeatServiceTests
|
||||
{
|
||||
[Fact]
|
||||
public async Task RunAsync_RespectsSafetyFactorBudget()
|
||||
{
|
||||
var options = new ScannerWorkerOptions
|
||||
{
|
||||
MaxConcurrentJobs = 1,
|
||||
};
|
||||
options.Queue.HeartbeatSafetyFactor = 3.0;
|
||||
options.Queue.MinHeartbeatInterval = TimeSpan.FromSeconds(5);
|
||||
options.Queue.MaxHeartbeatInterval = TimeSpan.FromSeconds(60);
|
||||
options.Queue.SetHeartbeatRetryDelays(Array.Empty<TimeSpan>());
|
||||
options.Queue.MaxHeartbeatJitterMilliseconds = 750;
|
||||
|
||||
var optionsMonitor = new StaticOptionsMonitor<ScannerWorkerOptions>(options);
|
||||
using var cts = new CancellationTokenSource();
|
||||
var scheduler = new RecordingDelayScheduler(cts);
|
||||
var lease = new TestJobLease(TimeSpan.FromSeconds(90));
|
||||
|
||||
var service = new LeaseHeartbeatService(TimeProvider.System, scheduler, optionsMonitor, NullLogger<LeaseHeartbeatService>.Instance);
|
||||
|
||||
await service.RunAsync(lease, cts.Token);
|
||||
|
||||
var delay = Assert.Single(scheduler.Delays);
|
||||
var expectedMax = TimeSpan.FromTicks((long)(lease.LeaseDuration.Ticks / Math.Max(3.0, options.Queue.HeartbeatSafetyFactor)));
|
||||
Assert.True(delay <= expectedMax, $"Heartbeat delay {delay} should stay within safety factor budget {expectedMax}.");
|
||||
Assert.True(delay >= options.Queue.MinHeartbeatInterval, $"Heartbeat delay {delay} should respect minimum interval {options.Queue.MinHeartbeatInterval}.");
|
||||
}
|
||||
|
||||
private sealed class RecordingDelayScheduler : IDelayScheduler
|
||||
{
|
||||
private readonly CancellationTokenSource _cts;
|
||||
|
||||
public RecordingDelayScheduler(CancellationTokenSource cts)
|
||||
{
|
||||
_cts = cts ?? throw new ArgumentNullException(nameof(cts));
|
||||
}
|
||||
|
||||
public List<TimeSpan> Delays { get; } = new();
|
||||
|
||||
public Task DelayAsync(TimeSpan delay, CancellationToken cancellationToken)
|
||||
{
|
||||
Delays.Add(delay);
|
||||
_cts.Cancel();
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
}
|
||||
|
||||
private sealed class TestJobLease : IScanJobLease
|
||||
{
|
||||
public TestJobLease(TimeSpan leaseDuration)
|
||||
{
|
||||
LeaseDuration = leaseDuration;
|
||||
EnqueuedAtUtc = DateTimeOffset.UtcNow - leaseDuration;
|
||||
LeasedAtUtc = DateTimeOffset.UtcNow;
|
||||
}
|
||||
|
||||
public string JobId { get; } = Guid.NewGuid().ToString("n");
|
||||
|
||||
public string ScanId { get; } = $"scan-{Guid.NewGuid():n}";
|
||||
|
||||
public int Attempt { get; } = 1;
|
||||
|
||||
public DateTimeOffset EnqueuedAtUtc { get; }
|
||||
|
||||
public DateTimeOffset LeasedAtUtc { get; }
|
||||
|
||||
public TimeSpan LeaseDuration { get; }
|
||||
|
||||
public IReadOnlyDictionary<string, string> Metadata { get; } = new Dictionary<string, string>();
|
||||
|
||||
public ValueTask RenewAsync(CancellationToken cancellationToken) => ValueTask.CompletedTask;
|
||||
|
||||
public ValueTask CompleteAsync(CancellationToken cancellationToken) => ValueTask.CompletedTask;
|
||||
|
||||
public ValueTask AbandonAsync(string reason, CancellationToken cancellationToken) => ValueTask.CompletedTask;
|
||||
|
||||
public ValueTask PoisonAsync(string reason, CancellationToken cancellationToken) => ValueTask.CompletedTask;
|
||||
|
||||
public ValueTask DisposeAsync() => ValueTask.CompletedTask;
|
||||
}
|
||||
|
||||
private sealed class StaticOptionsMonitor<TOptions> : IOptionsMonitor<TOptions>
|
||||
where TOptions : class
|
||||
{
|
||||
private readonly TOptions _value;
|
||||
|
||||
public StaticOptionsMonitor(TOptions value)
|
||||
{
|
||||
_value = value ?? throw new ArgumentNullException(nameof(value));
|
||||
}
|
||||
|
||||
public TOptions CurrentValue => _value;
|
||||
|
||||
public TOptions Get(string? name) => _value;
|
||||
|
||||
public IDisposable OnChange(Action<TOptions, string?> listener) => NullDisposable.Instance;
|
||||
|
||||
private sealed class NullDisposable : IDisposable
|
||||
{
|
||||
public static readonly NullDisposable Instance = new();
|
||||
|
||||
public void Dispose()
|
||||
{
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using Microsoft.Extensions.Options;
|
||||
using StellaOps.Scanner.Worker.Options;
|
||||
using StellaOps.Scanner.Worker.Determinism;
|
||||
using StellaOps.Scanner.Worker.Processing;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Scanner.Worker.Tests;
|
||||
|
||||
public sealed class LeaseHeartbeatServiceTests
|
||||
{
|
||||
[Fact]
|
||||
public async Task RunAsync_RespectsSafetyFactorBudget()
|
||||
{
|
||||
var options = new ScannerWorkerOptions
|
||||
{
|
||||
MaxConcurrentJobs = 1,
|
||||
};
|
||||
options.Queue.HeartbeatSafetyFactor = 3.0;
|
||||
options.Queue.MinHeartbeatInterval = TimeSpan.FromSeconds(5);
|
||||
options.Queue.MaxHeartbeatInterval = TimeSpan.FromSeconds(60);
|
||||
options.Queue.SetHeartbeatRetryDelays(Array.Empty<TimeSpan>());
|
||||
options.Queue.MaxHeartbeatJitterMilliseconds = 750;
|
||||
|
||||
var optionsMonitor = new StaticOptionsMonitor<ScannerWorkerOptions>(options);
|
||||
using var cts = new CancellationTokenSource();
|
||||
var scheduler = new RecordingDelayScheduler(cts);
|
||||
var lease = new TestJobLease(TimeSpan.FromSeconds(90));
|
||||
var randomProvider = new DeterministicRandomProvider(seed: 1337);
|
||||
|
||||
var service = new LeaseHeartbeatService(
|
||||
TimeProvider.System,
|
||||
scheduler,
|
||||
optionsMonitor,
|
||||
randomProvider,
|
||||
NullLogger<LeaseHeartbeatService>.Instance);
|
||||
|
||||
await service.RunAsync(lease, cts.Token);
|
||||
|
||||
var delay = Assert.Single(scheduler.Delays);
|
||||
var expectedMax = TimeSpan.FromTicks((long)(lease.LeaseDuration.Ticks / Math.Max(3.0, options.Queue.HeartbeatSafetyFactor)));
|
||||
Assert.True(delay <= expectedMax, $"Heartbeat delay {delay} should stay within safety factor budget {expectedMax}.");
|
||||
Assert.True(delay >= options.Queue.MinHeartbeatInterval, $"Heartbeat delay {delay} should respect minimum interval {options.Queue.MinHeartbeatInterval}.");
|
||||
}
|
||||
|
||||
private sealed class RecordingDelayScheduler : IDelayScheduler
|
||||
{
|
||||
private readonly CancellationTokenSource _cts;
|
||||
|
||||
public RecordingDelayScheduler(CancellationTokenSource cts)
|
||||
{
|
||||
_cts = cts ?? throw new ArgumentNullException(nameof(cts));
|
||||
}
|
||||
|
||||
public List<TimeSpan> Delays { get; } = new();
|
||||
|
||||
public Task DelayAsync(TimeSpan delay, CancellationToken cancellationToken)
|
||||
{
|
||||
Delays.Add(delay);
|
||||
_cts.Cancel();
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
}
|
||||
|
||||
private sealed class TestJobLease : IScanJobLease
|
||||
{
|
||||
public TestJobLease(TimeSpan leaseDuration)
|
||||
{
|
||||
LeaseDuration = leaseDuration;
|
||||
EnqueuedAtUtc = DateTimeOffset.UtcNow - leaseDuration;
|
||||
LeasedAtUtc = DateTimeOffset.UtcNow;
|
||||
}
|
||||
|
||||
public string JobId { get; } = Guid.NewGuid().ToString("n");
|
||||
|
||||
public string ScanId { get; } = $"scan-{Guid.NewGuid():n}";
|
||||
|
||||
public int Attempt { get; } = 1;
|
||||
|
||||
public DateTimeOffset EnqueuedAtUtc { get; }
|
||||
|
||||
public DateTimeOffset LeasedAtUtc { get; }
|
||||
|
||||
public TimeSpan LeaseDuration { get; }
|
||||
|
||||
public IReadOnlyDictionary<string, string> Metadata { get; } = new Dictionary<string, string>();
|
||||
|
||||
public ValueTask RenewAsync(CancellationToken cancellationToken) => ValueTask.CompletedTask;
|
||||
|
||||
public ValueTask CompleteAsync(CancellationToken cancellationToken) => ValueTask.CompletedTask;
|
||||
|
||||
public ValueTask AbandonAsync(string reason, CancellationToken cancellationToken) => ValueTask.CompletedTask;
|
||||
|
||||
public ValueTask PoisonAsync(string reason, CancellationToken cancellationToken) => ValueTask.CompletedTask;
|
||||
|
||||
public ValueTask DisposeAsync() => ValueTask.CompletedTask;
|
||||
}
|
||||
|
||||
private sealed class StaticOptionsMonitor<TOptions> : IOptionsMonitor<TOptions>
|
||||
where TOptions : class
|
||||
{
|
||||
private readonly TOptions _value;
|
||||
|
||||
public StaticOptionsMonitor(TOptions value)
|
||||
{
|
||||
_value = value ?? throw new ArgumentNullException(nameof(value));
|
||||
}
|
||||
|
||||
public TOptions CurrentValue => _value;
|
||||
|
||||
public TOptions Get(string? name) => _value;
|
||||
|
||||
public IDisposable OnChange(Action<TOptions, string?> listener) => NullDisposable.Instance;
|
||||
|
||||
private sealed class NullDisposable : IDisposable
|
||||
{
|
||||
public static readonly NullDisposable Instance = new();
|
||||
|
||||
public void Dispose()
|
||||
{
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,245 +1,245 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Text;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using Microsoft.Extensions.Configuration;
|
||||
using Microsoft.Extensions.DependencyInjection;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Microsoft.Extensions.Options;
|
||||
using StellaOps.Scanner.Queue;
|
||||
using StellaOps.Scanner.Worker.Diagnostics;
|
||||
using StellaOps.Scanner.Worker.Hosting;
|
||||
using StellaOps.Scanner.Worker.Options;
|
||||
using StellaOps.Scanner.Worker.Processing;
|
||||
using StellaOps.Scanner.Worker.Tests.TestInfrastructure;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Scanner.Worker.Tests;
|
||||
|
||||
public sealed class RedisWorkerSmokeTests
|
||||
{
|
||||
[Fact]
|
||||
public async Task Worker_CompletesJob_ViaRedisQueue()
|
||||
{
|
||||
var flag = Environment.GetEnvironmentVariable("STELLAOPS_REDIS_SMOKE");
|
||||
if (string.IsNullOrWhiteSpace(flag))
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
var redisConnection = Environment.GetEnvironmentVariable("STELLAOPS_REDIS_CONNECTION") ?? "localhost:6379";
|
||||
var streamName = $"scanner:jobs:{Guid.NewGuid():n}";
|
||||
var consumerGroup = $"worker-smoke-{Guid.NewGuid():n}";
|
||||
var configuration = BuildQueueConfiguration(redisConnection, streamName, consumerGroup);
|
||||
|
||||
var queueOptions = new ScannerQueueOptions();
|
||||
configuration.GetSection("scanner:queue").Bind(queueOptions);
|
||||
|
||||
var workerOptions = new ScannerWorkerOptions
|
||||
{
|
||||
MaxConcurrentJobs = 1,
|
||||
};
|
||||
workerOptions.Queue.HeartbeatSafetyFactor = 3.0;
|
||||
workerOptions.Queue.MinHeartbeatInterval = TimeSpan.FromSeconds(2);
|
||||
workerOptions.Queue.MaxHeartbeatInterval = TimeSpan.FromSeconds(8);
|
||||
workerOptions.Queue.SetHeartbeatRetryDelays(new[]
|
||||
{
|
||||
TimeSpan.FromMilliseconds(200),
|
||||
TimeSpan.FromMilliseconds(500),
|
||||
TimeSpan.FromSeconds(1),
|
||||
});
|
||||
|
||||
var services = new ServiceCollection();
|
||||
services.AddLogging(builder =>
|
||||
{
|
||||
builder.SetMinimumLevel(LogLevel.Debug);
|
||||
builder.AddConsole();
|
||||
});
|
||||
services.AddSingleton(TimeProvider.System);
|
||||
services.AddScannerQueue(configuration, "scanner:queue");
|
||||
services.AddSingleton<IScanJobSource, QueueBackedScanJobSource>();
|
||||
services.AddSingleton<QueueBackedScanJobSourceDependencies>();
|
||||
services.AddSingleton(queueOptions);
|
||||
services.AddSingleton<IScanAnalyzerDispatcher, SmokeAnalyzerDispatcher>();
|
||||
services.AddSingleton<IScanStageExecutor, AnalyzerStageExecutor>();
|
||||
services.AddSingleton<ScannerWorkerMetrics>();
|
||||
services.AddSingleton<ScanProgressReporter>();
|
||||
services.AddSingleton<ScanJobProcessor>();
|
||||
services.AddSingleton<LeaseHeartbeatService>();
|
||||
services.AddSingleton<IDelayScheduler, SystemDelayScheduler>();
|
||||
services.AddSingleton<IOptionsMonitor<ScannerWorkerOptions>>(new StaticOptionsMonitor<ScannerWorkerOptions>(workerOptions));
|
||||
services.AddSingleton<ScannerWorkerHostedService>();
|
||||
|
||||
using var provider = services.BuildServiceProvider();
|
||||
var queue = provider.GetRequiredService<IScanQueue>();
|
||||
|
||||
var jobId = $"job-{Guid.NewGuid():n}";
|
||||
var scanId = $"scan-{Guid.NewGuid():n}";
|
||||
await queue.EnqueueAsync(new ScanQueueMessage(jobId, Encoding.UTF8.GetBytes("smoke"))
|
||||
{
|
||||
Attributes = new Dictionary<string, string>(StringComparer.Ordinal)
|
||||
{
|
||||
["scanId"] = scanId,
|
||||
["queue"] = "redis",
|
||||
}
|
||||
});
|
||||
|
||||
var hostedService = provider.GetRequiredService<ScannerWorkerHostedService>();
|
||||
using var cts = new CancellationTokenSource(TimeSpan.FromSeconds(30));
|
||||
|
||||
await hostedService.StartAsync(cts.Token);
|
||||
|
||||
var smokeObserver = provider.GetRequiredService<QueueBackedScanJobSourceDependencies>();
|
||||
await smokeObserver.JobCompleted.Task.WaitAsync(TimeSpan.FromSeconds(20));
|
||||
|
||||
await hostedService.StopAsync(CancellationToken.None);
|
||||
}
|
||||
|
||||
private static IConfiguration BuildQueueConfiguration(string connection, string stream, string consumerGroup)
|
||||
{
|
||||
return new ConfigurationBuilder()
|
||||
.AddInMemoryCollection(new Dictionary<string, string?>
|
||||
{
|
||||
["scanner:queue:kind"] = "redis",
|
||||
["scanner:queue:defaultLeaseDuration"] = "00:00:30",
|
||||
["scanner:queue:redis:connectionString"] = connection,
|
||||
["scanner:queue:redis:streamName"] = stream,
|
||||
["scanner:queue:redis:consumerGroup"] = consumerGroup,
|
||||
["scanner:queue:redis:idempotencyKeyPrefix"] = $"{stream}:idemp:",
|
||||
["scanner:queue:redis:initializationTimeout"] = "00:00:10",
|
||||
})
|
||||
.Build();
|
||||
}
|
||||
|
||||
private sealed class SmokeAnalyzerDispatcher : IScanAnalyzerDispatcher
|
||||
{
|
||||
public ValueTask ExecuteAsync(ScanJobContext context, CancellationToken cancellationToken)
|
||||
{
|
||||
return ValueTask.CompletedTask;
|
||||
}
|
||||
}
|
||||
|
||||
private sealed class QueueBackedScanJobSourceDependencies
|
||||
{
|
||||
public QueueBackedScanJobSourceDependencies()
|
||||
{
|
||||
JobCompleted = new TaskCompletionSource(TaskCreationOptions.RunContinuationsAsynchronously);
|
||||
}
|
||||
|
||||
public TaskCompletionSource JobCompleted { get; }
|
||||
}
|
||||
|
||||
private sealed class QueueBackedScanJobSource : IScanJobSource
|
||||
{
|
||||
private readonly IScanQueue _queue;
|
||||
private readonly ScannerQueueOptions _queueOptions;
|
||||
private readonly QueueBackedScanJobSourceDependencies _deps;
|
||||
private readonly TimeProvider _timeProvider;
|
||||
private readonly string _consumerName = $"worker-smoke-{Guid.NewGuid():n}";
|
||||
|
||||
public QueueBackedScanJobSource(
|
||||
IScanQueue queue,
|
||||
ScannerQueueOptions queueOptions,
|
||||
QueueBackedScanJobSourceDependencies deps,
|
||||
TimeProvider timeProvider)
|
||||
{
|
||||
_queue = queue ?? throw new ArgumentNullException(nameof(queue));
|
||||
_queueOptions = queueOptions ?? throw new ArgumentNullException(nameof(queueOptions));
|
||||
_deps = deps ?? throw new ArgumentNullException(nameof(deps));
|
||||
_timeProvider = timeProvider ?? TimeProvider.System;
|
||||
}
|
||||
|
||||
public async Task<IScanJobLease?> TryAcquireAsync(CancellationToken cancellationToken)
|
||||
{
|
||||
var request = new QueueLeaseRequest(_consumerName, 1, _queueOptions.DefaultLeaseDuration);
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Text;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using Microsoft.Extensions.Configuration;
|
||||
using Microsoft.Extensions.DependencyInjection;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Microsoft.Extensions.Options;
|
||||
using StellaOps.Scanner.Queue;
|
||||
using StellaOps.Scanner.Worker.Diagnostics;
|
||||
using StellaOps.Scanner.Worker.Hosting;
|
||||
using StellaOps.Scanner.Worker.Options;
|
||||
using StellaOps.Scanner.Worker.Processing;
|
||||
using StellaOps.Scanner.Worker.Tests.TestInfrastructure;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Scanner.Worker.Tests;
|
||||
|
||||
public sealed class RedisWorkerSmokeTests
|
||||
{
|
||||
[Fact]
|
||||
public async Task Worker_CompletesJob_ViaRedisQueue()
|
||||
{
|
||||
var flag = Environment.GetEnvironmentVariable("STELLAOPS_REDIS_SMOKE");
|
||||
if (string.IsNullOrWhiteSpace(flag))
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
var redisConnection = Environment.GetEnvironmentVariable("STELLAOPS_REDIS_CONNECTION") ?? "localhost:6379";
|
||||
var streamName = $"scanner:jobs:{Guid.NewGuid():n}";
|
||||
var consumerGroup = $"worker-smoke-{Guid.NewGuid():n}";
|
||||
var configuration = BuildQueueConfiguration(redisConnection, streamName, consumerGroup);
|
||||
|
||||
var queueOptions = new ScannerQueueOptions();
|
||||
configuration.GetSection("scanner:queue").Bind(queueOptions);
|
||||
|
||||
var workerOptions = new ScannerWorkerOptions
|
||||
{
|
||||
MaxConcurrentJobs = 1,
|
||||
};
|
||||
workerOptions.Queue.HeartbeatSafetyFactor = 3.0;
|
||||
workerOptions.Queue.MinHeartbeatInterval = TimeSpan.FromSeconds(2);
|
||||
workerOptions.Queue.MaxHeartbeatInterval = TimeSpan.FromSeconds(8);
|
||||
workerOptions.Queue.SetHeartbeatRetryDelays(new[]
|
||||
{
|
||||
TimeSpan.FromMilliseconds(200),
|
||||
TimeSpan.FromMilliseconds(500),
|
||||
TimeSpan.FromSeconds(1),
|
||||
});
|
||||
|
||||
var services = new ServiceCollection();
|
||||
services.AddLogging(builder =>
|
||||
{
|
||||
builder.SetMinimumLevel(LogLevel.Debug);
|
||||
builder.AddConsole();
|
||||
});
|
||||
services.AddSingleton(TimeProvider.System);
|
||||
services.AddScannerQueue(configuration, "scanner:queue");
|
||||
services.AddSingleton<IScanJobSource, QueueBackedScanJobSource>();
|
||||
services.AddSingleton<QueueBackedScanJobSourceDependencies>();
|
||||
services.AddSingleton(queueOptions);
|
||||
services.AddSingleton<IScanAnalyzerDispatcher, SmokeAnalyzerDispatcher>();
|
||||
services.AddSingleton<IScanStageExecutor, AnalyzerStageExecutor>();
|
||||
services.AddSingleton<ScannerWorkerMetrics>();
|
||||
services.AddSingleton<ScanProgressReporter>();
|
||||
services.AddSingleton<ScanJobProcessor>();
|
||||
services.AddSingleton<LeaseHeartbeatService>();
|
||||
services.AddSingleton<IDelayScheduler, SystemDelayScheduler>();
|
||||
services.AddSingleton<IOptionsMonitor<ScannerWorkerOptions>>(new StaticOptionsMonitor<ScannerWorkerOptions>(workerOptions));
|
||||
services.AddSingleton<ScannerWorkerHostedService>();
|
||||
|
||||
using var provider = services.BuildServiceProvider();
|
||||
var queue = provider.GetRequiredService<IScanQueue>();
|
||||
|
||||
var jobId = $"job-{Guid.NewGuid():n}";
|
||||
var scanId = $"scan-{Guid.NewGuid():n}";
|
||||
await queue.EnqueueAsync(new ScanQueueMessage(jobId, Encoding.UTF8.GetBytes("smoke"))
|
||||
{
|
||||
Attributes = new Dictionary<string, string>(StringComparer.Ordinal)
|
||||
{
|
||||
["scanId"] = scanId,
|
||||
["queue"] = "redis",
|
||||
}
|
||||
});
|
||||
|
||||
var hostedService = provider.GetRequiredService<ScannerWorkerHostedService>();
|
||||
using var cts = new CancellationTokenSource(TimeSpan.FromSeconds(30));
|
||||
|
||||
await hostedService.StartAsync(cts.Token);
|
||||
|
||||
var smokeObserver = provider.GetRequiredService<QueueBackedScanJobSourceDependencies>();
|
||||
await smokeObserver.JobCompleted.Task.WaitAsync(TimeSpan.FromSeconds(20));
|
||||
|
||||
await hostedService.StopAsync(CancellationToken.None);
|
||||
}
|
||||
|
||||
private static IConfiguration BuildQueueConfiguration(string connection, string stream, string consumerGroup)
|
||||
{
|
||||
return new ConfigurationBuilder()
|
||||
.AddInMemoryCollection(new Dictionary<string, string?>
|
||||
{
|
||||
["scanner:queue:kind"] = "redis",
|
||||
["scanner:queue:defaultLeaseDuration"] = "00:00:30",
|
||||
["scanner:queue:redis:connectionString"] = connection,
|
||||
["scanner:queue:redis:streamName"] = stream,
|
||||
["scanner:queue:redis:consumerGroup"] = consumerGroup,
|
||||
["scanner:queue:redis:idempotencyKeyPrefix"] = $"{stream}:idemp:",
|
||||
["scanner:queue:redis:initializationTimeout"] = "00:00:10",
|
||||
})
|
||||
.Build();
|
||||
}
|
||||
|
||||
private sealed class SmokeAnalyzerDispatcher : IScanAnalyzerDispatcher
|
||||
{
|
||||
public ValueTask ExecuteAsync(ScanJobContext context, CancellationToken cancellationToken)
|
||||
{
|
||||
return ValueTask.CompletedTask;
|
||||
}
|
||||
}
|
||||
|
||||
private sealed class QueueBackedScanJobSourceDependencies
|
||||
{
|
||||
public QueueBackedScanJobSourceDependencies()
|
||||
{
|
||||
JobCompleted = new TaskCompletionSource(TaskCreationOptions.RunContinuationsAsynchronously);
|
||||
}
|
||||
|
||||
public TaskCompletionSource JobCompleted { get; }
|
||||
}
|
||||
|
||||
private sealed class QueueBackedScanJobSource : IScanJobSource
|
||||
{
|
||||
private readonly IScanQueue _queue;
|
||||
private readonly ScannerQueueOptions _queueOptions;
|
||||
private readonly QueueBackedScanJobSourceDependencies _deps;
|
||||
private readonly TimeProvider _timeProvider;
|
||||
private readonly string _consumerName = $"worker-smoke-{Guid.NewGuid():n}";
|
||||
|
||||
public QueueBackedScanJobSource(
|
||||
IScanQueue queue,
|
||||
ScannerQueueOptions queueOptions,
|
||||
QueueBackedScanJobSourceDependencies deps,
|
||||
TimeProvider timeProvider)
|
||||
{
|
||||
_queue = queue ?? throw new ArgumentNullException(nameof(queue));
|
||||
_queueOptions = queueOptions ?? throw new ArgumentNullException(nameof(queueOptions));
|
||||
_deps = deps ?? throw new ArgumentNullException(nameof(deps));
|
||||
_timeProvider = timeProvider ?? TimeProvider.System;
|
||||
}
|
||||
|
||||
public async Task<IScanJobLease?> TryAcquireAsync(CancellationToken cancellationToken)
|
||||
{
|
||||
var request = new QueueLeaseRequest(_consumerName, 1, _queueOptions.DefaultLeaseDuration);
|
||||
var leases = await _queue.LeaseAsync(request, cancellationToken);
|
||||
if (leases.Count == 0)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
return new QueueBackedScanJobLease(
|
||||
leases[0],
|
||||
_queueOptions,
|
||||
_deps,
|
||||
_timeProvider.GetUtcNow());
|
||||
}
|
||||
}
|
||||
|
||||
private sealed class QueueBackedScanJobLease : IScanJobLease
|
||||
{
|
||||
private readonly IScanQueueLease _lease;
|
||||
private readonly ScannerQueueOptions _options;
|
||||
private readonly QueueBackedScanJobSourceDependencies _deps;
|
||||
private readonly DateTimeOffset _leasedAt;
|
||||
private readonly IReadOnlyDictionary<string, string> _metadata;
|
||||
|
||||
public QueueBackedScanJobLease(
|
||||
IScanQueueLease lease,
|
||||
ScannerQueueOptions options,
|
||||
QueueBackedScanJobSourceDependencies deps,
|
||||
DateTimeOffset leasedAt)
|
||||
{
|
||||
_lease = lease ?? throw new ArgumentNullException(nameof(lease));
|
||||
_options = options ?? throw new ArgumentNullException(nameof(options));
|
||||
_deps = deps ?? throw new ArgumentNullException(nameof(deps));
|
||||
_leasedAt = leasedAt;
|
||||
|
||||
var metadata = new Dictionary<string, string>(StringComparer.Ordinal)
|
||||
{
|
||||
["queue"] = _options.Kind.ToString(),
|
||||
["queue.consumer"] = lease.Consumer,
|
||||
};
|
||||
|
||||
if (!string.IsNullOrWhiteSpace(lease.IdempotencyKey))
|
||||
{
|
||||
metadata["job.idempotency"] = lease.IdempotencyKey;
|
||||
}
|
||||
|
||||
foreach (var attribute in lease.Attributes)
|
||||
{
|
||||
metadata[attribute.Key] = attribute.Value;
|
||||
}
|
||||
|
||||
_metadata = metadata;
|
||||
}
|
||||
|
||||
public string JobId => _lease.JobId;
|
||||
|
||||
public string ScanId => _metadata.TryGetValue("scanId", out var scanId) ? scanId : _lease.JobId;
|
||||
|
||||
public int Attempt => _lease.Attempt;
|
||||
|
||||
public DateTimeOffset EnqueuedAtUtc => _lease.EnqueuedAt;
|
||||
|
||||
public DateTimeOffset LeasedAtUtc => _leasedAt;
|
||||
|
||||
public TimeSpan LeaseDuration => _lease.LeaseExpiresAt - _leasedAt;
|
||||
|
||||
public IReadOnlyDictionary<string, string> Metadata => _metadata;
|
||||
|
||||
public async ValueTask RenewAsync(CancellationToken cancellationToken)
|
||||
{
|
||||
if (leases.Count == 0)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
return new QueueBackedScanJobLease(
|
||||
leases[0],
|
||||
_queueOptions,
|
||||
_deps,
|
||||
_timeProvider.GetUtcNow());
|
||||
}
|
||||
}
|
||||
|
||||
private sealed class QueueBackedScanJobLease : IScanJobLease
|
||||
{
|
||||
private readonly IScanQueueLease _lease;
|
||||
private readonly ScannerQueueOptions _options;
|
||||
private readonly QueueBackedScanJobSourceDependencies _deps;
|
||||
private readonly DateTimeOffset _leasedAt;
|
||||
private readonly IReadOnlyDictionary<string, string> _metadata;
|
||||
|
||||
public QueueBackedScanJobLease(
|
||||
IScanQueueLease lease,
|
||||
ScannerQueueOptions options,
|
||||
QueueBackedScanJobSourceDependencies deps,
|
||||
DateTimeOffset leasedAt)
|
||||
{
|
||||
_lease = lease ?? throw new ArgumentNullException(nameof(lease));
|
||||
_options = options ?? throw new ArgumentNullException(nameof(options));
|
||||
_deps = deps ?? throw new ArgumentNullException(nameof(deps));
|
||||
_leasedAt = leasedAt;
|
||||
|
||||
var metadata = new Dictionary<string, string>(StringComparer.Ordinal)
|
||||
{
|
||||
["queue"] = _options.Kind.ToString(),
|
||||
["queue.consumer"] = lease.Consumer,
|
||||
};
|
||||
|
||||
if (!string.IsNullOrWhiteSpace(lease.IdempotencyKey))
|
||||
{
|
||||
metadata["job.idempotency"] = lease.IdempotencyKey;
|
||||
}
|
||||
|
||||
foreach (var attribute in lease.Attributes)
|
||||
{
|
||||
metadata[attribute.Key] = attribute.Value;
|
||||
}
|
||||
|
||||
_metadata = metadata;
|
||||
}
|
||||
|
||||
public string JobId => _lease.JobId;
|
||||
|
||||
public string ScanId => _metadata.TryGetValue("scanId", out var scanId) ? scanId : _lease.JobId;
|
||||
|
||||
public int Attempt => _lease.Attempt;
|
||||
|
||||
public DateTimeOffset EnqueuedAtUtc => _lease.EnqueuedAt;
|
||||
|
||||
public DateTimeOffset LeasedAtUtc => _leasedAt;
|
||||
|
||||
public TimeSpan LeaseDuration => _lease.LeaseExpiresAt - _leasedAt;
|
||||
|
||||
public IReadOnlyDictionary<string, string> Metadata => _metadata;
|
||||
|
||||
public async ValueTask RenewAsync(CancellationToken cancellationToken)
|
||||
{
|
||||
await _lease.RenewAsync(_options.DefaultLeaseDuration, cancellationToken);
|
||||
}
|
||||
|
||||
public async ValueTask CompleteAsync(CancellationToken cancellationToken)
|
||||
{
|
||||
}
|
||||
|
||||
public async ValueTask CompleteAsync(CancellationToken cancellationToken)
|
||||
{
|
||||
await _lease.AcknowledgeAsync(cancellationToken);
|
||||
_deps.JobCompleted.TrySetResult();
|
||||
}
|
||||
|
||||
public async ValueTask AbandonAsync(string reason, CancellationToken cancellationToken)
|
||||
{
|
||||
_deps.JobCompleted.TrySetResult();
|
||||
}
|
||||
|
||||
public async ValueTask AbandonAsync(string reason, CancellationToken cancellationToken)
|
||||
{
|
||||
await _lease.ReleaseAsync(QueueReleaseDisposition.Retry, cancellationToken);
|
||||
}
|
||||
|
||||
public async ValueTask PoisonAsync(string reason, CancellationToken cancellationToken)
|
||||
{
|
||||
}
|
||||
|
||||
public async ValueTask PoisonAsync(string reason, CancellationToken cancellationToken)
|
||||
{
|
||||
await _lease.DeadLetterAsync(reason, cancellationToken);
|
||||
}
|
||||
|
||||
public ValueTask DisposeAsync() => ValueTask.CompletedTask;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public ValueTask DisposeAsync() => ValueTask.CompletedTask;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -3,6 +3,7 @@ using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using StellaOps.Scanner.Core.Contracts;
|
||||
using StellaOps.Scanner.Worker.Processing;
|
||||
using StellaOps.Scanner.Worker.Processing.Replay;
|
||||
using Xunit;
|
||||
|
||||
@@ -14,27 +15,27 @@ public sealed class ReplaySealedBundleStageExecutorTests
|
||||
public async Task ExecuteAsync_SetsMetadata_WhenUriAndHashProvided()
|
||||
{
|
||||
var executor = new ReplaySealedBundleStageExecutor(NullLogger<ReplaySealedBundleStageExecutor>.Instance);
|
||||
var context = TestContexts.Create();
|
||||
context.Lease.Metadata["replay.bundle.uri"] = "cas://replay/input.tar.zst";
|
||||
context.Lease.Metadata["replay.bundle.sha256"] = "abc123";
|
||||
context.Lease.Metadata["determinism.policy"] = "rev-1";
|
||||
context.Lease.Metadata["determinism.feed"] = "feed-2";
|
||||
var context = TestContexts.Create(out var metadata);
|
||||
metadata["replay.bundle.uri"] = "cas://replay/input.tar.zst";
|
||||
metadata["replay.bundle.sha256"] = "abc123";
|
||||
metadata["determinism.policy"] = "rev-1";
|
||||
metadata["determinism.feed"] = "feed-2";
|
||||
|
||||
await executor.ExecuteAsync(context, CancellationToken.None);
|
||||
|
||||
Assert.True(context.Analysis.TryGet<ReplaySealedBundleMetadata>(ScanAnalysisKeys.ReplaySealedBundleMetadata, out var metadata));
|
||||
Assert.Equal("abc123", metadata.ManifestHash);
|
||||
Assert.Equal("cas://replay/input.tar.zst", metadata.BundleUri);
|
||||
Assert.Equal("rev-1", metadata.PolicySnapshotId);
|
||||
Assert.Equal("feed-2", metadata.FeedSnapshotId);
|
||||
Assert.True(context.Analysis.TryGet<ReplaySealedBundleMetadata>(ScanAnalysisKeys.ReplaySealedBundleMetadata, out var sealedMetadata));
|
||||
Assert.Equal("abc123", sealedMetadata.ManifestHash);
|
||||
Assert.Equal("cas://replay/input.tar.zst", sealedMetadata.BundleUri);
|
||||
Assert.Equal("rev-1", sealedMetadata.PolicySnapshotId);
|
||||
Assert.Equal("feed-2", sealedMetadata.FeedSnapshotId);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ExecuteAsync_Skips_WhenHashMissing()
|
||||
{
|
||||
var executor = new ReplaySealedBundleStageExecutor(NullLogger<ReplaySealedBundleStageExecutor>.Instance);
|
||||
var context = TestContexts.Create();
|
||||
context.Lease.Metadata["replay.bundle.uri"] = "cas://replay/input.tar.zst";
|
||||
var context = TestContexts.Create(out var metadata);
|
||||
metadata["replay.bundle.uri"] = "cas://replay/input.tar.zst";
|
||||
|
||||
await executor.ExecuteAsync(context, CancellationToken.None);
|
||||
|
||||
@@ -44,9 +45,10 @@ public sealed class ReplaySealedBundleStageExecutorTests
|
||||
|
||||
internal static class TestContexts
|
||||
{
|
||||
public static ScanJobContext Create()
|
||||
public static ScanJobContext Create(out Dictionary<string, string> metadata)
|
||||
{
|
||||
var lease = new TestScanJobLease();
|
||||
metadata = lease.MutableMetadata;
|
||||
return new ScanJobContext(lease, TimeProvider.System, TimeProvider.System.GetUtcNow(), CancellationToken.None);
|
||||
}
|
||||
|
||||
|
||||
@@ -1,26 +1,26 @@
|
||||
using System;
|
||||
using System.Linq;
|
||||
using StellaOps.Scanner.Worker.Options;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Scanner.Worker.Tests;
|
||||
|
||||
public sealed class ScannerWorkerOptionsValidatorTests
|
||||
{
|
||||
[Fact]
|
||||
public void Validate_Fails_WhenHeartbeatSafetyFactorBelowThree()
|
||||
{
|
||||
var options = new ScannerWorkerOptions();
|
||||
options.Queue.HeartbeatSafetyFactor = 2.5;
|
||||
|
||||
var validator = new ScannerWorkerOptionsValidator();
|
||||
var result = validator.Validate(string.Empty, options);
|
||||
|
||||
Assert.True(result.Failed, "Validation should fail when HeartbeatSafetyFactor < 3.");
|
||||
Assert.Contains(result.Failures, failure => failure.Contains("HeartbeatSafetyFactor", StringComparison.OrdinalIgnoreCase));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
using System;
|
||||
using System.Linq;
|
||||
using StellaOps.Scanner.Worker.Options;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Scanner.Worker.Tests;
|
||||
|
||||
public sealed class ScannerWorkerOptionsValidatorTests
|
||||
{
|
||||
[Fact]
|
||||
public void Validate_Fails_WhenHeartbeatSafetyFactorBelowThree()
|
||||
{
|
||||
var options = new ScannerWorkerOptions();
|
||||
options.Queue.HeartbeatSafetyFactor = 2.5;
|
||||
|
||||
var validator = new ScannerWorkerOptionsValidator();
|
||||
var result = validator.Validate(string.Empty, options);
|
||||
|
||||
Assert.True(result.Failed, "Validation should fail when HeartbeatSafetyFactor < 3.");
|
||||
Assert.Contains(result.Failures, failure => failure.Contains("HeartbeatSafetyFactor", StringComparison.OrdinalIgnoreCase));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Validate_Succeeds_WhenHeartbeatSafetyFactorAtLeastThree()
|
||||
{
|
||||
var options = new ScannerWorkerOptions();
|
||||
|
||||
@@ -43,14 +43,17 @@ public sealed class SurfaceManifestStageExecutorTests
|
||||
listener.Start();
|
||||
|
||||
var hash = CreateCryptoHash();
|
||||
var manifestWriter = new TestSurfaceManifestWriter();
|
||||
var executor = new SurfaceManifestStageExecutor(
|
||||
publisher,
|
||||
manifestWriter,
|
||||
cache,
|
||||
environment,
|
||||
metrics,
|
||||
NullLogger<SurfaceManifestStageExecutor>.Instance,
|
||||
hash,
|
||||
new NullRubyPackageInventoryStore(),
|
||||
new NullBunPackageInventoryStore(),
|
||||
new DeterminismContext(true, DateTimeOffset.Parse("2024-01-01T00:00:00Z"), 1337, true, 1),
|
||||
new DeterministicDsseEnvelopeSigner());
|
||||
|
||||
@@ -82,14 +85,17 @@ public sealed class SurfaceManifestStageExecutorTests
|
||||
listener.Start();
|
||||
|
||||
var hash = CreateCryptoHash();
|
||||
var manifestWriter = new TestSurfaceManifestWriter();
|
||||
var executor = new SurfaceManifestStageExecutor(
|
||||
publisher,
|
||||
manifestWriter,
|
||||
cache,
|
||||
environment,
|
||||
metrics,
|
||||
NullLogger<SurfaceManifestStageExecutor>.Instance,
|
||||
hash,
|
||||
new NullRubyPackageInventoryStore(),
|
||||
new NullBunPackageInventoryStore(),
|
||||
new DeterminismContext(false, DateTimeOffset.UnixEpoch, null, false, null),
|
||||
new DeterministicDsseEnvelopeSigner());
|
||||
|
||||
@@ -125,10 +131,14 @@ public sealed class SurfaceManifestStageExecutorTests
|
||||
var payloadMetrics = listener.Measurements
|
||||
.Where(m => m.InstrumentName == "scanner_worker_surface_payload_persisted_total")
|
||||
.ToArray();
|
||||
Assert.Equal(3, payloadMetrics.Length);
|
||||
Assert.Equal(7, payloadMetrics.Length);
|
||||
Assert.Contains(payloadMetrics, m => Equals("entrytrace.graph", m["surface.kind"]));
|
||||
Assert.Contains(payloadMetrics, m => Equals("entrytrace.ndjson", m["surface.kind"]));
|
||||
Assert.Contains(payloadMetrics, m => Equals("layer.fragments", m["surface.kind"]));
|
||||
Assert.Contains(payloadMetrics, m => Equals("composition.recipe", m["surface.kind"]));
|
||||
Assert.Contains(payloadMetrics, m => Equals("composition.recipe.dsse", m["surface.kind"]));
|
||||
Assert.Contains(payloadMetrics, m => Equals("layer.fragments.dsse", m["surface.kind"]));
|
||||
Assert.Contains(payloadMetrics, m => Equals("determinism.json", m["surface.kind"]));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
@@ -148,22 +158,28 @@ public sealed class SurfaceManifestStageExecutorTests
|
||||
|
||||
var executor = new SurfaceManifestStageExecutor(
|
||||
publisher,
|
||||
new TestSurfaceManifestWriter(),
|
||||
cache,
|
||||
environment,
|
||||
metrics,
|
||||
NullLogger<SurfaceManifestStageExecutor>.Instance,
|
||||
hash,
|
||||
new NullRubyPackageInventoryStore(),
|
||||
determinism);
|
||||
new NullBunPackageInventoryStore(),
|
||||
determinism,
|
||||
new DeterministicDsseEnvelopeSigner());
|
||||
|
||||
var context = CreateContext();
|
||||
context.Lease.Metadata["determinism.feed"] = "feed-001";
|
||||
context.Lease.Metadata["determinism.policy"] = "rev-77";
|
||||
var context = CreateContext(new Dictionary<string, string>
|
||||
{
|
||||
["determinism.feed"] = "feed-001",
|
||||
["determinism.policy"] = "rev-77"
|
||||
});
|
||||
PopulateAnalysis(context);
|
||||
|
||||
await executor.ExecuteAsync(context, CancellationToken.None);
|
||||
|
||||
var determinismPayload = publisher.LastRequest!.Payloads.Single(p => p.Kind == "determinism.json");
|
||||
var json = JsonDocument.Parse(determinismPayload.Content.Span);
|
||||
var json = JsonDocument.Parse(determinismPayload.Content);
|
||||
|
||||
Assert.True(json.RootElement.GetProperty("fixedClock").GetBoolean());
|
||||
Assert.Equal(42, json.RootElement.GetProperty("rngSeed").GetInt32());
|
||||
@@ -188,13 +204,16 @@ public sealed class SurfaceManifestStageExecutorTests
|
||||
|
||||
var executor = new SurfaceManifestStageExecutor(
|
||||
publisher,
|
||||
new TestSurfaceManifestWriter(),
|
||||
cache,
|
||||
environment,
|
||||
metrics,
|
||||
NullLogger<SurfaceManifestStageExecutor>.Instance,
|
||||
hash,
|
||||
new NullRubyPackageInventoryStore(),
|
||||
new DeterminismContext(false, DateTimeOffset.UnixEpoch, null, false, null));
|
||||
new NullBunPackageInventoryStore(),
|
||||
new DeterminismContext(false, DateTimeOffset.UnixEpoch, null, false, null),
|
||||
new DeterministicDsseEnvelopeSigner());
|
||||
|
||||
var context = CreateContext();
|
||||
|
||||
@@ -230,8 +249,8 @@ public sealed class SurfaceManifestStageExecutorTests
|
||||
Assert.Contains(publisher.LastRequest!.Payloads, p => p.Kind == "entropy.report");
|
||||
Assert.Contains(publisher.LastRequest!.Payloads, p => p.Kind == "entropy.layer-summary");
|
||||
|
||||
// Two payloads + manifest persisted to cache.
|
||||
Assert.Equal(3, cache.Entries.Count);
|
||||
// Two payloads + determinism + manifest persisted to cache.
|
||||
Assert.Equal(6, cache.Entries.Count);
|
||||
}
|
||||
|
||||
private static ScanJobContext CreateContext(Dictionary<string, string>? metadata = null)
|
||||
@@ -310,12 +329,16 @@ public sealed class SurfaceManifestStageExecutorTests
|
||||
|
||||
var executor = new SurfaceManifestStageExecutor(
|
||||
publisher,
|
||||
new TestSurfaceManifestWriter(),
|
||||
cache,
|
||||
environment,
|
||||
metrics,
|
||||
NullLogger<SurfaceManifestStageExecutor>.Instance,
|
||||
hash,
|
||||
packageStore);
|
||||
packageStore,
|
||||
new NullBunPackageInventoryStore(),
|
||||
new DeterminismContext(false, DateTimeOffset.UnixEpoch, null, false, null),
|
||||
new DeterministicDsseEnvelopeSigner());
|
||||
|
||||
var context = CreateContext();
|
||||
PopulateAnalysis(context);
|
||||
@@ -340,12 +363,16 @@ public sealed class SurfaceManifestStageExecutorTests
|
||||
|
||||
var executor = new SurfaceManifestStageExecutor(
|
||||
publisher,
|
||||
new TestSurfaceManifestWriter(),
|
||||
cache,
|
||||
environment,
|
||||
metrics,
|
||||
NullLogger<SurfaceManifestStageExecutor>.Instance,
|
||||
hash,
|
||||
packageStore);
|
||||
packageStore,
|
||||
new NullBunPackageInventoryStore(),
|
||||
new DeterminismContext(false, DateTimeOffset.UnixEpoch, null, false, null),
|
||||
new DeterministicDsseEnvelopeSigner());
|
||||
|
||||
var context = CreateContext();
|
||||
PopulateAnalysis(context);
|
||||
@@ -407,15 +434,19 @@ public sealed class SurfaceManifestStageExecutorTests
|
||||
var cache = new RecordingSurfaceCache();
|
||||
var environment = new TestSurfaceEnvironment("tenant-a");
|
||||
var hash = CreateCryptoHash();
|
||||
var manifestWriter = new TestSurfaceManifestWriter();
|
||||
var executor = new SurfaceManifestStageExecutor(
|
||||
publisher,
|
||||
manifestWriter,
|
||||
cache,
|
||||
environment,
|
||||
metrics,
|
||||
NullLogger<SurfaceManifestStageExecutor>.Instance,
|
||||
hash,
|
||||
new NullRubyPackageInventoryStore(),
|
||||
new DeterminismContext(false, DateTimeOffset.UnixEpoch, null, false, null));
|
||||
new NullBunPackageInventoryStore(),
|
||||
new DeterminismContext(false, DateTimeOffset.UnixEpoch, null, false, null),
|
||||
new DeterministicDsseEnvelopeSigner());
|
||||
|
||||
var context = CreateContext();
|
||||
var observationBytes = Encoding.UTF8.GetBytes("{\"entrypoints\":[\"mod.ts\"]}");
|
||||
@@ -461,13 +492,16 @@ public sealed class SurfaceManifestStageExecutorTests
|
||||
|
||||
var executor = new SurfaceManifestStageExecutor(
|
||||
publisher,
|
||||
new TestSurfaceManifestWriter(),
|
||||
cache,
|
||||
environment,
|
||||
metrics,
|
||||
NullLogger<SurfaceManifestStageExecutor>.Instance,
|
||||
hash,
|
||||
new NullRubyPackageInventoryStore(),
|
||||
determinism);
|
||||
new NullBunPackageInventoryStore(),
|
||||
determinism,
|
||||
new DeterministicDsseEnvelopeSigner());
|
||||
|
||||
var leaseMetadata = new Dictionary<string, string>
|
||||
{
|
||||
@@ -553,6 +587,46 @@ public sealed class SurfaceManifestStageExecutorTests
|
||||
}
|
||||
}
|
||||
|
||||
private sealed class TestSurfaceManifestWriter : ISurfaceManifestWriter
|
||||
{
|
||||
private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web)
|
||||
{
|
||||
WriteIndented = false,
|
||||
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull
|
||||
};
|
||||
|
||||
public int PublishCalls { get; private set; }
|
||||
|
||||
public SurfaceManifestDocument? LastDocument { get; private set; }
|
||||
|
||||
public Task<SurfaceManifestPublishResult> PublishAsync(
|
||||
SurfaceManifestDocument document,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(document);
|
||||
|
||||
PublishCalls++;
|
||||
LastDocument = document;
|
||||
|
||||
var json = JsonSerializer.SerializeToUtf8Bytes(document, SerializerOptions);
|
||||
var digest = ComputeDigest(json);
|
||||
|
||||
return Task.FromResult(new SurfaceManifestPublishResult(
|
||||
ManifestDigest: digest,
|
||||
ManifestUri: $"cas://test/surface.manifests/{digest}",
|
||||
ArtifactId: $"surface-manifest::{digest}",
|
||||
Document: document,
|
||||
DeterminismMerkleRoot: document.DeterminismMerkleRoot));
|
||||
}
|
||||
|
||||
private static string ComputeDigest(ReadOnlySpan<byte> content)
|
||||
{
|
||||
Span<byte> hash = stackalloc byte[32];
|
||||
SHA256.HashData(content, hash);
|
||||
return $"sha256:{Convert.ToHexString(hash).ToLowerInvariant()}";
|
||||
}
|
||||
}
|
||||
|
||||
private sealed class TestSurfaceManifestPublisher : ISurfaceManifestPublisher
|
||||
{
|
||||
private readonly string _tenant;
|
||||
@@ -664,30 +738,7 @@ public sealed class SurfaceManifestStageExecutorTests
|
||||
}
|
||||
|
||||
private static ICryptoHash CreateCryptoHash()
|
||||
=> new DefaultCryptoHash(new StaticOptionsMonitor<CryptoHashOptions>(new CryptoHashOptions()), NullLogger<DefaultCryptoHash>.Instance);
|
||||
|
||||
private sealed class StaticOptionsMonitor<T> : IOptionsMonitor<T>
|
||||
{
|
||||
public StaticOptionsMonitor(T value)
|
||||
{
|
||||
CurrentValue = value;
|
||||
}
|
||||
|
||||
public T CurrentValue { get; }
|
||||
|
||||
public T Get(string? name) => CurrentValue;
|
||||
|
||||
public IDisposable OnChange(Action<T, string?> listener) => Disposable.Instance;
|
||||
|
||||
private sealed class Disposable : IDisposable
|
||||
{
|
||||
public static readonly Disposable Instance = new();
|
||||
|
||||
public void Dispose()
|
||||
{
|
||||
}
|
||||
}
|
||||
}
|
||||
=> DefaultCryptoHash.CreateForTests();
|
||||
|
||||
private sealed class RecordingRubyPackageStore : IRubyPackageInventoryStore
|
||||
{
|
||||
|
||||
@@ -1,29 +1,29 @@
|
||||
using System;
|
||||
using Microsoft.Extensions.Options;
|
||||
|
||||
namespace StellaOps.Scanner.Worker.Tests.TestInfrastructure;
|
||||
|
||||
public sealed class StaticOptionsMonitor<TOptions> : IOptionsMonitor<TOptions>
|
||||
where TOptions : class
|
||||
{
|
||||
private readonly TOptions _value;
|
||||
|
||||
public StaticOptionsMonitor(TOptions value)
|
||||
{
|
||||
_value = value ?? throw new ArgumentNullException(nameof(value));
|
||||
}
|
||||
|
||||
public TOptions CurrentValue => _value;
|
||||
|
||||
public TOptions Get(string? name) => _value;
|
||||
|
||||
public IDisposable OnChange(Action<TOptions, string?> listener) => NullDisposable.Instance;
|
||||
|
||||
private sealed class NullDisposable : IDisposable
|
||||
{
|
||||
public static readonly NullDisposable Instance = new();
|
||||
public void Dispose()
|
||||
{
|
||||
}
|
||||
}
|
||||
}
|
||||
using System;
|
||||
using Microsoft.Extensions.Options;
|
||||
|
||||
namespace StellaOps.Scanner.Worker.Tests.TestInfrastructure;
|
||||
|
||||
public sealed class StaticOptionsMonitor<TOptions> : IOptionsMonitor<TOptions>
|
||||
where TOptions : class
|
||||
{
|
||||
private readonly TOptions _value;
|
||||
|
||||
public StaticOptionsMonitor(TOptions value)
|
||||
{
|
||||
_value = value ?? throw new ArgumentNullException(nameof(value));
|
||||
}
|
||||
|
||||
public TOptions CurrentValue => _value;
|
||||
|
||||
public TOptions Get(string? name) => _value;
|
||||
|
||||
public IDisposable OnChange(Action<TOptions, string?> listener) => NullDisposable.Instance;
|
||||
|
||||
private sealed class NullDisposable : IDisposable
|
||||
{
|
||||
public static readonly NullDisposable Instance = new();
|
||||
public void Dispose()
|
||||
{
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -35,6 +35,30 @@ internal sealed class TestCryptoHash : ICryptoHash
|
||||
return Convert.ToHexString(bytes).ToLowerInvariant();
|
||||
}
|
||||
|
||||
public byte[] ComputeHashForPurpose(ReadOnlySpan<byte> data, string purpose)
|
||||
=> ComputeHash(data, GetAlgorithmForPurpose(purpose));
|
||||
|
||||
public string ComputeHashHexForPurpose(ReadOnlySpan<byte> data, string purpose)
|
||||
=> ComputeHashHex(data, GetAlgorithmForPurpose(purpose));
|
||||
|
||||
public string ComputeHashBase64ForPurpose(ReadOnlySpan<byte> data, string purpose)
|
||||
=> ComputeHashBase64(data, GetAlgorithmForPurpose(purpose));
|
||||
|
||||
public ValueTask<byte[]> ComputeHashForPurposeAsync(Stream stream, string purpose, CancellationToken cancellationToken = default)
|
||||
=> ComputeHashAsync(stream, GetAlgorithmForPurpose(purpose), cancellationToken);
|
||||
|
||||
public ValueTask<string> ComputeHashHexForPurposeAsync(Stream stream, string purpose, CancellationToken cancellationToken = default)
|
||||
=> ComputeHashHexAsync(stream, GetAlgorithmForPurpose(purpose), cancellationToken);
|
||||
|
||||
public string GetAlgorithmForPurpose(string purpose)
|
||||
=> HashAlgorithms.Sha256;
|
||||
|
||||
public string GetHashPrefix(string purpose)
|
||||
=> "sha256:";
|
||||
|
||||
public string ComputePrefixedHashForPurpose(ReadOnlySpan<byte> data, string purpose)
|
||||
=> $"{GetHashPrefix(purpose)}{ComputeHashHexForPurpose(data, purpose)}";
|
||||
|
||||
private static HashAlgorithm CreateAlgorithm(string? algorithmId)
|
||||
{
|
||||
return algorithmId?.ToUpperInvariant() switch
|
||||
|
||||
@@ -1,69 +1,85 @@
|
||||
using System;
|
||||
using System.Collections.Concurrent;
|
||||
using System.Collections.Generic;
|
||||
using System;
|
||||
using System.Collections.Concurrent;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using Microsoft.Extensions.DependencyInjection;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using Microsoft.Extensions.Options;
|
||||
using Microsoft.Extensions.Time.Testing;
|
||||
using StellaOps.Cryptography;
|
||||
using StellaOps.Scanner.Reachability;
|
||||
using StellaOps.Scanner.Storage;
|
||||
using StellaOps.Scanner.Storage.ObjectStore;
|
||||
using StellaOps.Scanner.Worker.Diagnostics;
|
||||
using StellaOps.Scanner.Worker.Determinism;
|
||||
using StellaOps.Scanner.Worker.Hosting;
|
||||
using StellaOps.Scanner.Worker.Options;
|
||||
using StellaOps.Scanner.Worker.Processing;
|
||||
using StellaOps.Scanner.Worker.Processing.Replay;
|
||||
using StellaOps.Scanner.Worker.Tests.TestInfrastructure;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Scanner.Worker.Tests;
|
||||
|
||||
public sealed class WorkerBasicScanScenarioTests
|
||||
{
|
||||
[Fact]
|
||||
public async Task DelayAsync_CompletesAfterTimeAdvance()
|
||||
{
|
||||
var scheduler = new ControlledDelayScheduler();
|
||||
var delayTask = scheduler.DelayAsync(TimeSpan.FromSeconds(5), CancellationToken.None);
|
||||
scheduler.AdvanceBy(TimeSpan.FromSeconds(5));
|
||||
await delayTask.WaitAsync(TimeSpan.FromSeconds(1));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task Worker_CompletesJob_RecordsTelemetry_And_Heartbeats()
|
||||
{
|
||||
var fakeTime = new FakeTimeProvider();
|
||||
fakeTime.SetUtcNow(DateTimeOffset.UtcNow);
|
||||
|
||||
var options = new ScannerWorkerOptions
|
||||
{
|
||||
MaxConcurrentJobs = 1,
|
||||
};
|
||||
options.Telemetry.EnableTelemetry = false;
|
||||
options.Telemetry.EnableMetrics = true;
|
||||
|
||||
var optionsMonitor = new StaticOptionsMonitor<ScannerWorkerOptions>(options);
|
||||
var testLoggerProvider = new TestLoggerProvider();
|
||||
var lease = new TestJobLease(fakeTime);
|
||||
var jobSource = new TestJobSource(lease);
|
||||
var scheduler = new ControlledDelayScheduler();
|
||||
var analyzer = new TestAnalyzerDispatcher(scheduler);
|
||||
|
||||
|
||||
namespace StellaOps.Scanner.Worker.Tests;
|
||||
|
||||
public sealed class WorkerBasicScanScenarioTests
|
||||
{
|
||||
[Fact]
|
||||
public async Task DelayAsync_CompletesAfterTimeAdvance()
|
||||
{
|
||||
var scheduler = new ControlledDelayScheduler();
|
||||
var delayTask = scheduler.DelayAsync(TimeSpan.FromSeconds(5), CancellationToken.None);
|
||||
scheduler.AdvanceBy(TimeSpan.FromSeconds(5));
|
||||
await delayTask.WaitAsync(TimeSpan.FromSeconds(1));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task Worker_CompletesJob_RecordsTelemetry_And_Heartbeats()
|
||||
{
|
||||
var fakeTime = new FakeTimeProvider();
|
||||
fakeTime.SetUtcNow(DateTimeOffset.UtcNow);
|
||||
|
||||
var options = new ScannerWorkerOptions
|
||||
{
|
||||
MaxConcurrentJobs = 1,
|
||||
};
|
||||
options.Telemetry.EnableTelemetry = false;
|
||||
options.Telemetry.EnableMetrics = true;
|
||||
|
||||
var optionsMonitor = new StaticOptionsMonitor<ScannerWorkerOptions>(options);
|
||||
var testLoggerProvider = new TestLoggerProvider();
|
||||
var lease = new TestJobLease(fakeTime);
|
||||
var jobSource = new TestJobSource(lease);
|
||||
var scheduler = new ControlledDelayScheduler();
|
||||
var analyzer = new TestAnalyzerDispatcher(scheduler);
|
||||
|
||||
using var listener = new WorkerMeterListener();
|
||||
listener.Start();
|
||||
|
||||
using var services = new ServiceCollection()
|
||||
.AddLogging(builder =>
|
||||
{
|
||||
builder.ClearProviders();
|
||||
builder.AddProvider(testLoggerProvider);
|
||||
builder.SetMinimumLevel(LogLevel.Debug);
|
||||
})
|
||||
.AddSingleton(fakeTime)
|
||||
.AddSingleton<TimeProvider>(fakeTime)
|
||||
.AddSingleton<IOptionsMonitor<ScannerWorkerOptions>>(optionsMonitor)
|
||||
.AddSingleton<ScannerWorkerMetrics>()
|
||||
.AddSingleton<ScanProgressReporter>()
|
||||
|
||||
using var services = new ServiceCollection()
|
||||
.AddLogging(builder =>
|
||||
{
|
||||
builder.ClearProviders();
|
||||
builder.AddProvider(testLoggerProvider);
|
||||
builder.SetMinimumLevel(LogLevel.Debug);
|
||||
})
|
||||
.AddSingleton(fakeTime)
|
||||
.AddSingleton<TimeProvider>(fakeTime)
|
||||
.AddSingleton<IOptionsMonitor<ScannerWorkerOptions>>(optionsMonitor)
|
||||
.AddSingleton<ScannerWorkerMetrics>()
|
||||
.AddSingleton<ScanProgressReporter>()
|
||||
.AddSingleton<ScanJobProcessor>()
|
||||
.AddSingleton<IDeterministicRandomProvider>(new DeterministicRandomProvider(seed: 1337))
|
||||
.AddSingleton<DeterministicRandomService>()
|
||||
.AddSingleton<IReachabilityUnionPublisherService, NullReachabilityUnionPublisherService>()
|
||||
.AddSingleton<ReplayBundleFetcher>(_ => new ReplayBundleFetcher(
|
||||
new NullArtifactObjectStore(),
|
||||
DefaultCryptoHash.CreateForTests(),
|
||||
new ScannerStorageOptions(),
|
||||
NullLogger<ReplayBundleFetcher>.Instance))
|
||||
.AddSingleton<LeaseHeartbeatService>()
|
||||
.AddSingleton<IDelayScheduler>(scheduler)
|
||||
.AddSingleton<IScanJobSource>(_ => jobSource)
|
||||
@@ -72,147 +88,155 @@ public sealed class WorkerBasicScanScenarioTests
|
||||
.AddSingleton<IScanStageExecutor, AnalyzerStageExecutor>()
|
||||
.AddSingleton<ScannerWorkerHostedService>()
|
||||
.BuildServiceProvider();
|
||||
|
||||
var worker = services.GetRequiredService<ScannerWorkerHostedService>();
|
||||
|
||||
await worker.StartAsync(CancellationToken.None);
|
||||
|
||||
await jobSource.LeaseIssued.Task.WaitAsync(TimeSpan.FromSeconds(5));
|
||||
await Task.Yield();
|
||||
|
||||
var spin = 0;
|
||||
while (!lease.Completed.Task.IsCompleted && spin++ < 24)
|
||||
{
|
||||
fakeTime.Advance(TimeSpan.FromSeconds(15));
|
||||
scheduler.AdvanceBy(TimeSpan.FromSeconds(15));
|
||||
await Task.Delay(1);
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
await lease.Completed.Task.WaitAsync(TimeSpan.FromSeconds(30));
|
||||
}
|
||||
catch (TimeoutException ex)
|
||||
{
|
||||
var stageLogs = string.Join(Environment.NewLine, testLoggerProvider
|
||||
.GetEntriesForCategory(typeof(ScanProgressReporter).FullName!)
|
||||
.Select(entry => entry.ToFormattedString()));
|
||||
|
||||
throw new TimeoutException($"Worker did not complete within timeout. Logs:{Environment.NewLine}{stageLogs}", ex);
|
||||
}
|
||||
|
||||
await worker.StopAsync(CancellationToken.None);
|
||||
|
||||
Assert.True(lease.Completed.Task.IsCompletedSuccessfully, "Job should complete successfully.");
|
||||
|
||||
var worker = services.GetRequiredService<ScannerWorkerHostedService>();
|
||||
|
||||
await worker.StartAsync(CancellationToken.None);
|
||||
|
||||
await jobSource.LeaseIssued.Task.WaitAsync(TimeSpan.FromSeconds(5));
|
||||
await Task.Yield();
|
||||
|
||||
var spin = 0;
|
||||
while (!lease.Completed.Task.IsCompleted && spin++ < 24)
|
||||
{
|
||||
fakeTime.Advance(TimeSpan.FromSeconds(15));
|
||||
scheduler.AdvanceBy(TimeSpan.FromSeconds(15));
|
||||
await Task.Delay(1);
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
await lease.Completed.Task.WaitAsync(TimeSpan.FromSeconds(30));
|
||||
}
|
||||
catch (TimeoutException ex)
|
||||
{
|
||||
var stageLogs = string.Join(Environment.NewLine, testLoggerProvider
|
||||
.GetEntriesForCategory(typeof(ScanProgressReporter).FullName!)
|
||||
.Select(entry => entry.ToFormattedString()));
|
||||
|
||||
throw new TimeoutException($"Worker did not complete within timeout. Logs:{Environment.NewLine}{stageLogs}", ex);
|
||||
}
|
||||
|
||||
await worker.StopAsync(CancellationToken.None);
|
||||
|
||||
Assert.True(lease.Completed.Task.IsCompletedSuccessfully, "Job should complete successfully.");
|
||||
Assert.Single(analyzer.Executions);
|
||||
|
||||
var stageOrder = testLoggerProvider
|
||||
.GetEntriesForCategory(typeof(ScanProgressReporter).FullName!)
|
||||
.Where(entry => entry.EventId.Id == 1000)
|
||||
.Select(entry => entry.GetScopeProperty<string>("Stage"))
|
||||
.Where(stage => stage is not null)
|
||||
.Cast<string>()
|
||||
.ToArray();
|
||||
|
||||
Assert.Equal(ScanStageNames.Ordered, stageOrder);
|
||||
|
||||
var queueLatency = listener.Measurements.Where(m => m.InstrumentName == "scanner_worker_queue_latency_ms").ToArray();
|
||||
Assert.Single(queueLatency);
|
||||
Assert.True(queueLatency[0].Value > 0, "Queue latency should be positive.");
|
||||
|
||||
var jobDuration = listener.Measurements.Where(m => m.InstrumentName == "scanner_worker_job_duration_ms").ToArray();
|
||||
Assert.Single(jobDuration);
|
||||
|
||||
var queueLatency = listener.Measurements.Where(m => m.InstrumentName == "scanner_worker_queue_latency_ms").ToArray();
|
||||
Assert.Single(queueLatency);
|
||||
Assert.True(queueLatency[0].Value > 0, "Queue latency should be positive.");
|
||||
|
||||
var jobDuration = listener.Measurements.Where(m => m.InstrumentName == "scanner_worker_job_duration_ms").ToArray();
|
||||
Assert.Single(jobDuration);
|
||||
var jobDurationMs = jobDuration[0].Value;
|
||||
Assert.True(jobDurationMs > 0, "Job duration should be positive.");
|
||||
|
||||
var stageDurations = listener.Measurements.Where(m => m.InstrumentName == "scanner_worker_stage_duration_ms").ToArray();
|
||||
Assert.Contains(stageDurations, m => m.Tags.TryGetValue("stage", out var stage) && Equals(stage, ScanStageNames.ExecuteAnalyzers));
|
||||
}
|
||||
|
||||
private sealed class TestJobSource : IScanJobSource
|
||||
{
|
||||
private readonly TestJobLease _lease;
|
||||
private int _delivered;
|
||||
|
||||
public TestJobSource(TestJobLease lease)
|
||||
{
|
||||
_lease = lease;
|
||||
}
|
||||
|
||||
public TaskCompletionSource LeaseIssued { get; } = new(TaskCreationOptions.RunContinuationsAsynchronously);
|
||||
|
||||
public Task<IScanJobLease?> TryAcquireAsync(CancellationToken cancellationToken)
|
||||
{
|
||||
if (Interlocked.Exchange(ref _delivered, 1) == 0)
|
||||
{
|
||||
LeaseIssued.TrySetResult();
|
||||
return Task.FromResult<IScanJobLease?>(_lease);
|
||||
}
|
||||
|
||||
return Task.FromResult<IScanJobLease?>(null);
|
||||
}
|
||||
}
|
||||
|
||||
private sealed class TestJobLease : IScanJobLease
|
||||
{
|
||||
private readonly FakeTimeProvider _timeProvider;
|
||||
private readonly Dictionary<string, string> _metadata = new()
|
||||
{
|
||||
{ "queue", "tests" },
|
||||
{ "job.kind", "basic" },
|
||||
};
|
||||
|
||||
public TestJobLease(FakeTimeProvider timeProvider)
|
||||
{
|
||||
_timeProvider = timeProvider;
|
||||
EnqueuedAtUtc = _timeProvider.GetUtcNow() - TimeSpan.FromSeconds(5);
|
||||
LeasedAtUtc = _timeProvider.GetUtcNow();
|
||||
}
|
||||
|
||||
public string JobId { get; } = Guid.NewGuid().ToString("n");
|
||||
|
||||
public string ScanId { get; } = $"scan-{Guid.NewGuid():n}";
|
||||
|
||||
public int Attempt { get; } = 1;
|
||||
|
||||
public DateTimeOffset EnqueuedAtUtc { get; }
|
||||
|
||||
public DateTimeOffset LeasedAtUtc { get; }
|
||||
|
||||
public TimeSpan LeaseDuration { get; } = TimeSpan.FromSeconds(90);
|
||||
|
||||
public IReadOnlyDictionary<string, string> Metadata => _metadata;
|
||||
|
||||
public TaskCompletionSource Completed { get; } = new(TaskCreationOptions.RunContinuationsAsynchronously);
|
||||
|
||||
public int RenewalCount => _renewalCount;
|
||||
|
||||
public ValueTask RenewAsync(CancellationToken cancellationToken)
|
||||
{
|
||||
Interlocked.Increment(ref _renewalCount);
|
||||
return ValueTask.CompletedTask;
|
||||
}
|
||||
|
||||
public ValueTask CompleteAsync(CancellationToken cancellationToken)
|
||||
{
|
||||
Completed.TrySetResult();
|
||||
return ValueTask.CompletedTask;
|
||||
}
|
||||
|
||||
public ValueTask AbandonAsync(string reason, CancellationToken cancellationToken)
|
||||
{
|
||||
Completed.TrySetException(new InvalidOperationException($"Abandoned: {reason}"));
|
||||
return ValueTask.CompletedTask;
|
||||
}
|
||||
|
||||
public ValueTask PoisonAsync(string reason, CancellationToken cancellationToken)
|
||||
{
|
||||
Completed.TrySetException(new InvalidOperationException($"Poisoned: {reason}"));
|
||||
return ValueTask.CompletedTask;
|
||||
}
|
||||
|
||||
public ValueTask DisposeAsync() => ValueTask.CompletedTask;
|
||||
|
||||
|
||||
var stageDurations = listener.Measurements.Where(m => m.InstrumentName == "scanner_worker_stage_duration_ms").ToArray();
|
||||
Assert.Contains(stageDurations, m => m.Tags.TryGetValue("stage", out var stage) && Equals(stage, ScanStageNames.ExecuteAnalyzers));
|
||||
}
|
||||
|
||||
private sealed class NullReachabilityUnionPublisherService : IReachabilityUnionPublisherService
|
||||
{
|
||||
public Task<ReachabilityUnionPublishResult> PublishAsync(ReachabilityUnionGraph graph, string analysisId, CancellationToken cancellationToken = default)
|
||||
=> Task.FromResult(new ReachabilityUnionPublishResult("none", "none", 0));
|
||||
}
|
||||
|
||||
private sealed class NullArtifactObjectStore : IArtifactObjectStore
|
||||
{
|
||||
public Task PutAsync(ArtifactObjectDescriptor descriptor, Stream content, CancellationToken cancellationToken)
|
||||
=> Task.CompletedTask;
|
||||
|
||||
public Task<Stream?> GetAsync(ArtifactObjectDescriptor descriptor, CancellationToken cancellationToken)
|
||||
=> Task.FromResult<Stream?>(null);
|
||||
|
||||
public Task DeleteAsync(ArtifactObjectDescriptor descriptor, CancellationToken cancellationToken)
|
||||
=> Task.CompletedTask;
|
||||
}
|
||||
|
||||
private sealed class TestJobSource : IScanJobSource
|
||||
{
|
||||
private readonly TestJobLease _lease;
|
||||
private int _delivered;
|
||||
|
||||
public TestJobSource(TestJobLease lease)
|
||||
{
|
||||
_lease = lease;
|
||||
}
|
||||
|
||||
public TaskCompletionSource LeaseIssued { get; } = new(TaskCreationOptions.RunContinuationsAsynchronously);
|
||||
|
||||
public Task<IScanJobLease?> TryAcquireAsync(CancellationToken cancellationToken)
|
||||
{
|
||||
if (Interlocked.Exchange(ref _delivered, 1) == 0)
|
||||
{
|
||||
LeaseIssued.TrySetResult();
|
||||
return Task.FromResult<IScanJobLease?>(_lease);
|
||||
}
|
||||
|
||||
return Task.FromResult<IScanJobLease?>(null);
|
||||
}
|
||||
}
|
||||
|
||||
private sealed class TestJobLease : IScanJobLease
|
||||
{
|
||||
private readonly FakeTimeProvider _timeProvider;
|
||||
private readonly Dictionary<string, string> _metadata = new()
|
||||
{
|
||||
{ "queue", "tests" },
|
||||
{ "job.kind", "basic" },
|
||||
};
|
||||
|
||||
public TestJobLease(FakeTimeProvider timeProvider)
|
||||
{
|
||||
_timeProvider = timeProvider;
|
||||
EnqueuedAtUtc = _timeProvider.GetUtcNow() - TimeSpan.FromSeconds(5);
|
||||
LeasedAtUtc = _timeProvider.GetUtcNow();
|
||||
}
|
||||
|
||||
public string JobId { get; } = Guid.NewGuid().ToString("n");
|
||||
|
||||
public string ScanId { get; } = $"scan-{Guid.NewGuid():n}";
|
||||
|
||||
public int Attempt { get; } = 1;
|
||||
|
||||
public DateTimeOffset EnqueuedAtUtc { get; }
|
||||
|
||||
public DateTimeOffset LeasedAtUtc { get; }
|
||||
|
||||
public TimeSpan LeaseDuration { get; } = TimeSpan.FromSeconds(90);
|
||||
|
||||
public IReadOnlyDictionary<string, string> Metadata => _metadata;
|
||||
|
||||
public TaskCompletionSource Completed { get; } = new(TaskCreationOptions.RunContinuationsAsynchronously);
|
||||
|
||||
public int RenewalCount => _renewalCount;
|
||||
|
||||
public ValueTask RenewAsync(CancellationToken cancellationToken)
|
||||
{
|
||||
Interlocked.Increment(ref _renewalCount);
|
||||
return ValueTask.CompletedTask;
|
||||
}
|
||||
|
||||
public ValueTask CompleteAsync(CancellationToken cancellationToken)
|
||||
{
|
||||
Completed.TrySetResult();
|
||||
return ValueTask.CompletedTask;
|
||||
}
|
||||
|
||||
public ValueTask AbandonAsync(string reason, CancellationToken cancellationToken)
|
||||
{
|
||||
Completed.TrySetException(new InvalidOperationException($"Abandoned: {reason}"));
|
||||
return ValueTask.CompletedTask;
|
||||
}
|
||||
|
||||
public ValueTask PoisonAsync(string reason, CancellationToken cancellationToken)
|
||||
{
|
||||
Completed.TrySetException(new InvalidOperationException($"Poisoned: {reason}"));
|
||||
return ValueTask.CompletedTask;
|
||||
}
|
||||
|
||||
public ValueTask DisposeAsync() => ValueTask.CompletedTask;
|
||||
|
||||
private int _renewalCount;
|
||||
}
|
||||
|
||||
@@ -227,191 +251,191 @@ public sealed class WorkerBasicScanScenarioTests
|
||||
private readonly IDelayScheduler _scheduler;
|
||||
|
||||
public TestAnalyzerDispatcher(IDelayScheduler scheduler)
|
||||
{
|
||||
_scheduler = scheduler;
|
||||
}
|
||||
|
||||
public List<string> Executions { get; } = new();
|
||||
|
||||
public async ValueTask ExecuteAsync(ScanJobContext context, CancellationToken cancellationToken)
|
||||
{
|
||||
Executions.Add(context.JobId);
|
||||
await _scheduler.DelayAsync(TimeSpan.FromSeconds(45), cancellationToken);
|
||||
}
|
||||
}
|
||||
|
||||
private sealed class ControlledDelayScheduler : IDelayScheduler
|
||||
{
|
||||
private readonly object _lock = new();
|
||||
private readonly SortedDictionary<double, List<ScheduledDelay>> _scheduled = new();
|
||||
private double _currentMilliseconds;
|
||||
|
||||
public Task DelayAsync(TimeSpan delay, CancellationToken cancellationToken)
|
||||
{
|
||||
if (delay <= TimeSpan.Zero)
|
||||
{
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
|
||||
var tcs = new TaskCompletionSource<object?>(TaskCreationOptions.RunContinuationsAsynchronously);
|
||||
var scheduled = new ScheduledDelay(tcs, cancellationToken);
|
||||
lock (_lock)
|
||||
{
|
||||
var due = _currentMilliseconds + delay.TotalMilliseconds;
|
||||
if (!_scheduled.TryGetValue(due, out var list))
|
||||
{
|
||||
list = new List<ScheduledDelay>();
|
||||
_scheduled.Add(due, list);
|
||||
}
|
||||
|
||||
list.Add(scheduled);
|
||||
}
|
||||
|
||||
return scheduled.Task;
|
||||
}
|
||||
|
||||
public void AdvanceBy(TimeSpan delta)
|
||||
{
|
||||
lock (_lock)
|
||||
{
|
||||
_currentMilliseconds += delta.TotalMilliseconds;
|
||||
var dueKeys = _scheduled.Keys.Where(key => key <= _currentMilliseconds).ToList();
|
||||
foreach (var due in dueKeys)
|
||||
{
|
||||
foreach (var scheduled in _scheduled[due])
|
||||
{
|
||||
scheduled.Complete();
|
||||
}
|
||||
|
||||
_scheduled.Remove(due);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private sealed class ScheduledDelay
|
||||
{
|
||||
private readonly TaskCompletionSource<object?> _tcs;
|
||||
private readonly CancellationTokenRegistration _registration;
|
||||
|
||||
public ScheduledDelay(TaskCompletionSource<object?> tcs, CancellationToken cancellationToken)
|
||||
{
|
||||
_tcs = tcs;
|
||||
if (cancellationToken.CanBeCanceled)
|
||||
{
|
||||
_registration = cancellationToken.Register(state =>
|
||||
{
|
||||
var source = (TaskCompletionSource<object?>)state!;
|
||||
source.TrySetCanceled(cancellationToken);
|
||||
}, tcs);
|
||||
}
|
||||
}
|
||||
|
||||
public Task Task => _tcs.Task;
|
||||
|
||||
public void Complete()
|
||||
{
|
||||
_registration.Dispose();
|
||||
_tcs.TrySetResult(null);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private sealed class StaticOptionsMonitor<TOptions> : IOptionsMonitor<TOptions>
|
||||
where TOptions : class
|
||||
{
|
||||
private readonly TOptions _value;
|
||||
|
||||
public StaticOptionsMonitor(TOptions value)
|
||||
{
|
||||
_value = value;
|
||||
}
|
||||
|
||||
public TOptions CurrentValue => _value;
|
||||
|
||||
public TOptions Get(string? name) => _value;
|
||||
|
||||
public IDisposable OnChange(Action<TOptions, string?> listener) => NullDisposable.Instance;
|
||||
|
||||
private sealed class NullDisposable : IDisposable
|
||||
{
|
||||
public static readonly NullDisposable Instance = new();
|
||||
public void Dispose()
|
||||
{
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private sealed class TestLoggerProvider : ILoggerProvider
|
||||
{
|
||||
private readonly ConcurrentQueue<TestLogEntry> _entries = new();
|
||||
|
||||
public ILogger CreateLogger(string categoryName) => new TestLogger(categoryName, _entries);
|
||||
|
||||
public void Dispose()
|
||||
{
|
||||
}
|
||||
|
||||
public IEnumerable<TestLogEntry> GetEntriesForCategory(string categoryName)
|
||||
=> _entries.Where(entry => entry.Category == categoryName);
|
||||
|
||||
private sealed class TestLogger : ILogger
|
||||
{
|
||||
private readonly string _category;
|
||||
private readonly ConcurrentQueue<TestLogEntry> _entries;
|
||||
|
||||
public TestLogger(string category, ConcurrentQueue<TestLogEntry> entries)
|
||||
{
|
||||
_category = category;
|
||||
_entries = entries;
|
||||
}
|
||||
|
||||
public IDisposable? BeginScope<TState>(TState state) where TState : notnull => NullDisposable.Instance;
|
||||
|
||||
public bool IsEnabled(LogLevel logLevel) => true;
|
||||
|
||||
public void Log<TState>(LogLevel logLevel, EventId eventId, TState state, Exception? exception, Func<TState, Exception?, string> formatter)
|
||||
{
|
||||
_entries.Enqueue(new TestLogEntry(_category, logLevel, eventId, state, exception));
|
||||
}
|
||||
}
|
||||
|
||||
private sealed class NullDisposable : IDisposable
|
||||
{
|
||||
public static readonly NullDisposable Instance = new();
|
||||
public void Dispose()
|
||||
{
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public sealed record TestLogEntry(string Category, LogLevel Level, EventId EventId, object? State, Exception? Exception)
|
||||
{
|
||||
public T? GetScopeProperty<T>(string name)
|
||||
{
|
||||
if (State is not IEnumerable<KeyValuePair<string, object?>> state)
|
||||
{
|
||||
return default;
|
||||
}
|
||||
|
||||
foreach (var kvp in state)
|
||||
{
|
||||
if (string.Equals(kvp.Key, name, StringComparison.OrdinalIgnoreCase) && kvp.Value is T value)
|
||||
{
|
||||
return value;
|
||||
}
|
||||
}
|
||||
|
||||
return default;
|
||||
}
|
||||
|
||||
public string ToFormattedString()
|
||||
{
|
||||
var properties = State is IEnumerable<KeyValuePair<string, object?>> kvps
|
||||
? string.Join(", ", kvps.Select(kvp => $"{kvp.Key}={kvp.Value}"))
|
||||
: State?.ToString() ?? string.Empty;
|
||||
|
||||
var exceptionPart = Exception is null ? string.Empty : $" Exception={Exception.GetType().Name}: {Exception.Message}";
|
||||
return $"[{Level}] {Category} ({EventId.Id}) {properties}{exceptionPart}";
|
||||
}
|
||||
}
|
||||
}
|
||||
{
|
||||
_scheduler = scheduler;
|
||||
}
|
||||
|
||||
public List<string> Executions { get; } = new();
|
||||
|
||||
public async ValueTask ExecuteAsync(ScanJobContext context, CancellationToken cancellationToken)
|
||||
{
|
||||
Executions.Add(context.JobId);
|
||||
await _scheduler.DelayAsync(TimeSpan.FromSeconds(45), cancellationToken);
|
||||
}
|
||||
}
|
||||
|
||||
private sealed class ControlledDelayScheduler : IDelayScheduler
|
||||
{
|
||||
private readonly object _lock = new();
|
||||
private readonly SortedDictionary<double, List<ScheduledDelay>> _scheduled = new();
|
||||
private double _currentMilliseconds;
|
||||
|
||||
public Task DelayAsync(TimeSpan delay, CancellationToken cancellationToken)
|
||||
{
|
||||
if (delay <= TimeSpan.Zero)
|
||||
{
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
|
||||
var tcs = new TaskCompletionSource<object?>(TaskCreationOptions.RunContinuationsAsynchronously);
|
||||
var scheduled = new ScheduledDelay(tcs, cancellationToken);
|
||||
lock (_lock)
|
||||
{
|
||||
var due = _currentMilliseconds + delay.TotalMilliseconds;
|
||||
if (!_scheduled.TryGetValue(due, out var list))
|
||||
{
|
||||
list = new List<ScheduledDelay>();
|
||||
_scheduled.Add(due, list);
|
||||
}
|
||||
|
||||
list.Add(scheduled);
|
||||
}
|
||||
|
||||
return scheduled.Task;
|
||||
}
|
||||
|
||||
public void AdvanceBy(TimeSpan delta)
|
||||
{
|
||||
lock (_lock)
|
||||
{
|
||||
_currentMilliseconds += delta.TotalMilliseconds;
|
||||
var dueKeys = _scheduled.Keys.Where(key => key <= _currentMilliseconds).ToList();
|
||||
foreach (var due in dueKeys)
|
||||
{
|
||||
foreach (var scheduled in _scheduled[due])
|
||||
{
|
||||
scheduled.Complete();
|
||||
}
|
||||
|
||||
_scheduled.Remove(due);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private sealed class ScheduledDelay
|
||||
{
|
||||
private readonly TaskCompletionSource<object?> _tcs;
|
||||
private readonly CancellationTokenRegistration _registration;
|
||||
|
||||
public ScheduledDelay(TaskCompletionSource<object?> tcs, CancellationToken cancellationToken)
|
||||
{
|
||||
_tcs = tcs;
|
||||
if (cancellationToken.CanBeCanceled)
|
||||
{
|
||||
_registration = cancellationToken.Register(state =>
|
||||
{
|
||||
var source = (TaskCompletionSource<object?>)state!;
|
||||
source.TrySetCanceled(cancellationToken);
|
||||
}, tcs);
|
||||
}
|
||||
}
|
||||
|
||||
public Task Task => _tcs.Task;
|
||||
|
||||
public void Complete()
|
||||
{
|
||||
_registration.Dispose();
|
||||
_tcs.TrySetResult(null);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private sealed class StaticOptionsMonitor<TOptions> : IOptionsMonitor<TOptions>
|
||||
where TOptions : class
|
||||
{
|
||||
private readonly TOptions _value;
|
||||
|
||||
public StaticOptionsMonitor(TOptions value)
|
||||
{
|
||||
_value = value;
|
||||
}
|
||||
|
||||
public TOptions CurrentValue => _value;
|
||||
|
||||
public TOptions Get(string? name) => _value;
|
||||
|
||||
public IDisposable OnChange(Action<TOptions, string?> listener) => NullDisposable.Instance;
|
||||
|
||||
private sealed class NullDisposable : IDisposable
|
||||
{
|
||||
public static readonly NullDisposable Instance = new();
|
||||
public void Dispose()
|
||||
{
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private sealed class TestLoggerProvider : ILoggerProvider
|
||||
{
|
||||
private readonly ConcurrentQueue<TestLogEntry> _entries = new();
|
||||
|
||||
public ILogger CreateLogger(string categoryName) => new TestLogger(categoryName, _entries);
|
||||
|
||||
public void Dispose()
|
||||
{
|
||||
}
|
||||
|
||||
public IEnumerable<TestLogEntry> GetEntriesForCategory(string categoryName)
|
||||
=> _entries.Where(entry => entry.Category == categoryName);
|
||||
|
||||
private sealed class TestLogger : ILogger
|
||||
{
|
||||
private readonly string _category;
|
||||
private readonly ConcurrentQueue<TestLogEntry> _entries;
|
||||
|
||||
public TestLogger(string category, ConcurrentQueue<TestLogEntry> entries)
|
||||
{
|
||||
_category = category;
|
||||
_entries = entries;
|
||||
}
|
||||
|
||||
public IDisposable? BeginScope<TState>(TState state) where TState : notnull => NullDisposable.Instance;
|
||||
|
||||
public bool IsEnabled(LogLevel logLevel) => true;
|
||||
|
||||
public void Log<TState>(LogLevel logLevel, EventId eventId, TState state, Exception? exception, Func<TState, Exception?, string> formatter)
|
||||
{
|
||||
_entries.Enqueue(new TestLogEntry(_category, logLevel, eventId, state, exception));
|
||||
}
|
||||
}
|
||||
|
||||
private sealed class NullDisposable : IDisposable
|
||||
{
|
||||
public static readonly NullDisposable Instance = new();
|
||||
public void Dispose()
|
||||
{
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public sealed record TestLogEntry(string Category, LogLevel Level, EventId EventId, object? State, Exception? Exception)
|
||||
{
|
||||
public T? GetScopeProperty<T>(string name)
|
||||
{
|
||||
if (State is not IEnumerable<KeyValuePair<string, object?>> state)
|
||||
{
|
||||
return default;
|
||||
}
|
||||
|
||||
foreach (var kvp in state)
|
||||
{
|
||||
if (string.Equals(kvp.Key, name, StringComparison.OrdinalIgnoreCase) && kvp.Value is T value)
|
||||
{
|
||||
return value;
|
||||
}
|
||||
}
|
||||
|
||||
return default;
|
||||
}
|
||||
|
||||
public string ToFormattedString()
|
||||
{
|
||||
var properties = State is IEnumerable<KeyValuePair<string, object?>> kvps
|
||||
? string.Join(", ", kvps.Select(kvp => $"{kvp.Key}={kvp.Value}"))
|
||||
: State?.ToString() ?? string.Empty;
|
||||
|
||||
var exceptionPart = Exception is null ? string.Empty : $" Exception={Exception.GetType().Name}: {Exception.Message}";
|
||||
return $"[{Level}] {Category} ({EventId.Id}) {properties}{exceptionPart}";
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user