up
This commit is contained in:
@@ -0,0 +1,7 @@
|
||||
namespace StellaOps.Scanner.WebService.Contracts;
|
||||
|
||||
public sealed record ReplayAttachRequest(
|
||||
string ManifestHash,
|
||||
IReadOnlyList<ReplayBundleStatusDto> Bundles);
|
||||
|
||||
public sealed record ReplayAttachResponse(string Status);
|
||||
@@ -7,8 +7,19 @@ public sealed record ScanStatusResponse(
|
||||
DateTimeOffset CreatedAt,
|
||||
DateTimeOffset UpdatedAt,
|
||||
string? FailureReason,
|
||||
SurfacePointersDto? Surface);
|
||||
SurfacePointersDto? Surface,
|
||||
ReplayStatusDto? Replay);
|
||||
|
||||
public sealed record ScanStatusTarget(
|
||||
string? Reference,
|
||||
string? Digest);
|
||||
|
||||
public sealed record ReplayStatusDto(
|
||||
string ManifestHash,
|
||||
IReadOnlyList<ReplayBundleStatusDto> Bundles);
|
||||
|
||||
public sealed record ReplayBundleStatusDto(
|
||||
string Type,
|
||||
string Digest,
|
||||
string CasUri,
|
||||
long SizeBytes);
|
||||
|
||||
@@ -1,9 +1,20 @@
|
||||
namespace StellaOps.Scanner.WebService.Domain;
|
||||
|
||||
public sealed record ScanSnapshot(
|
||||
ScanId ScanId,
|
||||
ScanTarget Target,
|
||||
ScanStatus Status,
|
||||
DateTimeOffset CreatedAt,
|
||||
DateTimeOffset UpdatedAt,
|
||||
string? FailureReason);
|
||||
public sealed record ScanSnapshot(
|
||||
ScanId ScanId,
|
||||
ScanTarget Target,
|
||||
ScanStatus Status,
|
||||
DateTimeOffset CreatedAt,
|
||||
DateTimeOffset UpdatedAt,
|
||||
string? FailureReason,
|
||||
ReplayArtifacts? Replay);
|
||||
|
||||
public sealed record ReplayArtifacts(
|
||||
string ManifestHash,
|
||||
IReadOnlyList<ReplayBundleSummary> Bundles);
|
||||
|
||||
public sealed record ReplayBundleSummary(
|
||||
string Type,
|
||||
string Digest,
|
||||
string CasUri,
|
||||
long SizeBytes);
|
||||
|
||||
@@ -0,0 +1,53 @@
|
||||
using Microsoft.AspNetCore.Http;
|
||||
using Microsoft.AspNetCore.Routing;
|
||||
using StellaOps.Scanner.WebService.Contracts;
|
||||
using StellaOps.Scanner.WebService.Domain;
|
||||
using StellaOps.Scanner.WebService.Services;
|
||||
|
||||
namespace StellaOps.Scanner.WebService.Endpoints;
|
||||
|
||||
internal static class ReplayEndpoints
|
||||
{
|
||||
public static void MapReplayEndpoints(this RouteGroupBuilder apiGroup)
|
||||
{
|
||||
var replay = apiGroup.MapGroup("/replay");
|
||||
|
||||
replay.MapPost("/{scanId}/attach", HandleAttachAsync)
|
||||
.WithName("scanner.replay.attach")
|
||||
.Produces<ReplayAttachResponse>(StatusCodes.Status200OK)
|
||||
.Produces(StatusCodes.Status404NotFound)
|
||||
.Produces(StatusCodes.Status400BadRequest);
|
||||
}
|
||||
|
||||
private static async Task<IResult> HandleAttachAsync(
|
||||
string scanId,
|
||||
ReplayAttachRequest request,
|
||||
IScanCoordinator coordinator,
|
||||
HttpContext context,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
if (!ScanId.TryParse(scanId, out var parsed))
|
||||
{
|
||||
return Results.BadRequest("invalid scan id");
|
||||
}
|
||||
|
||||
if (string.IsNullOrWhiteSpace(request.ManifestHash) || request.Bundles is null || request.Bundles.Count == 0)
|
||||
{
|
||||
return Results.BadRequest("manifest hash and bundles are required");
|
||||
}
|
||||
|
||||
var replay = new ReplayArtifacts(
|
||||
request.ManifestHash,
|
||||
request.Bundles
|
||||
.Select(b => new ReplayBundleSummary(b.Type, b.Digest, b.CasUri, b.SizeBytes))
|
||||
.ToList());
|
||||
|
||||
var attached = await coordinator.AttachReplayAsync(parsed, replay, cancellationToken).ConfigureAwait(false);
|
||||
if (!attached)
|
||||
{
|
||||
return Results.NotFound();
|
||||
}
|
||||
|
||||
return Results.Ok(new ReplayAttachResponse("attached"));
|
||||
}
|
||||
}
|
||||
@@ -203,7 +203,8 @@ internal static class ScanEndpoints
|
||||
CreatedAt: snapshot.CreatedAt,
|
||||
UpdatedAt: snapshot.UpdatedAt,
|
||||
FailureReason: snapshot.FailureReason,
|
||||
Surface: surfacePointers);
|
||||
Surface: surfacePointers,
|
||||
Replay: snapshot.Replay is null ? null : MapReplay(snapshot.Replay));
|
||||
|
||||
return Json(response, StatusCodes.Status200OK);
|
||||
}
|
||||
@@ -283,6 +284,15 @@ internal static class ScanEndpoints
|
||||
return Results.Empty;
|
||||
}
|
||||
|
||||
private static ReplayStatusDto MapReplay(ReplayArtifacts replay)
|
||||
{
|
||||
return new ReplayStatusDto(
|
||||
ManifestHash: replay.ManifestHash,
|
||||
Bundles: replay.Bundles
|
||||
.Select(b => new ReplayBundleStatusDto(b.Type, b.Digest, b.CasUri, b.SizeBytes))
|
||||
.ToList());
|
||||
}
|
||||
|
||||
|
||||
private static async Task<IResult> HandleEntryTraceAsync(
|
||||
string scanId,
|
||||
|
||||
@@ -31,9 +31,11 @@ using StellaOps.Scanner.WebService.Hosting;
|
||||
using StellaOps.Scanner.WebService.Options;
|
||||
using StellaOps.Scanner.WebService.Services;
|
||||
using StellaOps.Scanner.WebService.Security;
|
||||
using StellaOps.Scanner.WebService.Replay;
|
||||
using StellaOps.Scanner.Storage;
|
||||
using StellaOps.Scanner.Storage.Extensions;
|
||||
using StellaOps.Scanner.Storage.Mongo;
|
||||
using StellaOps.Scanner.WebService.Endpoints;
|
||||
using StellaOps.Scanner.WebService.Options;
|
||||
|
||||
var builder = WebApplication.CreateBuilder(args);
|
||||
@@ -83,13 +85,14 @@ builder.Services.AddScannerCache(builder.Configuration);
|
||||
builder.Services.AddSingleton<ServiceStatus>();
|
||||
builder.Services.AddHttpContextAccessor();
|
||||
builder.Services.AddSingleton<ScanProgressStream>();
|
||||
builder.Services.AddSingleton<IScanProgressPublisher>(sp => sp.GetRequiredService<ScanProgressStream>());
|
||||
builder.Services.AddSingleton<IScanProgressReader>(sp => sp.GetRequiredService<ScanProgressStream>());
|
||||
builder.Services.AddSingleton<IScanCoordinator, InMemoryScanCoordinator>();
|
||||
builder.Services.AddSingleton<IPolicySnapshotRepository, InMemoryPolicySnapshotRepository>();
|
||||
builder.Services.AddSingleton<IPolicyAuditRepository, InMemoryPolicyAuditRepository>();
|
||||
builder.Services.AddSingleton<IScanProgressPublisher>(sp => sp.GetRequiredService<ScanProgressStream>());
|
||||
builder.Services.AddSingleton<IScanProgressReader>(sp => sp.GetRequiredService<ScanProgressStream>());
|
||||
builder.Services.AddSingleton<IScanCoordinator, InMemoryScanCoordinator>();
|
||||
builder.Services.AddSingleton<IPolicySnapshotRepository, InMemoryPolicySnapshotRepository>();
|
||||
builder.Services.AddSingleton<IPolicyAuditRepository, InMemoryPolicyAuditRepository>();
|
||||
builder.Services.AddSingleton<PolicySnapshotStore>();
|
||||
builder.Services.AddSingleton<PolicyPreviewService>();
|
||||
builder.Services.AddSingleton<IRecordModeService, RecordModeService>();
|
||||
builder.Services.AddStellaOpsCrypto();
|
||||
builder.Services.AddBouncyCastleEd25519Provider();
|
||||
builder.Services.AddSingleton<IReportSigner, ReportSigner>();
|
||||
@@ -386,6 +389,7 @@ if (app.Environment.IsEnvironment("Testing"))
|
||||
}
|
||||
|
||||
apiGroup.MapScanEndpoints(resolvedOptions.Api.ScansSegment);
|
||||
apiGroup.MapReplayEndpoints();
|
||||
|
||||
if (resolvedOptions.Features.EnablePolicyPreview)
|
||||
{
|
||||
|
||||
@@ -0,0 +1,35 @@
|
||||
using System.Collections.Generic;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using StellaOps.Replay.Core;
|
||||
using StellaOps.Scanner.WebService.Domain;
|
||||
using StellaOps.Scanner.WebService.Services;
|
||||
|
||||
namespace StellaOps.Scanner.WebService.Replay;
|
||||
|
||||
internal interface IRecordModeService
|
||||
{
|
||||
Task<(ReplayRunRecord Run, IReadOnlyList<ReplayBundleRecord> Bundles)> BuildAsync(
|
||||
string scanId,
|
||||
ReplayManifest manifest,
|
||||
ReplayBundleWriteResult inputBundle,
|
||||
ReplayBundleWriteResult outputBundle,
|
||||
string sbomDigest,
|
||||
string findingsDigest,
|
||||
string? vexDigest = null,
|
||||
string? logDigest = null,
|
||||
IEnumerable<(ReplayBundleWriteResult Result, string Type)>? additionalBundles = null);
|
||||
|
||||
Task<ReplayArtifacts?> AttachAsync(
|
||||
ScanId scanId,
|
||||
ReplayManifest manifest,
|
||||
ReplayBundleWriteResult inputBundle,
|
||||
ReplayBundleWriteResult outputBundle,
|
||||
string sbomDigest,
|
||||
string findingsDigest,
|
||||
IScanCoordinator coordinator,
|
||||
string? vexDigest = null,
|
||||
string? logDigest = null,
|
||||
IEnumerable<(ReplayBundleWriteResult Result, string Type)>? additionalBundles = null,
|
||||
CancellationToken cancellationToken = default);
|
||||
}
|
||||
@@ -0,0 +1,104 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using StellaOps.Replay.Core;
|
||||
using StellaOps.Scanner.Core.Replay;
|
||||
using StellaOps.Scanner.WebService.Domain;
|
||||
using StellaOps.Scanner.WebService.Services;
|
||||
|
||||
namespace StellaOps.Scanner.WebService.Replay;
|
||||
|
||||
/// <summary>
|
||||
/// Prepares replay run metadata from WebService scan results. This is a thin façade that will be invoked
|
||||
/// once record-mode wiring lands in the scan pipeline.
|
||||
/// </summary>
|
||||
internal sealed class RecordModeService : IRecordModeService
|
||||
{
|
||||
private readonly RecordModeAssembler _assembler;
|
||||
|
||||
public RecordModeService(TimeProvider? timeProvider = null)
|
||||
{
|
||||
_assembler = new RecordModeAssembler(timeProvider);
|
||||
}
|
||||
|
||||
public Task<(ReplayRunRecord Run, IReadOnlyList<ReplayBundleRecord> Bundles)> BuildAsync(
|
||||
string scanId,
|
||||
ReplayManifest manifest,
|
||||
ReplayBundleWriteResult inputBundle,
|
||||
ReplayBundleWriteResult outputBundle,
|
||||
string sbomDigest,
|
||||
string findingsDigest,
|
||||
string? vexDigest = null,
|
||||
string? logDigest = null,
|
||||
IEnumerable<(ReplayBundleWriteResult Result, string Type)>? additionalBundles = null)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(manifest);
|
||||
|
||||
var run = _assembler.BuildRun(scanId, manifest, sbomDigest, findingsDigest, vexDigest, logDigest);
|
||||
var bundles = _assembler.BuildBundles(inputBundle, outputBundle, additionalBundles);
|
||||
|
||||
return Task.FromResult((run, bundles));
|
||||
}
|
||||
|
||||
public async Task<ReplayArtifacts?> AttachAsync(
|
||||
ScanId scanId,
|
||||
ReplayManifest manifest,
|
||||
ReplayBundleWriteResult inputBundle,
|
||||
ReplayBundleWriteResult outputBundle,
|
||||
string sbomDigest,
|
||||
string findingsDigest,
|
||||
IScanCoordinator coordinator,
|
||||
string? vexDigest = null,
|
||||
string? logDigest = null,
|
||||
IEnumerable<(ReplayBundleWriteResult Result, string Type)>? additionalBundles = null,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(coordinator);
|
||||
|
||||
var (run, bundles) = await BuildAsync(
|
||||
scanId.Value,
|
||||
manifest,
|
||||
inputBundle,
|
||||
outputBundle,
|
||||
sbomDigest,
|
||||
findingsDigest,
|
||||
vexDigest,
|
||||
logDigest,
|
||||
additionalBundles).ConfigureAwait(false);
|
||||
|
||||
var replay = BuildArtifacts(run.ManifestHash, bundles);
|
||||
var attached = await coordinator.AttachReplayAsync(scanId, replay, cancellationToken).ConfigureAwait(false);
|
||||
return attached ? replay : null;
|
||||
}
|
||||
|
||||
private static ReplayArtifacts BuildArtifacts(string manifestHash, IReadOnlyList<ReplayBundleRecord> bundles)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(manifestHash);
|
||||
ArgumentNullException.ThrowIfNull(bundles);
|
||||
|
||||
var summaries = bundles
|
||||
.Select(bundle => new ReplayBundleSummary(
|
||||
bundle.Type,
|
||||
NormalizeDigest(bundle.Id),
|
||||
bundle.Location,
|
||||
bundle.Size))
|
||||
.ToList();
|
||||
|
||||
return new ReplayArtifacts(manifestHash, summaries);
|
||||
}
|
||||
|
||||
private static string NormalizeDigest(string digest)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(digest))
|
||||
{
|
||||
return string.Empty;
|
||||
}
|
||||
|
||||
var trimmed = digest.Trim().ToLowerInvariant();
|
||||
return trimmed.StartsWith("sha256:", StringComparison.Ordinal)
|
||||
? trimmed
|
||||
: $"sha256:{trimmed}";
|
||||
}
|
||||
}
|
||||
@@ -9,4 +9,6 @@ public interface IScanCoordinator
|
||||
ValueTask<ScanSnapshot?> GetAsync(ScanId scanId, CancellationToken cancellationToken);
|
||||
|
||||
ValueTask<ScanSnapshot?> TryFindByTargetAsync(string? reference, string? digest, CancellationToken cancellationToken);
|
||||
|
||||
ValueTask<bool> AttachReplayAsync(ScanId scanId, ReplayArtifacts replay, CancellationToken cancellationToken);
|
||||
}
|
||||
|
||||
@@ -46,8 +46,9 @@ public sealed class InMemoryScanCoordinator : IScanCoordinator
|
||||
normalizedTarget,
|
||||
ScanStatus.Pending,
|
||||
now,
|
||||
now,
|
||||
null)),
|
||||
now,
|
||||
null,
|
||||
null)),
|
||||
(_, existing) =>
|
||||
{
|
||||
if (submission.Force)
|
||||
@@ -72,8 +73,8 @@ public sealed class InMemoryScanCoordinator : IScanCoordinator
|
||||
return ValueTask.FromResult(new ScanSubmissionResult(entry.Snapshot, created));
|
||||
}
|
||||
|
||||
public ValueTask<ScanSnapshot?> GetAsync(ScanId scanId, CancellationToken cancellationToken)
|
||||
{
|
||||
public ValueTask<ScanSnapshot?> GetAsync(ScanId scanId, CancellationToken cancellationToken)
|
||||
{
|
||||
if (scans.TryGetValue(scanId.Value, out var entry))
|
||||
{
|
||||
return ValueTask.FromResult<ScanSnapshot?>(entry.Snapshot);
|
||||
@@ -109,6 +110,30 @@ public sealed class InMemoryScanCoordinator : IScanCoordinator
|
||||
return ValueTask.FromResult<ScanSnapshot?>(null);
|
||||
}
|
||||
|
||||
public ValueTask<bool> AttachReplayAsync(ScanId scanId, ReplayArtifacts replay, CancellationToken cancellationToken)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(replay);
|
||||
|
||||
if (!scans.TryGetValue(scanId.Value, out var existing))
|
||||
{
|
||||
return ValueTask.FromResult(false);
|
||||
}
|
||||
|
||||
var updated = existing.Snapshot with
|
||||
{
|
||||
Replay = replay,
|
||||
UpdatedAt = timeProvider.GetUtcNow()
|
||||
};
|
||||
|
||||
scans[scanId.Value] = new ScanEntry(updated);
|
||||
progressPublisher.Publish(scanId, updated.Status.ToString(), "replay-attached", new Dictionary<string, object?>
|
||||
{
|
||||
["replay.manifest"] = replay.ManifestHash,
|
||||
["replay.bundleCount"] = replay.Bundles.Count
|
||||
});
|
||||
return ValueTask.FromResult(true);
|
||||
}
|
||||
|
||||
private void IndexTarget(string scanId, ScanTarget target)
|
||||
{
|
||||
if (!string.IsNullOrWhiteSpace(target.Digest))
|
||||
|
||||
@@ -33,6 +33,8 @@
|
||||
<ProjectReference Include="../__Libraries/StellaOps.Scanner.Surface.Validation/StellaOps.Scanner.Surface.Validation.csproj" />
|
||||
<ProjectReference Include="../__Libraries/StellaOps.Scanner.Surface.FS/StellaOps.Scanner.Surface.FS.csproj" />
|
||||
<ProjectReference Include="../__Libraries/StellaOps.Scanner.Surface.Secrets/StellaOps.Scanner.Surface.Secrets.csproj" />
|
||||
<ProjectReference Include="../__Libraries/StellaOps.Scanner.Core/StellaOps.Scanner.Core.csproj" />
|
||||
<ProjectReference Include="../../__Libraries/StellaOps.Replay.Core/StellaOps.Replay.Core.csproj" />
|
||||
<ProjectReference Include="../../Zastava/__Libraries/StellaOps.Zastava.Core/StellaOps.Zastava.Core.csproj" />
|
||||
</ItemGroup>
|
||||
</Project>
|
||||
|
||||
@@ -0,0 +1,141 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using StellaOps.Scanner.Core.Entropy;
|
||||
using StellaOps.Scanner.Core.Contracts;
|
||||
using StellaOps.Scanner.Worker.Utilities;
|
||||
|
||||
namespace StellaOps.Scanner.Worker.Processing.Entropy;
|
||||
|
||||
/// <summary>
|
||||
/// Computes entropy reports for executable/blobs and stores them in the analysis store
|
||||
/// for downstream evidence emission.
|
||||
/// </summary>
|
||||
public sealed class EntropyStageExecutor : IScanStageExecutor
|
||||
{
|
||||
private readonly ILogger<EntropyStageExecutor> _logger;
|
||||
private readonly EntropyReportBuilder _reportBuilder;
|
||||
|
||||
public EntropyStageExecutor(ILogger<EntropyStageExecutor> logger)
|
||||
{
|
||||
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||
_reportBuilder = new EntropyReportBuilder();
|
||||
}
|
||||
|
||||
public string StageName => ScanStageNames.EmitReports;
|
||||
|
||||
public async ValueTask ExecuteAsync(ScanJobContext context, CancellationToken cancellationToken)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(context);
|
||||
|
||||
// Expect analyzer stage to have populated filesystem snapshots.
|
||||
if (!context.Analysis.TryGet<IReadOnlyList<ScanFileEntry>>(ScanAnalysisKeys.FileEntries, out var files) || files is null)
|
||||
{
|
||||
_logger.LogDebug("No file entries available; skipping entropy analysis.");
|
||||
return;
|
||||
}
|
||||
|
||||
var reports = new List<EntropyFileReport>();
|
||||
foreach (var file in files)
|
||||
{
|
||||
if (!ShouldAnalyze(file))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
|
||||
try
|
||||
{
|
||||
var data = await ReadFileAsync(file.Path, cancellationToken).ConfigureAwait(false);
|
||||
var flags = DeriveFlags(file);
|
||||
var report = _reportBuilder.BuildFile(file.Path, data, flags);
|
||||
reports.Add(report);
|
||||
}
|
||||
catch (Exception ex) when (!cancellationToken.IsCancellationRequested)
|
||||
{
|
||||
_logger.LogDebug(ex, "Skipping entropy for {Path}: {Reason}", file.Path, ex.Message);
|
||||
}
|
||||
}
|
||||
|
||||
if (reports.Count == 0)
|
||||
{
|
||||
_logger.LogDebug("Entropy analysis produced no reports.");
|
||||
return;
|
||||
}
|
||||
|
||||
var layerDigest = context.Lease.LayerDigest ?? string.Empty;
|
||||
var layerSize = files.Sum(f => f.SizeBytes);
|
||||
var imageOpaqueBytes = reports.Sum(r => r.OpaqueBytes);
|
||||
var imageTotalBytes = files.Sum(f => f.SizeBytes);
|
||||
|
||||
var (summary, imageRatio) = _reportBuilder.BuildLayerSummary(
|
||||
layerDigest,
|
||||
reports,
|
||||
layerSize,
|
||||
imageOpaqueBytes,
|
||||
imageTotalBytes);
|
||||
|
||||
var entropyReport = new EntropyReport(
|
||||
ImageDigest: context.Lease.ImageDigest ?? string.Empty,
|
||||
LayerDigest: layerDigest,
|
||||
Files: reports,
|
||||
ImageOpaqueRatio: imageRatio);
|
||||
|
||||
context.Analysis.Set(ScanAnalysisKeys.EntropyReport, entropyReport);
|
||||
context.Analysis.Set(ScanAnalysisKeys.EntropyLayerSummary, summary);
|
||||
|
||||
_logger.LogInformation(
|
||||
"Entropy report captured for layer {Layer}: opaqueBytes={OpaqueBytes} ratio={Ratio:F2}",
|
||||
layerDigest,
|
||||
summary.OpaqueBytes,
|
||||
summary.OpaqueRatio);
|
||||
}
|
||||
|
||||
private static bool ShouldAnalyze(ScanFileEntry file)
|
||||
{
|
||||
if (file is null || file.SizeBytes < 16 * 1024)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
return file.Kind switch
|
||||
{
|
||||
"elf" => true,
|
||||
"pe" => true,
|
||||
"mach-o" => true,
|
||||
"blob" => true,
|
||||
_ => false
|
||||
};
|
||||
}
|
||||
|
||||
private static IEnumerable<string> DeriveFlags(ScanFileEntry file)
|
||||
{
|
||||
if (file?.Metadata is null)
|
||||
{
|
||||
yield break;
|
||||
}
|
||||
|
||||
if (file.Metadata.TryGetValue("stripped", out var stripped) && stripped == "true")
|
||||
{
|
||||
yield return "stripped";
|
||||
}
|
||||
|
||||
if (file.Metadata.TryGetValue("packer", out var packer) && !string.IsNullOrWhiteSpace(packer))
|
||||
{
|
||||
yield return $"packer:{packer}";
|
||||
}
|
||||
}
|
||||
|
||||
private static async Task<byte[]> ReadFileAsync(string path, CancellationToken cancellationToken)
|
||||
{
|
||||
await using var stream = File.OpenRead(path);
|
||||
using var buffer = new MemoryStream();
|
||||
await stream.CopyToAsync(buffer, cancellationToken).ConfigureAwait(false);
|
||||
return buffer.ToArray();
|
||||
}
|
||||
}
|
||||
@@ -57,9 +57,9 @@ public sealed class ScanJobProcessor
|
||||
|
||||
foreach (var stage in ScanStageNames.Ordered)
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
|
||||
if (!_executors.TryGetValue(stage, out var executor))
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
|
||||
if (!_executors.TryGetValue(stage, out var executor))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
@@ -5,19 +5,21 @@ namespace StellaOps.Scanner.Worker.Processing;
|
||||
public static class ScanStageNames
|
||||
{
|
||||
public const string ResolveImage = "resolve-image";
|
||||
public const string PullLayers = "pull-layers";
|
||||
public const string BuildFilesystem = "build-filesystem";
|
||||
public const string ExecuteAnalyzers = "execute-analyzers";
|
||||
public const string ComposeArtifacts = "compose-artifacts";
|
||||
public const string EmitReports = "emit-reports";
|
||||
|
||||
public static readonly IReadOnlyList<string> Ordered = new[]
|
||||
{
|
||||
ResolveImage,
|
||||
PullLayers,
|
||||
BuildFilesystem,
|
||||
ExecuteAnalyzers,
|
||||
ComposeArtifacts,
|
||||
EmitReports,
|
||||
};
|
||||
}
|
||||
public const string PullLayers = "pull-layers";
|
||||
public const string BuildFilesystem = "build-filesystem";
|
||||
public const string ExecuteAnalyzers = "execute-analyzers";
|
||||
public const string ComposeArtifacts = "compose-artifacts";
|
||||
public const string EmitReports = "emit-reports";
|
||||
public const string Entropy = "entropy";
|
||||
|
||||
public static readonly IReadOnlyList<string> Ordered = new[]
|
||||
{
|
||||
ResolveImage,
|
||||
PullLayers,
|
||||
BuildFilesystem,
|
||||
ExecuteAnalyzers,
|
||||
ComposeArtifacts,
|
||||
Entropy,
|
||||
EmitReports,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -85,6 +85,7 @@ builder.Services.AddSingleton<IScanStageExecutor, RegistrySecretStageExecutor>()
|
||||
builder.Services.AddSingleton<IScanStageExecutor, AnalyzerStageExecutor>();
|
||||
builder.Services.AddSingleton<IScanStageExecutor, Reachability.ReachabilityBuildStageExecutor>();
|
||||
builder.Services.AddSingleton<IScanStageExecutor, Reachability.ReachabilityPublishStageExecutor>();
|
||||
builder.Services.AddSingleton<IScanStageExecutor, Entropy.EntropyStageExecutor>();
|
||||
|
||||
builder.Services.AddSingleton<ScannerWorkerHostedService>();
|
||||
builder.Services.AddHostedService(sp => sp.GetRequiredService<ScannerWorkerHostedService>());
|
||||
|
||||
@@ -0,0 +1,92 @@
|
||||
using System;
|
||||
using System.Buffers.Binary;
|
||||
using System.Collections.Generic;
|
||||
|
||||
namespace StellaOps.Scanner.Core.Entropy;
|
||||
|
||||
/// <summary>
|
||||
/// Computes sliding-window Shannon entropy for byte buffers.
|
||||
/// Offline-friendly and deterministic: no allocations beyond histogram buffer and result list.
|
||||
/// </summary>
|
||||
public static class EntropyCalculator
|
||||
{
|
||||
/// <summary>
|
||||
/// Computes entropy windows over the supplied buffer.
|
||||
/// </summary>
|
||||
/// <param name="data">Input bytes.</param>
|
||||
/// <param name="windowSize">Window length in bytes (default 4096).</param>
|
||||
/// <param name="stride">Step between windows in bytes (default 1024).</param>
|
||||
/// <returns>List of entropy windows (offset, length, entropy bits/byte).</returns>
|
||||
public static IReadOnlyList<EntropyWindow> Compute(ReadOnlySpan<byte> data, int windowSize = 4096, int stride = 1024)
|
||||
{
|
||||
if (windowSize <= 0)
|
||||
{
|
||||
throw new ArgumentOutOfRangeException(nameof(windowSize), "Window size must be positive.");
|
||||
}
|
||||
|
||||
if (stride <= 0)
|
||||
{
|
||||
throw new ArgumentOutOfRangeException(nameof(stride), "Stride must be positive.");
|
||||
}
|
||||
|
||||
var results = new List<EntropyWindow>();
|
||||
if (data.IsEmpty || data.Length < windowSize)
|
||||
{
|
||||
return results;
|
||||
}
|
||||
|
||||
// Reuse histogram buffer; fixed length for byte values.
|
||||
Span<int> histogram = stackalloc int[256];
|
||||
var end = data.Length - windowSize;
|
||||
|
||||
// Seed histogram for first window.
|
||||
for (var i = 0; i < windowSize; i++)
|
||||
{
|
||||
histogram[data[i]]++;
|
||||
}
|
||||
|
||||
AppendEntropy(results, 0, windowSize, histogram, windowSize);
|
||||
|
||||
// Slide window with rolling histogram updates to avoid re-scanning the buffer.
|
||||
for (var offset = stride; offset <= end; offset += stride)
|
||||
{
|
||||
var removeStart = offset - stride;
|
||||
var removeEnd = removeStart + stride;
|
||||
for (var i = removeStart; i < removeEnd; i++)
|
||||
{
|
||||
histogram[data[i]]--;
|
||||
}
|
||||
|
||||
var addStart = offset + windowSize - stride;
|
||||
var addEnd = offset + windowSize;
|
||||
for (var i = addStart; i < addEnd; i++)
|
||||
{
|
||||
histogram[data[i]]++;
|
||||
}
|
||||
|
||||
AppendEntropy(results, offset, windowSize, histogram, windowSize);
|
||||
}
|
||||
|
||||
return results;
|
||||
}
|
||||
|
||||
private static void AppendEntropy(ICollection<EntropyWindow> results, int offset, int length, ReadOnlySpan<int> histogram, int totalCount)
|
||||
{
|
||||
double entropy = 0;
|
||||
for (var i = 0; i < 256; i++)
|
||||
{
|
||||
var count = histogram[i];
|
||||
if (count == 0)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
var p = (double)count / totalCount;
|
||||
entropy -= p * Math.Log(p, 2);
|
||||
}
|
||||
|
||||
results.Add(new EntropyWindow(offset, length, entropy));
|
||||
}
|
||||
}
|
||||
|
||||
public readonly record struct EntropyWindow(int Offset, int Length, double Entropy);
|
||||
@@ -0,0 +1,107 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
|
||||
namespace StellaOps.Scanner.Core.Entropy;
|
||||
|
||||
/// <summary>
|
||||
/// Builds per-file entropy reports and aggregates layer-level opaque ratios.
|
||||
/// Keeps logic deterministic and offline-friendly.
|
||||
/// </summary>
|
||||
public sealed class EntropyReportBuilder
|
||||
{
|
||||
private readonly int _windowSize;
|
||||
private readonly int _stride;
|
||||
private readonly double _opaqueThreshold;
|
||||
private readonly double _opaqueFileRatioFlag;
|
||||
|
||||
public EntropyReportBuilder(
|
||||
int windowSize = 4096,
|
||||
int stride = 1024,
|
||||
double opaqueThreshold = 7.2,
|
||||
double opaqueFileRatioFlag = 0.30)
|
||||
{
|
||||
if (windowSize <= 0) throw new ArgumentOutOfRangeException(nameof(windowSize));
|
||||
if (stride <= 0) throw new ArgumentOutOfRangeException(nameof(stride));
|
||||
if (opaqueThreshold <= 0) throw new ArgumentOutOfRangeException(nameof(opaqueThreshold));
|
||||
if (opaqueFileRatioFlag < 0 || opaqueFileRatioFlag > 1) throw new ArgumentOutOfRangeException(nameof(opaqueFileRatioFlag));
|
||||
|
||||
_windowSize = windowSize;
|
||||
_stride = stride;
|
||||
_opaqueThreshold = opaqueThreshold;
|
||||
_opaqueFileRatioFlag = opaqueFileRatioFlag;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Builds a file-level entropy report.
|
||||
/// </summary>
|
||||
public EntropyFileReport BuildFile(string path, ReadOnlySpan<byte> data, IEnumerable<string>? flags = null)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(path);
|
||||
|
||||
var windows = EntropyCalculator
|
||||
.Compute(data, _windowSize, _stride)
|
||||
.Select(w => new EntropyFileWindow(w.Offset, w.Length, w.Entropy))
|
||||
.ToList();
|
||||
|
||||
var opaqueBytes = windows
|
||||
.Where(w => w.Entropy >= _opaqueThreshold)
|
||||
.Sum(w => (long)w.Length);
|
||||
|
||||
var size = data.Length;
|
||||
var ratio = size == 0 ? 0d : (double)opaqueBytes / size;
|
||||
|
||||
var fileFlags = new List<string>();
|
||||
if (flags is not null)
|
||||
{
|
||||
fileFlags.AddRange(flags.Where(f => !string.IsNullOrWhiteSpace(f)).Select(f => f.Trim()));
|
||||
}
|
||||
|
||||
if (ratio >= _opaqueFileRatioFlag)
|
||||
{
|
||||
fileFlags.Add("opaque-high");
|
||||
}
|
||||
|
||||
return new EntropyFileReport(
|
||||
Path: path,
|
||||
Size: size,
|
||||
OpaqueBytes: opaqueBytes,
|
||||
OpaqueRatio: ratio,
|
||||
Flags: fileFlags,
|
||||
Windows: windows);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Aggregates layer-level opaque ratios and returns an image-level ratio.
|
||||
/// </summary>
|
||||
public (EntropyLayerSummary Layer, double ImageOpaqueRatio) BuildLayerSummary(
|
||||
string layerDigest,
|
||||
IEnumerable<EntropyFileReport> fileReports,
|
||||
long layerTotalBytes,
|
||||
double imageOpaqueBytes,
|
||||
double imageTotalBytes)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(fileReports);
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(layerDigest);
|
||||
|
||||
var files = fileReports.ToList();
|
||||
var opaqueBytes = files.Sum(f => f.OpaqueBytes);
|
||||
var indicators = new List<string>();
|
||||
if (files.Any(f => f.Flags.Contains("opaque-high", StringComparer.OrdinalIgnoreCase)))
|
||||
{
|
||||
indicators.Add("packed-like");
|
||||
}
|
||||
|
||||
var layerRatio = layerTotalBytes <= 0 ? 0d : (double)opaqueBytes / layerTotalBytes;
|
||||
var imageRatio = imageTotalBytes <= 0 ? 0d : imageOpaqueBytes / imageTotalBytes;
|
||||
|
||||
var summary = new EntropyLayerSummary(
|
||||
LayerDigest: layerDigest,
|
||||
OpaqueBytes: opaqueBytes,
|
||||
TotalBytes: layerTotalBytes,
|
||||
OpaqueRatio: layerRatio,
|
||||
Indicators: indicators);
|
||||
|
||||
return (summary, imageRatio);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,26 @@
|
||||
using System.Collections.Generic;
|
||||
|
||||
namespace StellaOps.Scanner.Core.Entropy;
|
||||
|
||||
public sealed record EntropyFileWindow(int Offset, int Length, double EntropyBits);
|
||||
|
||||
public sealed record EntropyFileReport(
|
||||
string Path,
|
||||
long Size,
|
||||
long OpaqueBytes,
|
||||
double OpaqueRatio,
|
||||
IReadOnlyList<string> Flags,
|
||||
IReadOnlyList<EntropyFileWindow> Windows);
|
||||
|
||||
public sealed record EntropyLayerSummary(
|
||||
string LayerDigest,
|
||||
long OpaqueBytes,
|
||||
long TotalBytes,
|
||||
double OpaqueRatio,
|
||||
IReadOnlyList<string> Indicators);
|
||||
|
||||
public sealed record EntropyReport(
|
||||
string ImageDigest,
|
||||
string LayerDigest,
|
||||
IReadOnlyList<EntropyFileReport> Files,
|
||||
double ImageOpaqueRatio);
|
||||
@@ -0,0 +1,98 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using StellaOps.Replay.Core;
|
||||
|
||||
namespace StellaOps.Scanner.Core.Replay;
|
||||
|
||||
/// <summary>
|
||||
/// Assembles replay run metadata and bundle records from scanner artifacts.
|
||||
/// </summary>
|
||||
public sealed class RecordModeAssembler
|
||||
{
|
||||
private readonly TimeProvider _timeProvider;
|
||||
|
||||
public RecordModeAssembler(TimeProvider? timeProvider = null)
|
||||
{
|
||||
_timeProvider = timeProvider ?? TimeProvider.System;
|
||||
}
|
||||
|
||||
public ReplayRunRecord BuildRun(
|
||||
string scanId,
|
||||
ReplayManifest manifest,
|
||||
string sbomDigest,
|
||||
string findingsDigest,
|
||||
string? vexDigest = null,
|
||||
string? logDigest = null)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(scanId);
|
||||
ArgumentNullException.ThrowIfNull(manifest);
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(sbomDigest);
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(findingsDigest);
|
||||
|
||||
var now = _timeProvider.GetUtcNow().UtcDateTime;
|
||||
var manifestHash = "sha256:" + manifest.ComputeCanonicalSha256();
|
||||
|
||||
return new ReplayRunRecord
|
||||
{
|
||||
Id = scanId,
|
||||
ManifestHash = manifestHash,
|
||||
Status = "pending",
|
||||
CreatedAt = now,
|
||||
UpdatedAt = now,
|
||||
Outputs = new ReplayRunOutputs
|
||||
{
|
||||
Sbom = NormalizeDigest(sbomDigest),
|
||||
Findings = NormalizeDigest(findingsDigest),
|
||||
Vex = NormalizeOptionalDigest(vexDigest),
|
||||
Log = NormalizeOptionalDigest(logDigest)
|
||||
},
|
||||
Signatures = new List<ReplaySignatureRecord>()
|
||||
};
|
||||
}
|
||||
|
||||
public IReadOnlyList<ReplayBundleRecord> BuildBundles(
|
||||
ReplayBundleWriteResult inputBundle,
|
||||
ReplayBundleWriteResult outputBundle,
|
||||
IEnumerable<(ReplayBundleWriteResult Result, string Type)>? additionalBundles = null)
|
||||
{
|
||||
var now = _timeProvider.GetUtcNow().UtcDateTime;
|
||||
|
||||
var records = new List<ReplayBundleRecord>
|
||||
{
|
||||
ToBundleRecord(inputBundle, "input", now),
|
||||
ToBundleRecord(outputBundle, "output", now)
|
||||
};
|
||||
|
||||
if (additionalBundles != null)
|
||||
{
|
||||
records.AddRange(additionalBundles.Select(b => ToBundleRecord(b.Result, b.Type, now)));
|
||||
}
|
||||
|
||||
return records;
|
||||
}
|
||||
|
||||
private static ReplayBundleRecord ToBundleRecord(ReplayBundleWriteResult result, string type, DateTime createdAt)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(result);
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(type);
|
||||
|
||||
return new ReplayBundleRecord
|
||||
{
|
||||
Id = result.ZstSha256,
|
||||
Type = type.Trim().ToLowerInvariant(),
|
||||
Size = result.ZstBytes,
|
||||
Location = result.CasUri,
|
||||
CreatedAt = createdAt
|
||||
};
|
||||
}
|
||||
|
||||
private static string NormalizeDigest(string digest)
|
||||
{
|
||||
var trimmed = digest.Trim().ToLowerInvariant();
|
||||
return trimmed.StartsWith("sha256:", StringComparison.Ordinal) ? trimmed : $"sha256:{trimmed}";
|
||||
}
|
||||
|
||||
private static string? NormalizeOptionalDigest(string? digest)
|
||||
=> string.IsNullOrWhiteSpace(digest) ? null : NormalizeDigest(digest);
|
||||
}
|
||||
@@ -14,5 +14,6 @@
|
||||
<ItemGroup>
|
||||
<ProjectReference Include="../../../Authority/StellaOps.Authority/StellaOps.Auth.Client/StellaOps.Auth.Client.csproj" />
|
||||
<ProjectReference Include="../../../__Libraries/StellaOps.Auth.Security/StellaOps.Auth.Security.csproj" />
|
||||
<ProjectReference Include="../../../__Libraries/StellaOps.Replay.Core/StellaOps.Replay.Core.csproj" />
|
||||
</ItemGroup>
|
||||
</Project>
|
||||
</Project>
|
||||
|
||||
@@ -0,0 +1,40 @@
|
||||
using System;
|
||||
using System.Linq;
|
||||
using StellaOps.Scanner.Core.Entropy;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Scanner.Core.Tests.Entropy;
|
||||
|
||||
public class EntropyCalculatorTests
|
||||
{
|
||||
[Fact]
|
||||
public void Compute_ReturnsEmpty_WhenBufferTooSmall()
|
||||
{
|
||||
var result = EntropyCalculator.Compute(new byte[10], windowSize: 32, stride: 8);
|
||||
Assert.Empty(result);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Compute_ProducesZeroEntropy_ForConstantData()
|
||||
{
|
||||
var data = Enumerable.Repeat((byte)0xAA, 4096 * 2).ToArray();
|
||||
|
||||
var windows = EntropyCalculator.Compute(data, windowSize: 4096, stride: 1024);
|
||||
|
||||
Assert.NotEmpty(windows);
|
||||
Assert.All(windows, w => Assert.InRange(w.Entropy, 0, 0.0001));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Compute_DetectsHighEntropy_ForRandomBytes()
|
||||
{
|
||||
var rng = new Random(1234);
|
||||
var data = new byte[8192];
|
||||
rng.NextBytes(data);
|
||||
|
||||
var windows = EntropyCalculator.Compute(data, windowSize: 4096, stride: 1024);
|
||||
|
||||
Assert.NotEmpty(windows);
|
||||
Assert.All(windows, w => Assert.InRange(w.Entropy, 7.0, 8.1));
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,53 @@
|
||||
using System;
|
||||
using System.Linq;
|
||||
using StellaOps.Scanner.Core.Entropy;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Scanner.Core.Tests.Entropy;
|
||||
|
||||
public class EntropyReportBuilderTests
|
||||
{
|
||||
[Fact]
|
||||
public void BuildFile_FlagsOpaqueHigh_WhenRatioExceedsThreshold()
|
||||
{
|
||||
var builder = new EntropyReportBuilder(windowSize: 4, stride: 4, opaqueThreshold: 1.0, opaqueFileRatioFlag: 0.25);
|
||||
// Alternating bytes produce high entropy in every window.
|
||||
var data = Enumerable.Range(0, 64).Select(i => (byte)(i % 2)).ToArray();
|
||||
|
||||
var report = builder.BuildFile("/bin/demo", data);
|
||||
|
||||
Assert.Contains("opaque-high", report.Flags);
|
||||
Assert.True(report.OpaqueRatio > 0.25);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void BuildFile_RespectsProvidedFlags()
|
||||
{
|
||||
var builder = new EntropyReportBuilder(windowSize: 8, stride: 8, opaqueThreshold: 7.0, opaqueFileRatioFlag: 0.90);
|
||||
var data = new byte[64];
|
||||
|
||||
var report = builder.BuildFile("/bin/zero", data, new[] { "stripped", "", "debug-missing" });
|
||||
|
||||
Assert.Contains("stripped", report.Flags);
|
||||
Assert.Contains("debug-missing", report.Flags);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void BuildLayerSummary_ComputesRatios()
|
||||
{
|
||||
var builder = new EntropyReportBuilder(windowSize: 4, stride: 4, opaqueThreshold: 1.0, opaqueFileRatioFlag: 0.25);
|
||||
var data = Enumerable.Range(0, 64).Select(i => (byte)(i % 2)).ToArray();
|
||||
var file = builder.BuildFile("/bin/demo", data);
|
||||
|
||||
var (summary, imageRatio) = builder.BuildLayerSummary(
|
||||
"sha256:layer",
|
||||
new[] { file },
|
||||
layerTotalBytes: 64,
|
||||
imageOpaqueBytes: file.OpaqueBytes,
|
||||
imageTotalBytes: 128);
|
||||
|
||||
Assert.Equal("sha256:layer", summary.LayerDigest);
|
||||
Assert.InRange(summary.OpaqueRatio, 0.25, 1.0);
|
||||
Assert.InRange(imageRatio, 0.0, 1.0);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,56 @@
|
||||
using System;
|
||||
using FluentAssertions;
|
||||
using StellaOps.Replay.Core;
|
||||
using StellaOps.Scanner.Core.Replay;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Scanner.Core.Tests.Replay;
|
||||
|
||||
public sealed class RecordModeAssemblerTests
|
||||
{
|
||||
[Fact]
|
||||
public void BuildRun_ComputesManifestHashAndOutputs()
|
||||
{
|
||||
var manifest = new ReplayManifest
|
||||
{
|
||||
Scan = new ReplayScanMetadata { Id = "scan-1", Time = DateTimeOffset.UnixEpoch }
|
||||
};
|
||||
|
||||
var assembler = new RecordModeAssembler(new FixedTimeProvider(new DateTimeOffset(2025, 11, 25, 12, 0, 0, TimeSpan.Zero)));
|
||||
|
||||
var run = assembler.BuildRun("scan-1", manifest, "sha256:sbom", "findings-digest", vexDigest: "sha256:vex");
|
||||
|
||||
run.Id.Should().Be("scan-1");
|
||||
run.ManifestHash.Should().StartWith("sha256:");
|
||||
run.CreatedAt.Should().Be(new DateTime(2025, 11, 25, 12, 0, 0, DateTimeKind.Utc));
|
||||
run.Outputs.Sbom.Should().Be("sha256:sbom");
|
||||
run.Outputs.Findings.Should().Be("sha256:findings-digest");
|
||||
run.Outputs.Vex.Should().Be("sha256:vex");
|
||||
run.Status.Should().Be("pending");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void BuildBundles_ProducesDeterministicRecords()
|
||||
{
|
||||
var assembler = new RecordModeAssembler(new FixedTimeProvider(DateTimeOffset.UnixEpoch));
|
||||
|
||||
var input = new ReplayBundleWriteResult("tar1", "z1", 10, 20, "cas://replay/zz/z1.tar.zst");
|
||||
var output = new ReplayBundleWriteResult("tar2", "z2", 30, 40, "cas://replay/aa/z2.tar.zst");
|
||||
|
||||
var bundles = assembler.BuildBundles(input, output);
|
||||
|
||||
bundles.Should().HaveCount(2);
|
||||
bundles[0].Id.Should().Be("z1");
|
||||
bundles[0].Type.Should().Be("input");
|
||||
bundles[1].Id.Should().Be("z2");
|
||||
bundles[1].Location.Should().Be("cas://replay/aa/z2.tar.zst");
|
||||
bundles[0].CreatedAt.Should().Be(DateTime.UnixEpoch);
|
||||
}
|
||||
|
||||
private sealed class FixedTimeProvider : TimeProvider
|
||||
{
|
||||
private readonly DateTimeOffset _utc;
|
||||
public FixedTimeProvider(DateTimeOffset utc) => _utc = utc;
|
||||
public override DateTimeOffset GetUtcNow() => _utc;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,74 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Net.Http.Json;
|
||||
using System.Threading.Tasks;
|
||||
using Microsoft.Extensions.DependencyInjection;
|
||||
using StellaOps.Replay.Core;
|
||||
using StellaOps.Scanner.WebService.Contracts;
|
||||
using StellaOps.Scanner.WebService.Domain;
|
||||
using StellaOps.Scanner.WebService.Replay;
|
||||
using StellaOps.Scanner.WebService.Services;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Scanner.WebService.Tests;
|
||||
|
||||
public sealed partial class ScansEndpointsTests
|
||||
{
|
||||
[Fact]
|
||||
public async Task RecordModeService_AttachesReplayAndSurfacedInStatus()
|
||||
{
|
||||
using var secrets = new TestSurfaceSecretsScope();
|
||||
using var factory = new ScannerApplicationFactory(cfg =>
|
||||
{
|
||||
cfg["scanner:authority:enabled"] = "false";
|
||||
});
|
||||
using var client = factory.CreateClient();
|
||||
|
||||
var submitResponse = await client.PostAsJsonAsync("/api/v1/scans", new
|
||||
{
|
||||
image = new { digest = "sha256:demo" }
|
||||
});
|
||||
submitResponse.EnsureSuccessStatusCode();
|
||||
|
||||
var submitPayload = await submitResponse.Content.ReadFromJsonAsync<ScanSubmitResponse>();
|
||||
Assert.NotNull(submitPayload);
|
||||
var scanId = submitPayload!.ScanId;
|
||||
|
||||
using var scope = factory.Services.CreateScope();
|
||||
var coordinator = scope.ServiceProvider.GetRequiredService<IScanCoordinator>();
|
||||
var recordMode = scope.ServiceProvider.GetRequiredService<IRecordModeService>();
|
||||
var timeProvider = scope.ServiceProvider.GetRequiredService<TimeProvider>();
|
||||
|
||||
var manifest = new ReplayManifest
|
||||
{
|
||||
Scan = new ReplayScanMetadata
|
||||
{
|
||||
Id = scanId,
|
||||
Time = timeProvider.GetUtcNow()
|
||||
}
|
||||
};
|
||||
|
||||
var replay = await recordMode.AttachAsync(
|
||||
new ScanId(scanId),
|
||||
manifest,
|
||||
new ReplayBundleWriteResult("tar1", "z1", 128, 64, "cas://replay/z1.tar.zst"),
|
||||
new ReplayBundleWriteResult("tar2", "z2", 256, 96, "cas://replay/z2.tar.zst"),
|
||||
sbomDigest: "sha256:sbom",
|
||||
findingsDigest: "findings-digest",
|
||||
coordinator: coordinator,
|
||||
additionalBundles: new[]
|
||||
{
|
||||
(new ReplayBundleWriteResult("tar3", "z3", 1, 2, "cas://replay/z3.tar.zst"), "reachability")
|
||||
});
|
||||
|
||||
Assert.NotNull(replay);
|
||||
|
||||
var status = await client.GetFromJsonAsync<ScanStatusResponse>($"/api/v1/scans/{scanId}");
|
||||
Assert.NotNull(status);
|
||||
Assert.NotNull(status!.Replay);
|
||||
Assert.Equal(replay!.ManifestHash, status.Replay!.ManifestHash);
|
||||
Assert.Equal(3, status.Replay!.Bundles.Count);
|
||||
Assert.Contains(status.Replay!.Bundles, b => b.Type == "reachability");
|
||||
Assert.All(status.Replay!.Bundles, b => Assert.StartsWith("sha256:", b.Digest, StringComparison.Ordinal));
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,67 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using StellaOps.Scanner.Core.Contracts;
|
||||
using StellaOps.Scanner.Core.Entropy;
|
||||
using StellaOps.Scanner.Worker.Processing;
|
||||
using StellaOps.Scanner.Worker.Processing.Entropy;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Scanner.Worker.Tests;
|
||||
|
||||
public class EntropyStageExecutorTests
|
||||
{
|
||||
[Fact]
|
||||
public async Task ExecuteAsync_WritesEntropyReportAndSummary()
|
||||
{
|
||||
// Arrange: create a temp file with random bytes to yield high entropy.
|
||||
var tmp = Path.GetTempFileName();
|
||||
var rng = new Random(1234);
|
||||
var bytes = new byte[64 * 1024];
|
||||
rng.NextBytes(bytes);
|
||||
File.WriteAllBytes(tmp, bytes);
|
||||
|
||||
var fileEntries = new List<ScanFileEntry>
|
||||
{
|
||||
new ScanFileEntry(tmp, sizeBytes: bytes.LongLength, kind: "blob", metadata: new Dictionary<string, string>())
|
||||
};
|
||||
|
||||
var lease = new StubLease("job-1", "scan-1", imageDigest: "sha256:test", layerDigest: "sha256:layer");
|
||||
var context = new ScanJobContext(lease, TimeProvider.System, DateTimeOffset.UtcNow, CancellationToken.None);
|
||||
context.Analysis.Set(ScanAnalysisKeys.FileEntries, (IReadOnlyList<ScanFileEntry>)fileEntries);
|
||||
|
||||
var executor = new EntropyStageExecutor(NullLogger<EntropyStageExecutor>.Instance);
|
||||
|
||||
// Act
|
||||
await executor.ExecuteAsync(context, CancellationToken.None);
|
||||
|
||||
// Assert
|
||||
Assert.True(context.Analysis.TryGet<EntropyReport>(ScanAnalysisKeys.EntropyReport, out var report));
|
||||
Assert.NotNull(report);
|
||||
Assert.Equal("sha256:layer", report!.LayerDigest);
|
||||
Assert.NotEmpty(report.Files);
|
||||
|
||||
Assert.True(context.Analysis.TryGet<EntropyLayerSummary>(ScanAnalysisKeys.EntropyLayerSummary, out var summary));
|
||||
Assert.NotNull(summary);
|
||||
Assert.Equal("sha256:layer", summary!.LayerDigest);
|
||||
}
|
||||
|
||||
private sealed class StubLease : IScanJobLease
|
||||
{
|
||||
public StubLease(string jobId, string scanId, string imageDigest, string layerDigest)
|
||||
{
|
||||
JobId = jobId;
|
||||
ScanId = scanId;
|
||||
ImageDigest = imageDigest;
|
||||
LayerDigest = layerDigest;
|
||||
}
|
||||
|
||||
public string JobId { get; }
|
||||
public string ScanId { get; }
|
||||
public string? ImageDigest { get; }
|
||||
public string? LayerDigest { get; }
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user