save progress

This commit is contained in:
StellaOps Bot
2025-12-18 09:53:46 +02:00
parent 28823a8960
commit 7d5250238c
87 changed files with 9750 additions and 2026 deletions

View File

@@ -46,16 +46,31 @@ public sealed class VirtualFileSystem : IVirtualFileSystem
public VirtualFileSystem(IEnumerable<string> files)
{
_files = new HashSet<string>(files, StringComparer.OrdinalIgnoreCase);
ArgumentNullException.ThrowIfNull(files);
_files = new HashSet<string>(StringComparer.OrdinalIgnoreCase);
_directories = new HashSet<string>(StringComparer.OrdinalIgnoreCase);
foreach (var file in _files)
foreach (var file in files)
{
var dir = Path.GetDirectoryName(file);
var normalizedFile = NormalizePath(file);
if (string.IsNullOrWhiteSpace(normalizedFile))
{
continue;
}
_files.Add(normalizedFile);
var dir = GetDirectoryName(normalizedFile);
while (!string.IsNullOrEmpty(dir))
{
_directories.Add(dir);
dir = Path.GetDirectoryName(dir);
var normalizedDir = NormalizePath(dir);
if (!string.IsNullOrEmpty(normalizedDir))
{
_directories.Add(normalizedDir);
}
dir = GetParentDirectory(dir);
}
}
}
@@ -68,13 +83,53 @@ public sealed class VirtualFileSystem : IVirtualFileSystem
var normalizedDir = NormalizePath(directory);
return _files.Where(f =>
{
var fileDir = Path.GetDirectoryName(f);
var fileDir = GetDirectoryName(f);
return string.Equals(fileDir, normalizedDir, StringComparison.OrdinalIgnoreCase);
});
}
private static string NormalizePath(string path) =>
path.Replace('\\', '/').TrimEnd('/');
TrimEndDirectorySeparators(path.Replace('\\', '/'));
private static string TrimEndDirectorySeparators(string path)
{
if (string.IsNullOrWhiteSpace(path))
{
return string.Empty;
}
var normalized = path;
while (normalized.Length > 1 && normalized.EndsWith("/", StringComparison.Ordinal))
{
normalized = normalized[..^1];
}
return normalized;
}
private static string GetDirectoryName(string path)
{
var normalized = NormalizePath(path);
var lastSlash = normalized.LastIndexOf('/');
if (lastSlash <= 0)
{
return string.Empty;
}
return normalized[..lastSlash];
}
private static string GetParentDirectory(string directory)
{
var normalized = NormalizePath(directory);
var lastSlash = normalized.LastIndexOf('/');
if (lastSlash <= 0)
{
return string.Empty;
}
return normalized[..lastSlash];
}
}
/// <summary>

View File

@@ -3,6 +3,7 @@ using System.Text.Json.Serialization;
using Microsoft.AspNetCore.Http;
using Microsoft.AspNetCore.Mvc;
using Microsoft.AspNetCore.Routing;
using Microsoft.Extensions.DependencyInjection;
using StellaOps.Scanner.WebService.Constants;
using StellaOps.Scanner.WebService.Contracts;
using StellaOps.Scanner.WebService.Domain;
@@ -64,12 +65,13 @@ internal static class ReachabilityEndpoints
string scanId,
ComputeReachabilityRequestDto? request,
IScanCoordinator coordinator,
[FromServices] IReachabilityComputeService computeService,
HttpContext context,
CancellationToken cancellationToken)
{
ArgumentNullException.ThrowIfNull(coordinator);
ArgumentNullException.ThrowIfNull(computeService);
ArgumentNullException.ThrowIfNull(context);
var computeService = context.RequestServices.GetRequiredService<IReachabilityComputeService>();
if (!ScanId.TryParse(scanId, out var parsed))
{

View File

@@ -4,7 +4,6 @@ using System.Text;
using System.Text.Json;
using System.Text.Json.Serialization;
using Microsoft.AspNetCore.Http;
using Microsoft.AspNetCore.Mvc;
namespace StellaOps.Scanner.WebService.Infrastructure;
@@ -29,25 +28,56 @@ internal static class ProblemResultFactory
var traceId = Activity.Current?.TraceId.ToString() ?? context.TraceIdentifier;
var problem = new ProblemDetails
var mergedExtensions = new Dictionary<string, object?>(StringComparer.Ordinal)
{
["traceId"] = traceId
};
if (extensions is not null)
{
foreach (var entry in extensions)
{
if (string.IsNullOrWhiteSpace(entry.Key))
{
continue;
}
mergedExtensions[entry.Key] = entry.Value;
}
}
var problem = new ProblemDocument
{
Type = type,
Title = title,
Detail = detail,
Status = statusCode,
Instance = context.Request.Path
Instance = context.Request.Path,
Extensions = mergedExtensions
};
problem.Extensions["traceId"] = traceId;
if (extensions is not null)
{
foreach (var entry in extensions)
{
problem.Extensions[entry.Key] = entry.Value;
}
}
var payload = JsonSerializer.Serialize(problem, JsonOptions);
return Results.Content(payload, "application/problem+json", Encoding.UTF8, statusCode);
}
private sealed class ProblemDocument
{
[JsonPropertyName("type")]
public string? Type { get; init; }
[JsonPropertyName("title")]
public string? Title { get; init; }
[JsonPropertyName("detail")]
public string? Detail { get; init; }
[JsonPropertyName("status")]
public int Status { get; init; }
[JsonPropertyName("instance")]
public string? Instance { get; init; }
[JsonPropertyName("extensions")]
public Dictionary<string, object?>? Extensions { get; init; }
}
}

View File

@@ -544,21 +544,24 @@ internal sealed class OfflineKitImportService
long size = 0;
using var hasher = IncrementalHash.CreateHash(HashAlgorithmName.SHA256);
await using var output = File.Create(temp);
await using var input = file.OpenReadStream();
var buffer = new byte[128 * 1024];
while (true)
await using (var output = File.Create(temp))
await using (var input = file.OpenReadStream())
{
var read = await input.ReadAsync(buffer, cancellationToken).ConfigureAwait(false);
if (read == 0)
var buffer = new byte[128 * 1024];
while (true)
{
break;
var read = await input.ReadAsync(buffer, cancellationToken).ConfigureAwait(false);
if (read == 0)
{
break;
}
hasher.AppendData(buffer, 0, read);
await output.WriteAsync(buffer.AsMemory(0, read), cancellationToken).ConfigureAwait(false);
size += read;
}
hasher.AppendData(buffer, 0, read);
await output.WriteAsync(buffer.AsMemory(0, read), cancellationToken).ConfigureAwait(false);
size += read;
await output.FlushAsync(cancellationToken).ConfigureAwait(false);
}
var hash = hasher.GetHashAndReset();
@@ -579,9 +582,13 @@ internal sealed class OfflineKitImportService
Directory.CreateDirectory(directory);
}
await using var output = File.Create(path);
await using var input = file.OpenReadStream();
await input.CopyToAsync(output, cancellationToken).ConfigureAwait(false);
await using (var output = File.Create(path))
await using (var input = file.OpenReadStream())
{
await input.CopyToAsync(output, cancellationToken).ConfigureAwait(false);
await output.FlushAsync(cancellationToken).ConfigureAwait(false);
}
return await File.ReadAllBytesAsync(path, cancellationToken).ConfigureAwait(false);
}
@@ -695,4 +702,3 @@ internal sealed class OfflineKitImportService
return true;
}
}

View File

@@ -4,5 +4,6 @@
| --- | --- | --- | --- |
| `SCAN-API-3101-001` | `docs/implplan/SPRINT_3101_0001_0001_scanner_api_standardization.md` | DOING | Align Scanner OpenAPI spec with current endpoints and include ProofSpine routes; compose into `src/Api/StellaOps.Api.OpenApi/stella.yaml`. |
| `PROOFSPINE-3100-API` | `docs/implplan/SPRINT_3100_0001_0001_proof_spine_system.md` | DOING | Implement and test `/api/v1/spines/*` endpoints and wire verification output. |
| `SCAN-AIRGAP-0340-001` | `docs/implplan/SPRINT_0340_0001_0001_scanner_offline_config.md` | BLOCKED | Offline kit verification wiring is blocked on an import pipeline + offline Rekor verifier. |
| `SCAN-API-3103-001` | `docs/implplan/SPRINT_3103_0001_0001_scanner_api_ingestion_completion.md` | DOING | Implement missing ingestion services + DI for callgraph/SBOM endpoints and add deterministic integration tests. |
| `SCAN-AIRGAP-0340-001` | `docs/implplan/SPRINT_0340_0001_0001_scanner_offline_config.md` | DONE | Offline kit import + DSSE/offline Rekor verification wired; integration tests cover success/failure/audit. |
| `DRIFT-3600-API` | `docs/implplan/SPRINT_3600_0003_0001_drift_detection_engine.md` | DONE | Add reachability drift endpoints (`/api/v1/scans/{id}/drift`, `/api/v1/drift/{id}/sinks`) + integration tests. |
| `SCAN-API-3103-001` | `docs/implplan/SPRINT_3103_0001_0001_scanner_api_ingestion_completion.md` | DONE | Implement missing ingestion services + DI for callgraph/SBOM endpoints and add deterministic integration tests. |

View File

@@ -3,6 +3,7 @@ using System.Security.Cryptography;
using System.Text;
using System.Text.Json;
using System.Text.Json.Serialization;
using StellaOps.Scanner.CallGraph.Serialization;
using StellaOps.Scanner.Reachability;
namespace StellaOps.Scanner.CallGraph;
@@ -12,10 +13,18 @@ public sealed record CallGraphSnapshot(
[property: JsonPropertyName("graphDigest")] string GraphDigest,
[property: JsonPropertyName("language")] string Language,
[property: JsonPropertyName("extractedAt")] DateTimeOffset ExtractedAt,
[property: JsonPropertyName("nodes")] ImmutableArray<CallGraphNode> Nodes,
[property: JsonPropertyName("edges")] ImmutableArray<CallGraphEdge> Edges,
[property: JsonPropertyName("entrypointIds")] ImmutableArray<string> EntrypointIds,
[property: JsonPropertyName("sinkIds")] ImmutableArray<string> SinkIds)
[property: JsonPropertyName("nodes")]
[property: JsonConverter(typeof(ImmutableArrayJsonConverter<CallGraphNode>))]
ImmutableArray<CallGraphNode> Nodes,
[property: JsonPropertyName("edges")]
[property: JsonConverter(typeof(ImmutableArrayJsonConverter<CallGraphEdge>))]
ImmutableArray<CallGraphEdge> Edges,
[property: JsonPropertyName("entrypointIds")]
[property: JsonConverter(typeof(ImmutableArrayJsonConverter<string>))]
ImmutableArray<string> EntrypointIds,
[property: JsonPropertyName("sinkIds")]
[property: JsonConverter(typeof(ImmutableArrayJsonConverter<string>))]
ImmutableArray<string> SinkIds)
{
public CallGraphSnapshot Trimmed()
{
@@ -286,7 +295,9 @@ public static class CallGraphDigests
public sealed record ReachabilityPath(
[property: JsonPropertyName("entrypointId")] string EntrypointId,
[property: JsonPropertyName("sinkId")] string SinkId,
[property: JsonPropertyName("nodeIds")] ImmutableArray<string> NodeIds)
[property: JsonPropertyName("nodeIds")]
[property: JsonConverter(typeof(ImmutableArrayJsonConverter<string>))]
ImmutableArray<string> NodeIds)
{
public ReachabilityPath Trimmed()
{
@@ -309,9 +320,15 @@ public sealed record ReachabilityAnalysisResult(
[property: JsonPropertyName("graphDigest")] string GraphDigest,
[property: JsonPropertyName("language")] string Language,
[property: JsonPropertyName("computedAt")] DateTimeOffset ComputedAt,
[property: JsonPropertyName("reachableNodeIds")] ImmutableArray<string> ReachableNodeIds,
[property: JsonPropertyName("reachableSinkIds")] ImmutableArray<string> ReachableSinkIds,
[property: JsonPropertyName("paths")] ImmutableArray<ReachabilityPath> Paths,
[property: JsonPropertyName("reachableNodeIds")]
[property: JsonConverter(typeof(ImmutableArrayJsonConverter<string>))]
ImmutableArray<string> ReachableNodeIds,
[property: JsonPropertyName("reachableSinkIds")]
[property: JsonConverter(typeof(ImmutableArrayJsonConverter<string>))]
ImmutableArray<string> ReachableSinkIds,
[property: JsonPropertyName("paths")]
[property: JsonConverter(typeof(ImmutableArrayJsonConverter<ReachabilityPath>))]
ImmutableArray<ReachabilityPath> Paths,
[property: JsonPropertyName("resultDigest")] string ResultDigest)
{
public ReachabilityAnalysisResult Trimmed()

View File

@@ -0,0 +1,42 @@
using System.Collections.Generic;
using System.Collections.Immutable;
using System.Text.Json;
using System.Text.Json.Serialization;
namespace StellaOps.Scanner.CallGraph.Serialization;
/// <summary>
/// System.Text.Json converter for <see cref="ImmutableArray{T}"/> to ensure default serializer options
/// can round-trip call graph models without requiring per-call JsonSerializerOptions registration.
/// </summary>
public sealed class ImmutableArrayJsonConverter<T> : JsonConverter<ImmutableArray<T>>
{
public override ImmutableArray<T> Read(ref Utf8JsonReader reader, Type typeToConvert, JsonSerializerOptions options)
{
if (reader.TokenType == JsonTokenType.Null)
{
return ImmutableArray<T>.Empty;
}
var values = JsonSerializer.Deserialize<List<T>>(ref reader, options);
if (values is null || values.Count == 0)
{
return ImmutableArray<T>.Empty;
}
return ImmutableArray.CreateRange(values);
}
public override void Write(Utf8JsonWriter writer, ImmutableArray<T> value, JsonSerializerOptions options)
{
writer.WriteStartArray();
var normalized = value.IsDefault ? ImmutableArray<T>.Empty : value;
foreach (var item in normalized)
{
JsonSerializer.Serialize(writer, item, options);
}
writer.WriteEndArray();
}
}

View File

@@ -0,0 +1,41 @@
// -----------------------------------------------------------------------------
// EpssBundleSource.cs
// Sprint: SPRINT_3410_0001_0001_epss_ingestion_storage
// Task: EPSS-3410-005
// Description: File-based EPSS source for air-gapped imports.
// -----------------------------------------------------------------------------
namespace StellaOps.Scanner.Storage.Epss;
public sealed class EpssBundleSource : IEpssSource
{
private readonly string _path;
public EpssBundleSource(string path)
{
ArgumentException.ThrowIfNullOrWhiteSpace(path);
_path = path;
}
public ValueTask<EpssSourceFile> GetAsync(DateOnly modelDate, CancellationToken cancellationToken = default)
{
cancellationToken.ThrowIfCancellationRequested();
var fileName = $"epss_scores-{modelDate:yyyy-MM-dd}.csv.gz";
var resolvedPath = _path;
if (Directory.Exists(_path))
{
resolvedPath = Path.Combine(_path, fileName);
}
if (!File.Exists(resolvedPath))
{
throw new FileNotFoundException($"EPSS bundle file not found: {resolvedPath}", resolvedPath);
}
var sourceUri = $"bundle://{Path.GetFileName(resolvedPath)}";
return ValueTask.FromResult(new EpssSourceFile(sourceUri, resolvedPath, deleteOnDispose: false));
}
}

View File

@@ -0,0 +1,75 @@
// -----------------------------------------------------------------------------
// EpssChangeDetector.cs
// Sprint: SPRINT_3410_0001_0001_epss_ingestion_storage
// Task: EPSS-3410-008
// Description: Deterministic EPSS delta flag computation (mirrors SQL function).
// -----------------------------------------------------------------------------
namespace StellaOps.Scanner.Storage.Epss;
public static class EpssChangeDetector
{
public static EpssChangeThresholds DefaultThresholds => new(
HighScore: 0.50,
HighPercentile: 0.95,
BigJumpDelta: 0.10);
public static EpssChangeFlags ComputeFlags(
double? oldScore,
double newScore,
double? oldPercentile,
double newPercentile,
EpssChangeThresholds thresholds)
{
var flags = EpssChangeFlags.None;
if (oldScore is null)
{
flags |= EpssChangeFlags.NewScored;
}
if (oldScore is not null)
{
if (oldScore < thresholds.HighScore && newScore >= thresholds.HighScore)
{
flags |= EpssChangeFlags.CrossedHigh;
}
if (oldScore >= thresholds.HighScore && newScore < thresholds.HighScore)
{
flags |= EpssChangeFlags.CrossedLow;
}
var delta = newScore - oldScore.Value;
if (delta > thresholds.BigJumpDelta)
{
flags |= EpssChangeFlags.BigJumpUp;
}
if (delta < -thresholds.BigJumpDelta)
{
flags |= EpssChangeFlags.BigJumpDown;
}
}
if ((oldPercentile is null || oldPercentile < thresholds.HighPercentile)
&& newPercentile >= thresholds.HighPercentile)
{
flags |= EpssChangeFlags.TopPercentile;
}
if (oldPercentile is not null && oldPercentile >= thresholds.HighPercentile
&& newPercentile < thresholds.HighPercentile)
{
flags |= EpssChangeFlags.LeftTopPercentile;
}
return flags;
}
}
public readonly record struct EpssChangeThresholds(
double HighScore,
double HighPercentile,
double BigJumpDelta);

View File

@@ -0,0 +1,36 @@
// -----------------------------------------------------------------------------
// EpssChangeFlags.cs
// Sprint: SPRINT_3410_0001_0001_epss_ingestion_storage
// Task: EPSS-3410-008
// Description: Flag bitmask for EPSS change detection.
// -----------------------------------------------------------------------------
namespace StellaOps.Scanner.Storage.Epss;
[Flags]
public enum EpssChangeFlags
{
None = 0,
/// <summary>0x01 - CVE newly scored (not in previous snapshot).</summary>
NewScored = 1,
/// <summary>0x02 - Crossed above the high score threshold.</summary>
CrossedHigh = 2,
/// <summary>0x04 - Crossed below the high score threshold.</summary>
CrossedLow = 4,
/// <summary>0x08 - Score increased by more than the big jump delta.</summary>
BigJumpUp = 8,
/// <summary>0x10 - Score decreased by more than the big jump delta.</summary>
BigJumpDown = 16,
/// <summary>0x20 - Entered the top percentile band.</summary>
TopPercentile = 32,
/// <summary>0x40 - Left the top percentile band.</summary>
LeftTopPercentile = 64
}

View File

@@ -0,0 +1,297 @@
// -----------------------------------------------------------------------------
// EpssCsvStreamParser.cs
// Sprint: SPRINT_3410_0001_0001_epss_ingestion_storage
// Task: EPSS-3410-006
// Description: Streaming gzip CSV parser for EPSS snapshots with deterministic validation.
// -----------------------------------------------------------------------------
using System.IO.Compression;
using System.Runtime.CompilerServices;
using System.Security.Cryptography;
using System.Text;
using System.Text.RegularExpressions;
namespace StellaOps.Scanner.Storage.Epss;
public sealed class EpssCsvStreamParser
{
private static readonly Regex ModelVersionTagRegex = new(@"\bv\d{4}\.\d{2}\.\d{2}\b", RegexOptions.Compiled);
private static readonly Regex PublishedDateRegex = new(@"\b\d{4}-\d{2}-\d{2}\b", RegexOptions.Compiled);
public EpssCsvParseSession ParseGzip(Stream gzipStream)
=> new(gzipStream);
public sealed class EpssCsvParseSession : IAsyncEnumerable<EpssScoreRow>, IAsyncDisposable
{
private readonly Stream _gzipStream;
private bool _enumerated;
private bool _disposed;
public EpssCsvParseSession(Stream gzipStream)
{
_gzipStream = gzipStream ?? throw new ArgumentNullException(nameof(gzipStream));
}
public string? ModelVersionTag { get; private set; }
public DateOnly? PublishedDate { get; private set; }
public int RowCount { get; private set; }
public string? DecompressedSha256 { get; private set; }
public IAsyncEnumerator<EpssScoreRow> GetAsyncEnumerator(CancellationToken cancellationToken = default)
{
if (_disposed)
{
throw new ObjectDisposedException(nameof(EpssCsvParseSession));
}
if (_enumerated)
{
throw new InvalidOperationException("EPSS parse session can only be enumerated once.");
}
_enumerated = true;
return ParseAsync(cancellationToken).GetAsyncEnumerator(cancellationToken);
}
public ValueTask DisposeAsync()
{
if (_disposed)
{
return ValueTask.CompletedTask;
}
_disposed = true;
return _gzipStream.DisposeAsync();
}
private async IAsyncEnumerable<EpssScoreRow> ParseAsync([EnumeratorCancellation] CancellationToken cancellationToken)
{
await using var gzip = new GZipStream(_gzipStream, CompressionMode.Decompress, leaveOpen: false);
await using var hashing = new HashingReadStream(gzip);
using var reader = new StreamReader(
hashing,
Encoding.UTF8,
detectEncodingFromByteOrderMarks: true,
bufferSize: 64 * 1024,
leaveOpen: true);
string? line;
while ((line = await reader.ReadLineAsync(cancellationToken).ConfigureAwait(false)) is not null)
{
cancellationToken.ThrowIfCancellationRequested();
if (line.StartsWith('#'))
{
ParseCommentLine(line);
continue;
}
// First non-comment line is the CSV header.
var header = line.Trim();
if (!header.Equals("cve,epss,percentile", StringComparison.OrdinalIgnoreCase))
{
throw new FormatException($"Unexpected EPSS CSV header: '{header}'. Expected 'cve,epss,percentile'.");
}
break;
}
if (line is null)
{
throw new FormatException("EPSS CSV appears to be empty.");
}
while ((line = await reader.ReadLineAsync(cancellationToken).ConfigureAwait(false)) is not null)
{
cancellationToken.ThrowIfCancellationRequested();
if (string.IsNullOrWhiteSpace(line))
{
continue;
}
var row = ParseRow(line);
RowCount++;
yield return row;
}
DecompressedSha256 = "sha256:" + hashing.GetHashHex();
}
private void ParseCommentLine(string line)
{
if (ModelVersionTag is null)
{
var match = ModelVersionTagRegex.Match(line);
if (match.Success)
{
ModelVersionTag = match.Value;
}
}
if (PublishedDate is null)
{
var match = PublishedDateRegex.Match(line);
if (match.Success && DateOnly.TryParseExact(match.Value, "yyyy-MM-dd", out var date))
{
PublishedDate = date;
}
}
}
private static EpssScoreRow ParseRow(string line)
{
var comma1 = line.IndexOf(',');
if (comma1 <= 0)
{
throw new FormatException($"Invalid EPSS CSV row: '{line}'.");
}
var comma2 = line.IndexOf(',', comma1 + 1);
if (comma2 <= comma1 + 1 || comma2 == line.Length - 1)
{
throw new FormatException($"Invalid EPSS CSV row: '{line}'.");
}
var cveSpan = line.AsSpan(0, comma1).Trim();
var scoreSpan = line.AsSpan(comma1 + 1, comma2 - comma1 - 1).Trim();
var percentileSpan = line.AsSpan(comma2 + 1).Trim();
var cveId = NormalizeCveId(cveSpan);
if (!double.TryParse(scoreSpan, System.Globalization.NumberStyles.Float, System.Globalization.CultureInfo.InvariantCulture, out var score))
{
throw new FormatException($"Invalid EPSS score value in row: '{line}'.");
}
if (!double.TryParse(percentileSpan, System.Globalization.NumberStyles.Float, System.Globalization.CultureInfo.InvariantCulture, out var percentile))
{
throw new FormatException($"Invalid EPSS percentile value in row: '{line}'.");
}
if (score < 0.0 || score > 1.0)
{
throw new FormatException($"EPSS score out of range [0,1] in row: '{line}'.");
}
if (percentile < 0.0 || percentile > 1.0)
{
throw new FormatException($"EPSS percentile out of range [0,1] in row: '{line}'.");
}
return new EpssScoreRow(cveId, score, percentile);
}
private static string NormalizeCveId(ReadOnlySpan<char> value)
{
if (value.Length == 0)
{
throw new FormatException("EPSS row has empty CVE ID.");
}
// Expected: CVE-YYYY-NNNN...
if (value.Length < "CVE-1999-0000".Length)
{
throw new FormatException($"Invalid CVE ID '{value.ToString()}'.");
}
if (!value.StartsWith("CVE-", StringComparison.OrdinalIgnoreCase))
{
throw new FormatException($"Invalid CVE ID '{value.ToString()}'.");
}
var normalized = value.ToString().ToUpperInvariant();
return normalized;
}
}
private sealed class HashingReadStream : Stream
{
private readonly Stream _inner;
private readonly IncrementalHash _hash = IncrementalHash.CreateHash(HashAlgorithmName.SHA256);
private bool _disposed;
private string? _sha256Hex;
public HashingReadStream(Stream inner)
{
_inner = inner ?? throw new ArgumentNullException(nameof(inner));
}
public string GetHashHex()
{
if (_sha256Hex is not null)
{
return _sha256Hex;
}
var digest = _hash.GetHashAndReset();
_sha256Hex = Convert.ToHexString(digest).ToLowerInvariant();
return _sha256Hex;
}
public override bool CanRead => !_disposed && _inner.CanRead;
public override bool CanSeek => false;
public override bool CanWrite => false;
public override long Length => throw new NotSupportedException();
public override long Position { get => throw new NotSupportedException(); set => throw new NotSupportedException(); }
public override void Flush() => throw new NotSupportedException();
public override long Seek(long offset, SeekOrigin origin) => throw new NotSupportedException();
public override void SetLength(long value) => throw new NotSupportedException();
public override void Write(byte[] buffer, int offset, int count) => throw new NotSupportedException();
public override int Read(byte[] buffer, int offset, int count)
{
var read = _inner.Read(buffer, offset, count);
if (read > 0)
{
_hash.AppendData(buffer, offset, read);
}
return read;
}
public override async ValueTask<int> ReadAsync(Memory<byte> buffer, CancellationToken cancellationToken = default)
{
var read = await _inner.ReadAsync(buffer, cancellationToken).ConfigureAwait(false);
if (read > 0)
{
var slice = buffer.Slice(0, read);
_hash.AppendData(slice.Span);
}
return read;
}
protected override void Dispose(bool disposing)
{
if (_disposed)
{
return;
}
if (disposing)
{
_hash.Dispose();
_inner.Dispose();
}
_disposed = true;
base.Dispose(disposing);
}
public override async ValueTask DisposeAsync()
{
if (_disposed)
{
return;
}
_hash.Dispose();
await _inner.DisposeAsync().ConfigureAwait(false);
_disposed = true;
await base.DisposeAsync().ConfigureAwait(false);
}
}
}

View File

@@ -0,0 +1,46 @@
// -----------------------------------------------------------------------------
// EpssOnlineSource.cs
// Sprint: SPRINT_3410_0001_0001_epss_ingestion_storage
// Task: EPSS-3410-004
// Description: Online EPSS source that downloads FIRST.org CSV.gz snapshots.
// -----------------------------------------------------------------------------
using System.Net.Http;
namespace StellaOps.Scanner.Storage.Epss;
public sealed class EpssOnlineSource : IEpssSource
{
public const string DefaultBaseUri = "https://epss.empiricalsecurity.com/";
private readonly HttpClient _httpClient;
private readonly Uri _baseUri;
public EpssOnlineSource(HttpClient httpClient, string? baseUri = null)
{
_httpClient = httpClient ?? throw new ArgumentNullException(nameof(httpClient));
_baseUri = new Uri(string.IsNullOrWhiteSpace(baseUri) ? DefaultBaseUri : baseUri, UriKind.Absolute);
}
public async ValueTask<EpssSourceFile> GetAsync(DateOnly modelDate, CancellationToken cancellationToken = default)
{
var fileName = $"epss_scores-{modelDate:yyyy-MM-dd}.csv.gz";
var uri = new Uri(_baseUri, fileName);
var tempPath = Path.Combine(
Path.GetTempPath(),
$"stellaops-epss-{Guid.NewGuid():n}-{fileName}");
using var response = await _httpClient.GetAsync(uri, HttpCompletionOption.ResponseHeadersRead, cancellationToken).ConfigureAwait(false);
response.EnsureSuccessStatusCode();
await using var sourceStream = await response.Content.ReadAsStreamAsync(cancellationToken).ConfigureAwait(false);
await using (var destinationStream = new FileStream(tempPath, FileMode.CreateNew, FileAccess.Write, FileShare.None))
{
await sourceStream.CopyToAsync(destinationStream, cancellationToken).ConfigureAwait(false);
}
return new EpssSourceFile(uri.ToString(), tempPath, deleteOnDispose: true);
}
}

View File

@@ -0,0 +1,17 @@
// -----------------------------------------------------------------------------
// EpssScoreRow.cs
// Sprint: SPRINT_3410_0001_0001_epss_ingestion_storage
// Task: EPSS-3410-002
// Description: DTO representing a parsed EPSS CSV row.
// -----------------------------------------------------------------------------
namespace StellaOps.Scanner.Storage.Epss;
/// <summary>
/// Represents a single row from an EPSS CSV snapshot.
/// </summary>
public readonly record struct EpssScoreRow(
string CveId,
double Score,
double Percentile);

View File

@@ -0,0 +1,46 @@
// -----------------------------------------------------------------------------
// EpssSourceFile.cs
// Sprint: SPRINT_3410_0001_0001_epss_ingestion_storage
// Task: EPSS-3410-003
// Description: Local file materialization wrapper for EPSS sources.
// -----------------------------------------------------------------------------
namespace StellaOps.Scanner.Storage.Epss;
public sealed class EpssSourceFile : IAsyncDisposable
{
public EpssSourceFile(string sourceUri, string localPath, bool deleteOnDispose)
{
ArgumentException.ThrowIfNullOrWhiteSpace(sourceUri);
ArgumentException.ThrowIfNullOrWhiteSpace(localPath);
SourceUri = sourceUri;
LocalPath = localPath;
DeleteOnDispose = deleteOnDispose;
}
public string SourceUri { get; }
public string LocalPath { get; }
public bool DeleteOnDispose { get; }
public ValueTask DisposeAsync()
{
if (DeleteOnDispose)
{
try
{
if (File.Exists(LocalPath))
{
File.Delete(LocalPath);
}
}
catch
{
// Best-effort cleanup only.
}
}
return ValueTask.CompletedTask;
}
}

View File

@@ -0,0 +1,14 @@
// -----------------------------------------------------------------------------
// IEpssSource.cs
// Sprint: SPRINT_3410_0001_0001_epss_ingestion_storage
// Task: EPSS-3410-003
// Description: Abstraction for online vs air-gapped EPSS sources.
// -----------------------------------------------------------------------------
namespace StellaOps.Scanner.Storage.Epss;
public interface IEpssSource
{
ValueTask<EpssSourceFile> GetAsync(DateOnly modelDate, CancellationToken cancellationToken = default);
}

View File

@@ -16,6 +16,7 @@ using StellaOps.Scanner.Storage.ObjectStore;
using StellaOps.Scanner.Storage.Postgres;
using StellaOps.Scanner.Storage.Repositories;
using StellaOps.Scanner.Storage.Services;
using StellaOps.Scanner.Storage.Epss;
namespace StellaOps.Scanner.Storage.Extensions;
@@ -81,6 +82,8 @@ public static class ServiceCollectionExtensions
services.AddScoped<IReachabilityResultRepository, PostgresReachabilityResultRepository>();
services.AddScoped<ICodeChangeRepository, PostgresCodeChangeRepository>();
services.AddScoped<IReachabilityDriftResultRepository, PostgresReachabilityDriftResultRepository>();
services.AddSingleton<EpssCsvStreamParser>();
services.AddScoped<IEpssRepository, PostgresEpssRepository>();
services.AddSingleton<IEntryTraceResultStore, EntryTraceResultStore>();
services.AddSingleton<IRubyPackageInventoryStore, RubyPackageInventoryStore>();
services.AddSingleton<IBunPackageInventoryStore, BunPackageInventoryStore>();

View File

@@ -0,0 +1,78 @@
-- SPDX-License-Identifier: AGPL-3.0-or-later
-- Sprint: 3413
-- Task: EPSS Raw Feed Layer
-- Description: Creates epss_raw table for immutable full payload storage
-- Enables deterministic replay without re-downloading from FIRST.org
-- Advisory: 18-Dec-2025 - Designing a Layered EPSS v4 Database.md
-- ============================================================================
-- EPSS Raw Feed Storage (Immutable)
-- ============================================================================
-- Layer 1 of 3-layer EPSS architecture
-- Stores full CSV payload as JSONB for deterministic replay capability
-- Expected storage: ~15MB/day compressed → ~5GB/year in JSONB
CREATE TABLE IF NOT EXISTS epss_raw (
raw_id BIGSERIAL PRIMARY KEY,
source_uri TEXT NOT NULL,
asof_date DATE NOT NULL,
ingestion_ts TIMESTAMPTZ NOT NULL DEFAULT now(),
-- Full payload storage
payload JSONB NOT NULL, -- Full CSV content as JSON array of {cve, epss, percentile}
payload_sha256 BYTEA NOT NULL, -- SHA-256 of decompressed content for integrity
-- Metadata extracted from CSV comment line
header_comment TEXT, -- Leading # comment if present (e.g., "# model: v2025.03.14...")
model_version TEXT, -- Extracted model version (e.g., "v2025.03.14")
published_date DATE, -- Extracted publish date from comment
-- Stats
row_count INT NOT NULL,
compressed_size BIGINT, -- Original .csv.gz file size
decompressed_size BIGINT, -- Decompressed CSV size
-- Link to import run (optional, for correlation)
import_run_id UUID REFERENCES epss_import_runs(import_run_id),
-- Idempotency: same source + date + content hash = same record
CONSTRAINT epss_raw_unique UNIQUE (source_uri, asof_date, payload_sha256)
);
-- Performance indexes
CREATE INDEX IF NOT EXISTS idx_epss_raw_asof
ON epss_raw (asof_date DESC);
CREATE INDEX IF NOT EXISTS idx_epss_raw_model
ON epss_raw (model_version);
CREATE INDEX IF NOT EXISTS idx_epss_raw_import_run
ON epss_raw (import_run_id);
-- Comments
COMMENT ON TABLE epss_raw IS 'Layer 1: Immutable raw EPSS payload storage for deterministic replay';
COMMENT ON COLUMN epss_raw.payload IS 'Full CSV content as JSON array: [{cve:"CVE-...", epss:0.123, percentile:0.456}, ...]';
COMMENT ON COLUMN epss_raw.payload_sha256 IS 'SHA-256 hash of decompressed CSV for integrity verification';
COMMENT ON COLUMN epss_raw.header_comment IS 'Raw comment line from CSV (e.g., "# model: v2025.03.14, published: 2025-03-14")';
COMMENT ON COLUMN epss_raw.model_version IS 'Extracted model version for detecting model changes';
-- ============================================================================
-- Retention Policy Helper
-- ============================================================================
-- Function to prune old raw data (default: keep 365 days)
CREATE OR REPLACE FUNCTION prune_epss_raw(retention_days INT DEFAULT 365)
RETURNS INT AS $$
DECLARE
deleted_count INT;
BEGIN
DELETE FROM epss_raw
WHERE asof_date < CURRENT_DATE - retention_days::INTERVAL;
GET DIAGNOSTICS deleted_count = ROW_COUNT;
RAISE NOTICE 'Pruned % epss_raw records older than % days', deleted_count, retention_days;
RETURN deleted_count;
END;
$$ LANGUAGE plpgsql;
COMMENT ON FUNCTION prune_epss_raw IS 'Prunes epss_raw records older than retention_days (default: 365)';

View File

@@ -0,0 +1,179 @@
-- SPDX-License-Identifier: AGPL-3.0-or-later
-- Sprint: 3413
-- Task: EPSS Signal-Ready Layer
-- Description: Creates epss_signal table for tenant-scoped actionable events
-- Reduces noise by only signaling for observed CVEs per tenant
-- Advisory: 18-Dec-2025 - Designing a Layered EPSS v4 Database.md
-- ============================================================================
-- EPSS Signal-Ready Events (Tenant-Scoped)
-- ============================================================================
-- Layer 3 of 3-layer EPSS architecture
-- Pre-computed actionable events scoped to observed CVEs per tenant
-- Supports deduplication via dedupe_key and audit trail via explain_hash
CREATE TABLE IF NOT EXISTS epss_signal (
signal_id BIGSERIAL PRIMARY KEY,
tenant_id UUID NOT NULL,
model_date DATE NOT NULL,
cve_id TEXT NOT NULL,
-- Event classification
event_type TEXT NOT NULL, -- 'RISK_SPIKE', 'BAND_CHANGE', 'NEW_HIGH', 'MODEL_UPDATED'
risk_band TEXT, -- 'CRITICAL', 'HIGH', 'MEDIUM', 'LOW'
-- EPSS metrics at signal time
epss_score DOUBLE PRECISION,
epss_delta DOUBLE PRECISION, -- Delta from previous day
percentile DOUBLE PRECISION,
percentile_delta DOUBLE PRECISION, -- Delta from previous day
-- Model version tracking
is_model_change BOOLEAN NOT NULL DEFAULT false, -- True when FIRST.org updated model version
model_version TEXT,
-- Idempotency and audit
dedupe_key TEXT NOT NULL, -- Deterministic key for deduplication
explain_hash BYTEA NOT NULL, -- SHA-256 of signal inputs for audit trail
payload JSONB NOT NULL, -- Full evidence payload for downstream consumers
-- Timestamps
created_at TIMESTAMPTZ NOT NULL DEFAULT now(),
-- Deduplication constraint: same tenant + dedupe_key = same signal
CONSTRAINT epss_signal_dedupe UNIQUE (tenant_id, dedupe_key)
);
-- Performance indexes
CREATE INDEX IF NOT EXISTS idx_epss_signal_tenant_date
ON epss_signal (tenant_id, model_date DESC);
CREATE INDEX IF NOT EXISTS idx_epss_signal_tenant_cve
ON epss_signal (tenant_id, cve_id, model_date DESC);
CREATE INDEX IF NOT EXISTS idx_epss_signal_event_type
ON epss_signal (tenant_id, event_type, model_date DESC);
CREATE INDEX IF NOT EXISTS idx_epss_signal_risk_band
ON epss_signal (tenant_id, risk_band, model_date DESC)
WHERE risk_band IN ('CRITICAL', 'HIGH');
CREATE INDEX IF NOT EXISTS idx_epss_signal_model_change
ON epss_signal (model_date)
WHERE is_model_change = true;
-- Comments
COMMENT ON TABLE epss_signal IS 'Layer 3: Tenant-scoped EPSS signal events for actionable notifications';
COMMENT ON COLUMN epss_signal.event_type IS 'Event classification: RISK_SPIKE (delta > threshold), BAND_CHANGE (band transition), NEW_HIGH (new CVE in high percentile), MODEL_UPDATED (FIRST.org model version change)';
COMMENT ON COLUMN epss_signal.risk_band IS 'Derived risk band: CRITICAL (>=99.5%), HIGH (>=99%), MEDIUM (>=90%), LOW (<90%)';
COMMENT ON COLUMN epss_signal.is_model_change IS 'True when FIRST.org updated model version (v3->v4 etc), used to suppress noisy delta signals';
COMMENT ON COLUMN epss_signal.dedupe_key IS 'Deterministic key: {model_date}:{cve_id}:{event_type}:{band_before}->{band_after}';
COMMENT ON COLUMN epss_signal.explain_hash IS 'SHA-256 of signal inputs for deterministic audit trail';
COMMENT ON COLUMN epss_signal.payload IS 'Full evidence: {source, metrics, decision, thresholds, evidence_refs}';
-- ============================================================================
-- Signal Event Types Enum (for reference)
-- ============================================================================
-- Not enforced as constraint to allow future extensibility
-- Event Types:
-- - RISK_SPIKE: EPSS delta exceeds big_jump_delta threshold (default: 0.10)
-- - BAND_CHANGE: Risk band transition (e.g., MEDIUM -> HIGH)
-- - NEW_HIGH: CVE newly scored in high percentile (>=95th)
-- - DROPPED_LOW: CVE dropped below low percentile threshold
-- - MODEL_UPDATED: Summary event when FIRST.org updates model version
-- ============================================================================
-- Risk Band Configuration (per tenant)
-- ============================================================================
CREATE TABLE IF NOT EXISTS epss_signal_config (
config_id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
tenant_id UUID NOT NULL,
-- Thresholds for risk banding
critical_percentile DOUBLE PRECISION NOT NULL DEFAULT 0.995, -- Top 0.5%
high_percentile DOUBLE PRECISION NOT NULL DEFAULT 0.99, -- Top 1%
medium_percentile DOUBLE PRECISION NOT NULL DEFAULT 0.90, -- Top 10%
-- Thresholds for signal generation
big_jump_delta DOUBLE PRECISION NOT NULL DEFAULT 0.10, -- 10 percentage points
suppress_on_model_change BOOLEAN NOT NULL DEFAULT true, -- Suppress RISK_SPIKE on model change
-- Notification preferences
enabled_event_types TEXT[] NOT NULL DEFAULT ARRAY['RISK_SPIKE', 'BAND_CHANGE', 'NEW_HIGH'],
created_at TIMESTAMPTZ NOT NULL DEFAULT now(),
updated_at TIMESTAMPTZ NOT NULL DEFAULT now(),
CONSTRAINT epss_signal_config_tenant_unique UNIQUE (tenant_id)
);
-- Comments
COMMENT ON TABLE epss_signal_config IS 'Per-tenant configuration for EPSS signal generation';
COMMENT ON COLUMN epss_signal_config.suppress_on_model_change IS 'When true, suppress RISK_SPIKE and BAND_CHANGE signals on model version change days';
-- ============================================================================
-- Helper Functions
-- ============================================================================
-- Compute risk band from percentile
CREATE OR REPLACE FUNCTION compute_epss_risk_band(
p_percentile DOUBLE PRECISION,
p_critical_threshold DOUBLE PRECISION DEFAULT 0.995,
p_high_threshold DOUBLE PRECISION DEFAULT 0.99,
p_medium_threshold DOUBLE PRECISION DEFAULT 0.90
) RETURNS TEXT AS $$
BEGIN
IF p_percentile >= p_critical_threshold THEN
RETURN 'CRITICAL';
ELSIF p_percentile >= p_high_threshold THEN
RETURN 'HIGH';
ELSIF p_percentile >= p_medium_threshold THEN
RETURN 'MEDIUM';
ELSE
RETURN 'LOW';
END IF;
END;
$$ LANGUAGE plpgsql IMMUTABLE;
COMMENT ON FUNCTION compute_epss_risk_band IS 'Computes risk band from percentile using configurable thresholds';
-- Compute dedupe key for signal
CREATE OR REPLACE FUNCTION compute_epss_signal_dedupe_key(
p_model_date DATE,
p_cve_id TEXT,
p_event_type TEXT,
p_old_band TEXT,
p_new_band TEXT
) RETURNS TEXT AS $$
BEGIN
RETURN format('%s:%s:%s:%s->%s',
p_model_date::TEXT,
p_cve_id,
p_event_type,
COALESCE(p_old_band, 'NONE'),
COALESCE(p_new_band, 'NONE')
);
END;
$$ LANGUAGE plpgsql IMMUTABLE;
COMMENT ON FUNCTION compute_epss_signal_dedupe_key IS 'Computes deterministic deduplication key for EPSS signals';
-- ============================================================================
-- Retention Policy Helper
-- ============================================================================
CREATE OR REPLACE FUNCTION prune_epss_signals(retention_days INT DEFAULT 90)
RETURNS INT AS $$
DECLARE
deleted_count INT;
BEGIN
DELETE FROM epss_signal
WHERE model_date < CURRENT_DATE - retention_days::INTERVAL;
GET DIAGNOSTICS deleted_count = ROW_COUNT;
RAISE NOTICE 'Pruned % epss_signal records older than % days', deleted_count, retention_days;
RETURN deleted_count;
END;
$$ LANGUAGE plpgsql;
COMMENT ON FUNCTION prune_epss_signals IS 'Prunes epss_signal records older than retention_days (default: 90)';

View File

@@ -0,0 +1,601 @@
// -----------------------------------------------------------------------------
// PostgresEpssRepository.cs
// Sprint: SPRINT_3410_0001_0001_epss_ingestion_storage
// Tasks: EPSS-3410-007, EPSS-3410-008
// Description: PostgreSQL persistence for EPSS import runs, time-series scores, current projection, and change log.
// -----------------------------------------------------------------------------
using System.Data;
using Dapper;
using Npgsql;
using NpgsqlTypes;
using StellaOps.Scanner.Storage.Epss;
using StellaOps.Scanner.Storage.Repositories;
namespace StellaOps.Scanner.Storage.Postgres;
public sealed class PostgresEpssRepository : IEpssRepository
{
private static int _typeHandlersRegistered;
private readonly ScannerDataSource _dataSource;
private string SchemaName => _dataSource.SchemaName ?? ScannerDataSource.DefaultSchema;
private string ImportRunsTable => $"{SchemaName}.epss_import_runs";
private string ScoresTable => $"{SchemaName}.epss_scores";
private string CurrentTable => $"{SchemaName}.epss_current";
private string ChangesTable => $"{SchemaName}.epss_changes";
private string ConfigTable => $"{SchemaName}.epss_config";
public PostgresEpssRepository(ScannerDataSource dataSource)
{
EnsureTypeHandlers();
_dataSource = dataSource ?? throw new ArgumentNullException(nameof(dataSource));
}
public async Task<EpssImportRun?> GetImportRunAsync(DateOnly modelDate, CancellationToken cancellationToken = default)
{
var sql = $"""
SELECT
import_run_id,
model_date,
source_uri,
retrieved_at,
file_sha256,
decompressed_sha256,
row_count,
model_version_tag,
published_date,
status,
error,
created_at
FROM {ImportRunsTable}
WHERE model_date = @ModelDate
""";
await using var connection = await _dataSource.OpenSystemConnectionAsync(cancellationToken).ConfigureAwait(false);
var row = await connection.QuerySingleOrDefaultAsync<ImportRunRow>(
new CommandDefinition(sql, new { ModelDate = modelDate }, cancellationToken: cancellationToken)).ConfigureAwait(false);
return row?.ToModel();
}
public async Task<EpssImportRun> BeginImportAsync(
DateOnly modelDate,
string sourceUri,
DateTimeOffset retrievedAtUtc,
string fileSha256,
CancellationToken cancellationToken = default)
{
ArgumentException.ThrowIfNullOrWhiteSpace(sourceUri);
ArgumentException.ThrowIfNullOrWhiteSpace(fileSha256);
var insertSql = $"""
INSERT INTO {ImportRunsTable} (
model_date,
source_uri,
retrieved_at,
file_sha256,
row_count,
status,
created_at
) VALUES (
@ModelDate,
@SourceUri,
@RetrievedAtUtc,
@FileSha256,
0,
'PENDING',
@RetrievedAtUtc
)
ON CONFLICT (model_date) DO UPDATE SET
source_uri = EXCLUDED.source_uri,
retrieved_at = EXCLUDED.retrieved_at,
file_sha256 = EXCLUDED.file_sha256,
decompressed_sha256 = NULL,
row_count = 0,
model_version_tag = NULL,
published_date = NULL,
status = 'PENDING',
error = NULL
WHERE {ImportRunsTable}.status <> 'SUCCEEDED'
RETURNING
import_run_id,
model_date,
source_uri,
retrieved_at,
file_sha256,
decompressed_sha256,
row_count,
model_version_tag,
published_date,
status,
error,
created_at
""";
await using var connection = await _dataSource.OpenSystemConnectionAsync(cancellationToken).ConfigureAwait(false);
var row = await connection.QuerySingleOrDefaultAsync<ImportRunRow>(new CommandDefinition(
insertSql,
new
{
ModelDate = modelDate,
SourceUri = sourceUri,
RetrievedAtUtc = retrievedAtUtc,
FileSha256 = fileSha256
},
cancellationToken: cancellationToken)).ConfigureAwait(false);
if (row is not null)
{
return row.ToModel();
}
// Existing SUCCEEDED run: return it to allow the caller to decide idempotent behavior.
var existing = await GetImportRunAsync(modelDate, cancellationToken).ConfigureAwait(false);
if (existing is null)
{
throw new InvalidOperationException("EPSS import run conflict detected but existing row was not found.");
}
return existing;
}
public async Task MarkImportSucceededAsync(
Guid importRunId,
int rowCount,
string? decompressedSha256,
string? modelVersionTag,
DateOnly? publishedDate,
CancellationToken cancellationToken = default)
{
var sql = $"""
UPDATE {ImportRunsTable}
SET status = 'SUCCEEDED',
error = NULL,
row_count = @RowCount,
decompressed_sha256 = @DecompressedSha256,
model_version_tag = @ModelVersionTag,
published_date = @PublishedDate
WHERE import_run_id = @ImportRunId
""";
await using var connection = await _dataSource.OpenSystemConnectionAsync(cancellationToken).ConfigureAwait(false);
await connection.ExecuteAsync(new CommandDefinition(
sql,
new
{
ImportRunId = importRunId,
RowCount = rowCount,
DecompressedSha256 = decompressedSha256,
ModelVersionTag = modelVersionTag,
PublishedDate = publishedDate
},
cancellationToken: cancellationToken)).ConfigureAwait(false);
}
public async Task MarkImportFailedAsync(Guid importRunId, string error, CancellationToken cancellationToken = default)
{
ArgumentException.ThrowIfNullOrWhiteSpace(error);
var sql = $"""
UPDATE {ImportRunsTable}
SET status = 'FAILED',
error = @Error
WHERE import_run_id = @ImportRunId
""";
await using var connection = await _dataSource.OpenSystemConnectionAsync(cancellationToken).ConfigureAwait(false);
await connection.ExecuteAsync(new CommandDefinition(
sql,
new { ImportRunId = importRunId, Error = error },
cancellationToken: cancellationToken)).ConfigureAwait(false);
}
public async Task<EpssWriteResult> WriteSnapshotAsync(
Guid importRunId,
DateOnly modelDate,
DateTimeOffset updatedAtUtc,
IAsyncEnumerable<EpssScoreRow> rows,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(rows);
await using var connection = await _dataSource.OpenSystemConnectionAsync(cancellationToken).ConfigureAwait(false);
await using var transaction = await connection.BeginTransactionAsync(cancellationToken).ConfigureAwait(false);
try
{
await EnsurePartitionsAsync(connection, transaction, modelDate, cancellationToken).ConfigureAwait(false);
const string stageTable = "epss_stage";
var createStageSql = $"""
CREATE TEMP TABLE {stageTable} (
cve_id TEXT NOT NULL,
epss_score DOUBLE PRECISION NOT NULL,
percentile DOUBLE PRECISION NOT NULL
) ON COMMIT DROP
""";
await connection.ExecuteAsync(new CommandDefinition(
createStageSql,
transaction: transaction,
cancellationToken: cancellationToken)).ConfigureAwait(false);
var (rowCount, distinctCount) = await CopyStageAsync(connection, transaction, stageTable, rows, cancellationToken).ConfigureAwait(false);
if (rowCount != distinctCount)
{
throw new InvalidOperationException($"EPSS staging table contains duplicate CVE IDs (rows={rowCount}, distinct={distinctCount}).");
}
var insertScoresSql = $"""
INSERT INTO {ScoresTable} (model_date, cve_id, epss_score, percentile, import_run_id)
SELECT @ModelDate, cve_id, epss_score, percentile, @ImportRunId
FROM {stageTable}
""";
await connection.ExecuteAsync(new CommandDefinition(
insertScoresSql,
new { ModelDate = modelDate, ImportRunId = importRunId },
transaction: transaction,
cancellationToken: cancellationToken)).ConfigureAwait(false);
await InsertChangesAsync(connection, transaction, stageTable, modelDate, importRunId, cancellationToken).ConfigureAwait(false);
await UpsertCurrentAsync(connection, transaction, stageTable, modelDate, importRunId, updatedAtUtc, cancellationToken).ConfigureAwait(false);
await transaction.CommitAsync(cancellationToken).ConfigureAwait(false);
return new EpssWriteResult(RowCount: rowCount, DistinctCveCount: distinctCount);
}
catch
{
await transaction.RollbackAsync(cancellationToken).ConfigureAwait(false);
throw;
}
}
public async Task<IReadOnlyDictionary<string, EpssCurrentEntry>> GetCurrentAsync(
IEnumerable<string> cveIds,
CancellationToken cancellationToken = default)
{
if (cveIds is null)
{
return new Dictionary<string, EpssCurrentEntry>(StringComparer.Ordinal);
}
var normalized = cveIds
.Where(static id => !string.IsNullOrWhiteSpace(id))
.Select(static id => id.Trim().ToUpperInvariant())
.Distinct(StringComparer.Ordinal)
.OrderBy(static id => id, StringComparer.Ordinal)
.ToArray();
if (normalized.Length == 0)
{
return new Dictionary<string, EpssCurrentEntry>(StringComparer.Ordinal);
}
var sql = $"""
SELECT cve_id, epss_score, percentile, model_date, import_run_id
FROM {CurrentTable}
WHERE cve_id = ANY(@CveIds)
ORDER BY cve_id
""";
await using var connection = await _dataSource.OpenSystemConnectionAsync(cancellationToken).ConfigureAwait(false);
var rows = await connection.QueryAsync<CurrentRow>(new CommandDefinition(
sql,
new { CveIds = normalized },
cancellationToken: cancellationToken)).ConfigureAwait(false);
var result = new Dictionary<string, EpssCurrentEntry>(StringComparer.Ordinal);
foreach (var row in rows)
{
result[row.cve_id] = new EpssCurrentEntry(
row.cve_id,
(double)row.epss_score,
(double)row.percentile,
row.model_date,
row.import_run_id);
}
return result;
}
public async Task<IReadOnlyList<EpssHistoryEntry>> GetHistoryAsync(
string cveId,
int days,
CancellationToken cancellationToken = default)
{
ArgumentException.ThrowIfNullOrWhiteSpace(cveId);
var normalized = cveId.Trim().ToUpperInvariant();
var limit = Math.Clamp(days, 1, 3650);
var sql = $"""
SELECT model_date, epss_score, percentile, import_run_id
FROM {ScoresTable}
WHERE cve_id = @CveId
ORDER BY model_date DESC
LIMIT @Limit
""";
await using var connection = await _dataSource.OpenSystemConnectionAsync(cancellationToken).ConfigureAwait(false);
var rows = await connection.QueryAsync<HistoryRow>(new CommandDefinition(
sql,
new { CveId = normalized, Limit = limit },
cancellationToken: cancellationToken)).ConfigureAwait(false);
return rows.Select(static row => new EpssHistoryEntry(
row.model_date,
(double)row.epss_score,
(double)row.percentile,
row.import_run_id))
.ToList();
}
private static async Task EnsurePartitionsAsync(
NpgsqlConnection connection,
NpgsqlTransaction transaction,
DateOnly modelDate,
CancellationToken cancellationToken)
{
var sql = "SELECT create_epss_partition(@Year, @Month)";
await connection.ExecuteAsync(new CommandDefinition(
sql,
new { Year = modelDate.Year, Month = modelDate.Month },
transaction: transaction,
cancellationToken: cancellationToken)).ConfigureAwait(false);
}
private static async Task<(int RowCount, int DistinctCount)> CopyStageAsync(
NpgsqlConnection connection,
NpgsqlTransaction transaction,
string stageTable,
IAsyncEnumerable<EpssScoreRow> rows,
CancellationToken cancellationToken)
{
var rowCount = 0;
await using (var importer = connection.BeginBinaryImport($"COPY {stageTable} (cve_id, epss_score, percentile) FROM STDIN (FORMAT BINARY)"))
{
await foreach (var row in rows.WithCancellation(cancellationToken).ConfigureAwait(false))
{
await importer.StartRowAsync(cancellationToken).ConfigureAwait(false);
await importer.WriteAsync(row.CveId, NpgsqlDbType.Text, cancellationToken).ConfigureAwait(false);
await importer.WriteAsync(row.Score, NpgsqlDbType.Double, cancellationToken).ConfigureAwait(false);
await importer.WriteAsync(row.Percentile, NpgsqlDbType.Double, cancellationToken).ConfigureAwait(false);
rowCount++;
}
await importer.CompleteAsync(cancellationToken).ConfigureAwait(false);
}
var countsSql = $"""
SELECT COUNT(*) AS total, COUNT(DISTINCT cve_id) AS distinct_count
FROM {stageTable}
""";
var counts = await connection.QuerySingleAsync<StageCounts>(new CommandDefinition(
countsSql,
transaction: transaction,
cancellationToken: cancellationToken)).ConfigureAwait(false);
return (rowCount, counts.distinct_count);
}
private async Task InsertChangesAsync(
NpgsqlConnection connection,
NpgsqlTransaction transaction,
string stageTable,
DateOnly modelDate,
Guid importRunId,
CancellationToken cancellationToken)
{
var sql = $"""
INSERT INTO {ChangesTable} (
model_date,
cve_id,
old_score,
new_score,
delta_score,
old_percentile,
new_percentile,
delta_percentile,
flags,
import_run_id
)
SELECT
@ModelDate,
s.cve_id,
c.epss_score AS old_score,
s.epss_score AS new_score,
CASE WHEN c.epss_score IS NULL THEN NULL ELSE s.epss_score - c.epss_score END AS delta_score,
c.percentile AS old_percentile,
s.percentile AS new_percentile,
CASE WHEN c.percentile IS NULL THEN NULL ELSE s.percentile - c.percentile END AS delta_percentile,
compute_epss_change_flags(
c.epss_score,
s.epss_score,
c.percentile,
s.percentile,
cfg.high_score,
cfg.high_percentile,
cfg.big_jump_delta
) AS flags,
@ImportRunId
FROM {stageTable} s
LEFT JOIN {CurrentTable} c ON c.cve_id = s.cve_id
CROSS JOIN (
SELECT high_score, high_percentile, big_jump_delta
FROM {ConfigTable}
WHERE org_id IS NULL
LIMIT 1
) cfg
""";
await connection.ExecuteAsync(new CommandDefinition(
sql,
new { ModelDate = modelDate, ImportRunId = importRunId },
transaction: transaction,
cancellationToken: cancellationToken)).ConfigureAwait(false);
}
private async Task UpsertCurrentAsync(
NpgsqlConnection connection,
NpgsqlTransaction transaction,
string stageTable,
DateOnly modelDate,
Guid importRunId,
DateTimeOffset updatedAtUtc,
CancellationToken cancellationToken)
{
var sql = $"""
INSERT INTO {CurrentTable} (
cve_id,
epss_score,
percentile,
model_date,
import_run_id,
updated_at
)
SELECT
cve_id,
epss_score,
percentile,
@ModelDate,
@ImportRunId,
@UpdatedAtUtc
FROM {stageTable}
ON CONFLICT (cve_id) DO UPDATE SET
epss_score = EXCLUDED.epss_score,
percentile = EXCLUDED.percentile,
model_date = EXCLUDED.model_date,
import_run_id = EXCLUDED.import_run_id,
updated_at = EXCLUDED.updated_at
""";
await connection.ExecuteAsync(new CommandDefinition(
sql,
new { ModelDate = modelDate, ImportRunId = importRunId, UpdatedAtUtc = updatedAtUtc },
transaction: transaction,
cancellationToken: cancellationToken)).ConfigureAwait(false);
}
private sealed class StageCounts
{
public int distinct_count { get; set; }
}
private sealed class ImportRunRow
{
public Guid import_run_id { get; set; }
public DateOnly model_date { get; set; }
public string source_uri { get; set; } = "";
public DateTimeOffset retrieved_at { get; set; }
public string file_sha256 { get; set; } = "";
public string? decompressed_sha256 { get; set; }
public int row_count { get; set; }
public string? model_version_tag { get; set; }
public DateOnly? published_date { get; set; }
public string status { get; set; } = "";
public string? error { get; set; }
public DateTimeOffset created_at { get; set; }
public EpssImportRun ToModel() => new(
ImportRunId: import_run_id,
ModelDate: model_date,
SourceUri: source_uri,
RetrievedAtUtc: retrieved_at,
FileSha256: file_sha256,
DecompressedSha256: decompressed_sha256,
RowCount: row_count,
ModelVersionTag: model_version_tag,
PublishedDate: published_date,
Status: status,
Error: error,
CreatedAtUtc: created_at);
}
private sealed class CurrentRow
{
public string cve_id { get; set; } = "";
public decimal epss_score { get; set; }
public decimal percentile { get; set; }
public DateOnly model_date { get; set; }
public Guid import_run_id { get; set; }
}
private sealed class HistoryRow
{
public DateOnly model_date { get; set; }
public decimal epss_score { get; set; }
public decimal percentile { get; set; }
public Guid import_run_id { get; set; }
}
private static void EnsureTypeHandlers()
{
if (Interlocked.Exchange(ref _typeHandlersRegistered, 1) == 1)
{
return;
}
SqlMapper.AddTypeHandler(new DateOnlyTypeHandler());
SqlMapper.AddTypeHandler(new NullableDateOnlyTypeHandler());
}
private sealed class DateOnlyTypeHandler : SqlMapper.TypeHandler<DateOnly>
{
public override void SetValue(IDbDataParameter parameter, DateOnly value)
{
parameter.Value = value;
if (parameter is NpgsqlParameter npgsqlParameter)
{
npgsqlParameter.NpgsqlDbType = NpgsqlDbType.Date;
}
}
public override DateOnly Parse(object value)
{
return value switch
{
DateOnly dateOnly => dateOnly,
DateTime dateTime => DateOnly.FromDateTime(dateTime),
_ => DateOnly.FromDateTime((DateTime)value)
};
}
}
private sealed class NullableDateOnlyTypeHandler : SqlMapper.TypeHandler<DateOnly?>
{
public override void SetValue(IDbDataParameter parameter, DateOnly? value)
{
if (value is null)
{
parameter.Value = DBNull.Value;
return;
}
parameter.Value = value.Value;
if (parameter is NpgsqlParameter npgsqlParameter)
{
npgsqlParameter.NpgsqlDbType = NpgsqlDbType.Date;
}
}
public override DateOnly? Parse(object value)
{
if (value is null || value is DBNull)
{
return null;
}
return value switch
{
DateOnly dateOnly => dateOnly,
DateTime dateTime => DateOnly.FromDateTime(dateTime),
_ => DateOnly.FromDateTime((DateTime)value)
};
}
}
}

View File

@@ -0,0 +1,89 @@
// -----------------------------------------------------------------------------
// IEpssRepository.cs
// Sprint: SPRINT_3410_0001_0001_epss_ingestion_storage
// Tasks: EPSS-3410-007, EPSS-3410-008
// Description: EPSS persistence contract (import runs, scores/current projection, change log).
// -----------------------------------------------------------------------------
using StellaOps.Scanner.Storage.Epss;
namespace StellaOps.Scanner.Storage.Repositories;
public interface IEpssRepository
{
Task<EpssImportRun?> GetImportRunAsync(DateOnly modelDate, CancellationToken cancellationToken = default);
/// <summary>
/// Creates (or resets) the import run record for a model date.
/// </summary>
Task<EpssImportRun> BeginImportAsync(
DateOnly modelDate,
string sourceUri,
DateTimeOffset retrievedAtUtc,
string fileSha256,
CancellationToken cancellationToken = default);
Task MarkImportSucceededAsync(
Guid importRunId,
int rowCount,
string? decompressedSha256,
string? modelVersionTag,
DateOnly? publishedDate,
CancellationToken cancellationToken = default);
Task MarkImportFailedAsync(
Guid importRunId,
string error,
CancellationToken cancellationToken = default);
/// <summary>
/// Writes the EPSS snapshot into time-series storage, computes changes, and updates the current projection.
/// </summary>
Task<EpssWriteResult> WriteSnapshotAsync(
Guid importRunId,
DateOnly modelDate,
DateTimeOffset updatedAtUtc,
IAsyncEnumerable<EpssScoreRow> rows,
CancellationToken cancellationToken = default);
Task<IReadOnlyDictionary<string, EpssCurrentEntry>> GetCurrentAsync(
IEnumerable<string> cveIds,
CancellationToken cancellationToken = default);
Task<IReadOnlyList<EpssHistoryEntry>> GetHistoryAsync(
string cveId,
int days,
CancellationToken cancellationToken = default);
}
public sealed record EpssImportRun(
Guid ImportRunId,
DateOnly ModelDate,
string SourceUri,
DateTimeOffset RetrievedAtUtc,
string FileSha256,
string? DecompressedSha256,
int RowCount,
string? ModelVersionTag,
DateOnly? PublishedDate,
string Status,
string? Error,
DateTimeOffset CreatedAtUtc);
public readonly record struct EpssWriteResult(
int RowCount,
int DistinctCveCount);
public sealed record EpssCurrentEntry(
string CveId,
double Score,
double Percentile,
DateOnly ModelDate,
Guid ImportRunId);
public sealed record EpssHistoryEntry(
DateOnly ModelDate,
double Score,
double Percentile,
Guid ImportRunId);

View File

@@ -3,4 +3,13 @@
| Task ID | Sprint | Status | Notes |
| --- | --- | --- | --- |
| `PROOFSPINE-3100-DB` | `docs/implplan/SPRINT_3100_0001_0001_proof_spine_system.md` | DOING | Add Postgres migrations and repository for ProofSpine persistence (`proof_spines`, `proof_segments`, `proof_spine_history`). |
| `SCAN-API-3103-004` | `docs/implplan/SPRINT_3103_0001_0001_scanner_api_ingestion_completion.md` | DOING | Fix scanner storage connection/schema issues surfaced by Scanner WebService ingestion tests. |
| `SCAN-API-3103-004` | `docs/implplan/SPRINT_3103_0001_0001_scanner_api_ingestion_completion.md` | DONE | Fix scanner storage connection/schema issues surfaced by Scanner WebService ingestion tests. |
| `DRIFT-3600-DB` | `docs/implplan/SPRINT_3600_0003_0001_drift_detection_engine.md` | DONE | Add drift tables migration + code change/drift result repositories + DI wiring. |
| `EPSS-3410-001` | `docs/implplan/SPRINT_3410_0001_0001_epss_ingestion_storage.md` | DONE | Added EPSS schema migration `Postgres/Migrations/008_epss_integration.sql` and wired via `MigrationIds.cs`. |
| `EPSS-3410-002` | `docs/implplan/SPRINT_3410_0001_0001_epss_ingestion_storage.md` | DOING | Implement `EpssScoreRow` + ingestion models. |
| `EPSS-3410-003` | `docs/implplan/SPRINT_3410_0001_0001_epss_ingestion_storage.md` | DOING | Implement `IEpssSource` interface (online vs bundle). |
| `EPSS-3410-004` | `docs/implplan/SPRINT_3410_0001_0001_epss_ingestion_storage.md` | DOING | Implement `EpssOnlineSource` (download to temp; hash provenance). |
| `EPSS-3410-005` | `docs/implplan/SPRINT_3410_0001_0001_epss_ingestion_storage.md` | DOING | Implement `EpssBundleSource` (air-gap file input). |
| `EPSS-3410-006` | `docs/implplan/SPRINT_3410_0001_0001_epss_ingestion_storage.md` | DOING | Implement streaming `EpssCsvStreamParser` (validation + header comment extraction). |
| `EPSS-3410-007` | `docs/implplan/SPRINT_3410_0001_0001_epss_ingestion_storage.md` | DOING | Implement Postgres `IEpssRepository` (runs + scores/current/changes). |
| `EPSS-3410-008` | `docs/implplan/SPRINT_3410_0001_0001_epss_ingestion_storage.md` | DOING | Implement change detection + flags (`compute_epss_change_flags` + delta join). |

View File

@@ -90,7 +90,7 @@ public class NativeFormatDetectorTests
Assert.Equal(NativeFormat.Elf, id.Format);
Assert.Equal("x86_64", id.CpuArchitecture);
Assert.Equal("/lib64/ld-linux-x86-64.so.2", id.InterpreterPath);
Assert.Equal("0102030405060708090a0b0c0d0e0f10", id.BuildId);
Assert.Equal("gnu-build-id:0102030405060708090a0b0c0d0e0f10", id.BuildId);
}
[Fact]
@@ -150,7 +150,7 @@ public class NativeFormatDetectorTests
var cmdOffset = 32;
BitConverter.GetBytes((uint)0x1B).CopyTo(buffer, cmdOffset); // LC_UUID
BitConverter.GetBytes((uint)32).CopyTo(buffer, cmdOffset + 4); // cmdsize
var uuid = Guid.NewGuid();
var uuid = Guid.Parse("f81e1e08-4373-4df0-8a9e-19c23e2addc5");
uuid.ToByteArray().CopyTo(buffer, cmdOffset + 8);
using var stream = new MemoryStream(buffer);
@@ -158,7 +158,7 @@ public class NativeFormatDetectorTests
Assert.True(detected);
Assert.Equal(NativeFormat.MachO, id.Format);
Assert.Equal(uuid.ToString(), id.Uuid);
Assert.Equal($"macho-uuid:{Convert.ToHexString(uuid.ToByteArray()).ToLowerInvariant()}", id.Uuid);
}
[Fact]

View File

@@ -19,7 +19,7 @@ public class PeImportParserTests : NativeTestBase
var info = ParsePe(pe);
info.Is64Bit.Should().BeFalse();
info.Machine.Should().Be("x86_64");
info.Machine.Should().Be("x86");
info.Subsystem.Should().Be(PeSubsystem.WindowsConsole);
}

View File

@@ -0,0 +1,42 @@
using StellaOps.Scanner.Storage.Epss;
using Xunit;
namespace StellaOps.Scanner.Storage.Tests;
public sealed class EpssChangeDetectorTests
{
[Fact]
public void ComputeFlags_MatchesExpectedBitmask()
{
var thresholds = EpssChangeDetector.DefaultThresholds;
var crossedHigh = EpssChangeDetector.ComputeFlags(
oldScore: 0.40,
newScore: 0.55,
oldPercentile: 0.90,
newPercentile: 0.95,
thresholds);
Assert.Equal(
EpssChangeFlags.CrossedHigh | EpssChangeFlags.BigJumpUp | EpssChangeFlags.TopPercentile,
crossedHigh);
var crossedLow = EpssChangeDetector.ComputeFlags(
oldScore: 0.60,
newScore: 0.45,
oldPercentile: 0.96,
newPercentile: 0.94,
thresholds);
Assert.Equal(
EpssChangeFlags.CrossedLow | EpssChangeFlags.BigJumpDown | EpssChangeFlags.LeftTopPercentile,
crossedLow);
var newScored = EpssChangeDetector.ComputeFlags(
oldScore: null,
newScore: 0.70,
oldPercentile: null,
newPercentile: 0.97,
thresholds);
Assert.Equal(EpssChangeFlags.NewScored | EpssChangeFlags.TopPercentile, newScored);
}
}

View File

@@ -0,0 +1,53 @@
using System.IO.Compression;
using System.Security.Cryptography;
using System.Text;
using StellaOps.Scanner.Storage.Epss;
using Xunit;
namespace StellaOps.Scanner.Storage.Tests;
public sealed class EpssCsvStreamParserTests
{
[Fact]
public async Task ParseGzip_ParsesRowsAndComputesDecompressedHash()
{
var csv = string.Join('\n',
[
"# EPSS v2025.12.17 published 2025-12-17",
"cve,epss,percentile",
"CVE-2024-0001,0.1,0.5",
"cve-2024-0002,1.0,1.0",
""
]);
var decompressedBytes = Encoding.UTF8.GetBytes(csv);
var expectedHash = "sha256:" + Convert.ToHexString(SHA256.HashData(decompressedBytes)).ToLowerInvariant();
await using var gzipBytes = new MemoryStream();
await using (var gzip = new GZipStream(gzipBytes, CompressionLevel.Optimal, leaveOpen: true))
{
await gzip.WriteAsync(decompressedBytes);
}
gzipBytes.Position = 0;
var parser = new EpssCsvStreamParser();
var session = parser.ParseGzip(gzipBytes);
var rows = new List<EpssScoreRow>();
await foreach (var row in session)
{
rows.Add(row);
}
Assert.Equal(2, session.RowCount);
Assert.Equal("v2025.12.17", session.ModelVersionTag);
Assert.Equal(new DateOnly(2025, 12, 17), session.PublishedDate);
Assert.Equal(expectedHash, session.DecompressedSha256);
Assert.Equal("CVE-2024-0001", rows[0].CveId);
Assert.Equal(0.1, rows[0].Score, precision: 6);
Assert.Equal(0.5, rows[0].Percentile, precision: 6);
Assert.Equal("CVE-2024-0002", rows[1].CveId);
}
}

View File

@@ -0,0 +1,126 @@
using Dapper;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Logging.Abstractions;
using Microsoft.Extensions.Options;
using StellaOps.Scanner.Storage.Epss;
using StellaOps.Scanner.Storage.Postgres;
using Xunit;
namespace StellaOps.Scanner.Storage.Tests;
[Collection("scanner-postgres")]
public sealed class EpssRepositoryIntegrationTests : IAsyncLifetime
{
private readonly ScannerPostgresFixture _fixture;
private ScannerDataSource _dataSource = null!;
private PostgresEpssRepository _repository = null!;
public EpssRepositoryIntegrationTests(ScannerPostgresFixture fixture)
{
_fixture = fixture;
}
public async Task InitializeAsync()
{
await _fixture.TruncateAllTablesAsync();
var options = new ScannerStorageOptions
{
Postgres = new StellaOps.Infrastructure.Postgres.Options.PostgresOptions
{
ConnectionString = _fixture.ConnectionString,
SchemaName = _fixture.SchemaName
}
};
_dataSource = new ScannerDataSource(Options.Create(options), NullLoggerFactory.Instance.CreateLogger<ScannerDataSource>());
_repository = new PostgresEpssRepository(_dataSource);
}
public Task DisposeAsync() => Task.CompletedTask;
[Fact]
public async Task WriteSnapshot_ComputesChangesAndUpdatesCurrent()
{
var thresholds = EpssChangeDetector.DefaultThresholds;
var day1 = new DateOnly(2027, 1, 15);
var run1 = await _repository.BeginImportAsync(day1, "bundle://day1.csv.gz", DateTimeOffset.Parse("2027-01-15T00:05:00Z"), "sha256:day1");
Assert.Equal("PENDING", run1.Status);
var day1Rows = new[]
{
new EpssScoreRow("CVE-2024-0001", 0.40, 0.90),
new EpssScoreRow("CVE-2024-0002", 0.60, 0.96)
};
var write1 = await _repository.WriteSnapshotAsync(run1.ImportRunId, day1, DateTimeOffset.Parse("2027-01-15T00:06:00Z"), ToAsync(day1Rows));
Assert.Equal(day1Rows.Length, write1.RowCount);
await _repository.MarkImportSucceededAsync(run1.ImportRunId, write1.RowCount, decompressedSha256: "sha256:decompressed1", modelVersionTag: "v2027.01.15", publishedDate: day1);
var day2 = new DateOnly(2027, 1, 16);
var run2 = await _repository.BeginImportAsync(day2, "bundle://day2.csv.gz", DateTimeOffset.Parse("2027-01-16T00:05:00Z"), "sha256:day2");
var day2Rows = new[]
{
new EpssScoreRow("CVE-2024-0001", 0.55, 0.95),
new EpssScoreRow("CVE-2024-0002", 0.45, 0.94),
new EpssScoreRow("CVE-2024-0003", 0.70, 0.97)
};
var write2 = await _repository.WriteSnapshotAsync(run2.ImportRunId, day2, DateTimeOffset.Parse("2027-01-16T00:06:00Z"), ToAsync(day2Rows));
Assert.Equal(day2Rows.Length, write2.RowCount);
await _repository.MarkImportSucceededAsync(run2.ImportRunId, write2.RowCount, decompressedSha256: "sha256:decompressed2", modelVersionTag: "v2027.01.16", publishedDate: day2);
var current = await _repository.GetCurrentAsync(new[] { "CVE-2024-0001", "CVE-2024-0002", "CVE-2024-0003" });
Assert.Equal(3, current.Count);
Assert.Equal(day2, current["CVE-2024-0001"].ModelDate);
await using var connection = await _dataSource.OpenSystemConnectionAsync();
var changes = (await connection.QueryAsync<ChangeRow>(
"""
SELECT cve_id, old_score, new_score, old_percentile, new_percentile, flags
FROM epss_changes
WHERE model_date = @ModelDate
ORDER BY cve_id
""",
new { ModelDate = day2 })).ToList();
Assert.Equal(3, changes.Count);
var cve1 = changes.Single(c => c.cve_id == "CVE-2024-0001");
Assert.Equal(
(int)EpssChangeDetector.ComputeFlags(cve1.old_score, cve1.new_score, cve1.old_percentile, cve1.new_percentile, thresholds),
cve1.flags);
var cve2 = changes.Single(c => c.cve_id == "CVE-2024-0002");
Assert.Equal(
(int)EpssChangeDetector.ComputeFlags(cve2.old_score, cve2.new_score, cve2.old_percentile, cve2.new_percentile, thresholds),
cve2.flags);
var cve3 = changes.Single(c => c.cve_id == "CVE-2024-0003");
Assert.Null(cve3.old_score);
Assert.Equal(
(int)EpssChangeDetector.ComputeFlags(cve3.old_score, cve3.new_score, cve3.old_percentile, cve3.new_percentile, thresholds),
cve3.flags);
}
private static async IAsyncEnumerable<EpssScoreRow> ToAsync(IEnumerable<EpssScoreRow> rows)
{
foreach (var row in rows)
{
yield return row;
await Task.Yield();
}
}
private sealed class ChangeRow
{
public string cve_id { get; set; } = "";
public double? old_score { get; set; }
public double new_score { get; set; }
public double? old_percentile { get; set; }
public double new_percentile { get; set; }
public int flags { get; set; }
}
}

View File

@@ -7,7 +7,7 @@ public sealed class AuthorizationTests
[Fact]
public async Task ApiRoutesRequireAuthenticationWhenAuthorityEnabled()
{
using var factory = new ScannerApplicationFactory(configuration =>
using var factory = new ScannerApplicationFactory().WithOverrides(configuration =>
{
configuration["scanner:authority:enabled"] = "true";
configuration["scanner:authority:allowAnonymousFallback"] = "false";

View File

@@ -11,7 +11,7 @@ public sealed class CallGraphEndpointsTests
public async Task SubmitCallGraphRequiresContentDigestHeader()
{
using var secrets = new TestSurfaceSecretsScope();
using var factory = new ScannerApplicationFactory(configuration =>
using var factory = new ScannerApplicationFactory().WithOverrides(configuration =>
{
configuration["scanner:authority:enabled"] = "false";
});
@@ -30,7 +30,7 @@ public sealed class CallGraphEndpointsTests
public async Task SubmitCallGraphReturnsAcceptedAndDetectsDuplicates()
{
using var secrets = new TestSurfaceSecretsScope();
using var factory = new ScannerApplicationFactory(configuration =>
using var factory = new ScannerApplicationFactory().WithOverrides(configuration =>
{
configuration["scanner:authority:enabled"] = "false";
});
@@ -101,4 +101,3 @@ public sealed class CallGraphEndpointsTests
});
}
}

View File

@@ -15,7 +15,7 @@ namespace StellaOps.Scanner.WebService.Tests.Integration;
/// End-to-end integration tests for the Triage workflow.
/// Tests the complete flow from alert list to decision recording.
/// </summary>
public sealed class TriageWorkflowIntegrationTests : IClassFixture<ScannerApplicationFactory>
public sealed class TriageWorkflowIntegrationTests : IClassFixture<ScannerApplicationFixture>
{
private readonly HttpClient _client;
private static readonly JsonSerializerOptions JsonOptions = new()
@@ -23,9 +23,9 @@ public sealed class TriageWorkflowIntegrationTests : IClassFixture<ScannerApplic
PropertyNamingPolicy = JsonNamingPolicy.CamelCase
};
public TriageWorkflowIntegrationTests(ScannerApplicationFactory factory)
public TriageWorkflowIntegrationTests(ScannerApplicationFixture fixture)
{
_client = factory.CreateClient();
_client = fixture.Factory.CreateClient();
}
#region Alert List Tests

View File

@@ -4,6 +4,10 @@ using System.Security.Cryptography;
using System.Text;
using System.Text.Json;
using Microsoft.AspNetCore.Hosting;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.DependencyInjection.Extensions;
using StellaOps.Authority.Storage.Postgres.Models;
using StellaOps.Authority.Storage.Postgres.Repositories;
using Xunit;
namespace StellaOps.Scanner.WebService.Tests;
@@ -22,7 +26,7 @@ public sealed class OfflineKitEndpointsTests
var (keyId, keyPem, dsseJson) = CreateSignedDsse(bundleBytes);
File.WriteAllText(Path.Combine(trustRoots.Path, $"{keyId}.pem"), keyPem, Encoding.UTF8);
using var factory = new ScannerApplicationFactory(config =>
using var factory = new ScannerApplicationFactory().WithOverrides(config =>
{
config["Scanner:OfflineKit:Enabled"] = "true";
config["Scanner:OfflineKit:RequireDsse"] = "true";
@@ -89,7 +93,7 @@ public sealed class OfflineKitEndpointsTests
signatures = new[] { new { keyid = keyId, sig = Convert.ToBase64String(new byte[] { 1, 2, 3 }) } }
}, new JsonSerializerOptions(JsonSerializerDefaults.Web));
using var factory = new ScannerApplicationFactory(config =>
using var factory = new ScannerApplicationFactory().WithOverrides(config =>
{
config["Scanner:OfflineKit:Enabled"] = "true";
config["Scanner:OfflineKit:RequireDsse"] = "true";
@@ -142,7 +146,7 @@ public sealed class OfflineKitEndpointsTests
signatures = new[] { new { keyid = "unknown", sig = Convert.ToBase64String(new byte[] { 1, 2, 3 }) } }
}, new JsonSerializerOptions(JsonSerializerDefaults.Web));
using var factory = new ScannerApplicationFactory(config =>
using var factory = new ScannerApplicationFactory().WithOverrides(config =>
{
config["Scanner:OfflineKit:Enabled"] = "true";
config["Scanner:OfflineKit:RequireDsse"] = "false";
@@ -172,6 +176,57 @@ public sealed class OfflineKitEndpointsTests
Assert.Equal(HttpStatusCode.Accepted, response.StatusCode);
}
[Fact]
public async Task OfflineKitImport_EmitsAuditEvent_WithTenantHeader()
{
using var contentRoot = new TempDirectory();
var bundleBytes = Encoding.UTF8.GetBytes("deterministic-offline-kit-bundle");
var bundleSha = ComputeSha256Hex(bundleBytes);
var auditEmitter = new CapturingAuditEmitter();
using var factory = new ScannerApplicationFactory().WithOverrides(config =>
{
config["Scanner:OfflineKit:Enabled"] = "true";
config["Scanner:OfflineKit:RequireDsse"] = "false";
config["Scanner:OfflineKit:RekorOfflineMode"] = "false";
}, configureServices: services =>
{
services.RemoveAll<IOfflineKitAuditEmitter>();
services.AddSingleton<IOfflineKitAuditEmitter>(auditEmitter);
});
using var configured = factory.WithWebHostBuilder(builder => builder.UseContentRoot(contentRoot.Path));
using var client = configured.CreateClient();
var metadataJson = JsonSerializer.Serialize(new
{
bundleId = "test-bundle",
bundleSha256 = $"sha256:{bundleSha}",
bundleSize = bundleBytes.Length
}, new JsonSerializerOptions(JsonSerializerDefaults.Web));
using var content = new MultipartFormDataContent();
content.Add(new StringContent(metadataJson, Encoding.UTF8, "application/json"), "metadata");
var bundleContent = new ByteArrayContent(bundleBytes);
bundleContent.Headers.ContentType = new MediaTypeHeaderValue("application/octet-stream");
content.Add(bundleContent, "bundle", "bundle.tgz");
using var request = new HttpRequestMessage(HttpMethod.Post, "/api/offline-kit/import") { Content = content };
request.Headers.Add("X-Stella-Tenant", "tenant-a");
using var response = await client.SendAsync(request).ConfigureAwait(false);
Assert.Equal(HttpStatusCode.Accepted, response.StatusCode);
var entity = auditEmitter.LastRecorded;
Assert.NotNull(entity);
Assert.Equal("tenant-a", entity!.TenantId);
Assert.Equal("offlinekit.import", entity.EventType);
Assert.Equal("accepted", entity.Result);
}
private static string ComputeSha256Hex(byte[] bytes)
=> Convert.ToHexString(SHA256.HashData(bytes)).ToLowerInvariant();
@@ -247,4 +302,21 @@ public sealed class OfflineKitEndpointsTests
}
}
}
private sealed class CapturingAuditEmitter : IOfflineKitAuditEmitter
{
private readonly object gate = new();
public OfflineKitAuditEntity? LastRecorded { get; private set; }
public Task RecordAsync(OfflineKitAuditEntity entity, CancellationToken cancellationToken = default)
{
lock (gate)
{
LastRecorded = entity;
}
return Task.CompletedTask;
}
}
}

View File

@@ -10,7 +10,7 @@ public sealed class PlatformEventPublisherRegistrationTests
[Fact]
public void NullPublisherRegisteredWhenEventsDisabled()
{
using var factory = new ScannerApplicationFactory(configuration =>
using var factory = new ScannerApplicationFactory().WithOverrides(configuration =>
{
configuration["scanner:events:enabled"] = "false";
configuration["scanner:events:dsn"] = string.Empty;
@@ -40,7 +40,7 @@ public sealed class PlatformEventPublisherRegistrationTests
try
{
using var factory = new ScannerApplicationFactory(configuration =>
using var factory = new ScannerApplicationFactory().WithOverrides(configuration =>
{
configuration["scanner:events:enabled"] = "true";
configuration["scanner:events:driver"] = "redis";

View File

@@ -1,3 +1,4 @@
using System.Collections.Generic;
using System.Collections.Immutable;
using System.Net;
using System.Net.Http.Json;
@@ -17,7 +18,7 @@ public sealed class ReachabilityDriftEndpointsTests
public async Task GetDriftReturnsNotFoundWhenNoResultAndNoBaseScanProvided()
{
using var secrets = new TestSurfaceSecretsScope();
using var factory = new ScannerApplicationFactory(configuration =>
using var factory = new ScannerApplicationFactory().WithOverrides(configuration =>
{
configuration["scanner:authority:enabled"] = "false";
});
@@ -35,15 +36,15 @@ public sealed class ReachabilityDriftEndpointsTests
public async Task GetDriftComputesResultAndListsDriftedSinks()
{
using var secrets = new TestSurfaceSecretsScope();
using var factory = new ScannerApplicationFactory(configuration =>
using var factory = new ScannerApplicationFactory().WithOverrides(configuration =>
{
configuration["scanner:authority:enabled"] = "false";
});
using var client = factory.CreateClient();
var baseScanId = await CreateScanAsync(client);
var headScanId = await CreateScanAsync(client);
var baseScanId = await CreateScanAsync(client, "base");
var headScanId = await CreateScanAsync(client, "head");
await SeedCallGraphSnapshotsAsync(factory.Services, baseScanId, headScanId);
@@ -134,7 +135,7 @@ public sealed class ReachabilityDriftEndpointsTests
return provisional with { GraphDigest = CallGraphDigests.ComputeGraphDigest(provisional) };
}
private static async Task<string> CreateScanAsync(HttpClient client)
private static async Task<string> CreateScanAsync(HttpClient client, string? clientRequestId = null)
{
var response = await client.PostAsJsonAsync("/api/v1/scans", new ScanSubmitRequest
{
@@ -142,6 +143,11 @@ public sealed class ReachabilityDriftEndpointsTests
{
Reference = "example.com/demo:1.0",
Digest = "sha256:0123456789abcdef"
},
ClientRequestId = clientRequestId,
Metadata = new Dictionary<string, string>(StringComparer.OrdinalIgnoreCase)
{
["test.request"] = clientRequestId ?? string.Empty
}
});
@@ -161,4 +167,3 @@ public sealed class ReachabilityDriftEndpointsTests
int Count,
DriftedSink[] Sinks);
}

View File

@@ -35,7 +35,7 @@ rules:
var hmacKey = Convert.ToBase64String(Encoding.UTF8.GetBytes("scanner-report-hmac-key-2025!"));
using var factory = new ScannerApplicationFactory(configuration =>
using var factory = new ScannerApplicationFactory().WithOverrides(configuration =>
{
configuration["scanner:signing:enabled"] = "true";
configuration["scanner:signing:keyId"] = "scanner-report-signing";
@@ -148,7 +148,7 @@ rules:
action: block
""";
using var factory = new ScannerApplicationFactory(
using var factory = new ScannerApplicationFactory().WithOverrides(
configuration =>
{
configuration["scanner:signing:enabled"] = "true";

View File

@@ -241,7 +241,7 @@ public sealed class RubyPackagesEndpointsTests
new EntryTraceNdjsonMetadata("scan-placeholder", digest, generatedAt));
using var secrets = new TestSurfaceSecretsScope();
using var factory = new ScannerApplicationFactory(configureServices: services =>
using var factory = new ScannerApplicationFactory().WithOverrides(configureServices: services =>
{
services.AddSingleton<IEntryTraceResultStore, RecordingEntryTraceResultStore>();
});

View File

@@ -74,7 +74,7 @@ public sealed class RuntimeEndpointsTests
[Fact]
public async Task RuntimeEventsEndpointEnforcesRateLimit()
{
using var factory = new ScannerApplicationFactory(configuration =>
using var factory = new ScannerApplicationFactory().WithOverrides(configuration =>
{
configuration["scanner:runtime:perNodeBurst"] = "1";
configuration["scanner:runtime:perNodeEventsPerSecond"] = "1";
@@ -105,7 +105,7 @@ public sealed class RuntimeEndpointsTests
[Fact]
public async Task RuntimePolicyEndpointReturnsDecisions()
{
using var factory = new ScannerApplicationFactory(configuration =>
using var factory = new ScannerApplicationFactory().WithOverrides(configuration =>
{
configuration["scanner:runtime:policyCacheTtlSeconds"] = "600";
});

View File

@@ -49,7 +49,7 @@ public sealed class RuntimeReconciliationTests
{
var mockObjectStore = new InMemoryArtifactObjectStore();
using var factory = new ScannerApplicationFactory(
using var factory = new ScannerApplicationFactory().WithOverrides(
configureServices: services =>
{
services.RemoveAll<IArtifactObjectStore>();
@@ -98,7 +98,7 @@ public sealed class RuntimeReconciliationTests
{
var mockObjectStore = new InMemoryArtifactObjectStore();
using var factory = new ScannerApplicationFactory(
using var factory = new ScannerApplicationFactory().WithOverrides(
configureServices: services =>
{
services.RemoveAll<IArtifactObjectStore>();
@@ -188,7 +188,7 @@ public sealed class RuntimeReconciliationTests
{
var mockObjectStore = new InMemoryArtifactObjectStore();
using var factory = new ScannerApplicationFactory(
using var factory = new ScannerApplicationFactory().WithOverrides(
configureServices: services =>
{
services.RemoveAll<IArtifactObjectStore>();
@@ -273,7 +273,7 @@ public sealed class RuntimeReconciliationTests
{
var mockObjectStore = new InMemoryArtifactObjectStore();
using var factory = new ScannerApplicationFactory(
using var factory = new ScannerApplicationFactory().WithOverrides(
configureServices: services =>
{
services.RemoveAll<IArtifactObjectStore>();
@@ -398,7 +398,7 @@ public sealed class RuntimeReconciliationTests
{
var mockObjectStore = new InMemoryArtifactObjectStore();
using var factory = new ScannerApplicationFactory(
using var factory = new ScannerApplicationFactory().WithOverrides(
configureServices: services =>
{
services.RemoveAll<IArtifactObjectStore>();

View File

@@ -16,7 +16,7 @@ public sealed class SbomEndpointsTests
public async Task SubmitSbomAcceptsCycloneDxJson()
{
using var secrets = new TestSurfaceSecretsScope();
using var factory = new ScannerApplicationFactory(configuration =>
using var factory = new ScannerApplicationFactory().WithOverrides(configuration =>
{
configuration["scanner:authority:enabled"] = "false";
}, configureServices: services =>

View File

@@ -35,22 +35,36 @@ public sealed class ScannerApplicationFactory : WebApplicationFactory<ServiceSta
["scanner:features:enableSignedReports"] = "false"
};
private readonly Action<IDictionary<string, string?>>? configureConfiguration;
private readonly Action<IServiceCollection>? configureServices;
private Action<IDictionary<string, string?>>? configureConfiguration;
private Action<IServiceCollection>? configureServices;
public ScannerApplicationFactory(
Action<IDictionary<string, string?>>? configureConfiguration = null,
Action<IServiceCollection>? configureServices = null)
public ScannerApplicationFactory()
{
postgresFixture = new ScannerWebServicePostgresFixture();
postgresFixture.InitializeAsync().GetAwaiter().GetResult();
configuration["scanner:storage:dsn"] = postgresFixture.ConnectionString;
configuration["scanner:storage:database"] = postgresFixture.SchemaName;
}
public ScannerApplicationFactory(
Action<IDictionary<string, string?>>? configureConfiguration = null,
Action<IServiceCollection>? configureServices = null)
: this()
{
this.configureConfiguration = configureConfiguration;
this.configureServices = configureServices;
}
public ScannerApplicationFactory WithOverrides(
Action<IDictionary<string, string?>>? configureConfiguration = null,
Action<IServiceCollection>? configureServices = null)
{
this.configureConfiguration = configureConfiguration;
this.configureServices = configureServices;
return this;
}
protected override void ConfigureWebHost(IWebHostBuilder builder)
{
configureConfiguration?.Invoke(configuration);

View File

@@ -0,0 +1,11 @@
using System;
namespace StellaOps.Scanner.WebService.Tests;
public sealed class ScannerApplicationFixture : IDisposable
{
public ScannerApplicationFactory Factory { get; } = new();
public void Dispose() => Factory.Dispose();
}

View File

@@ -13,7 +13,7 @@ public sealed partial class ScansEndpointsTests
public async Task EntropyEndpoint_AttachesSnapshot_AndSurfacesInStatus()
{
using var secrets = new TestSurfaceSecretsScope();
using var factory = new ScannerApplicationFactory(cfg =>
using var factory = new ScannerApplicationFactory().WithOverrides(cfg =>
{
cfg["scanner:authority:enabled"] = "false";
cfg["scanner:authority:allowAnonymousFallback"] = "true";

View File

@@ -24,7 +24,7 @@ public sealed partial class ScansEndpointsTests
using var secrets = new TestSurfaceSecretsScope();
var store = new InMemoryArtifactObjectStore();
using var factory = new ScannerApplicationFactory(configureConfiguration: cfg =>
using var factory = new ScannerApplicationFactory().WithOverrides(configureConfiguration: cfg =>
{
cfg["scanner:artifactStore:bucket"] = "replay-bucket";
},

View File

@@ -18,7 +18,7 @@ public sealed partial class ScansEndpointsTests
public async Task RecordModeService_AttachesReplayAndSurfacedInStatus()
{
using var secrets = new TestSurfaceSecretsScope();
using var factory = new ScannerApplicationFactory(cfg =>
using var factory = new ScannerApplicationFactory().WithOverrides(cfg =>
{
cfg["scanner:authority:enabled"] = "false";
});

View File

@@ -39,7 +39,7 @@ public sealed partial class ScansEndpointsTests
using var secrets = new TestSurfaceSecretsScope();
RecordingCoordinator coordinator = null!;
using var factory = new ScannerApplicationFactory(configuration =>
using var factory = new ScannerApplicationFactory().WithOverrides(configuration =>
{
configuration["scanner:authority:enabled"] = "false";
}, configureServices: services =>
@@ -78,7 +78,7 @@ public sealed partial class ScansEndpointsTests
using var secrets = new TestSurfaceSecretsScope();
RecordingCoordinator coordinator = null!;
using var factory = new ScannerApplicationFactory(configuration =>
using var factory = new ScannerApplicationFactory().WithOverrides(configuration =>
{
configuration["scanner:determinism:feedSnapshotId"] = "feed-2025-11-26";
configuration["scanner:determinism:policySnapshotId"] = "rev-42";
@@ -149,7 +149,7 @@ public sealed partial class ScansEndpointsTests
var ndjson = EntryTraceNdjsonWriter.Serialize(graph, new EntryTraceNdjsonMetadata(scanId, "sha256:test", generatedAt));
var storedResult = new EntryTraceResult(scanId, "sha256:test", generatedAt, graph, ndjson);
using var factory = new ScannerApplicationFactory(configureServices: services =>
using var factory = new ScannerApplicationFactory().WithOverrides(configureServices: services =>
{
services.AddSingleton<IEntryTraceResultStore>(new StubEntryTraceResultStore(storedResult));
});
@@ -169,7 +169,7 @@ public sealed partial class ScansEndpointsTests
public async Task GetEntryTraceReturnsNotFoundWhenMissing()
{
using var secrets = new TestSurfaceSecretsScope();
using var factory = new ScannerApplicationFactory(configureServices: services =>
using var factory = new ScannerApplicationFactory().WithOverrides(configureServices: services =>
{
services.AddSingleton<IEntryTraceResultStore>(new StubEntryTraceResultStore(null));
});

View File

@@ -28,7 +28,7 @@ public sealed class ScoreReplayEndpointsTests : IDisposable
public ScoreReplayEndpointsTests()
{
_secrets = new TestSurfaceSecretsScope();
_factory = new ScannerApplicationFactory(cfg =>
_factory = new ScannerApplicationFactory().WithOverrides(cfg =>
{
cfg["scanner:authority:enabled"] = "false";
cfg["scanner:scoreReplay:enabled"] = "true";

View File

@@ -14,7 +14,7 @@ namespace StellaOps.Scanner.WebService.Tests;
/// <summary>
/// Integration tests for the Unknowns API endpoints.
/// </summary>
public sealed class UnknownsEndpointsTests : IClassFixture<ScannerApplicationFactory>
public sealed class UnknownsEndpointsTests : IClassFixture<ScannerApplicationFixture>
{
private readonly HttpClient _client;
private static readonly JsonSerializerOptions JsonOptions = new()
@@ -22,9 +22,9 @@ public sealed class UnknownsEndpointsTests : IClassFixture<ScannerApplicationFac
PropertyNamingPolicy = JsonNamingPolicy.CamelCase
};
public UnknownsEndpointsTests(ScannerApplicationFactory factory)
public UnknownsEndpointsTests(ScannerApplicationFixture fixture)
{
_client = factory.CreateClient();
_client = fixture.Factory.CreateClient();
}
[Fact]