Add new features and tests for AirGap and Time modules
Some checks failed
Docs CI / lint-and-preview (push) Has been cancelled
Some checks failed
Docs CI / lint-and-preview (push) Has been cancelled
- Introduced `SbomService` tasks documentation. - Updated `StellaOps.sln` to include new projects: `StellaOps.AirGap.Time` and `StellaOps.AirGap.Importer`. - Added unit tests for `BundleImportPlanner`, `DsseVerifier`, `ImportValidator`, and other components in the `StellaOps.AirGap.Importer.Tests` namespace. - Implemented `InMemoryBundleRepositories` for testing bundle catalog and item repositories. - Created `MerkleRootCalculator`, `RootRotationPolicy`, and `TufMetadataValidator` tests. - Developed `StalenessCalculator` and `TimeAnchorLoader` tests in the `StellaOps.AirGap.Time.Tests` namespace. - Added `fetch-sbomservice-deps.sh` script for offline dependency fetching.
This commit is contained in:
@@ -0,0 +1,51 @@
|
||||
using StellaOps.Findings.Ledger.Infrastructure.Exports;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Findings.Ledger.Tests.Exports;
|
||||
|
||||
public class ExportPagingTests
|
||||
{
|
||||
[Fact]
|
||||
public void ComputeFiltersHash_IsDeterministic()
|
||||
{
|
||||
var left = new Dictionary<string, string?>
|
||||
{
|
||||
["shape"] = "canonical",
|
||||
["since_sequence"] = "10",
|
||||
["until_sequence"] = "20"
|
||||
};
|
||||
|
||||
var right = new Dictionary<string, string?>
|
||||
{
|
||||
["until_sequence"] = "20",
|
||||
["shape"] = "canonical",
|
||||
["since_sequence"] = "10"
|
||||
};
|
||||
|
||||
var leftHash = ExportPaging.ComputeFiltersHash(left);
|
||||
var rightHash = ExportPaging.ComputeFiltersHash(right);
|
||||
|
||||
Assert.Equal(leftHash, rightHash);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void PageToken_RoundTrips()
|
||||
{
|
||||
var key = new ExportPaging.ExportPageKey(5, "v1", "abc123");
|
||||
var filtersHash = ExportPaging.ComputeFiltersHash(new Dictionary<string, string?>
|
||||
{
|
||||
["shape"] = "canonical"
|
||||
});
|
||||
|
||||
var token = ExportPaging.CreatePageToken(key, filtersHash);
|
||||
|
||||
var parsed = ExportPaging.TryParsePageToken(token, filtersHash, out var recovered, out var error);
|
||||
|
||||
Assert.True(parsed);
|
||||
Assert.Null(error);
|
||||
Assert.NotNull(recovered);
|
||||
Assert.Equal(key.SequenceNumber, recovered!.SequenceNumber);
|
||||
Assert.Equal(key.PolicyVersion, recovered.PolicyVersion);
|
||||
Assert.Equal(key.CycleHash, recovered.CycleHash);
|
||||
}
|
||||
}
|
||||
@@ -4,6 +4,8 @@
|
||||
<TargetFramework>net10.0</TargetFramework>
|
||||
<Nullable>enable</Nullable>
|
||||
<ImplicitUsings>enable</ImplicitUsings>
|
||||
<DefaultItemExcludes>$(DefaultItemExcludes);**/tools/**/*</DefaultItemExcludes>
|
||||
<DisableTransitiveProjectReferences>true</DisableTransitiveProjectReferences>
|
||||
</PropertyGroup>
|
||||
|
||||
<ItemGroup>
|
||||
|
||||
@@ -275,29 +275,7 @@ app.MapGet("/ledger/export/findings", async Task<Results<FileStreamHttpResult, J
|
||||
return TypedResults.Problem(statusCode: StatusCodes.Status400BadRequest, title: "page_token_filters_mismatch");
|
||||
}
|
||||
|
||||
if (!string.IsNullOrEmpty(page.NextPageToken))
|
||||
{
|
||||
httpContext.Response.Headers["X-Stella-Next-Page-Token"] = page.NextPageToken;
|
||||
}
|
||||
httpContext.Response.Headers["X-Stella-Result-Count"] = page.Items.Count.ToString();
|
||||
|
||||
var acceptsNdjson = httpContext.Request.Headers.Accept.Any(h => h.Contains("application/x-ndjson", StringComparison.OrdinalIgnoreCase));
|
||||
if (acceptsNdjson)
|
||||
{
|
||||
httpContext.Response.ContentType = "application/x-ndjson";
|
||||
var stream = new MemoryStream();
|
||||
await using var writer = new Utf8JsonWriter(stream, new JsonWriterOptions { SkipValidation = false, Indented = false });
|
||||
foreach (var item in page.Items)
|
||||
{
|
||||
JsonSerializer.Serialize(writer, item);
|
||||
writer.Flush();
|
||||
await stream.WriteAsync(new byte[] { (byte)'\n' }, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
stream.Position = 0;
|
||||
return TypedResults.Stream(stream, contentType: "application/x-ndjson");
|
||||
}
|
||||
|
||||
return TypedResults.Json(page);
|
||||
return await WritePagedResponse(httpContext, page, cancellationToken).ConfigureAwait(false);
|
||||
})
|
||||
.WithName("LedgerExportFindings")
|
||||
.RequireAuthorization(LedgerExportPolicy)
|
||||
@@ -342,3 +320,33 @@ static LedgerEventResponse CreateResponse(LedgerEventRecord record, string statu
|
||||
MerkleLeafHash = record.MerkleLeafHash,
|
||||
RecordedAt = record.RecordedAt
|
||||
};
|
||||
|
||||
static async Task<Results<FileStreamHttpResult, JsonHttpResult<ExportPage<T>>, ProblemHttpResult>> WritePagedResponse<T>(
|
||||
HttpContext httpContext,
|
||||
ExportPage<T> page,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
if (!string.IsNullOrEmpty(page.NextPageToken))
|
||||
{
|
||||
httpContext.Response.Headers["X-Stella-Next-Page-Token"] = page.NextPageToken;
|
||||
}
|
||||
httpContext.Response.Headers["X-Stella-Result-Count"] = page.Items.Count.ToString();
|
||||
|
||||
var acceptsNdjson = httpContext.Request.Headers.Accept.Any(h => h.Contains("application/x-ndjson", StringComparison.OrdinalIgnoreCase));
|
||||
if (acceptsNdjson)
|
||||
{
|
||||
httpContext.Response.ContentType = "application/x-ndjson";
|
||||
var stream = new MemoryStream();
|
||||
await using var writer = new Utf8JsonWriter(stream, new JsonWriterOptions { SkipValidation = false, Indented = false });
|
||||
foreach (var item in page.Items)
|
||||
{
|
||||
JsonSerializer.Serialize(writer, item);
|
||||
writer.Flush();
|
||||
await stream.WriteAsync(new byte[] { (byte)'\n' }, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
stream.Position = 0;
|
||||
return TypedResults.Stream(stream, contentType: "application/x-ndjson");
|
||||
}
|
||||
|
||||
return TypedResults.Json(page);
|
||||
}
|
||||
|
||||
@@ -0,0 +1,97 @@
|
||||
using System.Security.Cryptography;
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
using System.Text.Json.Serialization;
|
||||
using Microsoft.AspNetCore.WebUtilities;
|
||||
|
||||
namespace StellaOps.Findings.Ledger.Infrastructure.Exports;
|
||||
|
||||
public static class ExportPaging
|
||||
{
|
||||
private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web)
|
||||
{
|
||||
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
|
||||
WriteIndented = false,
|
||||
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull
|
||||
};
|
||||
|
||||
public static string ComputeFiltersHash(IReadOnlyDictionary<string, string?> filters)
|
||||
{
|
||||
var builder = new StringBuilder();
|
||||
foreach (var pair in filters.OrderBy(kv => kv.Key, StringComparer.Ordinal))
|
||||
{
|
||||
builder.Append(pair.Key).Append('=').Append(pair.Value ?? string.Empty).Append(';');
|
||||
}
|
||||
|
||||
using var sha = SHA256.Create();
|
||||
var bytes = Encoding.UTF8.GetBytes(builder.ToString());
|
||||
var hash = sha.ComputeHash(bytes);
|
||||
return Convert.ToHexString(hash).ToLowerInvariant();
|
||||
}
|
||||
|
||||
public static string CreatePageToken(ExportPageKey key, string filtersHash)
|
||||
{
|
||||
var payload = new ExportPageToken
|
||||
{
|
||||
FiltersHash = filtersHash,
|
||||
Last = key
|
||||
};
|
||||
|
||||
var json = JsonSerializer.Serialize(payload, SerializerOptions);
|
||||
return WebEncoders.Base64UrlEncode(Encoding.UTF8.GetBytes(json));
|
||||
}
|
||||
|
||||
public static bool TryParsePageToken(string token, string expectedFiltersHash, out ExportPageKey? key, out string? error)
|
||||
{
|
||||
key = null;
|
||||
error = null;
|
||||
|
||||
byte[] decoded;
|
||||
try
|
||||
{
|
||||
decoded = WebEncoders.Base64UrlDecode(token);
|
||||
}
|
||||
catch (FormatException)
|
||||
{
|
||||
error = "invalid_page_token_encoding";
|
||||
return false;
|
||||
}
|
||||
|
||||
ExportPageToken? payload;
|
||||
try
|
||||
{
|
||||
payload = JsonSerializer.Deserialize<ExportPageToken>(decoded, SerializerOptions);
|
||||
}
|
||||
catch (JsonException)
|
||||
{
|
||||
error = "invalid_page_token_payload";
|
||||
return false;
|
||||
}
|
||||
|
||||
if (payload is null || payload.Last is null)
|
||||
{
|
||||
error = "invalid_page_token_payload";
|
||||
return false;
|
||||
}
|
||||
|
||||
if (!string.Equals(payload.FiltersHash, expectedFiltersHash, StringComparison.Ordinal))
|
||||
{
|
||||
error = "page_token_filters_mismatch";
|
||||
return false;
|
||||
}
|
||||
|
||||
key = payload.Last;
|
||||
return true;
|
||||
}
|
||||
|
||||
public sealed record ExportPageKey(long SequenceNumber, string PolicyVersion, string CycleHash);
|
||||
|
||||
private sealed class ExportPageToken
|
||||
{
|
||||
[JsonPropertyName("filters_hash")]
|
||||
public string FiltersHash { get; set; } = string.Empty;
|
||||
|
||||
[JsonPropertyName("last")]
|
||||
public ExportPageKey? Last { get; set; }
|
||||
}
|
||||
}
|
||||
@@ -31,11 +31,11 @@ internal static class LedgerMetrics
|
||||
|
||||
public static void RecordWriteSuccess(TimeSpan duration, string? tenantId, string? eventType, string? source)
|
||||
{
|
||||
var tags = new TagList
|
||||
var tags = new KeyValuePair<string, object?>[]
|
||||
{
|
||||
{ "tenant", tenantId ?? string.Empty },
|
||||
{ "event_type", eventType ?? string.Empty },
|
||||
{ "source", source ?? string.Empty }
|
||||
new("tenant", tenantId ?? string.Empty),
|
||||
new("event_type", eventType ?? string.Empty),
|
||||
new("source", source ?? string.Empty)
|
||||
};
|
||||
|
||||
WriteLatencySeconds.Record(duration.TotalSeconds, tags);
|
||||
@@ -50,12 +50,12 @@ internal static class LedgerMetrics
|
||||
string? policyVersion,
|
||||
string? evaluationStatus)
|
||||
{
|
||||
var tags = new TagList
|
||||
var tags = new KeyValuePair<string, object?>[]
|
||||
{
|
||||
{ "tenant", tenantId ?? string.Empty },
|
||||
{ "event_type", eventType ?? string.Empty },
|
||||
{ "policy_version", policyVersion ?? string.Empty },
|
||||
{ "evaluation_status", evaluationStatus ?? string.Empty }
|
||||
new("tenant", tenantId ?? string.Empty),
|
||||
new("event_type", eventType ?? string.Empty),
|
||||
new("policy_version", policyVersion ?? string.Empty),
|
||||
new("evaluation_status", evaluationStatus ?? string.Empty)
|
||||
};
|
||||
|
||||
ProjectionApplySeconds.Record(duration.TotalSeconds, tags);
|
||||
|
||||
@@ -4,12 +4,19 @@
|
||||
<TargetFramework>net10.0</TargetFramework>
|
||||
<Nullable>enable</Nullable>
|
||||
<ImplicitUsings>enable</ImplicitUsings>
|
||||
<DefaultItemExcludes>$(DefaultItemExcludes);tools/**/*</DefaultItemExcludes>
|
||||
</PropertyGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<None Include="migrations\**\*" Pack="false" CopyToOutputDirectory="Never" />
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<Compile Remove="tools/**/*.cs" />
|
||||
<None Remove="tools/**/*" />
|
||||
<None Include="tools/**/*" Pack="false" CopyToOutputDirectory="Never" />
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="10.0.0-rc.2.25502.107" />
|
||||
<PackageReference Include="Microsoft.Extensions.Hosting.Abstractions" Version="10.0.0-rc.2.25502.107" />
|
||||
|
||||
@@ -456,7 +456,13 @@ internal sealed class NoOpPolicyEvaluationService : IPolicyEvaluationService
|
||||
{
|
||||
public Task<PolicyEvaluationResult> EvaluateAsync(LedgerEventRecord record, FindingProjection? current, CancellationToken cancellationToken)
|
||||
{
|
||||
return Task.FromResult(new PolicyEvaluationResult("noop", record.OccurredAt, record.RecordedAt, current?.Status ?? "new"));
|
||||
var labels = new JsonObject();
|
||||
return Task.FromResult(new PolicyEvaluationResult(
|
||||
Status: current?.Status ?? "new",
|
||||
Severity: current?.Severity,
|
||||
Labels: labels,
|
||||
ExplainRef: null,
|
||||
Rationale: new JsonArray()));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -465,9 +471,9 @@ internal sealed class NoOpProjectionRepository : IFindingProjectionRepository
|
||||
public Task<FindingProjection?> GetAsync(string tenantId, string findingId, string policyVersion, CancellationToken cancellationToken) =>
|
||||
Task.FromResult<FindingProjection?>(null);
|
||||
|
||||
public Task InsertActionAsync(FindingAction action, CancellationToken cancellationToken) => Task.CompletedTask;
|
||||
public Task InsertActionAsync(TriageActionEntry entry, CancellationToken cancellationToken) => Task.CompletedTask;
|
||||
|
||||
public Task InsertHistoryAsync(FindingHistory history, CancellationToken cancellationToken) => Task.CompletedTask;
|
||||
public Task InsertHistoryAsync(FindingHistoryEntry entry, CancellationToken cancellationToken) => Task.CompletedTask;
|
||||
|
||||
public Task SaveCheckpointAsync(ProjectionCheckpoint checkpoint, CancellationToken cancellationToken) => Task.CompletedTask;
|
||||
|
||||
@@ -475,17 +481,12 @@ internal sealed class NoOpProjectionRepository : IFindingProjectionRepository
|
||||
Task.FromResult(new ProjectionCheckpoint(DateTimeOffset.MinValue, Guid.Empty, DateTimeOffset.MinValue));
|
||||
|
||||
public Task UpsertAsync(FindingProjection projection, CancellationToken cancellationToken) => Task.CompletedTask;
|
||||
|
||||
public Task EnsureIndexesAsync(CancellationToken cancellationToken) => Task.CompletedTask;
|
||||
}
|
||||
|
||||
internal sealed class NoOpMerkleAnchorRepository : IMerkleAnchorRepository
|
||||
{
|
||||
public Task InsertAsync(string tenantId, Guid anchorId, DateTimeOffset windowStart, DateTimeOffset windowEnd, long sequenceStart, long sequenceEnd, string rootHash, long leafCount, DateTime anchoredAt, string? anchorReference, CancellationToken cancellationToken)
|
||||
public Task InsertAsync(string tenantId, Guid anchorId, DateTimeOffset windowStart, DateTimeOffset windowEnd, long sequenceStart, long sequenceEnd, string rootHash, int leafCount, DateTimeOffset anchoredAt, string? anchorReference, CancellationToken cancellationToken)
|
||||
=> Task.CompletedTask;
|
||||
|
||||
public Task<MerkleAnchor?> GetLatestAsync(string tenantId, CancellationToken cancellationToken) =>
|
||||
Task.FromResult<MerkleAnchor?>(null);
|
||||
}
|
||||
|
||||
internal sealed class QueueMerkleAnchorScheduler : IMerkleAnchorScheduler
|
||||
|
||||
Reference in New Issue
Block a user