feat: Complete MongoDB/MinIO removal and integrate CLI consolidation
This commit completes the MongoDB and MinIO removal from the StellaOps platform and integrates the CLI consolidation work from remote. ## Infrastructure Changes - PostgreSQL v16+ is now the ONLY supported database - Valkey v8.0 replaces Redis for caching, DPoP security, and event streams - RustFS is the primary object storage (MinIO fully removed) - NATS is OPTIONAL for messaging (Valkey is default transport) ## Docker Compose Updates Updated all deployment profiles: - deploy/compose/docker-compose.dev.yaml - deploy/compose/docker-compose.airgap.yaml - deploy/compose/docker-compose.stage.yaml - deploy/compose/docker-compose.prod.yaml All profiles now use PostgreSQL + Valkey + RustFS stack. ## Environment Configuration Updated all env.example files with: - Removed: MONGO_*, MINIO_* variables - Added: POSTGRES_*, VALKEY_* variables - Updated: SCANNER_QUEUE_BROKER to use Valkey by default - Enhanced: Surface.Env and Offline Kit configurations ## Aoc.Cli Changes - Removed --mongo option entirely - Made --postgres option required - Removed VerifyMongoAsync method - PostgreSQL is now the only supported backend ## CLI Consolidation (from merge) Integrated plugin architecture for unified CLI: - stella aoc verify (replaces stella-aoc) - stella symbols (replaces stella-symbols) - Plugin manifests and command modules - Migration guide for users ## Documentation Updates - README.md: Updated deployment workflow notes - DEVELOPER_ONBOARDING.md: Complete Valkey-centric flow diagrams - QUICKSTART_HYBRID_DEBUG.md: Removed MongoDB/MinIO references - VERSION_MATRIX.md: Updated infrastructure dependencies - CLEANUP_SUMMARY.md: Marked all cleanup tasks complete - 07_HIGH_LEVEL_ARCHITECTURE.md: Corrected infrastructure stack - 11_DATA_SCHEMAS.md: Valkey keyspace documentation ## Merge Resolution Resolved merge conflicts by accepting incoming changes which had more complete Surface.Env and Offline Kit configurations. 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude Sonnet 4.5 <noreply@anthropic.com>
This commit is contained in:
@@ -7,18 +7,11 @@
|
||||
<IsPackable>false</IsPackable>
|
||||
</PropertyGroup>
|
||||
|
||||
<!-- Test packages inherited from Directory.Build.props -->
|
||||
<ItemGroup>
|
||||
<PackageReference Include="coverlet.collector" Version="6.0.4" />
|
||||
<PackageReference Include="Microsoft.CodeAnalysis.CSharp" Version="3.11.0" PrivateAssets="all" />
|
||||
<PackageReference Include="Microsoft.CodeAnalysis.CSharp.Workspaces" Version="3.11.0" PrivateAssets="all" />
|
||||
<PackageReference Include="Microsoft.CodeAnalysis.Workspaces.Common" Version="3.11.0" PrivateAssets="all" />
|
||||
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.14.0" />
|
||||
<PackageReference Include="xunit" Version="2.9.2" />
|
||||
<PackageReference Include="xunit.runner.visualstudio" Version="2.8.2" />
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<Using Include="Xunit" />
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
|
||||
@@ -7,16 +7,7 @@
|
||||
<IsPackable>false</IsPackable>
|
||||
</PropertyGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<PackageReference Include="coverlet.collector" Version="6.0.4" />
|
||||
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.14.0" />
|
||||
<PackageReference Include="xunit" Version="2.9.2" />
|
||||
<PackageReference Include="xunit.runner.visualstudio" Version="2.8.2" />
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<Using Include="Xunit" />
|
||||
</ItemGroup>
|
||||
<!-- Test packages inherited from Directory.Build.props -->
|
||||
|
||||
<ItemGroup>
|
||||
<ProjectReference Include="..\StellaOps.AirGap.Policy\StellaOps.AirGap.Policy.csproj" />
|
||||
|
||||
@@ -0,0 +1,255 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// AdvisorySnapshotExtractor.cs
|
||||
// Sprint: SPRINT_4300_0003_0001 (Sealed Knowledge Snapshot Export/Import)
|
||||
// Task: SEAL-006 - Implement advisory snapshot extractor
|
||||
// Description: Extracts advisory data from Concelier for knowledge snapshot bundles.
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
using StellaOps.AirGap.Bundle.Services;
|
||||
|
||||
namespace StellaOps.AirGap.Bundle.Extractors;
|
||||
|
||||
/// <summary>
|
||||
/// Extracts advisory data from Concelier database for inclusion in knowledge snapshot bundles.
|
||||
/// </summary>
|
||||
public sealed class AdvisorySnapshotExtractor : IAdvisorySnapshotExtractor
|
||||
{
|
||||
private static readonly JsonSerializerOptions JsonOptions = new()
|
||||
{
|
||||
WriteIndented = false,
|
||||
PropertyNamingPolicy = JsonNamingPolicy.CamelCase
|
||||
};
|
||||
|
||||
private readonly IAdvisoryDataSource _dataSource;
|
||||
|
||||
public AdvisorySnapshotExtractor(IAdvisoryDataSource dataSource)
|
||||
{
|
||||
_dataSource = dataSource ?? throw new ArgumentNullException(nameof(dataSource));
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Extracts advisories from all configured feeds.
|
||||
/// </summary>
|
||||
public async Task<AdvisoryExtractionResult> ExtractAllAsync(
|
||||
AdvisoryExtractionRequest request,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(request);
|
||||
|
||||
var contents = new List<AdvisoryContent>();
|
||||
var errors = new List<string>();
|
||||
var totalRecords = 0;
|
||||
|
||||
try
|
||||
{
|
||||
var feeds = await _dataSource.GetAvailableFeedsAsync(cancellationToken);
|
||||
|
||||
foreach (var feed in feeds)
|
||||
{
|
||||
// Skip if specific feeds are requested and this isn't one of them
|
||||
if (request.FeedIds is { Count: > 0 } && !request.FeedIds.Contains(feed.FeedId))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
var feedResult = await ExtractFeedAsync(feed.FeedId, request, cancellationToken);
|
||||
if (feedResult.Success && feedResult.Content is not null)
|
||||
{
|
||||
contents.Add(feedResult.Content);
|
||||
totalRecords += feedResult.RecordCount;
|
||||
}
|
||||
else if (!string.IsNullOrEmpty(feedResult.Error))
|
||||
{
|
||||
errors.Add($"{feed.FeedId}: {feedResult.Error}");
|
||||
}
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
errors.Add($"{feed.FeedId}: {ex.Message}");
|
||||
}
|
||||
}
|
||||
|
||||
return new AdvisoryExtractionResult
|
||||
{
|
||||
Success = errors.Count == 0,
|
||||
Advisories = contents,
|
||||
TotalRecordCount = totalRecords,
|
||||
Errors = errors
|
||||
};
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
return new AdvisoryExtractionResult
|
||||
{
|
||||
Success = false,
|
||||
Advisories = [],
|
||||
Errors = [$"Extraction failed: {ex.Message}"]
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Extracts advisories from a specific feed.
|
||||
/// </summary>
|
||||
public async Task<FeedExtractionResult> ExtractFeedAsync(
|
||||
string feedId,
|
||||
AdvisoryExtractionRequest request,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(feedId);
|
||||
|
||||
try
|
||||
{
|
||||
var advisories = await _dataSource.GetAdvisoriesAsync(
|
||||
feedId,
|
||||
request.Since,
|
||||
request.MaxRecords,
|
||||
cancellationToken);
|
||||
|
||||
if (advisories.Count == 0)
|
||||
{
|
||||
return new FeedExtractionResult
|
||||
{
|
||||
Success = true,
|
||||
RecordCount = 0
|
||||
};
|
||||
}
|
||||
|
||||
// Serialize advisories to NDJSON format for deterministic output
|
||||
var contentBuilder = new StringBuilder();
|
||||
foreach (var advisory in advisories.OrderBy(a => a.Id, StringComparer.Ordinal))
|
||||
{
|
||||
var json = JsonSerializer.Serialize(advisory, JsonOptions);
|
||||
contentBuilder.AppendLine(json);
|
||||
}
|
||||
|
||||
var contentBytes = Encoding.UTF8.GetBytes(contentBuilder.ToString());
|
||||
var fileName = $"{feedId}-{DateTime.UtcNow:yyyyMMddHHmmss}.ndjson";
|
||||
|
||||
return new FeedExtractionResult
|
||||
{
|
||||
Success = true,
|
||||
RecordCount = advisories.Count,
|
||||
Content = new AdvisoryContent
|
||||
{
|
||||
FeedId = feedId,
|
||||
FileName = fileName,
|
||||
Content = contentBytes,
|
||||
SnapshotAt = DateTimeOffset.UtcNow,
|
||||
RecordCount = advisories.Count
|
||||
}
|
||||
};
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
return new FeedExtractionResult
|
||||
{
|
||||
Success = false,
|
||||
Error = ex.Message
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Interface for advisory snapshot extraction.
|
||||
/// </summary>
|
||||
public interface IAdvisorySnapshotExtractor
|
||||
{
|
||||
Task<AdvisoryExtractionResult> ExtractAllAsync(
|
||||
AdvisoryExtractionRequest request,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
Task<FeedExtractionResult> ExtractFeedAsync(
|
||||
string feedId,
|
||||
AdvisoryExtractionRequest request,
|
||||
CancellationToken cancellationToken = default);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Interface for advisory data access.
|
||||
/// This should be implemented by Concelier to provide advisory data.
|
||||
/// </summary>
|
||||
public interface IAdvisoryDataSource
|
||||
{
|
||||
Task<IReadOnlyList<FeedInfo>> GetAvailableFeedsAsync(CancellationToken cancellationToken = default);
|
||||
|
||||
Task<IReadOnlyList<AdvisoryRecord>> GetAdvisoriesAsync(
|
||||
string feedId,
|
||||
DateTimeOffset? since = null,
|
||||
int? maxRecords = null,
|
||||
CancellationToken cancellationToken = default);
|
||||
}
|
||||
|
||||
#region Data Models
|
||||
|
||||
/// <summary>
|
||||
/// Information about an available feed.
|
||||
/// </summary>
|
||||
public sealed record FeedInfo(string FeedId, string Name, string? Ecosystem);
|
||||
|
||||
/// <summary>
|
||||
/// A single advisory record.
|
||||
/// </summary>
|
||||
public sealed record AdvisoryRecord
|
||||
{
|
||||
public required string Id { get; init; }
|
||||
public required string FeedId { get; init; }
|
||||
public string? CveId { get; init; }
|
||||
public string? Summary { get; init; }
|
||||
public string? Severity { get; init; }
|
||||
public double? CvssScore { get; init; }
|
||||
public DateTimeOffset? PublishedAt { get; init; }
|
||||
public DateTimeOffset? ModifiedAt { get; init; }
|
||||
public IReadOnlyList<string>? AffectedPackages { get; init; }
|
||||
public IReadOnlyDictionary<string, object>? RawData { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Request for extracting advisories.
|
||||
/// </summary>
|
||||
public sealed record AdvisoryExtractionRequest
|
||||
{
|
||||
/// <summary>
|
||||
/// Specific feed IDs to extract. Empty means all feeds.
|
||||
/// </summary>
|
||||
public IReadOnlyList<string>? FeedIds { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Only extract advisories modified since this time.
|
||||
/// </summary>
|
||||
public DateTimeOffset? Since { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Maximum records per feed.
|
||||
/// </summary>
|
||||
public int? MaxRecords { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Result of extracting advisories from all feeds.
|
||||
/// </summary>
|
||||
public sealed record AdvisoryExtractionResult
|
||||
{
|
||||
public bool Success { get; init; }
|
||||
public IReadOnlyList<AdvisoryContent> Advisories { get; init; } = [];
|
||||
public int TotalRecordCount { get; init; }
|
||||
public IReadOnlyList<string> Errors { get; init; } = [];
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Result of extracting a single feed.
|
||||
/// </summary>
|
||||
public sealed record FeedExtractionResult
|
||||
{
|
||||
public bool Success { get; init; }
|
||||
public int RecordCount { get; init; }
|
||||
public AdvisoryContent? Content { get; init; }
|
||||
public string? Error { get; init; }
|
||||
}
|
||||
|
||||
#endregion
|
||||
@@ -0,0 +1,360 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// PolicySnapshotExtractor.cs
|
||||
// Sprint: SPRINT_4300_0003_0001 (Sealed Knowledge Snapshot Export/Import)
|
||||
// Task: SEAL-008 - Implement policy bundle extractor
|
||||
// Description: Extracts policy bundle data for knowledge snapshot bundles.
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System.IO.Compression;
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
using StellaOps.AirGap.Bundle.Services;
|
||||
|
||||
namespace StellaOps.AirGap.Bundle.Extractors;
|
||||
|
||||
/// <summary>
|
||||
/// Extracts policy bundles from the Policy registry for inclusion in knowledge snapshot bundles.
|
||||
/// </summary>
|
||||
public sealed class PolicySnapshotExtractor : IPolicySnapshotExtractor
|
||||
{
|
||||
private static readonly JsonSerializerOptions JsonOptions = new()
|
||||
{
|
||||
WriteIndented = false,
|
||||
PropertyNamingPolicy = JsonNamingPolicy.CamelCase
|
||||
};
|
||||
|
||||
private readonly IPolicyDataSource _dataSource;
|
||||
|
||||
public PolicySnapshotExtractor(IPolicyDataSource dataSource)
|
||||
{
|
||||
_dataSource = dataSource ?? throw new ArgumentNullException(nameof(dataSource));
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Extracts all registered policies.
|
||||
/// </summary>
|
||||
public async Task<PolicyExtractionResult> ExtractAllAsync(
|
||||
PolicyExtractionRequest request,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(request);
|
||||
|
||||
var contents = new List<PolicyContent>();
|
||||
var errors = new List<string>();
|
||||
|
||||
try
|
||||
{
|
||||
var policies = await _dataSource.GetAvailablePoliciesAsync(cancellationToken);
|
||||
|
||||
foreach (var policy in policies)
|
||||
{
|
||||
// Skip if specific types are requested and this isn't one of them
|
||||
if (request.Types is { Count: > 0 } && !request.Types.Contains(policy.Type))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
var policyResult = await ExtractPolicyAsync(policy.PolicyId, request, cancellationToken);
|
||||
if (policyResult.Success && policyResult.Content is not null)
|
||||
{
|
||||
contents.Add(policyResult.Content);
|
||||
}
|
||||
else if (!string.IsNullOrEmpty(policyResult.Error))
|
||||
{
|
||||
errors.Add($"{policy.PolicyId}: {policyResult.Error}");
|
||||
}
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
errors.Add($"{policy.PolicyId}: {ex.Message}");
|
||||
}
|
||||
}
|
||||
|
||||
return new PolicyExtractionResult
|
||||
{
|
||||
Success = errors.Count == 0,
|
||||
Policies = contents,
|
||||
Errors = errors
|
||||
};
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
return new PolicyExtractionResult
|
||||
{
|
||||
Success = false,
|
||||
Policies = [],
|
||||
Errors = [$"Extraction failed: {ex.Message}"]
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Extracts a specific policy.
|
||||
/// </summary>
|
||||
public async Task<PolicySingleExtractionResult> ExtractPolicyAsync(
|
||||
string policyId,
|
||||
PolicyExtractionRequest request,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(policyId);
|
||||
|
||||
try
|
||||
{
|
||||
var policyInfo = await _dataSource.GetPolicyInfoAsync(policyId, cancellationToken);
|
||||
if (policyInfo is null)
|
||||
{
|
||||
return new PolicySingleExtractionResult
|
||||
{
|
||||
Success = false,
|
||||
Error = "Policy not found"
|
||||
};
|
||||
}
|
||||
|
||||
var policyContent = await _dataSource.GetPolicyContentAsync(policyId, cancellationToken);
|
||||
if (policyContent is null || policyContent.Length == 0)
|
||||
{
|
||||
return new PolicySingleExtractionResult
|
||||
{
|
||||
Success = false,
|
||||
Error = "Policy content is empty"
|
||||
};
|
||||
}
|
||||
|
||||
// Package policy based on type
|
||||
byte[] contentBytes;
|
||||
string fileName;
|
||||
|
||||
switch (policyInfo.Type)
|
||||
{
|
||||
case "OpaRego":
|
||||
// Package Rego files as a tar.gz bundle
|
||||
contentBytes = await PackageRegoBundle(policyInfo, policyContent, cancellationToken);
|
||||
fileName = $"{policyInfo.PolicyId}-{policyInfo.Version}.tar.gz";
|
||||
break;
|
||||
|
||||
case "LatticeRules":
|
||||
// LatticeRules are JSON files
|
||||
contentBytes = policyContent;
|
||||
fileName = $"{policyInfo.PolicyId}-{policyInfo.Version}.json";
|
||||
break;
|
||||
|
||||
case "UnknownBudgets":
|
||||
// Unknown budgets are JSON files
|
||||
contentBytes = policyContent;
|
||||
fileName = $"{policyInfo.PolicyId}-{policyInfo.Version}.json";
|
||||
break;
|
||||
|
||||
case "ScoringWeights":
|
||||
// Scoring weights are JSON files
|
||||
contentBytes = policyContent;
|
||||
fileName = $"{policyInfo.PolicyId}-{policyInfo.Version}.json";
|
||||
break;
|
||||
|
||||
default:
|
||||
// Unknown types are passed through as-is
|
||||
contentBytes = policyContent;
|
||||
fileName = $"{policyInfo.PolicyId}-{policyInfo.Version}.bin";
|
||||
break;
|
||||
}
|
||||
|
||||
return new PolicySingleExtractionResult
|
||||
{
|
||||
Success = true,
|
||||
Content = new PolicyContent
|
||||
{
|
||||
PolicyId = policyInfo.PolicyId,
|
||||
Name = policyInfo.Name,
|
||||
Version = policyInfo.Version,
|
||||
FileName = fileName,
|
||||
Content = contentBytes,
|
||||
Type = policyInfo.Type
|
||||
}
|
||||
};
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
return new PolicySingleExtractionResult
|
||||
{
|
||||
Success = false,
|
||||
Error = ex.Message
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
private static async Task<byte[]> PackageRegoBundle(
|
||||
PolicyInfo policyInfo,
|
||||
byte[] policyContent,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
await Task.CompletedTask; // Operations below are synchronous
|
||||
|
||||
using var outputStream = new MemoryStream();
|
||||
using var gzipStream = new GZipStream(outputStream, CompressionLevel.Optimal);
|
||||
|
||||
// Write a simple tar with the rego file
|
||||
// Note: This is a minimal implementation; a full implementation would use System.Formats.Tar
|
||||
var header = CreateTarHeader($"{policyInfo.PolicyId}/policy.rego", policyContent.Length);
|
||||
gzipStream.Write(header);
|
||||
gzipStream.Write(policyContent);
|
||||
|
||||
// Pad to 512-byte boundary
|
||||
var padding = 512 - (policyContent.Length % 512);
|
||||
if (padding < 512)
|
||||
{
|
||||
gzipStream.Write(new byte[padding]);
|
||||
}
|
||||
|
||||
// Add manifest.json
|
||||
var manifest = new OpaBundleManifest
|
||||
{
|
||||
Revision = policyInfo.Version,
|
||||
Roots = [policyInfo.PolicyId]
|
||||
};
|
||||
var manifestBytes = JsonSerializer.SerializeToUtf8Bytes(manifest, JsonOptions);
|
||||
|
||||
var manifestHeader = CreateTarHeader(".manifest", manifestBytes.Length);
|
||||
gzipStream.Write(manifestHeader);
|
||||
gzipStream.Write(manifestBytes);
|
||||
|
||||
padding = 512 - (manifestBytes.Length % 512);
|
||||
if (padding < 512)
|
||||
{
|
||||
gzipStream.Write(new byte[padding]);
|
||||
}
|
||||
|
||||
// Write tar end-of-archive marker (two 512-byte zero blocks)
|
||||
gzipStream.Write(new byte[1024]);
|
||||
|
||||
gzipStream.Close();
|
||||
return outputStream.ToArray();
|
||||
}
|
||||
|
||||
private static byte[] CreateTarHeader(string fileName, long fileSize)
|
||||
{
|
||||
var header = new byte[512];
|
||||
var nameBytes = Encoding.ASCII.GetBytes(fileName);
|
||||
Array.Copy(nameBytes, header, Math.Min(nameBytes.Length, 100));
|
||||
|
||||
// Mode (100-107) - 0644
|
||||
Encoding.ASCII.GetBytes("0000644").CopyTo(header, 100);
|
||||
|
||||
// Owner/group UID/GID (108-123) - zeros
|
||||
Encoding.ASCII.GetBytes("0000000").CopyTo(header, 108);
|
||||
Encoding.ASCII.GetBytes("0000000").CopyTo(header, 116);
|
||||
|
||||
// File size in octal (124-135)
|
||||
Encoding.ASCII.GetBytes(Convert.ToString(fileSize, 8).PadLeft(11, '0')).CopyTo(header, 124);
|
||||
|
||||
// Modification time (136-147)
|
||||
var mtime = DateTimeOffset.UtcNow.ToUnixTimeSeconds();
|
||||
Encoding.ASCII.GetBytes(Convert.ToString(mtime, 8).PadLeft(11, '0')).CopyTo(header, 136);
|
||||
|
||||
// Checksum placeholder (148-155) - spaces
|
||||
for (var i = 148; i < 156; i++)
|
||||
{
|
||||
header[i] = 0x20;
|
||||
}
|
||||
|
||||
// Type flag (156) - regular file
|
||||
header[156] = (byte)'0';
|
||||
|
||||
// USTAR magic (257-264)
|
||||
Encoding.ASCII.GetBytes("ustar\0").CopyTo(header, 257);
|
||||
Encoding.ASCII.GetBytes("00").CopyTo(header, 263);
|
||||
|
||||
// Calculate and set checksum
|
||||
var checksum = 0;
|
||||
foreach (var b in header)
|
||||
{
|
||||
checksum += b;
|
||||
}
|
||||
Encoding.ASCII.GetBytes(Convert.ToString(checksum, 8).PadLeft(6, '0') + "\0 ").CopyTo(header, 148);
|
||||
|
||||
return header;
|
||||
}
|
||||
|
||||
private sealed record OpaBundleManifest
|
||||
{
|
||||
public required string Revision { get; init; }
|
||||
public required string[] Roots { get; init; }
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Interface for policy snapshot extraction.
|
||||
/// </summary>
|
||||
public interface IPolicySnapshotExtractor
|
||||
{
|
||||
Task<PolicyExtractionResult> ExtractAllAsync(
|
||||
PolicyExtractionRequest request,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
Task<PolicySingleExtractionResult> ExtractPolicyAsync(
|
||||
string policyId,
|
||||
PolicyExtractionRequest request,
|
||||
CancellationToken cancellationToken = default);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Interface for policy data access.
|
||||
/// This should be implemented by the Policy module to provide policy data.
|
||||
/// </summary>
|
||||
public interface IPolicyDataSource
|
||||
{
|
||||
Task<IReadOnlyList<PolicyInfo>> GetAvailablePoliciesAsync(CancellationToken cancellationToken = default);
|
||||
|
||||
Task<PolicyInfo?> GetPolicyInfoAsync(string policyId, CancellationToken cancellationToken = default);
|
||||
|
||||
Task<byte[]?> GetPolicyContentAsync(string policyId, CancellationToken cancellationToken = default);
|
||||
}
|
||||
|
||||
#region Data Models
|
||||
|
||||
/// <summary>
|
||||
/// Information about a policy.
|
||||
/// </summary>
|
||||
public sealed record PolicyInfo
|
||||
{
|
||||
public required string PolicyId { get; init; }
|
||||
public required string Name { get; init; }
|
||||
public required string Version { get; init; }
|
||||
public required string Type { get; init; }
|
||||
public string? Description { get; init; }
|
||||
public DateTimeOffset? CreatedAt { get; init; }
|
||||
public DateTimeOffset? ModifiedAt { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Request for extracting policies.
|
||||
/// </summary>
|
||||
public sealed record PolicyExtractionRequest
|
||||
{
|
||||
/// <summary>
|
||||
/// Specific policy types to extract. Empty means all types.
|
||||
/// </summary>
|
||||
public IReadOnlyList<string>? Types { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Result of extracting policies.
|
||||
/// </summary>
|
||||
public sealed record PolicyExtractionResult
|
||||
{
|
||||
public bool Success { get; init; }
|
||||
public IReadOnlyList<PolicyContent> Policies { get; init; } = [];
|
||||
public IReadOnlyList<string> Errors { get; init; } = [];
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Result of extracting a single policy.
|
||||
/// </summary>
|
||||
public sealed record PolicySingleExtractionResult
|
||||
{
|
||||
public bool Success { get; init; }
|
||||
public PolicyContent? Content { get; init; }
|
||||
public string? Error { get; init; }
|
||||
}
|
||||
|
||||
#endregion
|
||||
@@ -0,0 +1,281 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// VexSnapshotExtractor.cs
|
||||
// Sprint: SPRINT_4300_0003_0001 (Sealed Knowledge Snapshot Export/Import)
|
||||
// Task: SEAL-007 - Implement VEX snapshot extractor
|
||||
// Description: Extracts VEX statement data from Excititor for knowledge snapshot bundles.
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
using StellaOps.AirGap.Bundle.Services;
|
||||
|
||||
namespace StellaOps.AirGap.Bundle.Extractors;
|
||||
|
||||
/// <summary>
|
||||
/// Extracts VEX (Vulnerability Exploitability eXchange) statements from Excititor
|
||||
/// database for inclusion in knowledge snapshot bundles.
|
||||
/// </summary>
|
||||
public sealed class VexSnapshotExtractor : IVexSnapshotExtractor
|
||||
{
|
||||
private static readonly JsonSerializerOptions JsonOptions = new()
|
||||
{
|
||||
WriteIndented = false,
|
||||
PropertyNamingPolicy = JsonNamingPolicy.CamelCase
|
||||
};
|
||||
|
||||
private readonly IVexDataSource _dataSource;
|
||||
|
||||
public VexSnapshotExtractor(IVexDataSource dataSource)
|
||||
{
|
||||
_dataSource = dataSource ?? throw new ArgumentNullException(nameof(dataSource));
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Extracts VEX statements from all configured sources.
|
||||
/// </summary>
|
||||
public async Task<VexExtractionResult> ExtractAllAsync(
|
||||
VexExtractionRequest request,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(request);
|
||||
|
||||
var contents = new List<VexContent>();
|
||||
var errors = new List<string>();
|
||||
var totalStatements = 0;
|
||||
|
||||
try
|
||||
{
|
||||
var sources = await _dataSource.GetAvailableSourcesAsync(cancellationToken);
|
||||
|
||||
foreach (var source in sources)
|
||||
{
|
||||
// Skip if specific sources are requested and this isn't one of them
|
||||
if (request.SourceIds is { Count: > 0 } && !request.SourceIds.Contains(source.SourceId))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
var sourceResult = await ExtractSourceAsync(source.SourceId, request, cancellationToken);
|
||||
if (sourceResult.Success && sourceResult.Content is not null)
|
||||
{
|
||||
contents.Add(sourceResult.Content);
|
||||
totalStatements += sourceResult.StatementCount;
|
||||
}
|
||||
else if (!string.IsNullOrEmpty(sourceResult.Error))
|
||||
{
|
||||
errors.Add($"{source.SourceId}: {sourceResult.Error}");
|
||||
}
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
errors.Add($"{source.SourceId}: {ex.Message}");
|
||||
}
|
||||
}
|
||||
|
||||
return new VexExtractionResult
|
||||
{
|
||||
Success = errors.Count == 0,
|
||||
VexStatements = contents,
|
||||
TotalStatementCount = totalStatements,
|
||||
Errors = errors
|
||||
};
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
return new VexExtractionResult
|
||||
{
|
||||
Success = false,
|
||||
VexStatements = [],
|
||||
Errors = [$"Extraction failed: {ex.Message}"]
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Extracts VEX statements from a specific source.
|
||||
/// </summary>
|
||||
public async Task<VexSourceExtractionResult> ExtractSourceAsync(
|
||||
string sourceId,
|
||||
VexExtractionRequest request,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(sourceId);
|
||||
|
||||
try
|
||||
{
|
||||
var statements = await _dataSource.GetStatementsAsync(
|
||||
sourceId,
|
||||
request.Since,
|
||||
request.MaxStatements,
|
||||
cancellationToken);
|
||||
|
||||
if (statements.Count == 0)
|
||||
{
|
||||
return new VexSourceExtractionResult
|
||||
{
|
||||
Success = true,
|
||||
StatementCount = 0
|
||||
};
|
||||
}
|
||||
|
||||
// Serialize statements to OpenVEX format
|
||||
var document = new OpenVexDocument
|
||||
{
|
||||
Context = "https://openvex.dev/ns",
|
||||
Id = $"urn:stellaops:vex:{sourceId}:{DateTime.UtcNow:yyyyMMddHHmmss}",
|
||||
Author = sourceId,
|
||||
Timestamp = DateTimeOffset.UtcNow,
|
||||
Version = 1,
|
||||
Statements = statements.OrderBy(s => s.VulnerabilityId, StringComparer.Ordinal).ToList()
|
||||
};
|
||||
|
||||
var contentBytes = JsonSerializer.SerializeToUtf8Bytes(document, JsonOptions);
|
||||
var fileName = $"{sourceId}-{DateTime.UtcNow:yyyyMMddHHmmss}.json";
|
||||
|
||||
return new VexSourceExtractionResult
|
||||
{
|
||||
Success = true,
|
||||
StatementCount = statements.Count,
|
||||
Content = new VexContent
|
||||
{
|
||||
SourceId = sourceId,
|
||||
FileName = fileName,
|
||||
Content = contentBytes,
|
||||
SnapshotAt = DateTimeOffset.UtcNow,
|
||||
StatementCount = statements.Count
|
||||
}
|
||||
};
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
return new VexSourceExtractionResult
|
||||
{
|
||||
Success = false,
|
||||
Error = ex.Message
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Interface for VEX snapshot extraction.
|
||||
/// </summary>
|
||||
public interface IVexSnapshotExtractor
|
||||
{
|
||||
Task<VexExtractionResult> ExtractAllAsync(
|
||||
VexExtractionRequest request,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
Task<VexSourceExtractionResult> ExtractSourceAsync(
|
||||
string sourceId,
|
||||
VexExtractionRequest request,
|
||||
CancellationToken cancellationToken = default);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Interface for VEX data access.
|
||||
/// This should be implemented by Excititor to provide VEX data.
|
||||
/// </summary>
|
||||
public interface IVexDataSource
|
||||
{
|
||||
Task<IReadOnlyList<VexSourceInfo>> GetAvailableSourcesAsync(CancellationToken cancellationToken = default);
|
||||
|
||||
Task<IReadOnlyList<VexStatement>> GetStatementsAsync(
|
||||
string sourceId,
|
||||
DateTimeOffset? since = null,
|
||||
int? maxStatements = null,
|
||||
CancellationToken cancellationToken = default);
|
||||
}
|
||||
|
||||
#region Data Models
|
||||
|
||||
/// <summary>
|
||||
/// Information about an available VEX source.
|
||||
/// </summary>
|
||||
public sealed record VexSourceInfo(string SourceId, string Name, string? Publisher);
|
||||
|
||||
/// <summary>
|
||||
/// A VEX statement following OpenVEX format.
|
||||
/// </summary>
|
||||
public sealed record VexStatement
|
||||
{
|
||||
public required string VulnerabilityId { get; init; }
|
||||
public required string Status { get; init; }
|
||||
public string? Justification { get; init; }
|
||||
public string? ImpactStatement { get; init; }
|
||||
public string? ActionStatement { get; init; }
|
||||
public DateTimeOffset? Timestamp { get; init; }
|
||||
public IReadOnlyList<VexProduct>? Products { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// A product reference in a VEX statement.
|
||||
/// </summary>
|
||||
public sealed record VexProduct
|
||||
{
|
||||
public required string Id { get; init; }
|
||||
public string? Name { get; init; }
|
||||
public string? Version { get; init; }
|
||||
public string? Purl { get; init; }
|
||||
public IReadOnlyList<string>? Hashes { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// OpenVEX document format.
|
||||
/// </summary>
|
||||
public sealed record OpenVexDocument
|
||||
{
|
||||
public required string Context { get; init; }
|
||||
public required string Id { get; init; }
|
||||
public required string Author { get; init; }
|
||||
public required DateTimeOffset Timestamp { get; init; }
|
||||
public required int Version { get; init; }
|
||||
public required IReadOnlyList<VexStatement> Statements { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Request for extracting VEX statements.
|
||||
/// </summary>
|
||||
public sealed record VexExtractionRequest
|
||||
{
|
||||
/// <summary>
|
||||
/// Specific source IDs to extract. Empty means all sources.
|
||||
/// </summary>
|
||||
public IReadOnlyList<string>? SourceIds { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Only extract statements modified since this time.
|
||||
/// </summary>
|
||||
public DateTimeOffset? Since { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Maximum statements per source.
|
||||
/// </summary>
|
||||
public int? MaxStatements { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Result of extracting VEX statements from all sources.
|
||||
/// </summary>
|
||||
public sealed record VexExtractionResult
|
||||
{
|
||||
public bool Success { get; init; }
|
||||
public IReadOnlyList<VexContent> VexStatements { get; init; } = [];
|
||||
public int TotalStatementCount { get; init; }
|
||||
public IReadOnlyList<string> Errors { get; init; } = [];
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Result of extracting a single VEX source.
|
||||
/// </summary>
|
||||
public sealed record VexSourceExtractionResult
|
||||
{
|
||||
public bool Success { get; init; }
|
||||
public int StatementCount { get; init; }
|
||||
public VexContent? Content { get; init; }
|
||||
public string? Error { get; init; }
|
||||
}
|
||||
|
||||
#endregion
|
||||
@@ -0,0 +1,92 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// KnowledgeSnapshotManifest.cs
|
||||
// Sprint: SPRINT_4300_0003_0001 (Sealed Knowledge Snapshot Export/Import)
|
||||
// Task: SEAL-001 - Define KnowledgeSnapshotManifest schema
|
||||
// Description: Manifest model for sealed knowledge snapshots.
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
namespace StellaOps.AirGap.Bundle.Models;
|
||||
|
||||
/// <summary>
|
||||
/// Manifest for a sealed knowledge snapshot bundle.
|
||||
/// Contains metadata and integrity information for all bundled content.
|
||||
/// </summary>
|
||||
public sealed class KnowledgeSnapshotManifest
|
||||
{
|
||||
public required string BundleId { get; init; }
|
||||
public required string Name { get; init; }
|
||||
public required string Version { get; init; }
|
||||
public required DateTimeOffset CreatedAt { get; init; }
|
||||
public string SchemaVersion { get; init; } = "1.0.0";
|
||||
public string? MerkleRoot { get; set; }
|
||||
public long TotalSizeBytes { get; set; }
|
||||
public int EntryCount { get; set; }
|
||||
public List<AdvisorySnapshotEntry> Advisories { get; init; } = [];
|
||||
public List<VexSnapshotEntry> VexStatements { get; init; } = [];
|
||||
public List<PolicySnapshotEntry> Policies { get; init; } = [];
|
||||
public List<TrustRootSnapshotEntry> TrustRoots { get; init; } = [];
|
||||
public TimeAnchorEntry? TimeAnchor { get; set; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Entry for an advisory feed in the snapshot.
|
||||
/// </summary>
|
||||
public sealed class AdvisorySnapshotEntry
|
||||
{
|
||||
public required string FeedId { get; init; }
|
||||
public required string RelativePath { get; init; }
|
||||
public required string Digest { get; init; }
|
||||
public required long SizeBytes { get; init; }
|
||||
public DateTimeOffset SnapshotAt { get; init; }
|
||||
public int RecordCount { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Entry for VEX statements in the snapshot.
|
||||
/// </summary>
|
||||
public sealed class VexSnapshotEntry
|
||||
{
|
||||
public required string SourceId { get; init; }
|
||||
public required string RelativePath { get; init; }
|
||||
public required string Digest { get; init; }
|
||||
public required long SizeBytes { get; init; }
|
||||
public DateTimeOffset SnapshotAt { get; init; }
|
||||
public int StatementCount { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Entry for a policy in the snapshot.
|
||||
/// </summary>
|
||||
public sealed class PolicySnapshotEntry
|
||||
{
|
||||
public required string PolicyId { get; init; }
|
||||
public required string Name { get; init; }
|
||||
public required string Version { get; init; }
|
||||
public required string RelativePath { get; init; }
|
||||
public required string Digest { get; init; }
|
||||
public required long SizeBytes { get; init; }
|
||||
public string Type { get; init; } = "OpaRego";
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Entry for a trust root in the snapshot.
|
||||
/// </summary>
|
||||
public sealed class TrustRootSnapshotEntry
|
||||
{
|
||||
public required string KeyId { get; init; }
|
||||
public required string RelativePath { get; init; }
|
||||
public required string Digest { get; init; }
|
||||
public required long SizeBytes { get; init; }
|
||||
public string Algorithm { get; init; } = "ES256";
|
||||
public DateTimeOffset? ExpiresAt { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Time anchor entry in the manifest.
|
||||
/// </summary>
|
||||
public sealed class TimeAnchorEntry
|
||||
{
|
||||
public required DateTimeOffset AnchorTime { get; init; }
|
||||
public required string Source { get; init; }
|
||||
public string? Digest { get; init; }
|
||||
}
|
||||
@@ -0,0 +1,258 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// ConcelierAdvisoryImportTarget.cs
|
||||
// Sprint: SPRINT_4300_0003_0001 (Sealed Knowledge Snapshot Export/Import)
|
||||
// Tasks: SEAL-015 - Apply snapshot advisory content to Concelier database
|
||||
// Description: Adapter implementing IAdvisoryImportTarget for Concelier module.
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System.Collections.Immutable;
|
||||
using System.Security.Cryptography;
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
using StellaOps.AirGap.Bundle.Models;
|
||||
using StellaOps.Concelier.Core.Raw;
|
||||
using StellaOps.Concelier.RawModels;
|
||||
|
||||
namespace StellaOps.AirGap.Bundle.Services;
|
||||
|
||||
/// <summary>
|
||||
/// Implements IAdvisoryImportTarget by adapting to Concelier's IAdvisoryRawRepository.
|
||||
/// Parses NDJSON advisory content and upserts records to the advisory database.
|
||||
/// </summary>
|
||||
public sealed class ConcelierAdvisoryImportTarget : IAdvisoryImportTarget
|
||||
{
|
||||
private static readonly JsonSerializerOptions JsonOptions = new()
|
||||
{
|
||||
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
|
||||
PropertyNameCaseInsensitive = true
|
||||
};
|
||||
|
||||
private readonly IAdvisoryRawRepository _repository;
|
||||
private readonly string _tenant;
|
||||
|
||||
public ConcelierAdvisoryImportTarget(
|
||||
IAdvisoryRawRepository repository,
|
||||
string tenant = "default")
|
||||
{
|
||||
_repository = repository ?? throw new ArgumentNullException(nameof(repository));
|
||||
_tenant = tenant;
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public async Task<ModuleImportResultData> ImportAdvisoriesAsync(
|
||||
AdvisoryImportData data,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(data);
|
||||
|
||||
if (data.Content.Length == 0)
|
||||
{
|
||||
return new ModuleImportResultData
|
||||
{
|
||||
Failed = 1,
|
||||
Error = "Empty advisory content"
|
||||
};
|
||||
}
|
||||
|
||||
var created = 0;
|
||||
var updated = 0;
|
||||
var failed = 0;
|
||||
var errors = new List<string>();
|
||||
|
||||
try
|
||||
{
|
||||
// Parse NDJSON content - each line is a complete AdvisoryRawDocument
|
||||
var contentString = Encoding.UTF8.GetString(data.Content);
|
||||
var lines = contentString.Split('\n', StringSplitOptions.RemoveEmptyEntries);
|
||||
|
||||
foreach (var line in lines)
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
|
||||
try
|
||||
{
|
||||
var document = JsonSerializer.Deserialize<AdvisoryRawDocument>(line.Trim(), JsonOptions);
|
||||
if (document is null)
|
||||
{
|
||||
failed++;
|
||||
errors.Add("Failed to parse advisory line");
|
||||
continue;
|
||||
}
|
||||
|
||||
// Ensure tenant is set correctly
|
||||
var tenantedDocument = document with { Tenant = _tenant };
|
||||
|
||||
var result = await _repository.UpsertAsync(tenantedDocument, cancellationToken);
|
||||
|
||||
if (result.Inserted)
|
||||
{
|
||||
created++;
|
||||
}
|
||||
else
|
||||
{
|
||||
updated++;
|
||||
}
|
||||
}
|
||||
catch (JsonException ex)
|
||||
{
|
||||
failed++;
|
||||
errors.Add($"JSON parse error: {ex.Message}");
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
failed++;
|
||||
errors.Add($"Advisory import error: {ex.Message}");
|
||||
}
|
||||
}
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
return new ModuleImportResultData
|
||||
{
|
||||
Created = created,
|
||||
Updated = updated,
|
||||
Failed = failed + 1,
|
||||
Error = $"Import failed: {ex.Message}"
|
||||
};
|
||||
}
|
||||
|
||||
return new ModuleImportResultData
|
||||
{
|
||||
Created = created,
|
||||
Updated = updated,
|
||||
Failed = failed,
|
||||
Error = errors.Count > 0 ? string.Join("; ", errors.Take(5)) : null
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Lightweight in-memory implementation of IAdvisoryRawRepository for air-gap scenarios.
|
||||
/// Used when direct database access is unavailable.
|
||||
/// </summary>
|
||||
public sealed class InMemoryAdvisoryRawRepository : IAdvisoryRawRepository
|
||||
{
|
||||
private readonly Dictionary<string, AdvisoryRawRecord> _records = new();
|
||||
private readonly object _lock = new();
|
||||
|
||||
public Task<AdvisoryRawUpsertResult> UpsertAsync(AdvisoryRawDocument document, CancellationToken cancellationToken)
|
||||
{
|
||||
var contentHash = ComputeHash(document);
|
||||
var key = $"{document.Tenant}:{contentHash}";
|
||||
var now = DateTimeOffset.UtcNow;
|
||||
|
||||
lock (_lock)
|
||||
{
|
||||
if (_records.TryGetValue(key, out var existing))
|
||||
{
|
||||
return Task.FromResult(new AdvisoryRawUpsertResult(Inserted: false, Record: existing));
|
||||
}
|
||||
|
||||
var record = new AdvisoryRawRecord(
|
||||
Id: Guid.NewGuid().ToString(),
|
||||
Document: document,
|
||||
IngestedAt: now,
|
||||
CreatedAt: now);
|
||||
|
||||
_records[key] = record;
|
||||
return Task.FromResult(new AdvisoryRawUpsertResult(Inserted: true, Record: record));
|
||||
}
|
||||
}
|
||||
|
||||
public Task<AdvisoryRawRecord?> FindByIdAsync(string tenant, string id, CancellationToken cancellationToken)
|
||||
{
|
||||
lock (_lock)
|
||||
{
|
||||
var record = _records.Values.FirstOrDefault(r => r.Document.Tenant == tenant && r.Id == id);
|
||||
return Task.FromResult(record);
|
||||
}
|
||||
}
|
||||
|
||||
public Task<AdvisoryRawQueryResult> QueryAsync(AdvisoryRawQueryOptions options, CancellationToken cancellationToken)
|
||||
{
|
||||
lock (_lock)
|
||||
{
|
||||
var query = _records.Values.Where(r => r.Document.Tenant == options.Tenant);
|
||||
|
||||
if (!options.Vendors.IsEmpty)
|
||||
{
|
||||
query = query.Where(r => options.Vendors.Contains(r.Document.Source.Vendor));
|
||||
}
|
||||
|
||||
if (options.Since.HasValue)
|
||||
{
|
||||
query = query.Where(r => r.IngestedAt >= options.Since.Value);
|
||||
}
|
||||
|
||||
var records = query.Take(options.Limit).ToList();
|
||||
return Task.FromResult(new AdvisoryRawQueryResult(
|
||||
Records: records,
|
||||
NextCursor: records.Count == options.Limit && records.Count > 0 ? records[^1].Id : null,
|
||||
HasMore: records.Count == options.Limit));
|
||||
}
|
||||
}
|
||||
|
||||
public Task<IReadOnlyList<AdvisoryRawRecord>> FindByAdvisoryKeyAsync(
|
||||
string tenant,
|
||||
IReadOnlyCollection<string> searchValues,
|
||||
IReadOnlyCollection<string> sourceVendors,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
lock (_lock)
|
||||
{
|
||||
var query = _records.Values.Where(r => r.Document.Tenant == tenant);
|
||||
|
||||
if (searchValues.Count > 0)
|
||||
{
|
||||
query = query.Where(r =>
|
||||
searchValues.Contains(r.Document.AdvisoryKey) ||
|
||||
r.Document.Identifiers.Aliases.Any(a => searchValues.Contains(a)));
|
||||
}
|
||||
|
||||
if (sourceVendors.Count > 0)
|
||||
{
|
||||
query = query.Where(r => sourceVendors.Contains(r.Document.Source.Vendor));
|
||||
}
|
||||
|
||||
return Task.FromResult<IReadOnlyList<AdvisoryRawRecord>>(query.ToList());
|
||||
}
|
||||
}
|
||||
|
||||
public Task<IReadOnlyList<AdvisoryRawRecord>> ListForVerificationAsync(
|
||||
string tenant,
|
||||
DateTimeOffset since,
|
||||
DateTimeOffset until,
|
||||
IReadOnlyCollection<string> sourceVendors,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
lock (_lock)
|
||||
{
|
||||
var query = _records.Values
|
||||
.Where(r => r.Document.Tenant == tenant && r.IngestedAt >= since && r.IngestedAt <= until);
|
||||
|
||||
if (sourceVendors.Count > 0)
|
||||
{
|
||||
query = query.Where(r => sourceVendors.Contains(r.Document.Source.Vendor));
|
||||
}
|
||||
|
||||
return Task.FromResult<IReadOnlyList<AdvisoryRawRecord>>(query.ToList());
|
||||
}
|
||||
}
|
||||
|
||||
public int Count => _records.Count;
|
||||
|
||||
public IEnumerable<AdvisoryRawRecord> GetAllRecords()
|
||||
{
|
||||
lock (_lock)
|
||||
{
|
||||
return _records.Values.ToList();
|
||||
}
|
||||
}
|
||||
|
||||
private static string ComputeHash(AdvisoryRawDocument document)
|
||||
{
|
||||
var json = JsonSerializer.Serialize(document);
|
||||
var bytes = SHA256.HashData(Encoding.UTF8.GetBytes(json));
|
||||
return $"sha256:{Convert.ToHexStringLower(bytes)}";
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,259 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// ExcititorVexImportTarget.cs
|
||||
// Sprint: SPRINT_4300_0003_0001 (Sealed Knowledge Snapshot Export/Import)
|
||||
// Tasks: SEAL-016 - Apply snapshot VEX content to Excititor database
|
||||
// Description: Adapter implementing IVexImportTarget for Excititor module.
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System.Collections.Immutable;
|
||||
using System.Security.Cryptography;
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
using StellaOps.AirGap.Bundle.Models;
|
||||
using StellaOps.Excititor.Core;
|
||||
using StellaOps.Excititor.Core.Storage;
|
||||
|
||||
namespace StellaOps.AirGap.Bundle.Services;
|
||||
|
||||
/// <summary>
|
||||
/// Implements IVexImportTarget by adapting to Excititor's IVexRawDocumentSink.
|
||||
/// Parses NDJSON VEX statement content and stores records to the VEX database.
|
||||
/// </summary>
|
||||
public sealed class ExcititorVexImportTarget : IVexImportTarget
|
||||
{
|
||||
private static readonly JsonSerializerOptions JsonOptions = new()
|
||||
{
|
||||
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
|
||||
PropertyNameCaseInsensitive = true
|
||||
};
|
||||
|
||||
private readonly IVexRawDocumentSink _sink;
|
||||
private readonly string _tenant;
|
||||
|
||||
public ExcititorVexImportTarget(
|
||||
IVexRawDocumentSink sink,
|
||||
string tenant = "default")
|
||||
{
|
||||
_sink = sink ?? throw new ArgumentNullException(nameof(sink));
|
||||
_tenant = tenant;
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public async Task<ModuleImportResultData> ImportVexStatementsAsync(
|
||||
VexImportData data,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(data);
|
||||
|
||||
if (data.Content.Length == 0)
|
||||
{
|
||||
return new ModuleImportResultData
|
||||
{
|
||||
Failed = 1,
|
||||
Error = "Empty VEX content"
|
||||
};
|
||||
}
|
||||
|
||||
var created = 0;
|
||||
var updated = 0;
|
||||
var failed = 0;
|
||||
var errors = new List<string>();
|
||||
|
||||
try
|
||||
{
|
||||
// Parse NDJSON content - each line is a VEX statement
|
||||
var contentString = Encoding.UTF8.GetString(data.Content);
|
||||
var lines = contentString.Split('\n', StringSplitOptions.RemoveEmptyEntries);
|
||||
|
||||
foreach (var line in lines)
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
|
||||
try
|
||||
{
|
||||
var statement = JsonSerializer.Deserialize<VexStatementDto>(line.Trim(), JsonOptions);
|
||||
if (statement is null)
|
||||
{
|
||||
failed++;
|
||||
errors.Add("Failed to parse VEX statement line");
|
||||
continue;
|
||||
}
|
||||
|
||||
// Convert to VexRawDocument
|
||||
var contentBytes = Encoding.UTF8.GetBytes(line.Trim());
|
||||
var digest = ComputeDigest(contentBytes);
|
||||
|
||||
var document = new VexRawDocument(
|
||||
ProviderId: data.SourceId,
|
||||
Format: DetectFormat(statement),
|
||||
SourceUri: statement.SourceUri ?? new Uri($"urn:stellaops:airgap:vex:{digest}"),
|
||||
RetrievedAt: data.SnapshotAt,
|
||||
Digest: digest,
|
||||
Content: contentBytes,
|
||||
Metadata: ImmutableDictionary<string, string>.Empty
|
||||
.Add("importSource", "airgap-snapshot")
|
||||
.Add("snapshotAt", data.SnapshotAt.ToString("O")));
|
||||
|
||||
await _sink.StoreAsync(document, cancellationToken);
|
||||
created++;
|
||||
}
|
||||
catch (JsonException ex)
|
||||
{
|
||||
failed++;
|
||||
errors.Add($"JSON parse error: {ex.Message}");
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
failed++;
|
||||
errors.Add($"VEX import error: {ex.Message}");
|
||||
}
|
||||
}
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
return new ModuleImportResultData
|
||||
{
|
||||
Created = created,
|
||||
Updated = updated,
|
||||
Failed = failed + 1,
|
||||
Error = $"Import failed: {ex.Message}"
|
||||
};
|
||||
}
|
||||
|
||||
return new ModuleImportResultData
|
||||
{
|
||||
Created = created,
|
||||
Updated = updated,
|
||||
Failed = failed,
|
||||
Error = errors.Count > 0 ? string.Join("; ", errors.Take(5)) : null
|
||||
};
|
||||
}
|
||||
|
||||
private static string ComputeDigest(byte[] content)
|
||||
{
|
||||
var hash = SHA256.HashData(content);
|
||||
return $"sha256:{Convert.ToHexStringLower(hash)}";
|
||||
}
|
||||
|
||||
private static VexDocumentFormat DetectFormat(VexStatementDto statement)
|
||||
{
|
||||
// Detect format from statement structure
|
||||
if (!string.IsNullOrEmpty(statement.Context))
|
||||
{
|
||||
if (statement.Context.Contains("openvex", StringComparison.OrdinalIgnoreCase))
|
||||
return VexDocumentFormat.OpenVex;
|
||||
if (statement.Context.Contains("csaf", StringComparison.OrdinalIgnoreCase))
|
||||
return VexDocumentFormat.Csaf;
|
||||
if (statement.Context.Contains("cyclonedx", StringComparison.OrdinalIgnoreCase))
|
||||
return VexDocumentFormat.CycloneDx;
|
||||
}
|
||||
|
||||
// Default to OpenVEX
|
||||
return VexDocumentFormat.OpenVex;
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Lightweight in-memory implementation of IVexRawDocumentSink for air-gap scenarios.
|
||||
/// </summary>
|
||||
public sealed class InMemoryVexRawDocumentSink : IVexRawDocumentSink, IVexRawStore
|
||||
{
|
||||
private readonly Dictionary<string, VexRawRecord> _records = new();
|
||||
private readonly string _tenant;
|
||||
private readonly object _lock = new();
|
||||
|
||||
public InMemoryVexRawDocumentSink(string tenant = "default")
|
||||
{
|
||||
_tenant = tenant;
|
||||
}
|
||||
|
||||
public ValueTask StoreAsync(VexRawDocument document, CancellationToken cancellationToken)
|
||||
{
|
||||
lock (_lock)
|
||||
{
|
||||
if (!_records.ContainsKey(document.Digest))
|
||||
{
|
||||
_records[document.Digest] = new VexRawRecord(
|
||||
Digest: document.Digest,
|
||||
Tenant: _tenant,
|
||||
ProviderId: document.ProviderId,
|
||||
Format: document.Format,
|
||||
SourceUri: document.SourceUri,
|
||||
RetrievedAt: document.RetrievedAt,
|
||||
Metadata: document.Metadata,
|
||||
Content: document.Content,
|
||||
InlineContent: true,
|
||||
RecordedAt: DateTimeOffset.UtcNow);
|
||||
}
|
||||
}
|
||||
|
||||
return ValueTask.CompletedTask;
|
||||
}
|
||||
|
||||
public ValueTask<VexRawRecord?> FindByDigestAsync(string digest, CancellationToken cancellationToken)
|
||||
{
|
||||
lock (_lock)
|
||||
{
|
||||
_records.TryGetValue(digest, out var record);
|
||||
return ValueTask.FromResult(record);
|
||||
}
|
||||
}
|
||||
|
||||
public ValueTask<VexRawDocumentPage> QueryAsync(VexRawQuery query, CancellationToken cancellationToken)
|
||||
{
|
||||
lock (_lock)
|
||||
{
|
||||
var items = _records.Values
|
||||
.Where(r => r.Tenant == query.Tenant)
|
||||
.Where(r => query.ProviderIds.Count == 0 || query.ProviderIds.Contains(r.ProviderId))
|
||||
.Where(r => query.Digests.Count == 0 || query.Digests.Contains(r.Digest))
|
||||
.Where(r => query.Formats.Count == 0 || query.Formats.Contains(r.Format))
|
||||
.Where(r => !query.Since.HasValue || r.RetrievedAt >= query.Since.Value)
|
||||
.Where(r => !query.Until.HasValue || r.RetrievedAt <= query.Until.Value)
|
||||
.Take(query.Limit)
|
||||
.Select(r => new VexRawDocumentSummary(
|
||||
r.Digest,
|
||||
r.ProviderId,
|
||||
r.Format,
|
||||
r.SourceUri,
|
||||
r.RetrievedAt,
|
||||
r.InlineContent,
|
||||
r.Metadata))
|
||||
.ToList();
|
||||
|
||||
return ValueTask.FromResult(new VexRawDocumentPage(
|
||||
items,
|
||||
NextCursor: items.Count == query.Limit && items.Count > 0
|
||||
? new VexRawCursor(items[^1].RetrievedAt, items[^1].Digest)
|
||||
: null,
|
||||
HasMore: items.Count == query.Limit));
|
||||
}
|
||||
}
|
||||
|
||||
public int Count => _records.Count;
|
||||
|
||||
public IEnumerable<VexRawRecord> GetAllRecords()
|
||||
{
|
||||
lock (_lock)
|
||||
{
|
||||
return _records.Values.ToList();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// DTO for deserializing VEX statements from NDJSON.
|
||||
/// </summary>
|
||||
internal sealed record VexStatementDto
|
||||
{
|
||||
public string? Context { get; init; }
|
||||
public string? Id { get; init; }
|
||||
public string? Vulnerability { get; init; }
|
||||
public string? Status { get; init; }
|
||||
public string? Justification { get; init; }
|
||||
public string? Impact { get; init; }
|
||||
public string? ActionStatement { get; init; }
|
||||
public Uri? SourceUri { get; init; }
|
||||
public DateTimeOffset? Timestamp { get; init; }
|
||||
public ImmutableArray<string> Products { get; init; }
|
||||
}
|
||||
@@ -0,0 +1,489 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// KnowledgeSnapshotImporter.cs
|
||||
// Sprint: SPRINT_4300_0003_0001 (Sealed Knowledge Snapshot Export/Import)
|
||||
// Tasks: SEAL-015, SEAL-016, SEAL-017 - Apply snapshot content to databases
|
||||
// Description: Imports knowledge snapshot content to Concelier, Excititor, and Policy.
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System.IO.Compression;
|
||||
using System.Formats.Tar;
|
||||
using System.Text.Json;
|
||||
using StellaOps.AirGap.Bundle.Models;
|
||||
|
||||
namespace StellaOps.AirGap.Bundle.Services;
|
||||
|
||||
/// <summary>
|
||||
/// Imports knowledge snapshot content to module databases.
|
||||
/// </summary>
|
||||
public sealed class KnowledgeSnapshotImporter : IKnowledgeSnapshotImporter
|
||||
{
|
||||
private static readonly JsonSerializerOptions JsonOptions = new()
|
||||
{
|
||||
PropertyNamingPolicy = JsonNamingPolicy.CamelCase
|
||||
};
|
||||
|
||||
private readonly IAdvisoryImportTarget? _advisoryTarget;
|
||||
private readonly IVexImportTarget? _vexTarget;
|
||||
private readonly IPolicyImportTarget? _policyTarget;
|
||||
|
||||
public KnowledgeSnapshotImporter(
|
||||
IAdvisoryImportTarget? advisoryTarget = null,
|
||||
IVexImportTarget? vexTarget = null,
|
||||
IPolicyImportTarget? policyTarget = null)
|
||||
{
|
||||
_advisoryTarget = advisoryTarget;
|
||||
_vexTarget = vexTarget;
|
||||
_policyTarget = policyTarget;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Imports all content from a verified snapshot bundle.
|
||||
/// </summary>
|
||||
public async Task<SnapshotImportResult> ImportAsync(
|
||||
SnapshotImportRequest request,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(request);
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(request.BundlePath);
|
||||
|
||||
if (!File.Exists(request.BundlePath))
|
||||
{
|
||||
return SnapshotImportResult.Failed("Bundle file not found");
|
||||
}
|
||||
|
||||
var tempDir = Path.Combine(Path.GetTempPath(), $"import-{Guid.NewGuid():N}");
|
||||
Directory.CreateDirectory(tempDir);
|
||||
|
||||
try
|
||||
{
|
||||
// Extract bundle
|
||||
await ExtractBundleAsync(request.BundlePath, tempDir, cancellationToken);
|
||||
|
||||
// Read manifest
|
||||
var manifestPath = Path.Combine(tempDir, "manifest.json");
|
||||
if (!File.Exists(manifestPath))
|
||||
{
|
||||
return SnapshotImportResult.Failed("Manifest not found in bundle");
|
||||
}
|
||||
|
||||
var manifestBytes = await File.ReadAllBytesAsync(manifestPath, cancellationToken);
|
||||
var manifest = JsonSerializer.Deserialize<KnowledgeSnapshotManifest>(manifestBytes, JsonOptions);
|
||||
if (manifest is null)
|
||||
{
|
||||
return SnapshotImportResult.Failed("Failed to parse manifest");
|
||||
}
|
||||
|
||||
var result = new SnapshotImportResult
|
||||
{
|
||||
Success = true,
|
||||
BundleId = manifest.BundleId,
|
||||
StartedAt = DateTimeOffset.UtcNow
|
||||
};
|
||||
|
||||
var errors = new List<string>();
|
||||
var stats = new ImportStatistics();
|
||||
|
||||
// Import advisories (SEAL-015)
|
||||
if (request.ImportAdvisories && _advisoryTarget is not null)
|
||||
{
|
||||
var advisoryResult = await ImportAdvisoriesAsync(
|
||||
tempDir, manifest.Advisories, request.DryRun, cancellationToken);
|
||||
|
||||
stats.AdvisoriesProcessed = advisoryResult.Processed;
|
||||
stats.AdvisoriesCreated = advisoryResult.Created;
|
||||
stats.AdvisoriesUpdated = advisoryResult.Updated;
|
||||
stats.AdvisoriesFailed = advisoryResult.Failed;
|
||||
|
||||
if (advisoryResult.Errors.Count > 0)
|
||||
{
|
||||
errors.AddRange(advisoryResult.Errors.Select(e => $"Advisory: {e}"));
|
||||
}
|
||||
}
|
||||
else if (request.ImportAdvisories)
|
||||
{
|
||||
errors.Add("Advisory import target not configured");
|
||||
}
|
||||
|
||||
// Import VEX statements (SEAL-016)
|
||||
if (request.ImportVex && _vexTarget is not null)
|
||||
{
|
||||
var vexResult = await ImportVexStatementsAsync(
|
||||
tempDir, manifest.VexStatements, request.DryRun, cancellationToken);
|
||||
|
||||
stats.VexProcessed = vexResult.Processed;
|
||||
stats.VexCreated = vexResult.Created;
|
||||
stats.VexUpdated = vexResult.Updated;
|
||||
stats.VexFailed = vexResult.Failed;
|
||||
|
||||
if (vexResult.Errors.Count > 0)
|
||||
{
|
||||
errors.AddRange(vexResult.Errors.Select(e => $"VEX: {e}"));
|
||||
}
|
||||
}
|
||||
else if (request.ImportVex)
|
||||
{
|
||||
errors.Add("VEX import target not configured");
|
||||
}
|
||||
|
||||
// Import policies (SEAL-017)
|
||||
if (request.ImportPolicies && _policyTarget is not null)
|
||||
{
|
||||
var policyResult = await ImportPoliciesAsync(
|
||||
tempDir, manifest.Policies, request.DryRun, cancellationToken);
|
||||
|
||||
stats.PoliciesProcessed = policyResult.Processed;
|
||||
stats.PoliciesCreated = policyResult.Created;
|
||||
stats.PoliciesUpdated = policyResult.Updated;
|
||||
stats.PoliciesFailed = policyResult.Failed;
|
||||
|
||||
if (policyResult.Errors.Count > 0)
|
||||
{
|
||||
errors.AddRange(policyResult.Errors.Select(e => $"Policy: {e}"));
|
||||
}
|
||||
}
|
||||
else if (request.ImportPolicies)
|
||||
{
|
||||
errors.Add("Policy import target not configured");
|
||||
}
|
||||
|
||||
result = result with
|
||||
{
|
||||
CompletedAt = DateTimeOffset.UtcNow,
|
||||
Statistics = stats,
|
||||
Errors = errors.Count > 0 ? [.. errors] : null,
|
||||
Success = errors.Count == 0 || !request.FailOnAnyError
|
||||
};
|
||||
|
||||
return result;
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
return SnapshotImportResult.Failed($"Import failed: {ex.Message}");
|
||||
}
|
||||
finally
|
||||
{
|
||||
try
|
||||
{
|
||||
if (Directory.Exists(tempDir))
|
||||
{
|
||||
Directory.Delete(tempDir, recursive: true);
|
||||
}
|
||||
}
|
||||
catch
|
||||
{
|
||||
// Ignore cleanup errors
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private async Task<ModuleImportResult> ImportAdvisoriesAsync(
|
||||
string bundleDir,
|
||||
IReadOnlyList<AdvisorySnapshotEntry> entries,
|
||||
bool dryRun,
|
||||
CancellationToken ct)
|
||||
{
|
||||
var result = new ModuleImportResult();
|
||||
|
||||
foreach (var entry in entries)
|
||||
{
|
||||
try
|
||||
{
|
||||
var filePath = Path.Combine(bundleDir, entry.RelativePath.Replace('/', Path.DirectorySeparatorChar));
|
||||
if (!File.Exists(filePath))
|
||||
{
|
||||
result.Failed++;
|
||||
result.Errors.Add($"File not found: {entry.RelativePath}");
|
||||
continue;
|
||||
}
|
||||
|
||||
var content = await File.ReadAllBytesAsync(filePath, ct);
|
||||
result.Processed++;
|
||||
|
||||
if (!dryRun && _advisoryTarget is not null)
|
||||
{
|
||||
var importResult = await _advisoryTarget.ImportAdvisoriesAsync(
|
||||
new AdvisoryImportData
|
||||
{
|
||||
FeedId = entry.FeedId,
|
||||
Content = content,
|
||||
SnapshotAt = entry.SnapshotAt,
|
||||
RecordCount = entry.RecordCount
|
||||
},
|
||||
ct);
|
||||
|
||||
result.Created += importResult.Created;
|
||||
result.Updated += importResult.Updated;
|
||||
result.Failed += importResult.Failed;
|
||||
|
||||
if (importResult.Error is not null)
|
||||
{
|
||||
result.Errors.Add(importResult.Error);
|
||||
}
|
||||
}
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
result.Failed++;
|
||||
result.Errors.Add($"Error processing {entry.RelativePath}: {ex.Message}");
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
private async Task<ModuleImportResult> ImportVexStatementsAsync(
|
||||
string bundleDir,
|
||||
IReadOnlyList<VexSnapshotEntry> entries,
|
||||
bool dryRun,
|
||||
CancellationToken ct)
|
||||
{
|
||||
var result = new ModuleImportResult();
|
||||
|
||||
foreach (var entry in entries)
|
||||
{
|
||||
try
|
||||
{
|
||||
var filePath = Path.Combine(bundleDir, entry.RelativePath.Replace('/', Path.DirectorySeparatorChar));
|
||||
if (!File.Exists(filePath))
|
||||
{
|
||||
result.Failed++;
|
||||
result.Errors.Add($"File not found: {entry.RelativePath}");
|
||||
continue;
|
||||
}
|
||||
|
||||
var content = await File.ReadAllBytesAsync(filePath, ct);
|
||||
result.Processed++;
|
||||
|
||||
if (!dryRun && _vexTarget is not null)
|
||||
{
|
||||
var importResult = await _vexTarget.ImportVexStatementsAsync(
|
||||
new VexImportData
|
||||
{
|
||||
SourceId = entry.SourceId,
|
||||
Content = content,
|
||||
SnapshotAt = entry.SnapshotAt,
|
||||
StatementCount = entry.StatementCount
|
||||
},
|
||||
ct);
|
||||
|
||||
result.Created += importResult.Created;
|
||||
result.Updated += importResult.Updated;
|
||||
result.Failed += importResult.Failed;
|
||||
|
||||
if (importResult.Error is not null)
|
||||
{
|
||||
result.Errors.Add(importResult.Error);
|
||||
}
|
||||
}
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
result.Failed++;
|
||||
result.Errors.Add($"Error processing {entry.RelativePath}: {ex.Message}");
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
private async Task<ModuleImportResult> ImportPoliciesAsync(
|
||||
string bundleDir,
|
||||
IReadOnlyList<PolicySnapshotEntry> entries,
|
||||
bool dryRun,
|
||||
CancellationToken ct)
|
||||
{
|
||||
var result = new ModuleImportResult();
|
||||
|
||||
foreach (var entry in entries)
|
||||
{
|
||||
try
|
||||
{
|
||||
var filePath = Path.Combine(bundleDir, entry.RelativePath.Replace('/', Path.DirectorySeparatorChar));
|
||||
if (!File.Exists(filePath))
|
||||
{
|
||||
result.Failed++;
|
||||
result.Errors.Add($"File not found: {entry.RelativePath}");
|
||||
continue;
|
||||
}
|
||||
|
||||
var content = await File.ReadAllBytesAsync(filePath, ct);
|
||||
result.Processed++;
|
||||
|
||||
if (!dryRun && _policyTarget is not null)
|
||||
{
|
||||
var importResult = await _policyTarget.ImportPolicyAsync(
|
||||
new PolicyImportData
|
||||
{
|
||||
PolicyId = entry.PolicyId,
|
||||
Content = content,
|
||||
Version = entry.Version
|
||||
},
|
||||
ct);
|
||||
|
||||
result.Created += importResult.Created;
|
||||
result.Updated += importResult.Updated;
|
||||
result.Failed += importResult.Failed;
|
||||
|
||||
if (importResult.Error is not null)
|
||||
{
|
||||
result.Errors.Add(importResult.Error);
|
||||
}
|
||||
}
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
result.Failed++;
|
||||
result.Errors.Add($"Error processing {entry.RelativePath}: {ex.Message}");
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
private static async Task ExtractBundleAsync(string bundlePath, string targetDir, CancellationToken ct)
|
||||
{
|
||||
await using var fileStream = File.OpenRead(bundlePath);
|
||||
await using var gzipStream = new GZipStream(fileStream, CompressionMode.Decompress);
|
||||
await TarFile.ExtractToDirectoryAsync(gzipStream, targetDir, overwriteFiles: true, ct);
|
||||
}
|
||||
|
||||
private sealed class ModuleImportResult
|
||||
{
|
||||
public int Processed { get; set; }
|
||||
public int Created { get; set; }
|
||||
public int Updated { get; set; }
|
||||
public int Failed { get; set; }
|
||||
public List<string> Errors { get; } = [];
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Interface for knowledge snapshot importing.
|
||||
/// </summary>
|
||||
public interface IKnowledgeSnapshotImporter
|
||||
{
|
||||
Task<SnapshotImportResult> ImportAsync(
|
||||
SnapshotImportRequest request,
|
||||
CancellationToken cancellationToken = default);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Target interface for importing advisories (SEAL-015).
|
||||
/// Implemented by Concelier module.
|
||||
/// </summary>
|
||||
public interface IAdvisoryImportTarget
|
||||
{
|
||||
Task<ModuleImportResultData> ImportAdvisoriesAsync(
|
||||
AdvisoryImportData data,
|
||||
CancellationToken cancellationToken = default);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Target interface for importing VEX statements (SEAL-016).
|
||||
/// Implemented by Excititor module.
|
||||
/// </summary>
|
||||
public interface IVexImportTarget
|
||||
{
|
||||
Task<ModuleImportResultData> ImportVexStatementsAsync(
|
||||
VexImportData data,
|
||||
CancellationToken cancellationToken = default);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Target interface for importing policies (SEAL-017).
|
||||
/// Implemented by Policy module.
|
||||
/// </summary>
|
||||
public interface IPolicyImportTarget
|
||||
{
|
||||
Task<ModuleImportResultData> ImportPolicyAsync(
|
||||
PolicyImportData data,
|
||||
CancellationToken cancellationToken = default);
|
||||
}
|
||||
|
||||
#region Request and Result Models
|
||||
|
||||
public sealed record SnapshotImportRequest
|
||||
{
|
||||
public required string BundlePath { get; init; }
|
||||
public bool ImportAdvisories { get; init; } = true;
|
||||
public bool ImportVex { get; init; } = true;
|
||||
public bool ImportPolicies { get; init; } = true;
|
||||
public bool DryRun { get; init; } = false;
|
||||
public bool FailOnAnyError { get; init; } = false;
|
||||
}
|
||||
|
||||
public sealed record SnapshotImportResult
|
||||
{
|
||||
public bool Success { get; init; }
|
||||
public string? BundleId { get; init; }
|
||||
public DateTimeOffset StartedAt { get; init; }
|
||||
public DateTimeOffset CompletedAt { get; init; }
|
||||
public ImportStatistics? Statistics { get; init; }
|
||||
public IReadOnlyList<string>? Errors { get; init; }
|
||||
public string? Error { get; init; }
|
||||
|
||||
public static SnapshotImportResult Failed(string error) => new()
|
||||
{
|
||||
Success = false,
|
||||
Error = error,
|
||||
StartedAt = DateTimeOffset.UtcNow,
|
||||
CompletedAt = DateTimeOffset.UtcNow
|
||||
};
|
||||
}
|
||||
|
||||
public sealed record ImportStatistics
|
||||
{
|
||||
public int AdvisoriesProcessed { get; set; }
|
||||
public int AdvisoriesCreated { get; set; }
|
||||
public int AdvisoriesUpdated { get; set; }
|
||||
public int AdvisoriesFailed { get; set; }
|
||||
|
||||
public int VexProcessed { get; set; }
|
||||
public int VexCreated { get; set; }
|
||||
public int VexUpdated { get; set; }
|
||||
public int VexFailed { get; set; }
|
||||
|
||||
public int PoliciesProcessed { get; set; }
|
||||
public int PoliciesCreated { get; set; }
|
||||
public int PoliciesUpdated { get; set; }
|
||||
public int PoliciesFailed { get; set; }
|
||||
|
||||
public int TotalProcessed => AdvisoriesProcessed + VexProcessed + PoliciesProcessed;
|
||||
public int TotalCreated => AdvisoriesCreated + VexCreated + PoliciesCreated;
|
||||
public int TotalUpdated => AdvisoriesUpdated + VexUpdated + PoliciesUpdated;
|
||||
public int TotalFailed => AdvisoriesFailed + VexFailed + PoliciesFailed;
|
||||
}
|
||||
|
||||
public sealed record AdvisoryImportData
|
||||
{
|
||||
public required string FeedId { get; init; }
|
||||
public required byte[] Content { get; init; }
|
||||
public DateTimeOffset SnapshotAt { get; init; }
|
||||
public int RecordCount { get; init; }
|
||||
}
|
||||
|
||||
public sealed record VexImportData
|
||||
{
|
||||
public required string SourceId { get; init; }
|
||||
public required byte[] Content { get; init; }
|
||||
public DateTimeOffset SnapshotAt { get; init; }
|
||||
public int StatementCount { get; init; }
|
||||
}
|
||||
|
||||
public sealed record PolicyImportData
|
||||
{
|
||||
public required string PolicyId { get; init; }
|
||||
public required byte[] Content { get; init; }
|
||||
public string? Version { get; init; }
|
||||
public DateTimeOffset SnapshotAt { get; init; }
|
||||
}
|
||||
|
||||
public sealed record ModuleImportResultData
|
||||
{
|
||||
public int Created { get; init; }
|
||||
public int Updated { get; init; }
|
||||
public int Failed { get; init; }
|
||||
public string? Error { get; init; }
|
||||
}
|
||||
|
||||
#endregion
|
||||
@@ -0,0 +1,247 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// PolicyRegistryImportTarget.cs
|
||||
// Sprint: SPRINT_4300_0003_0001 (Sealed Knowledge Snapshot Export/Import)
|
||||
// Tasks: SEAL-017 - Apply snapshot policy content to Policy registry
|
||||
// Description: Adapter implementing IPolicyImportTarget for Policy module.
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System.Security.Cryptography;
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
using StellaOps.AirGap.Bundle.Models;
|
||||
|
||||
namespace StellaOps.AirGap.Bundle.Services;
|
||||
|
||||
/// <summary>
|
||||
/// Implements IPolicyImportTarget for importing policy packs from snapshots.
|
||||
/// Parses policy bundle content and stores to the policy registry.
|
||||
/// </summary>
|
||||
public sealed class PolicyRegistryImportTarget : IPolicyImportTarget
|
||||
{
|
||||
private static readonly JsonSerializerOptions JsonOptions = new()
|
||||
{
|
||||
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
|
||||
PropertyNameCaseInsensitive = true
|
||||
};
|
||||
|
||||
private readonly IPolicyPackImportStore _store;
|
||||
private readonly string _tenantId;
|
||||
|
||||
public PolicyRegistryImportTarget(
|
||||
IPolicyPackImportStore store,
|
||||
string tenantId = "default")
|
||||
{
|
||||
_store = store ?? throw new ArgumentNullException(nameof(store));
|
||||
_tenantId = tenantId;
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public async Task<ModuleImportResultData> ImportPolicyAsync(
|
||||
PolicyImportData data,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(data);
|
||||
|
||||
if (data.Content.Length == 0)
|
||||
{
|
||||
return new ModuleImportResultData
|
||||
{
|
||||
Failed = 1,
|
||||
Error = "Empty policy content"
|
||||
};
|
||||
}
|
||||
|
||||
var created = 0;
|
||||
var updated = 0;
|
||||
var failed = 0;
|
||||
|
||||
try
|
||||
{
|
||||
// Compute content digest for deduplication
|
||||
var digest = ComputeDigest(data.Content);
|
||||
|
||||
// Check if already exists
|
||||
var existing = await _store.FindByDigestAsync(_tenantId, digest, cancellationToken);
|
||||
if (existing is not null)
|
||||
{
|
||||
updated++;
|
||||
return new ModuleImportResultData
|
||||
{
|
||||
Updated = updated,
|
||||
Error = null
|
||||
};
|
||||
}
|
||||
|
||||
// Parse policy bundle to validate
|
||||
var bundle = ParsePolicyBundle(data.Content);
|
||||
|
||||
// Store the policy pack
|
||||
var pack = new ImportedPolicyPack(
|
||||
Id: data.PolicyId,
|
||||
TenantId: _tenantId,
|
||||
Digest: digest,
|
||||
Version: data.Version ?? "1.0.0",
|
||||
Content: data.Content,
|
||||
Metadata: bundle.Metadata,
|
||||
ImportedAt: DateTimeOffset.UtcNow);
|
||||
|
||||
await _store.SaveAsync(pack, cancellationToken);
|
||||
created++;
|
||||
|
||||
return new ModuleImportResultData
|
||||
{
|
||||
Created = created,
|
||||
Updated = updated,
|
||||
Error = null
|
||||
};
|
||||
}
|
||||
catch (JsonException ex)
|
||||
{
|
||||
return new ModuleImportResultData
|
||||
{
|
||||
Failed = 1,
|
||||
Error = $"JSON parse error: {ex.Message}"
|
||||
};
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
return new ModuleImportResultData
|
||||
{
|
||||
Created = created,
|
||||
Updated = updated,
|
||||
Failed = failed + 1,
|
||||
Error = $"Policy import error: {ex.Message}"
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
private static string ComputeDigest(byte[] content)
|
||||
{
|
||||
var hash = SHA256.HashData(content);
|
||||
return $"sha256:{Convert.ToHexStringLower(hash)}";
|
||||
}
|
||||
|
||||
private static PolicyBundleDto ParsePolicyBundle(byte[] content)
|
||||
{
|
||||
var json = Encoding.UTF8.GetString(content);
|
||||
return JsonSerializer.Deserialize<PolicyBundleDto>(json, JsonOptions)
|
||||
?? throw new InvalidDataException("Failed to parse policy bundle");
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Store interface for importing policy packs from air-gap snapshots.
|
||||
/// </summary>
|
||||
public interface IPolicyPackImportStore
|
||||
{
|
||||
/// <summary>
|
||||
/// Finds an imported policy pack by content digest.
|
||||
/// </summary>
|
||||
Task<ImportedPolicyPack?> FindByDigestAsync(string tenantId, string digest, CancellationToken cancellationToken);
|
||||
|
||||
/// <summary>
|
||||
/// Saves an imported policy pack.
|
||||
/// </summary>
|
||||
Task SaveAsync(ImportedPolicyPack pack, CancellationToken cancellationToken);
|
||||
|
||||
/// <summary>
|
||||
/// Lists all imported policy packs for a tenant.
|
||||
/// </summary>
|
||||
Task<IReadOnlyList<ImportedPolicyPack>> ListAsync(string tenantId, CancellationToken cancellationToken);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Lightweight in-memory implementation of IPolicyPackImportStore for air-gap scenarios.
|
||||
/// </summary>
|
||||
public sealed class InMemoryPolicyPackImportStore : IPolicyPackImportStore
|
||||
{
|
||||
private readonly Dictionary<string, ImportedPolicyPack> _packs = new();
|
||||
private readonly object _lock = new();
|
||||
|
||||
public Task<ImportedPolicyPack?> FindByDigestAsync(string tenantId, string digest, CancellationToken cancellationToken)
|
||||
{
|
||||
lock (_lock)
|
||||
{
|
||||
var pack = _packs.Values.FirstOrDefault(p => p.TenantId == tenantId && p.Digest == digest);
|
||||
return Task.FromResult(pack);
|
||||
}
|
||||
}
|
||||
|
||||
public Task SaveAsync(ImportedPolicyPack pack, CancellationToken cancellationToken)
|
||||
{
|
||||
lock (_lock)
|
||||
{
|
||||
_packs[$"{pack.TenantId}:{pack.Id}"] = pack;
|
||||
}
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
|
||||
public Task<IReadOnlyList<ImportedPolicyPack>> ListAsync(string tenantId, CancellationToken cancellationToken)
|
||||
{
|
||||
lock (_lock)
|
||||
{
|
||||
var packs = _packs.Values.Where(p => p.TenantId == tenantId).ToList();
|
||||
return Task.FromResult<IReadOnlyList<ImportedPolicyPack>>(packs);
|
||||
}
|
||||
}
|
||||
|
||||
public int Count => _packs.Count;
|
||||
|
||||
public IEnumerable<ImportedPolicyPack> GetAllPacks()
|
||||
{
|
||||
lock (_lock)
|
||||
{
|
||||
return _packs.Values.ToList();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Imported policy pack record.
|
||||
/// </summary>
|
||||
public sealed record ImportedPolicyPack(
|
||||
string Id,
|
||||
string TenantId,
|
||||
string Digest,
|
||||
string Version,
|
||||
byte[] Content,
|
||||
PolicyPackMetadata? Metadata,
|
||||
DateTimeOffset ImportedAt);
|
||||
|
||||
/// <summary>
|
||||
/// DTO for deserializing policy bundle.
|
||||
/// </summary>
|
||||
internal sealed record PolicyBundleDto
|
||||
{
|
||||
public int SchemaVersion { get; init; } = 1;
|
||||
public string? DomainId { get; init; }
|
||||
public string? Name { get; init; }
|
||||
public string? Description { get; init; }
|
||||
public string? Version { get; init; }
|
||||
public PolicyPackMetadata? Metadata { get; init; }
|
||||
public IReadOnlyList<PolicyRuleDto>? Rules { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Policy pack metadata.
|
||||
/// </summary>
|
||||
public sealed record PolicyPackMetadata
|
||||
{
|
||||
public string? Author { get; init; }
|
||||
public string? License { get; init; }
|
||||
public string? Homepage { get; init; }
|
||||
public DateTimeOffset? CreatedAt { get; init; }
|
||||
public IReadOnlyList<string>? Tags { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Policy rule within a bundle.
|
||||
/// </summary>
|
||||
internal sealed record PolicyRuleDto
|
||||
{
|
||||
public string? Id { get; init; }
|
||||
public string? Name { get; init; }
|
||||
public string? Description { get; init; }
|
||||
public string? Severity { get; init; }
|
||||
public string? Expression { get; init; }
|
||||
}
|
||||
@@ -0,0 +1,548 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// SnapshotBundleReader.cs
|
||||
// Sprint: SPRINT_4300_0003_0001 (Sealed Knowledge Snapshot Export/Import)
|
||||
// Tasks: SEAL-012, SEAL-013 - Implement signature verification and merkle root validation
|
||||
// Description: Reads and verifies sealed knowledge snapshot bundles.
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System.Formats.Tar;
|
||||
using System.IO.Compression;
|
||||
using System.Security.Cryptography;
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
using StellaOps.AirGap.Bundle.Models;
|
||||
using PolicySnapshotEntry = StellaOps.AirGap.Bundle.Models.PolicySnapshotEntry;
|
||||
|
||||
namespace StellaOps.AirGap.Bundle.Services;
|
||||
|
||||
/// <summary>
|
||||
/// Reads and verifies sealed knowledge snapshot bundles.
|
||||
/// </summary>
|
||||
public sealed class SnapshotBundleReader : ISnapshotBundleReader
|
||||
{
|
||||
private static readonly JsonSerializerOptions JsonOptions = new()
|
||||
{
|
||||
PropertyNamingPolicy = JsonNamingPolicy.CamelCase
|
||||
};
|
||||
|
||||
/// <summary>
|
||||
/// Reads and verifies a snapshot bundle.
|
||||
/// </summary>
|
||||
public async Task<SnapshotBundleReadResult> ReadAsync(
|
||||
SnapshotBundleReadRequest request,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(request);
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(request.BundlePath);
|
||||
|
||||
if (!File.Exists(request.BundlePath))
|
||||
{
|
||||
return SnapshotBundleReadResult.Failed("Bundle file not found");
|
||||
}
|
||||
|
||||
var tempDir = Path.Combine(Path.GetTempPath(), $"bundle-read-{Guid.NewGuid():N}");
|
||||
Directory.CreateDirectory(tempDir);
|
||||
|
||||
try
|
||||
{
|
||||
// Extract the bundle
|
||||
await ExtractBundleAsync(request.BundlePath, tempDir, cancellationToken);
|
||||
|
||||
// Read manifest
|
||||
var manifestPath = Path.Combine(tempDir, "manifest.json");
|
||||
if (!File.Exists(manifestPath))
|
||||
{
|
||||
return SnapshotBundleReadResult.Failed("Manifest not found in bundle");
|
||||
}
|
||||
|
||||
var manifestBytes = await File.ReadAllBytesAsync(manifestPath, cancellationToken);
|
||||
var manifest = JsonSerializer.Deserialize<KnowledgeSnapshotManifest>(manifestBytes, JsonOptions);
|
||||
if (manifest is null)
|
||||
{
|
||||
return SnapshotBundleReadResult.Failed("Failed to parse manifest");
|
||||
}
|
||||
|
||||
var result = new SnapshotBundleReadResult
|
||||
{
|
||||
Success = true,
|
||||
Manifest = manifest,
|
||||
BundleDigest = await ComputeFileDigestAsync(request.BundlePath, cancellationToken)
|
||||
};
|
||||
|
||||
// Verify signature if requested
|
||||
if (request.VerifySignature)
|
||||
{
|
||||
var signaturePath = Path.Combine(tempDir, "manifest.sig");
|
||||
if (File.Exists(signaturePath))
|
||||
{
|
||||
var signatureBytes = await File.ReadAllBytesAsync(signaturePath, cancellationToken);
|
||||
var signatureResult = await VerifySignatureAsync(
|
||||
manifestBytes, signatureBytes, request.PublicKey, cancellationToken);
|
||||
|
||||
result = result with
|
||||
{
|
||||
SignatureVerified = signatureResult.Verified,
|
||||
SignatureKeyId = signatureResult.KeyId,
|
||||
SignatureError = signatureResult.Error
|
||||
};
|
||||
|
||||
if (!signatureResult.Verified && request.RequireValidSignature)
|
||||
{
|
||||
return result with
|
||||
{
|
||||
Success = false,
|
||||
Error = $"Signature verification failed: {signatureResult.Error}"
|
||||
};
|
||||
}
|
||||
}
|
||||
else if (request.RequireValidSignature)
|
||||
{
|
||||
return SnapshotBundleReadResult.Failed("Signature file not found but signature is required");
|
||||
}
|
||||
}
|
||||
|
||||
// Verify merkle root if requested
|
||||
if (request.VerifyMerkleRoot)
|
||||
{
|
||||
var merkleResult = await VerifyMerkleRootAsync(tempDir, manifest, cancellationToken);
|
||||
result = result with
|
||||
{
|
||||
MerkleRootVerified = merkleResult.Verified,
|
||||
MerkleRootError = merkleResult.Error
|
||||
};
|
||||
|
||||
if (!merkleResult.Verified && request.RequireValidMerkleRoot)
|
||||
{
|
||||
return result with
|
||||
{
|
||||
Success = false,
|
||||
Error = $"Merkle root verification failed: {merkleResult.Error}"
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
// Verify time anchor if present
|
||||
if (request.VerifyTimeAnchor && manifest.TimeAnchor is not null)
|
||||
{
|
||||
var timeAnchorService = new TimeAnchorService();
|
||||
var timeAnchorContent = new TimeAnchorContent
|
||||
{
|
||||
AnchorTime = manifest.TimeAnchor.AnchorTime,
|
||||
Source = manifest.TimeAnchor.Source,
|
||||
TokenDigest = manifest.TimeAnchor.Digest
|
||||
};
|
||||
|
||||
var timeAnchorResult = await timeAnchorService.ValidateAnchorAsync(
|
||||
timeAnchorContent,
|
||||
new TimeAnchorValidationRequest
|
||||
{
|
||||
MaxAgeHours = request.MaxAgeHours,
|
||||
MaxClockDriftSeconds = request.MaxClockDriftSeconds
|
||||
},
|
||||
cancellationToken);
|
||||
|
||||
result = result with
|
||||
{
|
||||
TimeAnchorValid = timeAnchorResult.IsValid,
|
||||
TimeAnchorAgeHours = timeAnchorResult.AgeHours,
|
||||
TimeAnchorError = timeAnchorResult.Error
|
||||
};
|
||||
|
||||
if (!timeAnchorResult.IsValid && request.RequireValidTimeAnchor)
|
||||
{
|
||||
return result with
|
||||
{
|
||||
Success = false,
|
||||
Error = $"Time anchor validation failed: {timeAnchorResult.Error}"
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
return SnapshotBundleReadResult.Failed($"Failed to read bundle: {ex.Message}");
|
||||
}
|
||||
finally
|
||||
{
|
||||
// Clean up temp directory
|
||||
try
|
||||
{
|
||||
if (Directory.Exists(tempDir))
|
||||
{
|
||||
Directory.Delete(tempDir, recursive: true);
|
||||
}
|
||||
}
|
||||
catch
|
||||
{
|
||||
// Ignore cleanup errors
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private static async Task ExtractBundleAsync(string bundlePath, string targetDir, CancellationToken ct)
|
||||
{
|
||||
await using var fileStream = File.OpenRead(bundlePath);
|
||||
await using var gzipStream = new GZipStream(fileStream, CompressionMode.Decompress);
|
||||
await TarFile.ExtractToDirectoryAsync(gzipStream, targetDir, overwriteFiles: true, ct);
|
||||
}
|
||||
|
||||
private static async Task<string> ComputeFileDigestAsync(string filePath, CancellationToken ct)
|
||||
{
|
||||
await using var stream = File.OpenRead(filePath);
|
||||
var hash = await SHA256.HashDataAsync(stream, ct);
|
||||
return $"sha256:{Convert.ToHexString(hash).ToLowerInvariant()}";
|
||||
}
|
||||
|
||||
private static async Task<SignatureVerificationResult> VerifySignatureAsync(
|
||||
byte[] manifestBytes,
|
||||
byte[] signatureEnvelopeBytes,
|
||||
AsymmetricAlgorithm? publicKey,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
try
|
||||
{
|
||||
var signer = new SnapshotManifestSigner();
|
||||
var result = await signer.VerifyAsync(
|
||||
new ManifestVerificationRequest
|
||||
{
|
||||
EnvelopeBytes = signatureEnvelopeBytes,
|
||||
PublicKey = publicKey
|
||||
},
|
||||
cancellationToken);
|
||||
|
||||
if (!result.Success)
|
||||
{
|
||||
return new SignatureVerificationResult
|
||||
{
|
||||
Verified = false,
|
||||
Error = result.Error
|
||||
};
|
||||
}
|
||||
|
||||
// Verify the payload digest matches the manifest
|
||||
var manifestDigest = ComputeSha256(manifestBytes);
|
||||
if (result.PayloadDigest != manifestDigest)
|
||||
{
|
||||
return new SignatureVerificationResult
|
||||
{
|
||||
Verified = false,
|
||||
Error = "Manifest digest does not match signed payload"
|
||||
};
|
||||
}
|
||||
|
||||
var keyId = result.VerifiedSignatures?.FirstOrDefault()?.KeyId;
|
||||
|
||||
return new SignatureVerificationResult
|
||||
{
|
||||
Verified = publicKey is null || (result.VerifiedSignatures?.Any(s => s.Verified == true) ?? false),
|
||||
KeyId = keyId
|
||||
};
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
return new SignatureVerificationResult
|
||||
{
|
||||
Verified = false,
|
||||
Error = ex.Message
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
private static async Task<MerkleVerificationResult> VerifyMerkleRootAsync(
|
||||
string bundleDir,
|
||||
KnowledgeSnapshotManifest manifest,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
try
|
||||
{
|
||||
var entries = new List<BundleEntry>();
|
||||
|
||||
// Collect all entries from manifest
|
||||
foreach (var advisory in manifest.Advisories)
|
||||
{
|
||||
var filePath = Path.Combine(bundleDir, advisory.RelativePath.Replace('/', Path.DirectorySeparatorChar));
|
||||
if (!File.Exists(filePath))
|
||||
{
|
||||
return new MerkleVerificationResult
|
||||
{
|
||||
Verified = false,
|
||||
Error = $"Missing file: {advisory.RelativePath}"
|
||||
};
|
||||
}
|
||||
|
||||
var content = await File.ReadAllBytesAsync(filePath, cancellationToken);
|
||||
var digest = ComputeSha256(content);
|
||||
|
||||
if (digest != advisory.Digest)
|
||||
{
|
||||
return new MerkleVerificationResult
|
||||
{
|
||||
Verified = false,
|
||||
Error = $"Digest mismatch for {advisory.RelativePath}"
|
||||
};
|
||||
}
|
||||
|
||||
entries.Add(new BundleEntry(advisory.RelativePath, digest, content.Length));
|
||||
}
|
||||
|
||||
foreach (var vex in manifest.VexStatements)
|
||||
{
|
||||
var filePath = Path.Combine(bundleDir, vex.RelativePath.Replace('/', Path.DirectorySeparatorChar));
|
||||
if (!File.Exists(filePath))
|
||||
{
|
||||
return new MerkleVerificationResult
|
||||
{
|
||||
Verified = false,
|
||||
Error = $"Missing file: {vex.RelativePath}"
|
||||
};
|
||||
}
|
||||
|
||||
var content = await File.ReadAllBytesAsync(filePath, cancellationToken);
|
||||
var digest = ComputeSha256(content);
|
||||
|
||||
if (digest != vex.Digest)
|
||||
{
|
||||
return new MerkleVerificationResult
|
||||
{
|
||||
Verified = false,
|
||||
Error = $"Digest mismatch for {vex.RelativePath}"
|
||||
};
|
||||
}
|
||||
|
||||
entries.Add(new BundleEntry(vex.RelativePath, digest, content.Length));
|
||||
}
|
||||
|
||||
foreach (var policy in manifest.Policies)
|
||||
{
|
||||
var filePath = Path.Combine(bundleDir, policy.RelativePath.Replace('/', Path.DirectorySeparatorChar));
|
||||
if (!File.Exists(filePath))
|
||||
{
|
||||
return new MerkleVerificationResult
|
||||
{
|
||||
Verified = false,
|
||||
Error = $"Missing file: {policy.RelativePath}"
|
||||
};
|
||||
}
|
||||
|
||||
var content = await File.ReadAllBytesAsync(filePath, cancellationToken);
|
||||
var digest = ComputeSha256(content);
|
||||
|
||||
if (digest != policy.Digest)
|
||||
{
|
||||
return new MerkleVerificationResult
|
||||
{
|
||||
Verified = false,
|
||||
Error = $"Digest mismatch for {policy.RelativePath}"
|
||||
};
|
||||
}
|
||||
|
||||
entries.Add(new BundleEntry(policy.RelativePath, digest, content.Length));
|
||||
}
|
||||
|
||||
foreach (var trust in manifest.TrustRoots)
|
||||
{
|
||||
var filePath = Path.Combine(bundleDir, trust.RelativePath.Replace('/', Path.DirectorySeparatorChar));
|
||||
if (!File.Exists(filePath))
|
||||
{
|
||||
return new MerkleVerificationResult
|
||||
{
|
||||
Verified = false,
|
||||
Error = $"Missing file: {trust.RelativePath}"
|
||||
};
|
||||
}
|
||||
|
||||
var content = await File.ReadAllBytesAsync(filePath, cancellationToken);
|
||||
var digest = ComputeSha256(content);
|
||||
|
||||
if (digest != trust.Digest)
|
||||
{
|
||||
return new MerkleVerificationResult
|
||||
{
|
||||
Verified = false,
|
||||
Error = $"Digest mismatch for {trust.RelativePath}"
|
||||
};
|
||||
}
|
||||
|
||||
entries.Add(new BundleEntry(trust.RelativePath, digest, content.Length));
|
||||
}
|
||||
|
||||
// Compute merkle root
|
||||
var computedRoot = ComputeMerkleRoot(entries);
|
||||
|
||||
if (computedRoot != manifest.MerkleRoot)
|
||||
{
|
||||
return new MerkleVerificationResult
|
||||
{
|
||||
Verified = false,
|
||||
Error = $"Merkle root mismatch: expected {manifest.MerkleRoot}, got {computedRoot}"
|
||||
};
|
||||
}
|
||||
|
||||
return new MerkleVerificationResult { Verified = true };
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
return new MerkleVerificationResult
|
||||
{
|
||||
Verified = false,
|
||||
Error = ex.Message
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
private static string ComputeSha256(byte[] content)
|
||||
{
|
||||
var hash = SHA256.HashData(content);
|
||||
return $"sha256:{Convert.ToHexString(hash).ToLowerInvariant()}";
|
||||
}
|
||||
|
||||
private static string ComputeMerkleRoot(List<BundleEntry> entries)
|
||||
{
|
||||
if (entries.Count == 0)
|
||||
{
|
||||
return string.Empty;
|
||||
}
|
||||
|
||||
var leaves = entries
|
||||
.OrderBy(e => e.Path, StringComparer.Ordinal)
|
||||
.Select(e => SHA256.HashData(Encoding.UTF8.GetBytes($"{e.Path}:{e.Digest}")))
|
||||
.ToArray();
|
||||
|
||||
while (leaves.Length > 1)
|
||||
{
|
||||
leaves = PairwiseHash(leaves).ToArray();
|
||||
}
|
||||
|
||||
return Convert.ToHexString(leaves[0]).ToLowerInvariant();
|
||||
}
|
||||
|
||||
private static IEnumerable<byte[]> PairwiseHash(byte[][] nodes)
|
||||
{
|
||||
for (var i = 0; i < nodes.Length; i += 2)
|
||||
{
|
||||
if (i + 1 >= nodes.Length)
|
||||
{
|
||||
yield return SHA256.HashData(nodes[i]);
|
||||
continue;
|
||||
}
|
||||
|
||||
var combined = new byte[nodes[i].Length + nodes[i + 1].Length];
|
||||
Buffer.BlockCopy(nodes[i], 0, combined, 0, nodes[i].Length);
|
||||
Buffer.BlockCopy(nodes[i + 1], 0, combined, nodes[i].Length, nodes[i + 1].Length);
|
||||
yield return SHA256.HashData(combined);
|
||||
}
|
||||
}
|
||||
|
||||
private sealed record BundleEntry(string Path, string Digest, long SizeBytes);
|
||||
private sealed record SignatureVerificationResult
|
||||
{
|
||||
public bool Verified { get; init; }
|
||||
public string? KeyId { get; init; }
|
||||
public string? Error { get; init; }
|
||||
}
|
||||
private sealed record MerkleVerificationResult
|
||||
{
|
||||
public bool Verified { get; init; }
|
||||
public string? Error { get; init; }
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Interface for snapshot bundle reading.
|
||||
/// </summary>
|
||||
public interface ISnapshotBundleReader
|
||||
{
|
||||
Task<SnapshotBundleReadResult> ReadAsync(
|
||||
SnapshotBundleReadRequest request,
|
||||
CancellationToken cancellationToken = default);
|
||||
}
|
||||
|
||||
#region Request and Result Models
|
||||
|
||||
/// <summary>
|
||||
/// Request for reading a snapshot bundle.
|
||||
/// </summary>
|
||||
public sealed record SnapshotBundleReadRequest
|
||||
{
|
||||
public required string BundlePath { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Verify the manifest signature.
|
||||
/// </summary>
|
||||
public bool VerifySignature { get; init; } = true;
|
||||
|
||||
/// <summary>
|
||||
/// Fail if signature is invalid.
|
||||
/// </summary>
|
||||
public bool RequireValidSignature { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Verify the merkle root.
|
||||
/// </summary>
|
||||
public bool VerifyMerkleRoot { get; init; } = true;
|
||||
|
||||
/// <summary>
|
||||
/// Fail if merkle root is invalid.
|
||||
/// </summary>
|
||||
public bool RequireValidMerkleRoot { get; init; } = true;
|
||||
|
||||
/// <summary>
|
||||
/// Verify time anchor freshness.
|
||||
/// </summary>
|
||||
public bool VerifyTimeAnchor { get; init; } = true;
|
||||
|
||||
/// <summary>
|
||||
/// Fail if time anchor is invalid.
|
||||
/// </summary>
|
||||
public bool RequireValidTimeAnchor { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Maximum age in hours for time anchor validation.
|
||||
/// </summary>
|
||||
public int? MaxAgeHours { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Maximum clock drift in seconds for time anchor validation.
|
||||
/// </summary>
|
||||
public int? MaxClockDriftSeconds { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Public key for signature verification.
|
||||
/// </summary>
|
||||
public AsymmetricAlgorithm? PublicKey { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Result of reading a snapshot bundle.
|
||||
/// </summary>
|
||||
public sealed record SnapshotBundleReadResult
|
||||
{
|
||||
public bool Success { get; init; }
|
||||
public KnowledgeSnapshotManifest? Manifest { get; init; }
|
||||
public string? BundleDigest { get; init; }
|
||||
public string? Error { get; init; }
|
||||
|
||||
// Signature verification
|
||||
public bool? SignatureVerified { get; init; }
|
||||
public string? SignatureKeyId { get; init; }
|
||||
public string? SignatureError { get; init; }
|
||||
|
||||
// Merkle root verification
|
||||
public bool? MerkleRootVerified { get; init; }
|
||||
public string? MerkleRootError { get; init; }
|
||||
|
||||
// Time anchor verification
|
||||
public bool? TimeAnchorValid { get; init; }
|
||||
public double? TimeAnchorAgeHours { get; init; }
|
||||
public string? TimeAnchorError { get; init; }
|
||||
|
||||
public static SnapshotBundleReadResult Failed(string error) => new()
|
||||
{
|
||||
Success = false,
|
||||
Error = error
|
||||
};
|
||||
}
|
||||
|
||||
#endregion
|
||||
@@ -0,0 +1,455 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// SnapshotBundleWriter.cs
|
||||
// Sprint: SPRINT_4300_0003_0001 (Sealed Knowledge Snapshot Export/Import)
|
||||
// Task: SEAL-003 - Create SnapshotBundleWriter
|
||||
// Description: Writes sealed knowledge snapshots to tar.gz bundles.
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System.Formats.Tar;
|
||||
using System.IO.Compression;
|
||||
using System.Security.Cryptography;
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
using StellaOps.AirGap.Bundle.Models;
|
||||
using PolicySnapshotEntry = StellaOps.AirGap.Bundle.Models.PolicySnapshotEntry;
|
||||
|
||||
namespace StellaOps.AirGap.Bundle.Services;
|
||||
|
||||
/// <summary>
|
||||
/// Writes sealed knowledge snapshots to tar.gz bundles with manifest and merkle root.
|
||||
/// </summary>
|
||||
public sealed class SnapshotBundleWriter : ISnapshotBundleWriter
|
||||
{
|
||||
private static readonly JsonSerializerOptions JsonOptions = new()
|
||||
{
|
||||
WriteIndented = true,
|
||||
PropertyNamingPolicy = JsonNamingPolicy.CamelCase
|
||||
};
|
||||
|
||||
/// <summary>
|
||||
/// Creates a knowledge snapshot bundle from the specified contents.
|
||||
/// </summary>
|
||||
public async Task<SnapshotBundleResult> WriteAsync(
|
||||
SnapshotBundleRequest request,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(request);
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(request.OutputPath);
|
||||
|
||||
var tempDir = Path.Combine(Path.GetTempPath(), $"snapshot-{Guid.NewGuid():N}");
|
||||
Directory.CreateDirectory(tempDir);
|
||||
|
||||
try
|
||||
{
|
||||
var entries = new List<BundleEntry>();
|
||||
var manifest = new KnowledgeSnapshotManifest
|
||||
{
|
||||
BundleId = request.BundleId ?? Guid.NewGuid().ToString("N"),
|
||||
Name = request.Name ?? $"knowledge-{DateTime.UtcNow:yyyy-MM-dd}",
|
||||
Version = request.Version ?? "1.0.0",
|
||||
CreatedAt = DateTimeOffset.UtcNow,
|
||||
SchemaVersion = "1.0.0"
|
||||
};
|
||||
|
||||
// Write advisories
|
||||
if (request.Advisories is { Count: > 0 })
|
||||
{
|
||||
var advisoriesDir = Path.Combine(tempDir, "advisories");
|
||||
Directory.CreateDirectory(advisoriesDir);
|
||||
|
||||
foreach (var advisory in request.Advisories)
|
||||
{
|
||||
var feedDir = Path.Combine(advisoriesDir, advisory.FeedId);
|
||||
Directory.CreateDirectory(feedDir);
|
||||
|
||||
var filePath = Path.Combine(feedDir, advisory.FileName);
|
||||
await File.WriteAllBytesAsync(filePath, advisory.Content, cancellationToken);
|
||||
|
||||
var relativePath = $"advisories/{advisory.FeedId}/{advisory.FileName}";
|
||||
var digest = ComputeSha256(advisory.Content);
|
||||
|
||||
entries.Add(new BundleEntry(relativePath, digest, advisory.Content.Length));
|
||||
manifest.Advisories.Add(new AdvisorySnapshotEntry
|
||||
{
|
||||
FeedId = advisory.FeedId,
|
||||
RelativePath = relativePath,
|
||||
Digest = digest,
|
||||
SizeBytes = advisory.Content.Length,
|
||||
SnapshotAt = advisory.SnapshotAt ?? DateTimeOffset.UtcNow,
|
||||
RecordCount = advisory.RecordCount
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Write VEX statements
|
||||
if (request.VexStatements is { Count: > 0 })
|
||||
{
|
||||
var vexDir = Path.Combine(tempDir, "vex");
|
||||
Directory.CreateDirectory(vexDir);
|
||||
|
||||
foreach (var vex in request.VexStatements)
|
||||
{
|
||||
var sourceDir = Path.Combine(vexDir, vex.SourceId);
|
||||
Directory.CreateDirectory(sourceDir);
|
||||
|
||||
var filePath = Path.Combine(sourceDir, vex.FileName);
|
||||
await File.WriteAllBytesAsync(filePath, vex.Content, cancellationToken);
|
||||
|
||||
var relativePath = $"vex/{vex.SourceId}/{vex.FileName}";
|
||||
var digest = ComputeSha256(vex.Content);
|
||||
|
||||
entries.Add(new BundleEntry(relativePath, digest, vex.Content.Length));
|
||||
manifest.VexStatements.Add(new VexSnapshotEntry
|
||||
{
|
||||
SourceId = vex.SourceId,
|
||||
RelativePath = relativePath,
|
||||
Digest = digest,
|
||||
SizeBytes = vex.Content.Length,
|
||||
SnapshotAt = vex.SnapshotAt ?? DateTimeOffset.UtcNow,
|
||||
StatementCount = vex.StatementCount
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Write policies
|
||||
if (request.Policies is { Count: > 0 })
|
||||
{
|
||||
var policiesDir = Path.Combine(tempDir, "policies");
|
||||
Directory.CreateDirectory(policiesDir);
|
||||
|
||||
foreach (var policy in request.Policies)
|
||||
{
|
||||
var filePath = Path.Combine(policiesDir, policy.FileName);
|
||||
await File.WriteAllBytesAsync(filePath, policy.Content, cancellationToken);
|
||||
|
||||
var relativePath = $"policies/{policy.FileName}";
|
||||
var digest = ComputeSha256(policy.Content);
|
||||
|
||||
entries.Add(new BundleEntry(relativePath, digest, policy.Content.Length));
|
||||
manifest.Policies.Add(new PolicySnapshotEntry
|
||||
{
|
||||
PolicyId = policy.PolicyId,
|
||||
Name = policy.Name,
|
||||
Version = policy.Version,
|
||||
RelativePath = relativePath,
|
||||
Digest = digest,
|
||||
SizeBytes = policy.Content.Length,
|
||||
Type = policy.Type
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Write trust roots
|
||||
if (request.TrustRoots is { Count: > 0 })
|
||||
{
|
||||
var trustDir = Path.Combine(tempDir, "trust");
|
||||
Directory.CreateDirectory(trustDir);
|
||||
|
||||
foreach (var trustRoot in request.TrustRoots)
|
||||
{
|
||||
var filePath = Path.Combine(trustDir, trustRoot.FileName);
|
||||
await File.WriteAllBytesAsync(filePath, trustRoot.Content, cancellationToken);
|
||||
|
||||
var relativePath = $"trust/{trustRoot.FileName}";
|
||||
var digest = ComputeSha256(trustRoot.Content);
|
||||
|
||||
entries.Add(new BundleEntry(relativePath, digest, trustRoot.Content.Length));
|
||||
manifest.TrustRoots.Add(new TrustRootSnapshotEntry
|
||||
{
|
||||
KeyId = trustRoot.KeyId,
|
||||
RelativePath = relativePath,
|
||||
Digest = digest,
|
||||
SizeBytes = trustRoot.Content.Length,
|
||||
Algorithm = trustRoot.Algorithm,
|
||||
ExpiresAt = trustRoot.ExpiresAt
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Write time anchor
|
||||
if (request.TimeAnchor is not null)
|
||||
{
|
||||
var timeAnchorPath = Path.Combine(tempDir, "time-anchor.json");
|
||||
var timeAnchorJson = JsonSerializer.SerializeToUtf8Bytes(request.TimeAnchor, JsonOptions);
|
||||
await File.WriteAllBytesAsync(timeAnchorPath, timeAnchorJson, cancellationToken);
|
||||
|
||||
var digest = ComputeSha256(timeAnchorJson);
|
||||
entries.Add(new BundleEntry("time-anchor.json", digest, timeAnchorJson.Length));
|
||||
manifest.TimeAnchor = new TimeAnchorEntry
|
||||
{
|
||||
AnchorTime = request.TimeAnchor.AnchorTime,
|
||||
Source = request.TimeAnchor.Source,
|
||||
Digest = digest
|
||||
};
|
||||
}
|
||||
|
||||
// Compute merkle root
|
||||
manifest.MerkleRoot = ComputeMerkleRoot(entries);
|
||||
manifest.TotalSizeBytes = entries.Sum(e => e.SizeBytes);
|
||||
manifest.EntryCount = entries.Count;
|
||||
|
||||
// Write manifest
|
||||
var manifestJson = JsonSerializer.SerializeToUtf8Bytes(manifest, JsonOptions);
|
||||
var manifestPath = Path.Combine(tempDir, "manifest.json");
|
||||
await File.WriteAllBytesAsync(manifestPath, manifestJson, cancellationToken);
|
||||
|
||||
// Sign manifest if requested
|
||||
string? signingKeyId = null;
|
||||
string? signingAlgorithm = null;
|
||||
var signed = false;
|
||||
|
||||
if (request.Sign)
|
||||
{
|
||||
var signer = new SnapshotManifestSigner();
|
||||
var signResult = await signer.SignAsync(new ManifestSigningRequest
|
||||
{
|
||||
ManifestBytes = manifestJson,
|
||||
KeyFilePath = request.SigningKeyPath,
|
||||
KeyPassword = request.SigningKeyPassword
|
||||
}, cancellationToken);
|
||||
|
||||
if (signResult.Success && signResult.Envelope is not null)
|
||||
{
|
||||
var signaturePath = Path.Combine(tempDir, "manifest.sig");
|
||||
await File.WriteAllBytesAsync(signaturePath, signResult.Envelope, cancellationToken);
|
||||
signingKeyId = signResult.KeyId;
|
||||
signingAlgorithm = signResult.Algorithm;
|
||||
signed = true;
|
||||
}
|
||||
}
|
||||
|
||||
// Create tar.gz bundle
|
||||
var outputPath = request.OutputPath;
|
||||
if (!outputPath.EndsWith(".tar.gz", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
outputPath = $"{outputPath}.tar.gz";
|
||||
}
|
||||
|
||||
await CreateTarGzAsync(tempDir, outputPath, cancellationToken);
|
||||
|
||||
var bundleDigest = await ComputeFileDigestAsync(outputPath, cancellationToken);
|
||||
|
||||
return new SnapshotBundleResult
|
||||
{
|
||||
Success = true,
|
||||
OutputPath = outputPath,
|
||||
BundleId = manifest.BundleId,
|
||||
MerkleRoot = manifest.MerkleRoot,
|
||||
BundleDigest = bundleDigest,
|
||||
TotalSizeBytes = new FileInfo(outputPath).Length,
|
||||
EntryCount = entries.Count,
|
||||
CreatedAt = manifest.CreatedAt,
|
||||
Signed = signed,
|
||||
SigningKeyId = signingKeyId,
|
||||
SigningAlgorithm = signingAlgorithm
|
||||
};
|
||||
}
|
||||
finally
|
||||
{
|
||||
// Clean up temp directory
|
||||
try
|
||||
{
|
||||
if (Directory.Exists(tempDir))
|
||||
{
|
||||
Directory.Delete(tempDir, recursive: true);
|
||||
}
|
||||
}
|
||||
catch
|
||||
{
|
||||
// Ignore cleanup errors
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private static string ComputeSha256(byte[] content)
|
||||
{
|
||||
var hash = SHA256.HashData(content);
|
||||
return $"sha256:{Convert.ToHexString(hash).ToLowerInvariant()}";
|
||||
}
|
||||
|
||||
private static async Task<string> ComputeFileDigestAsync(string filePath, CancellationToken ct)
|
||||
{
|
||||
await using var stream = File.OpenRead(filePath);
|
||||
var hash = await SHA256.HashDataAsync(stream, ct);
|
||||
return $"sha256:{Convert.ToHexString(hash).ToLowerInvariant()}";
|
||||
}
|
||||
|
||||
private static string ComputeMerkleRoot(List<BundleEntry> entries)
|
||||
{
|
||||
if (entries.Count == 0)
|
||||
{
|
||||
return string.Empty;
|
||||
}
|
||||
|
||||
var leaves = entries
|
||||
.OrderBy(e => e.Path, StringComparer.Ordinal)
|
||||
.Select(e => SHA256.HashData(Encoding.UTF8.GetBytes($"{e.Path}:{e.Digest}")))
|
||||
.ToArray();
|
||||
|
||||
while (leaves.Length > 1)
|
||||
{
|
||||
leaves = PairwiseHash(leaves).ToArray();
|
||||
}
|
||||
|
||||
return Convert.ToHexString(leaves[0]).ToLowerInvariant();
|
||||
}
|
||||
|
||||
private static IEnumerable<byte[]> PairwiseHash(byte[][] nodes)
|
||||
{
|
||||
for (var i = 0; i < nodes.Length; i += 2)
|
||||
{
|
||||
if (i + 1 >= nodes.Length)
|
||||
{
|
||||
yield return SHA256.HashData(nodes[i]);
|
||||
continue;
|
||||
}
|
||||
|
||||
var combined = new byte[nodes[i].Length + nodes[i + 1].Length];
|
||||
Buffer.BlockCopy(nodes[i], 0, combined, 0, nodes[i].Length);
|
||||
Buffer.BlockCopy(nodes[i + 1], 0, combined, nodes[i].Length, nodes[i + 1].Length);
|
||||
yield return SHA256.HashData(combined);
|
||||
}
|
||||
}
|
||||
|
||||
private static async Task CreateTarGzAsync(string sourceDir, string outputPath, CancellationToken ct)
|
||||
{
|
||||
var outputDir = Path.GetDirectoryName(outputPath);
|
||||
if (!string.IsNullOrEmpty(outputDir) && !Directory.Exists(outputDir))
|
||||
{
|
||||
Directory.CreateDirectory(outputDir);
|
||||
}
|
||||
|
||||
await using var fileStream = File.Create(outputPath);
|
||||
await using var gzipStream = new GZipStream(fileStream, CompressionLevel.Optimal);
|
||||
await TarFile.CreateFromDirectoryAsync(sourceDir, gzipStream, includeBaseDirectory: false, ct);
|
||||
}
|
||||
|
||||
private sealed record BundleEntry(string Path, string Digest, long SizeBytes);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Interface for snapshot bundle writing.
|
||||
/// </summary>
|
||||
public interface ISnapshotBundleWriter
|
||||
{
|
||||
Task<SnapshotBundleResult> WriteAsync(
|
||||
SnapshotBundleRequest request,
|
||||
CancellationToken cancellationToken = default);
|
||||
}
|
||||
|
||||
#region Request and Result Models
|
||||
|
||||
/// <summary>
|
||||
/// Request for creating a knowledge snapshot bundle.
|
||||
/// </summary>
|
||||
public sealed record SnapshotBundleRequest
|
||||
{
|
||||
public required string OutputPath { get; init; }
|
||||
public string? BundleId { get; init; }
|
||||
public string? Name { get; init; }
|
||||
public string? Version { get; init; }
|
||||
public List<AdvisoryContent> Advisories { get; init; } = [];
|
||||
public List<VexContent> VexStatements { get; init; } = [];
|
||||
public List<PolicyContent> Policies { get; init; } = [];
|
||||
public List<TrustRootContent> TrustRoots { get; init; } = [];
|
||||
public TimeAnchorContent? TimeAnchor { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Whether to sign the manifest.
|
||||
/// </summary>
|
||||
public bool Sign { get; init; } = true;
|
||||
|
||||
/// <summary>
|
||||
/// Path to signing key file (PEM format).
|
||||
/// If null and Sign is true, an ephemeral key will be used.
|
||||
/// </summary>
|
||||
public string? SigningKeyPath { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Password for encrypted signing key.
|
||||
/// </summary>
|
||||
public string? SigningKeyPassword { get; init; }
|
||||
}
|
||||
|
||||
public sealed record AdvisoryContent
|
||||
{
|
||||
public required string FeedId { get; init; }
|
||||
public required string FileName { get; init; }
|
||||
public required byte[] Content { get; init; }
|
||||
public DateTimeOffset? SnapshotAt { get; init; }
|
||||
public int RecordCount { get; init; }
|
||||
}
|
||||
|
||||
public sealed record VexContent
|
||||
{
|
||||
public required string SourceId { get; init; }
|
||||
public required string FileName { get; init; }
|
||||
public required byte[] Content { get; init; }
|
||||
public DateTimeOffset? SnapshotAt { get; init; }
|
||||
public int StatementCount { get; init; }
|
||||
}
|
||||
|
||||
public sealed record PolicyContent
|
||||
{
|
||||
public required string PolicyId { get; init; }
|
||||
public required string Name { get; init; }
|
||||
public required string Version { get; init; }
|
||||
public required string FileName { get; init; }
|
||||
public required byte[] Content { get; init; }
|
||||
public string Type { get; init; } = "OpaRego";
|
||||
}
|
||||
|
||||
public sealed record TrustRootContent
|
||||
{
|
||||
public required string KeyId { get; init; }
|
||||
public required string FileName { get; init; }
|
||||
public required byte[] Content { get; init; }
|
||||
public string Algorithm { get; init; } = "ES256";
|
||||
public DateTimeOffset? ExpiresAt { get; init; }
|
||||
}
|
||||
|
||||
public sealed record TimeAnchorContent
|
||||
{
|
||||
public required DateTimeOffset AnchorTime { get; init; }
|
||||
public required string Source { get; init; }
|
||||
public string? TokenDigest { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Result of creating a knowledge snapshot bundle.
|
||||
/// </summary>
|
||||
public sealed record SnapshotBundleResult
|
||||
{
|
||||
public bool Success { get; init; }
|
||||
public string? OutputPath { get; init; }
|
||||
public string? BundleId { get; init; }
|
||||
public string? MerkleRoot { get; init; }
|
||||
public string? BundleDigest { get; init; }
|
||||
public long TotalSizeBytes { get; init; }
|
||||
public int EntryCount { get; init; }
|
||||
public DateTimeOffset CreatedAt { get; init; }
|
||||
public string? Error { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Whether the manifest was signed.
|
||||
/// </summary>
|
||||
public bool Signed { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Key ID used for signing.
|
||||
/// </summary>
|
||||
public string? SigningKeyId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Algorithm used for signing.
|
||||
/// </summary>
|
||||
public string? SigningAlgorithm { get; init; }
|
||||
|
||||
public static SnapshotBundleResult Failed(string error) => new()
|
||||
{
|
||||
Success = false,
|
||||
Error = error
|
||||
};
|
||||
}
|
||||
|
||||
#endregion
|
||||
@@ -0,0 +1,486 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// SnapshotManifestSigner.cs
|
||||
// Sprint: SPRINT_4300_0003_0001 (Sealed Knowledge Snapshot Export/Import)
|
||||
// Task: SEAL-004 - Add DSSE signing for manifest
|
||||
// Description: Signs snapshot manifests using DSSE format for integrity verification.
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System.Security.Cryptography;
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
|
||||
namespace StellaOps.AirGap.Bundle.Services;
|
||||
|
||||
/// <summary>
|
||||
/// Signs snapshot manifests using DSSE (Dead Simple Signing Envelope) format.
|
||||
/// Produces signatures compatible with in-toto/Sigstore verification.
|
||||
/// </summary>
|
||||
public sealed class SnapshotManifestSigner : ISnapshotManifestSigner
|
||||
{
|
||||
private const string DssePayloadType = "application/vnd.stellaops.knowledge-snapshot+json";
|
||||
private const string PreAuthenticationEncodingPrefix = "DSSEv1";
|
||||
|
||||
private static readonly JsonSerializerOptions JsonOptions = new()
|
||||
{
|
||||
WriteIndented = false,
|
||||
PropertyNamingPolicy = JsonNamingPolicy.CamelCase
|
||||
};
|
||||
|
||||
/// <summary>
|
||||
/// Signs a manifest using the provided signing key.
|
||||
/// </summary>
|
||||
public async Task<ManifestSignatureResult> SignAsync(
|
||||
ManifestSigningRequest request,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(request);
|
||||
ArgumentNullException.ThrowIfNull(request.ManifestBytes);
|
||||
|
||||
// Build PAE (Pre-Authentication Encoding) for DSSE signing
|
||||
var paeBytes = BuildPae(DssePayloadType, request.ManifestBytes);
|
||||
|
||||
// Sign the PAE
|
||||
byte[] signatureBytes;
|
||||
string keyId;
|
||||
string algorithm;
|
||||
|
||||
if (request.SigningKey is not null)
|
||||
{
|
||||
// Use provided signing key
|
||||
(signatureBytes, keyId, algorithm) = await SignWithKeyAsync(
|
||||
request.SigningKey, paeBytes, cancellationToken);
|
||||
}
|
||||
else if (!string.IsNullOrWhiteSpace(request.KeyFilePath))
|
||||
{
|
||||
// Load key from file and sign
|
||||
(signatureBytes, keyId, algorithm) = await SignWithKeyFileAsync(
|
||||
request.KeyFilePath, request.KeyPassword, paeBytes, cancellationToken);
|
||||
}
|
||||
else
|
||||
{
|
||||
// Generate ephemeral key for signing (keyless mode)
|
||||
(signatureBytes, keyId, algorithm) = await SignEphemeralAsync(paeBytes, cancellationToken);
|
||||
}
|
||||
|
||||
// Build DSSE envelope
|
||||
var envelope = BuildDsseEnvelope(request.ManifestBytes, signatureBytes, keyId);
|
||||
|
||||
return new ManifestSignatureResult
|
||||
{
|
||||
Success = true,
|
||||
Envelope = envelope,
|
||||
KeyId = keyId,
|
||||
Algorithm = algorithm,
|
||||
SignatureDigest = ComputeSha256(signatureBytes)
|
||||
};
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Verifies a DSSE envelope signature.
|
||||
/// </summary>
|
||||
public async Task<ManifestVerificationResult> VerifyAsync(
|
||||
ManifestVerificationRequest request,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(request);
|
||||
ArgumentNullException.ThrowIfNull(request.EnvelopeBytes);
|
||||
|
||||
try
|
||||
{
|
||||
// Parse the envelope
|
||||
using var envelope = JsonDocument.Parse(request.EnvelopeBytes);
|
||||
var root = envelope.RootElement;
|
||||
|
||||
if (!root.TryGetProperty("payloadType", out var payloadTypeElement) ||
|
||||
!root.TryGetProperty("payload", out var payloadElement) ||
|
||||
!root.TryGetProperty("signatures", out var signaturesElement))
|
||||
{
|
||||
return new ManifestVerificationResult
|
||||
{
|
||||
Success = false,
|
||||
Error = "Invalid DSSE envelope structure"
|
||||
};
|
||||
}
|
||||
|
||||
var payloadType = payloadTypeElement.GetString();
|
||||
var payloadBase64 = payloadElement.GetString();
|
||||
|
||||
if (string.IsNullOrEmpty(payloadBase64))
|
||||
{
|
||||
return new ManifestVerificationResult
|
||||
{
|
||||
Success = false,
|
||||
Error = "Missing payload in envelope"
|
||||
};
|
||||
}
|
||||
|
||||
// Decode payload
|
||||
var payloadBytes = Convert.FromBase64String(payloadBase64);
|
||||
|
||||
// Compute expected digest
|
||||
var payloadDigest = ComputeSha256(payloadBytes);
|
||||
|
||||
// Verify at least one signature
|
||||
var signatureCount = signaturesElement.GetArrayLength();
|
||||
if (signatureCount == 0)
|
||||
{
|
||||
return new ManifestVerificationResult
|
||||
{
|
||||
Success = false,
|
||||
Error = "No signatures present in envelope"
|
||||
};
|
||||
}
|
||||
|
||||
// Build PAE for verification
|
||||
var paeBytes = BuildPae(payloadType ?? DssePayloadType, payloadBytes);
|
||||
|
||||
// Verify signatures if public key is provided
|
||||
var verifiedSignatures = new List<VerifiedSignature>();
|
||||
foreach (var sig in signaturesElement.EnumerateArray())
|
||||
{
|
||||
var keyId = sig.TryGetProperty("keyid", out var keyIdElement)
|
||||
? keyIdElement.GetString()
|
||||
: null;
|
||||
|
||||
if (sig.TryGetProperty("sig", out var sigElement))
|
||||
{
|
||||
var signatureBase64 = sigElement.GetString();
|
||||
if (!string.IsNullOrEmpty(signatureBase64))
|
||||
{
|
||||
// If public key is provided, verify the signature
|
||||
if (request.PublicKey is not null)
|
||||
{
|
||||
var signatureBytes = Convert.FromBase64String(signatureBase64);
|
||||
var isValid = await VerifySignatureAsync(
|
||||
request.PublicKey, paeBytes, signatureBytes, cancellationToken);
|
||||
|
||||
verifiedSignatures.Add(new VerifiedSignature(keyId, isValid));
|
||||
}
|
||||
else
|
||||
{
|
||||
// Without public key, we can only confirm presence
|
||||
verifiedSignatures.Add(new VerifiedSignature(keyId, null));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return new ManifestVerificationResult
|
||||
{
|
||||
Success = true,
|
||||
PayloadDigest = payloadDigest,
|
||||
SignatureCount = signatureCount,
|
||||
VerifiedSignatures = verifiedSignatures,
|
||||
PayloadType = payloadType
|
||||
};
|
||||
}
|
||||
catch (JsonException ex)
|
||||
{
|
||||
return new ManifestVerificationResult
|
||||
{
|
||||
Success = false,
|
||||
Error = $"Failed to parse envelope: {ex.Message}"
|
||||
};
|
||||
}
|
||||
catch (FormatException ex)
|
||||
{
|
||||
return new ManifestVerificationResult
|
||||
{
|
||||
Success = false,
|
||||
Error = $"Invalid base64 encoding: {ex.Message}"
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
private static byte[] BuildPae(string payloadType, byte[] payload)
|
||||
{
|
||||
var typeBytes = Encoding.UTF8.GetBytes(payloadType);
|
||||
var prefixBytes = Encoding.UTF8.GetBytes(PreAuthenticationEncodingPrefix);
|
||||
var typeLenStr = typeBytes.Length.ToString();
|
||||
var payloadLenStr = payload.Length.ToString();
|
||||
|
||||
var totalLen = prefixBytes.Length + 1 +
|
||||
typeLenStr.Length + 1 +
|
||||
typeBytes.Length + 1 +
|
||||
payloadLenStr.Length + 1 +
|
||||
payload.Length;
|
||||
|
||||
var pae = new byte[totalLen];
|
||||
var offset = 0;
|
||||
|
||||
// DSSEv1
|
||||
Buffer.BlockCopy(prefixBytes, 0, pae, offset, prefixBytes.Length);
|
||||
offset += prefixBytes.Length;
|
||||
pae[offset++] = 0x20;
|
||||
|
||||
// LEN(type)
|
||||
var typeLenBytes = Encoding.UTF8.GetBytes(typeLenStr);
|
||||
Buffer.BlockCopy(typeLenBytes, 0, pae, offset, typeLenBytes.Length);
|
||||
offset += typeLenBytes.Length;
|
||||
pae[offset++] = 0x20;
|
||||
|
||||
// type
|
||||
Buffer.BlockCopy(typeBytes, 0, pae, offset, typeBytes.Length);
|
||||
offset += typeBytes.Length;
|
||||
pae[offset++] = 0x20;
|
||||
|
||||
// LEN(payload)
|
||||
var payloadLenBytes = Encoding.UTF8.GetBytes(payloadLenStr);
|
||||
Buffer.BlockCopy(payloadLenBytes, 0, pae, offset, payloadLenBytes.Length);
|
||||
offset += payloadLenBytes.Length;
|
||||
pae[offset++] = 0x20;
|
||||
|
||||
// payload
|
||||
Buffer.BlockCopy(payload, 0, pae, offset, payload.Length);
|
||||
|
||||
return pae;
|
||||
}
|
||||
|
||||
private static async Task<(byte[] Signature, string KeyId, string Algorithm)> SignWithKeyAsync(
|
||||
AsymmetricAlgorithm key,
|
||||
byte[] data,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
await Task.CompletedTask; // Signature operations are synchronous
|
||||
|
||||
return key switch
|
||||
{
|
||||
ECDsa ecdsa => SignWithEcdsa(ecdsa, data),
|
||||
RSA rsa => SignWithRsa(rsa, data),
|
||||
_ => throw new NotSupportedException($"Unsupported key type: {key.GetType().Name}")
|
||||
};
|
||||
}
|
||||
|
||||
private static (byte[] Signature, string KeyId, string Algorithm) SignWithEcdsa(ECDsa ecdsa, byte[] data)
|
||||
{
|
||||
var signature = ecdsa.SignData(data, HashAlgorithmName.SHA256);
|
||||
var keyId = ComputeKeyId(ecdsa);
|
||||
var algorithm = ecdsa.KeySize switch
|
||||
{
|
||||
256 => "ES256",
|
||||
384 => "ES384",
|
||||
521 => "ES512",
|
||||
_ => "ECDSA"
|
||||
};
|
||||
return (signature, keyId, algorithm);
|
||||
}
|
||||
|
||||
private static (byte[] Signature, string KeyId, string Algorithm) SignWithRsa(RSA rsa, byte[] data)
|
||||
{
|
||||
var signature = rsa.SignData(data, HashAlgorithmName.SHA256, RSASignaturePadding.Pkcs1);
|
||||
var keyId = ComputeKeyId(rsa);
|
||||
return (signature, keyId, "RS256");
|
||||
}
|
||||
|
||||
private static async Task<(byte[] Signature, string KeyId, string Algorithm)> SignWithKeyFileAsync(
|
||||
string keyFilePath,
|
||||
string? password,
|
||||
byte[] data,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
var keyBytes = await File.ReadAllBytesAsync(keyFilePath, cancellationToken);
|
||||
var keyPem = Encoding.UTF8.GetString(keyBytes);
|
||||
|
||||
// Try to load as ECDSA first
|
||||
try
|
||||
{
|
||||
using var ecdsa = ECDsa.Create();
|
||||
if (string.IsNullOrEmpty(password))
|
||||
{
|
||||
ecdsa.ImportFromPem(keyPem);
|
||||
}
|
||||
else
|
||||
{
|
||||
ecdsa.ImportFromEncryptedPem(keyPem, password);
|
||||
}
|
||||
return SignWithEcdsa(ecdsa, data);
|
||||
}
|
||||
catch (CryptographicException)
|
||||
{
|
||||
// Try RSA
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
using var rsa = RSA.Create();
|
||||
if (string.IsNullOrEmpty(password))
|
||||
{
|
||||
rsa.ImportFromPem(keyPem);
|
||||
}
|
||||
else
|
||||
{
|
||||
rsa.ImportFromEncryptedPem(keyPem, password);
|
||||
}
|
||||
return SignWithRsa(rsa, data);
|
||||
}
|
||||
catch (CryptographicException ex)
|
||||
{
|
||||
throw new InvalidOperationException($"Failed to load signing key from {keyFilePath}", ex);
|
||||
}
|
||||
}
|
||||
|
||||
private static async Task<(byte[] Signature, string KeyId, string Algorithm)> SignEphemeralAsync(
|
||||
byte[] data,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
await Task.CompletedTask;
|
||||
|
||||
// Generate ephemeral ECDSA P-256 key
|
||||
using var ecdsa = ECDsa.Create(ECCurve.NamedCurves.nistP256);
|
||||
var signature = ecdsa.SignData(data, HashAlgorithmName.SHA256);
|
||||
var keyId = $"ephemeral:{ComputeKeyId(ecdsa)}";
|
||||
return (signature, keyId, "ES256");
|
||||
}
|
||||
|
||||
private static async Task<bool> VerifySignatureAsync(
|
||||
AsymmetricAlgorithm key,
|
||||
byte[] data,
|
||||
byte[] signature,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
await Task.CompletedTask;
|
||||
|
||||
return key switch
|
||||
{
|
||||
ECDsa ecdsa => ecdsa.VerifyData(data, signature, HashAlgorithmName.SHA256),
|
||||
RSA rsa => rsa.VerifyData(data, signature, HashAlgorithmName.SHA256, RSASignaturePadding.Pkcs1),
|
||||
_ => false
|
||||
};
|
||||
}
|
||||
|
||||
private static string ComputeKeyId(AsymmetricAlgorithm key)
|
||||
{
|
||||
byte[] publicKeyBytes;
|
||||
|
||||
switch (key)
|
||||
{
|
||||
case ECDsa ecdsa:
|
||||
publicKeyBytes = ecdsa.ExportSubjectPublicKeyInfo();
|
||||
break;
|
||||
case RSA rsa:
|
||||
publicKeyBytes = rsa.ExportSubjectPublicKeyInfo();
|
||||
break;
|
||||
default:
|
||||
return "unknown";
|
||||
}
|
||||
|
||||
var hash = SHA256.HashData(publicKeyBytes);
|
||||
return Convert.ToHexString(hash[..8]).ToLowerInvariant();
|
||||
}
|
||||
|
||||
private static byte[] BuildDsseEnvelope(byte[] payload, byte[] signature, string keyId)
|
||||
{
|
||||
var payloadBase64 = Convert.ToBase64String(payload);
|
||||
var signatureBase64 = Convert.ToBase64String(signature);
|
||||
|
||||
var envelope = new DsseEnvelopeDto
|
||||
{
|
||||
PayloadType = DssePayloadType,
|
||||
Payload = payloadBase64,
|
||||
Signatures =
|
||||
[
|
||||
new DsseSignatureDto
|
||||
{
|
||||
KeyId = keyId,
|
||||
Sig = signatureBase64
|
||||
}
|
||||
]
|
||||
};
|
||||
|
||||
return JsonSerializer.SerializeToUtf8Bytes(envelope, JsonOptions);
|
||||
}
|
||||
|
||||
private static string ComputeSha256(byte[] content)
|
||||
{
|
||||
var hash = SHA256.HashData(content);
|
||||
return $"sha256:{Convert.ToHexString(hash).ToLowerInvariant()}";
|
||||
}
|
||||
|
||||
private sealed class DsseEnvelopeDto
|
||||
{
|
||||
public required string PayloadType { get; init; }
|
||||
public required string Payload { get; init; }
|
||||
public required List<DsseSignatureDto> Signatures { get; init; }
|
||||
}
|
||||
|
||||
private sealed class DsseSignatureDto
|
||||
{
|
||||
public string? KeyId { get; init; }
|
||||
public required string Sig { get; init; }
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Interface for manifest signing operations.
|
||||
/// </summary>
|
||||
public interface ISnapshotManifestSigner
|
||||
{
|
||||
Task<ManifestSignatureResult> SignAsync(
|
||||
ManifestSigningRequest request,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
Task<ManifestVerificationResult> VerifyAsync(
|
||||
ManifestVerificationRequest request,
|
||||
CancellationToken cancellationToken = default);
|
||||
}
|
||||
|
||||
#region Request and Result Models
|
||||
|
||||
/// <summary>
|
||||
/// Request for signing a manifest.
|
||||
/// </summary>
|
||||
public sealed record ManifestSigningRequest
|
||||
{
|
||||
public required byte[] ManifestBytes { get; init; }
|
||||
public AsymmetricAlgorithm? SigningKey { get; init; }
|
||||
public string? KeyFilePath { get; init; }
|
||||
public string? KeyPassword { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Result of signing a manifest.
|
||||
/// </summary>
|
||||
public sealed record ManifestSignatureResult
|
||||
{
|
||||
public bool Success { get; init; }
|
||||
public byte[]? Envelope { get; init; }
|
||||
public string? KeyId { get; init; }
|
||||
public string? Algorithm { get; init; }
|
||||
public string? SignatureDigest { get; init; }
|
||||
public string? Error { get; init; }
|
||||
|
||||
public static ManifestSignatureResult Failed(string error) => new()
|
||||
{
|
||||
Success = false,
|
||||
Error = error
|
||||
};
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Request for verifying a manifest signature.
|
||||
/// </summary>
|
||||
public sealed record ManifestVerificationRequest
|
||||
{
|
||||
public required byte[] EnvelopeBytes { get; init; }
|
||||
public AsymmetricAlgorithm? PublicKey { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Result of verifying a manifest signature.
|
||||
/// </summary>
|
||||
public sealed record ManifestVerificationResult
|
||||
{
|
||||
public bool Success { get; init; }
|
||||
public string? PayloadDigest { get; init; }
|
||||
public string? PayloadType { get; init; }
|
||||
public int SignatureCount { get; init; }
|
||||
public IReadOnlyList<VerifiedSignature>? VerifiedSignatures { get; init; }
|
||||
public string? Error { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// A verified signature with optional verification status.
|
||||
/// </summary>
|
||||
public sealed record VerifiedSignature(string? KeyId, bool? Verified);
|
||||
|
||||
#endregion
|
||||
@@ -0,0 +1,352 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// TimeAnchorService.cs
|
||||
// Sprint: SPRINT_4300_0003_0001 (Sealed Knowledge Snapshot Export/Import)
|
||||
// Task: SEAL-009 - Add time anchor token generation
|
||||
// Description: Generates time anchor tokens for knowledge snapshot bundles.
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System.Security.Cryptography;
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
|
||||
namespace StellaOps.AirGap.Bundle.Services;
|
||||
|
||||
/// <summary>
|
||||
/// Generates time anchor tokens for snapshot bundles.
|
||||
/// Time anchors provide cryptographic proof of the time when a snapshot was created.
|
||||
/// </summary>
|
||||
public sealed class TimeAnchorService : ITimeAnchorService
|
||||
{
|
||||
private static readonly JsonSerializerOptions JsonOptions = new()
|
||||
{
|
||||
WriteIndented = false,
|
||||
PropertyNamingPolicy = JsonNamingPolicy.CamelCase
|
||||
};
|
||||
|
||||
/// <summary>
|
||||
/// Creates a time anchor token for a snapshot.
|
||||
/// </summary>
|
||||
public async Task<TimeAnchorResult> CreateAnchorAsync(
|
||||
TimeAnchorRequest request,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(request);
|
||||
|
||||
try
|
||||
{
|
||||
var source = request.Source?.ToLowerInvariant() ?? "local";
|
||||
|
||||
return source switch
|
||||
{
|
||||
"local" => await CreateLocalAnchorAsync(request, cancellationToken),
|
||||
var s when s.StartsWith("roughtime:") => await CreateRoughtimeAnchorAsync(request, cancellationToken),
|
||||
var s when s.StartsWith("rfc3161:") => await CreateRfc3161AnchorAsync(request, cancellationToken),
|
||||
_ => await CreateLocalAnchorAsync(request, cancellationToken)
|
||||
};
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
return TimeAnchorResult.Failed($"Failed to create time anchor: {ex.Message}");
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Validates a time anchor token.
|
||||
/// </summary>
|
||||
public async Task<TimeAnchorValidationResult> ValidateAnchorAsync(
|
||||
TimeAnchorContent anchor,
|
||||
TimeAnchorValidationRequest request,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(anchor);
|
||||
ArgumentNullException.ThrowIfNull(request);
|
||||
|
||||
try
|
||||
{
|
||||
// Validate timestamp is within acceptable range
|
||||
var now = DateTimeOffset.UtcNow;
|
||||
var anchorAge = now - anchor.AnchorTime;
|
||||
|
||||
if (request.MaxAgeHours.HasValue && anchorAge.TotalHours > request.MaxAgeHours.Value)
|
||||
{
|
||||
return new TimeAnchorValidationResult
|
||||
{
|
||||
IsValid = false,
|
||||
AnchorTime = anchor.AnchorTime,
|
||||
Source = anchor.Source,
|
||||
AgeHours = anchorAge.TotalHours,
|
||||
Error = $"Time anchor is too old: {anchorAge.TotalHours:F1} hours (max: {request.MaxAgeHours.Value})"
|
||||
};
|
||||
}
|
||||
|
||||
// Validate anchor is not in the future (with drift tolerance)
|
||||
var maxDrift = TimeSpan.FromSeconds(request.MaxClockDriftSeconds ?? 60);
|
||||
if (anchor.AnchorTime > now + maxDrift)
|
||||
{
|
||||
return new TimeAnchorValidationResult
|
||||
{
|
||||
IsValid = false,
|
||||
AnchorTime = anchor.AnchorTime,
|
||||
Source = anchor.Source,
|
||||
Error = "Time anchor is in the future"
|
||||
};
|
||||
}
|
||||
|
||||
// Validate token digest if provided
|
||||
if (!string.IsNullOrEmpty(anchor.TokenDigest) && !string.IsNullOrEmpty(request.ExpectedTokenDigest))
|
||||
{
|
||||
if (!string.Equals(anchor.TokenDigest, request.ExpectedTokenDigest, StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
return new TimeAnchorValidationResult
|
||||
{
|
||||
IsValid = false,
|
||||
AnchorTime = anchor.AnchorTime,
|
||||
Source = anchor.Source,
|
||||
Error = "Token digest mismatch"
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
await Task.CompletedTask;
|
||||
|
||||
return new TimeAnchorValidationResult
|
||||
{
|
||||
IsValid = true,
|
||||
AnchorTime = anchor.AnchorTime,
|
||||
Source = anchor.Source,
|
||||
AgeHours = anchorAge.TotalHours
|
||||
};
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
return new TimeAnchorValidationResult
|
||||
{
|
||||
IsValid = false,
|
||||
Error = $"Validation failed: {ex.Message}"
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
private static async Task<TimeAnchorResult> CreateLocalAnchorAsync(
|
||||
TimeAnchorRequest request,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
await Task.CompletedTask;
|
||||
|
||||
var anchorTime = DateTimeOffset.UtcNow;
|
||||
|
||||
// Create a local anchor with a signed timestamp
|
||||
var anchorData = new LocalAnchorData
|
||||
{
|
||||
Timestamp = anchorTime,
|
||||
Nonce = Guid.NewGuid().ToString("N"),
|
||||
MerkleRoot = request.MerkleRoot
|
||||
};
|
||||
|
||||
var anchorJson = JsonSerializer.Serialize(anchorData, JsonOptions);
|
||||
var anchorBytes = Encoding.UTF8.GetBytes(anchorJson);
|
||||
var tokenDigest = $"sha256:{Convert.ToHexString(SHA256.HashData(anchorBytes)).ToLowerInvariant()}";
|
||||
|
||||
return new TimeAnchorResult
|
||||
{
|
||||
Success = true,
|
||||
Content = new TimeAnchorContent
|
||||
{
|
||||
AnchorTime = anchorTime,
|
||||
Source = "local",
|
||||
TokenDigest = tokenDigest
|
||||
},
|
||||
TokenBytes = anchorBytes
|
||||
};
|
||||
}
|
||||
|
||||
private static async Task<TimeAnchorResult> CreateRoughtimeAnchorAsync(
|
||||
TimeAnchorRequest request,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
// Roughtime is a cryptographic time synchronization protocol
|
||||
// This is a placeholder implementation - full implementation would use a Roughtime client
|
||||
var serverUrl = request.Source?["roughtime:".Length..] ?? "roughtime.cloudflare.com:2003";
|
||||
|
||||
// For now, fallback to local with indication of intended source
|
||||
var anchorTime = DateTimeOffset.UtcNow;
|
||||
var anchorData = new RoughtimeAnchorData
|
||||
{
|
||||
Timestamp = anchorTime,
|
||||
Server = serverUrl,
|
||||
Midpoint = anchorTime.ToUnixTimeSeconds(),
|
||||
Radius = 1000000, // 1 second radius in microseconds
|
||||
Nonce = Guid.NewGuid().ToString("N"),
|
||||
MerkleRoot = request.MerkleRoot
|
||||
};
|
||||
|
||||
var anchorJson = JsonSerializer.Serialize(anchorData, JsonOptions);
|
||||
var anchorBytes = Encoding.UTF8.GetBytes(anchorJson);
|
||||
var tokenDigest = $"sha256:{Convert.ToHexString(SHA256.HashData(anchorBytes)).ToLowerInvariant()}";
|
||||
|
||||
await Task.CompletedTask;
|
||||
|
||||
return new TimeAnchorResult
|
||||
{
|
||||
Success = true,
|
||||
Content = new TimeAnchorContent
|
||||
{
|
||||
AnchorTime = anchorTime,
|
||||
Source = $"roughtime:{serverUrl}",
|
||||
TokenDigest = tokenDigest
|
||||
},
|
||||
TokenBytes = anchorBytes,
|
||||
Warning = "Roughtime client not implemented; using simulated response"
|
||||
};
|
||||
}
|
||||
|
||||
private static async Task<TimeAnchorResult> CreateRfc3161AnchorAsync(
|
||||
TimeAnchorRequest request,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
// RFC 3161 is the Internet X.509 PKI Time-Stamp Protocol (TSP)
|
||||
// This is a placeholder implementation - full implementation would use a TSA client
|
||||
var tsaUrl = request.Source?["rfc3161:".Length..] ?? "http://timestamp.digicert.com";
|
||||
|
||||
var anchorTime = DateTimeOffset.UtcNow;
|
||||
var anchorData = new Rfc3161AnchorData
|
||||
{
|
||||
Timestamp = anchorTime,
|
||||
TsaUrl = tsaUrl,
|
||||
SerialNumber = Guid.NewGuid().ToString("N"),
|
||||
PolicyOid = "2.16.840.1.114412.2.1", // DigiCert timestamp policy
|
||||
MerkleRoot = request.MerkleRoot
|
||||
};
|
||||
|
||||
var anchorJson = JsonSerializer.Serialize(anchorData, JsonOptions);
|
||||
var anchorBytes = Encoding.UTF8.GetBytes(anchorJson);
|
||||
var tokenDigest = $"sha256:{Convert.ToHexString(SHA256.HashData(anchorBytes)).ToLowerInvariant()}";
|
||||
|
||||
await Task.CompletedTask;
|
||||
|
||||
return new TimeAnchorResult
|
||||
{
|
||||
Success = true,
|
||||
Content = new TimeAnchorContent
|
||||
{
|
||||
AnchorTime = anchorTime,
|
||||
Source = $"rfc3161:{tsaUrl}",
|
||||
TokenDigest = tokenDigest
|
||||
},
|
||||
TokenBytes = anchorBytes,
|
||||
Warning = "RFC 3161 TSA client not implemented; using simulated response"
|
||||
};
|
||||
}
|
||||
|
||||
private sealed record LocalAnchorData
|
||||
{
|
||||
public required DateTimeOffset Timestamp { get; init; }
|
||||
public required string Nonce { get; init; }
|
||||
public string? MerkleRoot { get; init; }
|
||||
}
|
||||
|
||||
private sealed record RoughtimeAnchorData
|
||||
{
|
||||
public required DateTimeOffset Timestamp { get; init; }
|
||||
public required string Server { get; init; }
|
||||
public required long Midpoint { get; init; }
|
||||
public required long Radius { get; init; }
|
||||
public required string Nonce { get; init; }
|
||||
public string? MerkleRoot { get; init; }
|
||||
}
|
||||
|
||||
private sealed record Rfc3161AnchorData
|
||||
{
|
||||
public required DateTimeOffset Timestamp { get; init; }
|
||||
public required string TsaUrl { get; init; }
|
||||
public required string SerialNumber { get; init; }
|
||||
public required string PolicyOid { get; init; }
|
||||
public string? MerkleRoot { get; init; }
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Interface for time anchor operations.
|
||||
/// </summary>
|
||||
public interface ITimeAnchorService
|
||||
{
|
||||
Task<TimeAnchorResult> CreateAnchorAsync(
|
||||
TimeAnchorRequest request,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
Task<TimeAnchorValidationResult> ValidateAnchorAsync(
|
||||
TimeAnchorContent anchor,
|
||||
TimeAnchorValidationRequest request,
|
||||
CancellationToken cancellationToken = default);
|
||||
}
|
||||
|
||||
#region Request and Result Models
|
||||
|
||||
/// <summary>
|
||||
/// Request for creating a time anchor.
|
||||
/// </summary>
|
||||
public sealed record TimeAnchorRequest
|
||||
{
|
||||
/// <summary>
|
||||
/// Time anchor source: "local", "roughtime:<server>", or "rfc3161:<tsa-url>"
|
||||
/// </summary>
|
||||
public string? Source { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Merkle root to bind to the time anchor (optional).
|
||||
/// </summary>
|
||||
public string? MerkleRoot { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Result of creating a time anchor.
|
||||
/// </summary>
|
||||
public sealed record TimeAnchorResult
|
||||
{
|
||||
public bool Success { get; init; }
|
||||
public TimeAnchorContent? Content { get; init; }
|
||||
public byte[]? TokenBytes { get; init; }
|
||||
public string? Warning { get; init; }
|
||||
public string? Error { get; init; }
|
||||
|
||||
public static TimeAnchorResult Failed(string error) => new()
|
||||
{
|
||||
Success = false,
|
||||
Error = error
|
||||
};
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Request for validating a time anchor.
|
||||
/// </summary>
|
||||
public sealed record TimeAnchorValidationRequest
|
||||
{
|
||||
/// <summary>
|
||||
/// Maximum age in hours.
|
||||
/// </summary>
|
||||
public int? MaxAgeHours { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Maximum clock drift in seconds.
|
||||
/// </summary>
|
||||
public int? MaxClockDriftSeconds { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Expected token digest for validation.
|
||||
/// </summary>
|
||||
public string? ExpectedTokenDigest { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Result of validating a time anchor.
|
||||
/// </summary>
|
||||
public sealed record TimeAnchorValidationResult
|
||||
{
|
||||
public bool IsValid { get; init; }
|
||||
public DateTimeOffset? AnchorTime { get; init; }
|
||||
public string? Source { get; init; }
|
||||
public double? AgeHours { get; init; }
|
||||
public string? Error { get; init; }
|
||||
}
|
||||
|
||||
#endregion
|
||||
@@ -12,6 +12,9 @@
|
||||
|
||||
<ItemGroup>
|
||||
<ProjectReference Include="..\..\..\__Libraries\StellaOps.Canonical.Json\StellaOps.Canonical.Json.csproj" />
|
||||
<ProjectReference Include="..\..\..\Concelier\__Libraries\StellaOps.Concelier.Core\StellaOps.Concelier.Core.csproj" />
|
||||
<ProjectReference Include="..\..\..\Concelier\__Libraries\StellaOps.Concelier.RawModels\StellaOps.Concelier.RawModels.csproj" />
|
||||
<ProjectReference Include="..\..\..\Excititor\__Libraries\StellaOps.Excititor.Core\StellaOps.Excititor.Core.csproj" />
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
|
||||
@@ -6,8 +6,14 @@ namespace StellaOps.Aoc.Cli;
|
||||
|
||||
public static class Program
|
||||
{
|
||||
private const string DeprecationDate = "2025-07-01";
|
||||
private const string MigrationUrl = "https://docs.stellaops.io/cli/migration";
|
||||
|
||||
public static async Task<int> Main(string[] args)
|
||||
{
|
||||
// Emit deprecation warning
|
||||
EmitDeprecationWarning();
|
||||
|
||||
var rootCommand = new RootCommand("StellaOps AOC CLI - Verify append-only contract compliance")
|
||||
{
|
||||
VerifyCommand.Create()
|
||||
@@ -15,4 +21,21 @@ public static class Program
|
||||
|
||||
return await rootCommand.InvokeAsync(args);
|
||||
}
|
||||
|
||||
private static void EmitDeprecationWarning()
|
||||
{
|
||||
var originalColor = Console.ForegroundColor;
|
||||
Console.ForegroundColor = ConsoleColor.Yellow;
|
||||
Console.Error.WriteLine();
|
||||
Console.Error.WriteLine("================================================================================");
|
||||
Console.Error.WriteLine("[DEPRECATED] stella-aoc is deprecated and will be removed on " + DeprecationDate + ".");
|
||||
Console.Error.WriteLine();
|
||||
Console.Error.WriteLine("Please migrate to the unified stella CLI:");
|
||||
Console.Error.WriteLine(" stella aoc verify --since <ref> --postgres <conn>");
|
||||
Console.Error.WriteLine();
|
||||
Console.Error.WriteLine("Migration guide: " + MigrationUrl);
|
||||
Console.Error.WriteLine("================================================================================");
|
||||
Console.Error.WriteLine();
|
||||
Console.ForegroundColor = originalColor;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -22,7 +22,7 @@ public sealed class AocVerificationService
|
||||
// Parse the since parameter
|
||||
var sinceTimestamp = ParseSinceParameter(options.Since);
|
||||
|
||||
// Verify PostgreSQL database
|
||||
// Verify using PostgreSQL
|
||||
await VerifyPostgresAsync(options.PostgresConnectionString, sinceTimestamp, options.Tenant, result, cancellationToken);
|
||||
|
||||
stopwatch.Stop();
|
||||
@@ -228,4 +228,5 @@ public sealed class AocVerificationService
|
||||
Console.WriteLine("Note: excititor.vex_documents table not found (may not be initialized)");
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -117,25 +117,16 @@ public sealed class AocVerificationServiceTests
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void VerifyOptions_MongoAndPostgres_AreMutuallyExclusive()
|
||||
public void VerifyOptions_PostgresConnectionString_IsRequired()
|
||||
{
|
||||
var optionsMongo = new VerifyOptions
|
||||
{
|
||||
Since = "HEAD~1",
|
||||
MongoConnectionString = "mongodb://localhost:27017"
|
||||
};
|
||||
|
||||
var optionsPostgres = new VerifyOptions
|
||||
var options = new VerifyOptions
|
||||
{
|
||||
Since = "HEAD~1",
|
||||
PostgresConnectionString = "Host=localhost;Database=test"
|
||||
};
|
||||
|
||||
Assert.NotNull(optionsMongo.MongoConnectionString);
|
||||
Assert.Null(optionsMongo.PostgresConnectionString);
|
||||
|
||||
Assert.Null(optionsPostgres.MongoConnectionString);
|
||||
Assert.NotNull(optionsPostgres.PostgresConnectionString);
|
||||
Assert.NotNull(options.PostgresConnectionString);
|
||||
Assert.Equal("Host=localhost;Database=test", options.PostgresConnectionString);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
@@ -143,7 +134,8 @@ public sealed class AocVerificationServiceTests
|
||||
{
|
||||
var options = new VerifyOptions
|
||||
{
|
||||
Since = "2025-01-01"
|
||||
Since = "2025-01-01",
|
||||
PostgresConnectionString = "Host=localhost;Database=test"
|
||||
};
|
||||
|
||||
Assert.False(options.DryRun);
|
||||
@@ -154,7 +146,8 @@ public sealed class AocVerificationServiceTests
|
||||
{
|
||||
var options = new VerifyOptions
|
||||
{
|
||||
Since = "2025-01-01"
|
||||
Since = "2025-01-01",
|
||||
PostgresConnectionString = "Host=localhost;Database=test"
|
||||
};
|
||||
|
||||
Assert.False(options.Verbose);
|
||||
|
||||
@@ -0,0 +1,187 @@
|
||||
{
|
||||
"$schema": "https://json-schema.org/draft/2020-12/schema",
|
||||
"$id": "https://stella-ops.org/schemas/attestation/uncertainty-budget-statement.v1.json",
|
||||
"title": "Uncertainty Budget Statement",
|
||||
"description": "In-toto predicate type for uncertainty budget evaluation attestations. Sprint: SPRINT_4300_0002_0002 (UATT-007).",
|
||||
"type": "object",
|
||||
"required": ["_type", "subject", "predicateType", "predicate"],
|
||||
"properties": {
|
||||
"_type": {
|
||||
"type": "string",
|
||||
"const": "https://in-toto.io/Statement/v1"
|
||||
},
|
||||
"subject": {
|
||||
"type": "array",
|
||||
"minItems": 1,
|
||||
"items": {
|
||||
"type": "object",
|
||||
"required": ["digest"],
|
||||
"properties": {
|
||||
"name": {
|
||||
"type": "string",
|
||||
"description": "Subject identifier (e.g., environment name or image reference)"
|
||||
},
|
||||
"digest": {
|
||||
"type": "object",
|
||||
"description": "Cryptographic digest of the subject",
|
||||
"additionalProperties": {
|
||||
"type": "string",
|
||||
"pattern": "^[a-fA-F0-9]+$"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"predicateType": {
|
||||
"type": "string",
|
||||
"const": "uncertainty-budget.stella/v1"
|
||||
},
|
||||
"predicate": {
|
||||
"$ref": "#/$defs/UncertaintyBudgetPredicate"
|
||||
}
|
||||
},
|
||||
"$defs": {
|
||||
"UncertaintyBudgetPredicate": {
|
||||
"type": "object",
|
||||
"required": ["environment", "isWithinBudget", "action", "totalUnknowns", "evaluatedAt"],
|
||||
"properties": {
|
||||
"environment": {
|
||||
"type": "string",
|
||||
"description": "Environment against which budget was evaluated (e.g., production, staging)"
|
||||
},
|
||||
"isWithinBudget": {
|
||||
"type": "boolean",
|
||||
"description": "Whether the evaluation passed the budget check"
|
||||
},
|
||||
"action": {
|
||||
"type": "string",
|
||||
"enum": ["pass", "warn", "block"],
|
||||
"description": "Recommended action based on budget evaluation"
|
||||
},
|
||||
"totalUnknowns": {
|
||||
"type": "integer",
|
||||
"minimum": 0,
|
||||
"description": "Total count of unknowns in evaluation"
|
||||
},
|
||||
"totalLimit": {
|
||||
"type": "integer",
|
||||
"minimum": 0,
|
||||
"description": "Configured total unknown limit for this environment"
|
||||
},
|
||||
"percentageUsed": {
|
||||
"type": "number",
|
||||
"minimum": 0,
|
||||
"maximum": 100,
|
||||
"description": "Percentage of budget consumed"
|
||||
},
|
||||
"violationCount": {
|
||||
"type": "integer",
|
||||
"minimum": 0,
|
||||
"description": "Number of budget rule violations"
|
||||
},
|
||||
"violations": {
|
||||
"type": "array",
|
||||
"description": "Detailed violation information",
|
||||
"items": {
|
||||
"$ref": "#/$defs/BudgetViolation"
|
||||
}
|
||||
},
|
||||
"budget": {
|
||||
"$ref": "#/$defs/BudgetDefinition",
|
||||
"description": "Budget definition that was applied"
|
||||
},
|
||||
"message": {
|
||||
"type": "string",
|
||||
"description": "Human-readable budget status message"
|
||||
},
|
||||
"evaluatedAt": {
|
||||
"type": "string",
|
||||
"format": "date-time",
|
||||
"description": "ISO-8601 timestamp of budget evaluation"
|
||||
},
|
||||
"policyRevisionId": {
|
||||
"type": "string",
|
||||
"description": "Policy revision ID containing the budget rules"
|
||||
},
|
||||
"imageDigest": {
|
||||
"type": "string",
|
||||
"pattern": "^sha256:[a-fA-F0-9]{64}$",
|
||||
"description": "Optional container image digest"
|
||||
},
|
||||
"uncertaintyStatementId": {
|
||||
"type": "string",
|
||||
"description": "Reference to the linked uncertainty statement attestation ID"
|
||||
}
|
||||
}
|
||||
},
|
||||
"BudgetViolation": {
|
||||
"type": "object",
|
||||
"required": ["reasonCode", "count", "limit"],
|
||||
"properties": {
|
||||
"reasonCode": {
|
||||
"type": "string",
|
||||
"enum": ["U-RCH", "U-ID", "U-PROV", "U-VEX", "U-FEED", "U-CONFIG", "U-ANALYZER"],
|
||||
"description": "Unknown reason code that violated the budget"
|
||||
},
|
||||
"count": {
|
||||
"type": "integer",
|
||||
"minimum": 0,
|
||||
"description": "Actual count of unknowns for this reason"
|
||||
},
|
||||
"limit": {
|
||||
"type": "integer",
|
||||
"minimum": 0,
|
||||
"description": "Configured limit for this reason"
|
||||
},
|
||||
"severity": {
|
||||
"type": "string",
|
||||
"enum": ["low", "medium", "high", "critical"],
|
||||
"description": "Severity of the violation"
|
||||
}
|
||||
}
|
||||
},
|
||||
"BudgetDefinition": {
|
||||
"type": "object",
|
||||
"required": ["name", "environment"],
|
||||
"properties": {
|
||||
"name": {
|
||||
"type": "string",
|
||||
"description": "Budget rule name"
|
||||
},
|
||||
"environment": {
|
||||
"type": "string",
|
||||
"description": "Target environment"
|
||||
},
|
||||
"totalLimit": {
|
||||
"type": "integer",
|
||||
"minimum": 0,
|
||||
"description": "Total unknown limit"
|
||||
},
|
||||
"tierMax": {
|
||||
"type": "string",
|
||||
"enum": ["T1", "T2", "T3", "T4"],
|
||||
"description": "Maximum allowed uncertainty tier"
|
||||
},
|
||||
"entropyMax": {
|
||||
"type": "number",
|
||||
"minimum": 0,
|
||||
"maximum": 1,
|
||||
"description": "Maximum allowed mean entropy"
|
||||
},
|
||||
"reasonLimits": {
|
||||
"type": "object",
|
||||
"description": "Per-reason-code limits",
|
||||
"additionalProperties": {
|
||||
"type": "integer",
|
||||
"minimum": 0
|
||||
}
|
||||
},
|
||||
"action": {
|
||||
"type": "string",
|
||||
"enum": ["warn", "block", "warnUnlessException"],
|
||||
"description": "Action to take when budget is exceeded"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,119 @@
|
||||
{
|
||||
"$schema": "https://json-schema.org/draft/2020-12/schema",
|
||||
"$id": "https://stella-ops.org/schemas/attestation/uncertainty-statement.v1.json",
|
||||
"title": "Uncertainty Statement",
|
||||
"description": "In-toto predicate type for uncertainty state attestations. Sprint: SPRINT_4300_0002_0002 (UATT-007).",
|
||||
"type": "object",
|
||||
"required": ["_type", "subject", "predicateType", "predicate"],
|
||||
"properties": {
|
||||
"_type": {
|
||||
"type": "string",
|
||||
"const": "https://in-toto.io/Statement/v1"
|
||||
},
|
||||
"subject": {
|
||||
"type": "array",
|
||||
"minItems": 1,
|
||||
"items": {
|
||||
"type": "object",
|
||||
"required": ["digest"],
|
||||
"properties": {
|
||||
"name": {
|
||||
"type": "string",
|
||||
"description": "Subject identifier (e.g., SBOM file name or image reference)"
|
||||
},
|
||||
"digest": {
|
||||
"type": "object",
|
||||
"description": "Cryptographic digest of the subject",
|
||||
"additionalProperties": {
|
||||
"type": "string",
|
||||
"pattern": "^[a-fA-F0-9]+$"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"predicateType": {
|
||||
"type": "string",
|
||||
"const": "uncertainty.stella/v1"
|
||||
},
|
||||
"predicate": {
|
||||
"$ref": "#/$defs/UncertaintyPredicate"
|
||||
}
|
||||
},
|
||||
"$defs": {
|
||||
"UncertaintyPredicate": {
|
||||
"type": "object",
|
||||
"required": ["graphRevisionId", "aggregateTier", "meanEntropy", "unknownCount", "evaluatedAt"],
|
||||
"properties": {
|
||||
"graphRevisionId": {
|
||||
"type": "string",
|
||||
"description": "Unique identifier for the knowledge graph revision used in evaluation"
|
||||
},
|
||||
"aggregateTier": {
|
||||
"type": "string",
|
||||
"enum": ["T1", "T2", "T3", "T4"],
|
||||
"description": "Aggregate uncertainty tier (T1 = highest uncertainty, T4 = lowest)"
|
||||
},
|
||||
"meanEntropy": {
|
||||
"type": "number",
|
||||
"minimum": 0,
|
||||
"maximum": 1,
|
||||
"description": "Mean entropy across all unknowns (0.0 = certain, 1.0 = maximum uncertainty)"
|
||||
},
|
||||
"unknownCount": {
|
||||
"type": "integer",
|
||||
"minimum": 0,
|
||||
"description": "Total count of unknowns in this evaluation"
|
||||
},
|
||||
"markers": {
|
||||
"type": "array",
|
||||
"description": "Breakdown of unknowns by marker kind",
|
||||
"items": {
|
||||
"$ref": "#/$defs/UnknownMarker"
|
||||
}
|
||||
},
|
||||
"evaluatedAt": {
|
||||
"type": "string",
|
||||
"format": "date-time",
|
||||
"description": "ISO-8601 timestamp of uncertainty evaluation"
|
||||
},
|
||||
"policyRevisionId": {
|
||||
"type": "string",
|
||||
"description": "Optional policy revision ID if uncertainty was evaluated with policy"
|
||||
},
|
||||
"imageDigest": {
|
||||
"type": "string",
|
||||
"pattern": "^sha256:[a-fA-F0-9]{64}$",
|
||||
"description": "Optional container image digest"
|
||||
}
|
||||
}
|
||||
},
|
||||
"UnknownMarker": {
|
||||
"type": "object",
|
||||
"required": ["kind", "count", "entropy"],
|
||||
"properties": {
|
||||
"kind": {
|
||||
"type": "string",
|
||||
"enum": ["U-RCH", "U-ID", "U-PROV", "U-VEX", "U-FEED", "U-CONFIG", "U-ANALYZER"],
|
||||
"description": "Unknown marker kind code"
|
||||
},
|
||||
"count": {
|
||||
"type": "integer",
|
||||
"minimum": 0,
|
||||
"description": "Count of unknowns with this marker"
|
||||
},
|
||||
"entropy": {
|
||||
"type": "number",
|
||||
"minimum": 0,
|
||||
"maximum": 1,
|
||||
"description": "Mean entropy for this marker kind"
|
||||
},
|
||||
"tier": {
|
||||
"type": "string",
|
||||
"enum": ["T1", "T2", "T3", "T4"],
|
||||
"description": "Uncertainty tier for this marker kind"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -6,6 +6,10 @@
|
||||
<ImplicitUsings>enable</ImplicitUsings>
|
||||
<TreatWarningsAsErrors>false</TreatWarningsAsErrors>
|
||||
</PropertyGroup>
|
||||
<ItemGroup>
|
||||
<PackageReference Include="JsonSchema.Net" Version="7.3.4" />
|
||||
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="10.0.0" />
|
||||
</ItemGroup>
|
||||
<ItemGroup>
|
||||
<ProjectReference Include="..\..\..\__Libraries\StellaOps.Cryptography\StellaOps.Cryptography.csproj" />
|
||||
<ProjectReference Include="..\..\..\__Libraries\StellaOps.Cryptography.Kms\StellaOps.Cryptography.Kms.csproj" />
|
||||
|
||||
@@ -118,11 +118,14 @@ public sealed class PredicateSchemaValidator : IPredicateSchemaValidator
|
||||
{
|
||||
foreach (var detail in results.Details)
|
||||
{
|
||||
if (detail.HasErrors)
|
||||
if (detail.HasErrors && detail.Errors is not null)
|
||||
{
|
||||
var errorMsg = detail.Errors?.FirstOrDefault()?.Value ?? "Unknown error";
|
||||
var location = detail.InstanceLocation.ToString();
|
||||
errors.Add($"{location}: {errorMsg}");
|
||||
foreach (var error in detail.Errors)
|
||||
{
|
||||
var errorMsg = error.Value ?? "Unknown error";
|
||||
var location = detail.InstanceLocation.ToString();
|
||||
errors.Add($"{location}: {errorMsg}");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -161,7 +164,9 @@ public sealed class PredicateSchemaValidator : IPredicateSchemaValidator
|
||||
|
||||
try
|
||||
{
|
||||
var schema = JsonSchema.FromStream(stream);
|
||||
using var reader = new StreamReader(stream);
|
||||
var schemaJson = reader.ReadToEnd();
|
||||
var schema = JsonSchema.FromText(schemaJson);
|
||||
schemas[key] = schema;
|
||||
}
|
||||
catch (Exception ex)
|
||||
|
||||
@@ -73,6 +73,18 @@ public sealed record ProofSpineRequest
|
||||
/// Key profile to use for signing the spine statement.
|
||||
/// </summary>
|
||||
public SigningKeyProfile SigningProfile { get; init; } = SigningKeyProfile.Authority;
|
||||
|
||||
/// <summary>
|
||||
/// Optional: ID of the uncertainty state attestation to include in the spine.
|
||||
/// Sprint: SPRINT_4300_0002_0002_unknowns_attestation_predicates
|
||||
/// </summary>
|
||||
public string? UncertaintyStatementId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Optional: ID of the uncertainty budget attestation to include in the spine.
|
||||
/// Sprint: SPRINT_4300_0002_0002_unknowns_attestation_predicates
|
||||
/// </summary>
|
||||
public string? UncertaintyBudgetStatementId { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
|
||||
@@ -92,4 +92,26 @@ public interface IStatementBuilder
|
||||
SbomLinkageStatement BuildSbomLinkageStatement(
|
||||
IReadOnlyList<ProofSubject> subjects,
|
||||
SbomLinkagePayload predicate);
|
||||
|
||||
/// <summary>
|
||||
/// Build an Uncertainty statement for signing.
|
||||
/// Sprint: SPRINT_4300_0002_0002_unknowns_attestation_predicates
|
||||
/// </summary>
|
||||
/// <param name="subject">The artifact subject this uncertainty relates to.</param>
|
||||
/// <param name="predicate">The uncertainty payload.</param>
|
||||
/// <returns>An UncertaintyStatement ready for signing.</returns>
|
||||
UncertaintyStatement BuildUncertaintyStatement(
|
||||
ProofSubject subject,
|
||||
UncertaintyPayload predicate);
|
||||
|
||||
/// <summary>
|
||||
/// Build an Uncertainty Budget statement for signing.
|
||||
/// Sprint: SPRINT_4300_0002_0002_unknowns_attestation_predicates
|
||||
/// </summary>
|
||||
/// <param name="subject">The artifact subject this budget evaluation relates to.</param>
|
||||
/// <param name="predicate">The uncertainty budget payload.</param>
|
||||
/// <returns>An UncertaintyBudgetStatement ready for signing.</returns>
|
||||
UncertaintyBudgetStatement BuildUncertaintyBudgetStatement(
|
||||
ProofSubject subject,
|
||||
UncertaintyBudgetPayload predicate);
|
||||
}
|
||||
|
||||
@@ -103,4 +103,34 @@ public sealed class StatementBuilder : IStatementBuilder
|
||||
Predicate = predicate
|
||||
};
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public UncertaintyStatement BuildUncertaintyStatement(
|
||||
ProofSubject subject,
|
||||
UncertaintyPayload predicate)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(subject);
|
||||
ArgumentNullException.ThrowIfNull(predicate);
|
||||
|
||||
return new UncertaintyStatement
|
||||
{
|
||||
Subject = [subject.ToSubject()],
|
||||
Predicate = predicate
|
||||
};
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public UncertaintyBudgetStatement BuildUncertaintyBudgetStatement(
|
||||
ProofSubject subject,
|
||||
UncertaintyBudgetPayload predicate)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(subject);
|
||||
ArgumentNullException.ThrowIfNull(predicate);
|
||||
|
||||
return new UncertaintyBudgetStatement
|
||||
{
|
||||
Subject = [subject.ToSubject()],
|
||||
Predicate = predicate
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
@@ -91,6 +91,13 @@ public sealed record DeltaVerdictPredicate
|
||||
/// </summary>
|
||||
[JsonPropertyName("comparedAt")]
|
||||
public required DateTimeOffset ComparedAt { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Unknowns budget evaluation result (if available).
|
||||
/// Sprint: SPRINT_5100_0004_0001 Task T5
|
||||
/// </summary>
|
||||
[JsonPropertyName("unknownsBudget")]
|
||||
public UnknownsBudgetPredicate? UnknownsBudget { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
|
||||
@@ -0,0 +1,108 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// UnknownsBudgetPredicate.cs
|
||||
// Sprint: SPRINT_5100_0004_0001_unknowns_budget_ci_gates
|
||||
// Task: T5 - Attestation Integration
|
||||
// Description: DSSE predicate for unknowns budget evaluation in verdict attestations.
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System.Collections.Immutable;
|
||||
using System.Text.Json.Serialization;
|
||||
|
||||
namespace StellaOps.Attestor.ProofChain.Predicates;
|
||||
|
||||
/// <summary>
|
||||
/// DSSE predicate for unknowns budget evaluation within verdict attestations.
|
||||
/// predicateType: unknowns-budget.stella/v1
|
||||
/// </summary>
|
||||
public sealed record UnknownsBudgetPredicate
|
||||
{
|
||||
/// <summary>
|
||||
/// The predicate type URI for unknowns budget attestations.
|
||||
/// </summary>
|
||||
public const string PredicateType = "unknowns-budget.stella/v1";
|
||||
|
||||
/// <summary>
|
||||
/// Environment for which the budget was evaluated (prod, stage, dev).
|
||||
/// </summary>
|
||||
[JsonPropertyName("environment")]
|
||||
public required string Environment { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Total number of unknowns found in the scan.
|
||||
/// </summary>
|
||||
[JsonPropertyName("totalUnknowns")]
|
||||
public required int TotalUnknowns { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Maximum unknowns allowed by the budget (null if unlimited).
|
||||
/// </summary>
|
||||
[JsonPropertyName("totalLimit")]
|
||||
public int? TotalLimit { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Whether the scan is within budget limits.
|
||||
/// </summary>
|
||||
[JsonPropertyName("isWithinBudget")]
|
||||
public required bool IsWithinBudget { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Percentage of budget used (0-100+).
|
||||
/// </summary>
|
||||
[JsonPropertyName("percentageUsed")]
|
||||
public decimal PercentageUsed { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Action recommended when budget is exceeded.
|
||||
/// </summary>
|
||||
[JsonPropertyName("recommendedAction")]
|
||||
public string? RecommendedAction { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Violations by reason code (if any).
|
||||
/// </summary>
|
||||
[JsonPropertyName("violations")]
|
||||
public ImmutableArray<BudgetViolationPredicate> Violations { get; init; } = [];
|
||||
|
||||
/// <summary>
|
||||
/// Breakdown of unknowns by reason code.
|
||||
/// </summary>
|
||||
[JsonPropertyName("byReasonCode")]
|
||||
public ImmutableDictionary<string, int> ByReasonCode { get; init; }
|
||||
= ImmutableDictionary<string, int>.Empty;
|
||||
|
||||
/// <summary>
|
||||
/// When the budget was evaluated.
|
||||
/// </summary>
|
||||
[JsonPropertyName("evaluatedAt")]
|
||||
public required DateTimeOffset EvaluatedAt { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Optional message describing the budget status.
|
||||
/// </summary>
|
||||
[JsonPropertyName("message")]
|
||||
public string? Message { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Individual budget violation for a specific reason code.
|
||||
/// </summary>
|
||||
public sealed record BudgetViolationPredicate
|
||||
{
|
||||
/// <summary>
|
||||
/// Reason code for this violation (e.g., Reachability, Identity).
|
||||
/// </summary>
|
||||
[JsonPropertyName("reasonCode")]
|
||||
public required string ReasonCode { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Number of unknowns with this reason code.
|
||||
/// </summary>
|
||||
[JsonPropertyName("count")]
|
||||
public required int Count { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Maximum allowed for this reason code.
|
||||
/// </summary>
|
||||
[JsonPropertyName("limit")]
|
||||
public required int Limit { get; init; }
|
||||
}
|
||||
@@ -61,4 +61,18 @@ public sealed record ProofSpinePayload
|
||||
/// </summary>
|
||||
[JsonPropertyName("proofBundleId")]
|
||||
public required string ProofBundleId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Optional: ID of the uncertainty state attestation.
|
||||
/// Sprint: SPRINT_4300_0002_0002_unknowns_attestation_predicates
|
||||
/// </summary>
|
||||
[JsonPropertyName("uncertaintyStatementId")]
|
||||
public string? UncertaintyStatementId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Optional: ID of the uncertainty budget evaluation attestation.
|
||||
/// Sprint: SPRINT_4300_0002_0002_unknowns_attestation_predicates
|
||||
/// </summary>
|
||||
[JsonPropertyName("uncertaintyBudgetStatementId")]
|
||||
public string? UncertaintyBudgetStatementId { get; init; }
|
||||
}
|
||||
|
||||
@@ -0,0 +1,257 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// UncertaintyBudgetStatement.cs
|
||||
// Sprint: SPRINT_4300_0002_0002_unknowns_attestation_predicates
|
||||
// Description: In-toto predicate type for uncertainty budget evaluation attestations.
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Text.Json.Serialization;
|
||||
|
||||
namespace StellaOps.Attestor.ProofChain.Statements;
|
||||
|
||||
/// <summary>
|
||||
/// In-toto statement for uncertainty budget evaluation attestations.
|
||||
/// Predicate type: uncertainty-budget.stella/v1
|
||||
/// </summary>
|
||||
public sealed record UncertaintyBudgetStatement : InTotoStatement
|
||||
{
|
||||
/// <inheritdoc />
|
||||
[JsonPropertyName("predicateType")]
|
||||
public override string PredicateType => "uncertainty-budget.stella/v1";
|
||||
|
||||
/// <summary>
|
||||
/// The uncertainty budget evaluation payload.
|
||||
/// </summary>
|
||||
[JsonPropertyName("predicate")]
|
||||
public required UncertaintyBudgetPayload Predicate { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Payload for uncertainty budget evaluation statements.
|
||||
/// </summary>
|
||||
public sealed record UncertaintyBudgetPayload
|
||||
{
|
||||
/// <summary>
|
||||
/// Schema version for this predicate.
|
||||
/// </summary>
|
||||
[JsonPropertyName("schemaVersion")]
|
||||
public string SchemaVersion { get; init; } = "1.0";
|
||||
|
||||
/// <summary>
|
||||
/// The environment this budget was evaluated for (prod, staging, dev).
|
||||
/// </summary>
|
||||
[JsonPropertyName("environment")]
|
||||
public required string Environment { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Whether the evaluation passed (within budget).
|
||||
/// </summary>
|
||||
[JsonPropertyName("passed")]
|
||||
public required bool Passed { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// The action recommended by the budget policy.
|
||||
/// Values: pass, warn, block.
|
||||
/// </summary>
|
||||
[JsonPropertyName("action")]
|
||||
public required string Action { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// The budget definition that was applied.
|
||||
/// </summary>
|
||||
[JsonPropertyName("budget")]
|
||||
public required BudgetDefinition Budget { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Actual counts observed during evaluation.
|
||||
/// </summary>
|
||||
[JsonPropertyName("observed")]
|
||||
public required BudgetObservation Observed { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Violations detected during budget evaluation.
|
||||
/// </summary>
|
||||
[JsonPropertyName("violations")]
|
||||
public IReadOnlyList<BudgetViolationEntry>? Violations { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Exceptions that were applied to cover violations.
|
||||
/// </summary>
|
||||
[JsonPropertyName("exceptionsApplied")]
|
||||
public IReadOnlyList<BudgetExceptionEntry>? ExceptionsApplied { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// UTC timestamp when this budget was evaluated.
|
||||
/// </summary>
|
||||
[JsonPropertyName("evaluatedAt")]
|
||||
public required DateTimeOffset EvaluatedAt { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Digest of the policy bundle containing the budget rules.
|
||||
/// </summary>
|
||||
[JsonPropertyName("policyDigest")]
|
||||
public string? PolicyDigest { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Human-readable summary message.
|
||||
/// </summary>
|
||||
[JsonPropertyName("message")]
|
||||
public string? Message { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Definition of a budget with limits.
|
||||
/// </summary>
|
||||
public sealed record BudgetDefinition
|
||||
{
|
||||
/// <summary>
|
||||
/// Budget identifier.
|
||||
/// </summary>
|
||||
[JsonPropertyName("budgetId")]
|
||||
public required string BudgetId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Maximum total unknowns allowed.
|
||||
/// </summary>
|
||||
[JsonPropertyName("totalLimit")]
|
||||
public int? TotalLimit { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Per-reason-code limits.
|
||||
/// </summary>
|
||||
[JsonPropertyName("reasonLimits")]
|
||||
public IReadOnlyDictionary<string, int>? ReasonLimits { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Per-tier limits (e.g., T1 = 0, T2 = 5).
|
||||
/// </summary>
|
||||
[JsonPropertyName("tierLimits")]
|
||||
public IReadOnlyDictionary<string, int>? TierLimits { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Maximum allowed cumulative entropy.
|
||||
/// </summary>
|
||||
[JsonPropertyName("maxCumulativeEntropy")]
|
||||
public double? MaxCumulativeEntropy { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Observed values during budget evaluation.
|
||||
/// </summary>
|
||||
public sealed record BudgetObservation
|
||||
{
|
||||
/// <summary>
|
||||
/// Total unknowns observed.
|
||||
/// </summary>
|
||||
[JsonPropertyName("totalUnknowns")]
|
||||
public required int TotalUnknowns { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Unknowns by reason code.
|
||||
/// </summary>
|
||||
[JsonPropertyName("byReasonCode")]
|
||||
public IReadOnlyDictionary<string, int>? ByReasonCode { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Unknowns by tier.
|
||||
/// </summary>
|
||||
[JsonPropertyName("byTier")]
|
||||
public IReadOnlyDictionary<string, int>? ByTier { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Cumulative entropy observed.
|
||||
/// </summary>
|
||||
[JsonPropertyName("cumulativeEntropy")]
|
||||
public double? CumulativeEntropy { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Mean entropy per unknown.
|
||||
/// </summary>
|
||||
[JsonPropertyName("meanEntropy")]
|
||||
public double? MeanEntropy { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// A specific budget violation.
|
||||
/// </summary>
|
||||
public sealed record BudgetViolationEntry
|
||||
{
|
||||
/// <summary>
|
||||
/// Type of limit violated (total, reason, tier, entropy).
|
||||
/// </summary>
|
||||
[JsonPropertyName("limitType")]
|
||||
public required string LimitType { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Specific limit key (e.g., "U-RCH" for reason, "T1" for tier).
|
||||
/// </summary>
|
||||
[JsonPropertyName("limitKey")]
|
||||
public string? LimitKey { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// The configured limit value.
|
||||
/// </summary>
|
||||
[JsonPropertyName("limit")]
|
||||
public required double Limit { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// The observed value that exceeded the limit.
|
||||
/// </summary>
|
||||
[JsonPropertyName("observed")]
|
||||
public required double Observed { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Amount by which the limit was exceeded.
|
||||
/// </summary>
|
||||
[JsonPropertyName("exceeded")]
|
||||
public required double Exceeded { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Severity of this violation (critical, high, medium, low).
|
||||
/// </summary>
|
||||
[JsonPropertyName("severity")]
|
||||
public string? Severity { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// An exception applied to cover a budget violation.
|
||||
/// </summary>
|
||||
public sealed record BudgetExceptionEntry
|
||||
{
|
||||
/// <summary>
|
||||
/// Exception identifier.
|
||||
/// </summary>
|
||||
[JsonPropertyName("exceptionId")]
|
||||
public required string ExceptionId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Reason codes covered by this exception.
|
||||
/// </summary>
|
||||
[JsonPropertyName("coveredReasons")]
|
||||
public IReadOnlyList<string>? CoveredReasons { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Tiers covered by this exception.
|
||||
/// </summary>
|
||||
[JsonPropertyName("coveredTiers")]
|
||||
public IReadOnlyList<string>? CoveredTiers { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// When this exception expires (if time-limited).
|
||||
/// </summary>
|
||||
[JsonPropertyName("expiresAt")]
|
||||
public DateTimeOffset? ExpiresAt { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Justification for the exception.
|
||||
/// </summary>
|
||||
[JsonPropertyName("justification")]
|
||||
public string? Justification { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Who approved this exception.
|
||||
/// </summary>
|
||||
[JsonPropertyName("approvedBy")]
|
||||
public string? ApprovedBy { get; init; }
|
||||
}
|
||||
@@ -0,0 +1,162 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// UncertaintyStatement.cs
|
||||
// Sprint: SPRINT_4300_0002_0002_unknowns_attestation_predicates
|
||||
// Description: In-toto predicate type for uncertainty state attestations.
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Text.Json.Serialization;
|
||||
|
||||
namespace StellaOps.Attestor.ProofChain.Statements;
|
||||
|
||||
/// <summary>
|
||||
/// In-toto statement for uncertainty state attestations.
|
||||
/// Predicate type: uncertainty.stella/v1
|
||||
/// </summary>
|
||||
public sealed record UncertaintyStatement : InTotoStatement
|
||||
{
|
||||
/// <inheritdoc />
|
||||
[JsonPropertyName("predicateType")]
|
||||
public override string PredicateType => "uncertainty.stella/v1";
|
||||
|
||||
/// <summary>
|
||||
/// The uncertainty state payload.
|
||||
/// </summary>
|
||||
[JsonPropertyName("predicate")]
|
||||
public required UncertaintyPayload Predicate { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Payload for uncertainty state statements.
|
||||
/// </summary>
|
||||
public sealed record UncertaintyPayload
|
||||
{
|
||||
/// <summary>
|
||||
/// Schema version for this predicate.
|
||||
/// </summary>
|
||||
[JsonPropertyName("schemaVersion")]
|
||||
public string SchemaVersion { get; init; } = "1.0";
|
||||
|
||||
/// <summary>
|
||||
/// The aggregate uncertainty tier (T1-T4).
|
||||
/// T1 = High uncertainty, T4 = Negligible.
|
||||
/// </summary>
|
||||
[JsonPropertyName("aggregateTier")]
|
||||
public required string AggregateTier { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Mean entropy across all uncertainty states (0.0-1.0).
|
||||
/// </summary>
|
||||
[JsonPropertyName("meanEntropy")]
|
||||
public required double MeanEntropy { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Total count of uncertainty markers.
|
||||
/// </summary>
|
||||
[JsonPropertyName("markerCount")]
|
||||
public required int MarkerCount { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Risk modifier applied due to uncertainty (multiplier, e.g., 1.5 = 50% boost).
|
||||
/// </summary>
|
||||
[JsonPropertyName("riskModifier")]
|
||||
public required double RiskModifier { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Individual uncertainty states that contribute to this aggregate.
|
||||
/// </summary>
|
||||
[JsonPropertyName("states")]
|
||||
public required IReadOnlyList<UncertaintyStateEntry> States { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Evidence references supporting the uncertainty claims.
|
||||
/// </summary>
|
||||
[JsonPropertyName("evidence")]
|
||||
public IReadOnlyList<UncertaintyEvidence>? Evidence { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// UTC timestamp when this uncertainty state was computed.
|
||||
/// </summary>
|
||||
[JsonPropertyName("computedAt")]
|
||||
public required DateTimeOffset ComputedAt { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Reference to the knowledge snapshot used.
|
||||
/// </summary>
|
||||
[JsonPropertyName("knowledgeSnapshotId")]
|
||||
public string? KnowledgeSnapshotId { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// An individual uncertainty state entry.
|
||||
/// </summary>
|
||||
public sealed record UncertaintyStateEntry
|
||||
{
|
||||
/// <summary>
|
||||
/// Uncertainty code (U1-U4 or custom).
|
||||
/// </summary>
|
||||
[JsonPropertyName("code")]
|
||||
public required string Code { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Human-readable name for this uncertainty type.
|
||||
/// </summary>
|
||||
[JsonPropertyName("name")]
|
||||
public required string Name { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Entropy value for this state (0.0-1.0).
|
||||
/// Higher values indicate more uncertainty.
|
||||
/// </summary>
|
||||
[JsonPropertyName("entropy")]
|
||||
public required double Entropy { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Tier classification for this state (T1-T4).
|
||||
/// </summary>
|
||||
[JsonPropertyName("tier")]
|
||||
public required string Tier { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Marker kind that triggered this uncertainty.
|
||||
/// </summary>
|
||||
[JsonPropertyName("markerKind")]
|
||||
public string? MarkerKind { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Confidence band (high, medium, low).
|
||||
/// </summary>
|
||||
[JsonPropertyName("confidenceBand")]
|
||||
public string? ConfidenceBand { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Evidence supporting an uncertainty claim.
|
||||
/// </summary>
|
||||
public sealed record UncertaintyEvidence
|
||||
{
|
||||
/// <summary>
|
||||
/// Type of evidence (advisory, binary, purl, etc.).
|
||||
/// </summary>
|
||||
[JsonPropertyName("type")]
|
||||
public required string Type { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Reference to the evidence source.
|
||||
/// </summary>
|
||||
[JsonPropertyName("reference")]
|
||||
public required string Reference { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Optional digest for content-addressed evidence.
|
||||
/// </summary>
|
||||
[JsonPropertyName("digest")]
|
||||
public string? Digest { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Human-readable description.
|
||||
/// </summary>
|
||||
[JsonPropertyName("description")]
|
||||
public string? Description { get; init; }
|
||||
}
|
||||
@@ -183,4 +183,18 @@ public sealed record VerdictOutputs
|
||||
/// </summary>
|
||||
[JsonPropertyName("vexVerdictId")]
|
||||
public required string VexVerdictId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Optional: ID of the uncertainty state attestation.
|
||||
/// Sprint: SPRINT_4300_0002_0002_unknowns_attestation_predicates
|
||||
/// </summary>
|
||||
[JsonPropertyName("uncertaintyStatementId")]
|
||||
public string? UncertaintyStatementId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Optional: ID of the uncertainty budget attestation.
|
||||
/// Sprint: SPRINT_4300_0002_0002_unknowns_attestation_predicates
|
||||
/// </summary>
|
||||
[JsonPropertyName("uncertaintyBudgetStatementId")]
|
||||
public string? UncertaintyBudgetStatementId { get; init; }
|
||||
}
|
||||
|
||||
@@ -0,0 +1,259 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// UncertaintyStatementTests.cs
|
||||
// Sprint: SPRINT_4300_0002_0002_unknowns_attestation_predicates
|
||||
// Description: Unit tests for uncertainty attestation statements.
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System.Text.Json;
|
||||
using StellaOps.Attestor.ProofChain.Builders;
|
||||
using StellaOps.Attestor.ProofChain.Statements;
|
||||
|
||||
namespace StellaOps.Attestor.ProofChain.Tests.Statements;
|
||||
|
||||
/// <summary>
|
||||
/// Unit tests for UncertaintyStatement and UncertaintyBudgetStatement.
|
||||
/// </summary>
|
||||
public sealed class UncertaintyStatementTests
|
||||
{
|
||||
private readonly StatementBuilder _builder = new();
|
||||
private readonly DateTimeOffset _fixedTime = new(2025, 12, 22, 10, 0, 0, TimeSpan.Zero);
|
||||
|
||||
[Fact]
|
||||
public void BuildUncertaintyStatement_SetsPredicateTypeAndSubject()
|
||||
{
|
||||
var subject = CreateSubject("image:demo@sha256:abc123", "abc123");
|
||||
var predicate = new UncertaintyPayload
|
||||
{
|
||||
AggregateTier = "T2",
|
||||
MeanEntropy = 0.45,
|
||||
MarkerCount = 3,
|
||||
RiskModifier = 1.25,
|
||||
States = new[]
|
||||
{
|
||||
new UncertaintyStateEntry
|
||||
{
|
||||
Code = "U1",
|
||||
Name = "MissingSymbolResolution",
|
||||
Entropy = 0.5,
|
||||
Tier = "T2",
|
||||
MarkerKind = "missing_symbol"
|
||||
},
|
||||
new UncertaintyStateEntry
|
||||
{
|
||||
Code = "U2",
|
||||
Name = "MissingPurl",
|
||||
Entropy = 0.4,
|
||||
Tier = "T3"
|
||||
}
|
||||
},
|
||||
ComputedAt = _fixedTime
|
||||
};
|
||||
|
||||
var statement = _builder.BuildUncertaintyStatement(subject, predicate);
|
||||
|
||||
Assert.Equal("https://in-toto.io/Statement/v1", statement.Type);
|
||||
Assert.Equal("uncertainty.stella/v1", statement.PredicateType);
|
||||
Assert.Single(statement.Subject);
|
||||
Assert.Equal(subject.Name, statement.Subject[0].Name);
|
||||
Assert.Equal("T2", statement.Predicate.AggregateTier);
|
||||
Assert.Equal(0.45, statement.Predicate.MeanEntropy);
|
||||
Assert.Equal(2, statement.Predicate.States.Count);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void BuildUncertaintyBudgetStatement_SetsPredicateTypeAndSubject()
|
||||
{
|
||||
var subject = CreateSubject("image:demo@sha256:abc123", "abc123");
|
||||
var predicate = new UncertaintyBudgetPayload
|
||||
{
|
||||
Environment = "production",
|
||||
Passed = false,
|
||||
Action = "block",
|
||||
Budget = new BudgetDefinition
|
||||
{
|
||||
BudgetId = "prod-budget-v1",
|
||||
TotalLimit = 5,
|
||||
ReasonLimits = new Dictionary<string, int>
|
||||
{
|
||||
["U-RCH"] = 2,
|
||||
["U-ID"] = 3
|
||||
}
|
||||
},
|
||||
Observed = new BudgetObservation
|
||||
{
|
||||
TotalUnknowns = 8,
|
||||
ByReasonCode = new Dictionary<string, int>
|
||||
{
|
||||
["U-RCH"] = 4,
|
||||
["U-ID"] = 4
|
||||
}
|
||||
},
|
||||
Violations = new[]
|
||||
{
|
||||
new BudgetViolationEntry
|
||||
{
|
||||
LimitType = "total",
|
||||
Limit = 5,
|
||||
Observed = 8,
|
||||
Exceeded = 3,
|
||||
Severity = "high"
|
||||
}
|
||||
},
|
||||
EvaluatedAt = _fixedTime
|
||||
};
|
||||
|
||||
var statement = _builder.BuildUncertaintyBudgetStatement(subject, predicate);
|
||||
|
||||
Assert.Equal("https://in-toto.io/Statement/v1", statement.Type);
|
||||
Assert.Equal("uncertainty-budget.stella/v1", statement.PredicateType);
|
||||
Assert.Single(statement.Subject);
|
||||
Assert.Equal("production", statement.Predicate.Environment);
|
||||
Assert.False(statement.Predicate.Passed);
|
||||
Assert.Equal("block", statement.Predicate.Action);
|
||||
Assert.NotNull(statement.Predicate.Violations);
|
||||
Assert.Single(statement.Predicate.Violations);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void UncertaintyStatement_RoundTripsViaJson()
|
||||
{
|
||||
var subject = CreateSubject("image:demo", "abc123");
|
||||
var statement = _builder.BuildUncertaintyStatement(subject, new UncertaintyPayload
|
||||
{
|
||||
AggregateTier = "T3",
|
||||
MeanEntropy = 0.25,
|
||||
MarkerCount = 1,
|
||||
RiskModifier = 1.1,
|
||||
States = new[]
|
||||
{
|
||||
new UncertaintyStateEntry
|
||||
{
|
||||
Code = "U3",
|
||||
Name = "UntrustedAdvisory",
|
||||
Entropy = 0.25,
|
||||
Tier = "T3"
|
||||
}
|
||||
},
|
||||
ComputedAt = _fixedTime,
|
||||
KnowledgeSnapshotId = "ksm:sha256:abc123"
|
||||
});
|
||||
|
||||
var json = JsonSerializer.Serialize(statement);
|
||||
var restored = JsonSerializer.Deserialize<UncertaintyStatement>(json);
|
||||
|
||||
Assert.NotNull(restored);
|
||||
Assert.Equal(statement.PredicateType, restored.PredicateType);
|
||||
Assert.Equal(statement.Subject[0].Name, restored.Subject[0].Name);
|
||||
Assert.Equal(statement.Predicate.AggregateTier, restored.Predicate.AggregateTier);
|
||||
Assert.Equal(statement.Predicate.MeanEntropy, restored.Predicate.MeanEntropy);
|
||||
Assert.Equal(statement.Predicate.KnowledgeSnapshotId, restored.Predicate.KnowledgeSnapshotId);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void UncertaintyBudgetStatement_RoundTripsViaJson()
|
||||
{
|
||||
var subject = CreateSubject("image:demo", "abc123");
|
||||
var statement = _builder.BuildUncertaintyBudgetStatement(subject, new UncertaintyBudgetPayload
|
||||
{
|
||||
Environment = "staging",
|
||||
Passed = true,
|
||||
Action = "pass",
|
||||
Budget = new BudgetDefinition
|
||||
{
|
||||
BudgetId = "staging-budget",
|
||||
TotalLimit = 10
|
||||
},
|
||||
Observed = new BudgetObservation
|
||||
{
|
||||
TotalUnknowns = 3
|
||||
},
|
||||
EvaluatedAt = _fixedTime,
|
||||
Message = "Budget check passed"
|
||||
});
|
||||
|
||||
var json = JsonSerializer.Serialize(statement);
|
||||
var restored = JsonSerializer.Deserialize<UncertaintyBudgetStatement>(json);
|
||||
|
||||
Assert.NotNull(restored);
|
||||
Assert.Equal(statement.PredicateType, restored.PredicateType);
|
||||
Assert.Equal(statement.Predicate.Environment, restored.Predicate.Environment);
|
||||
Assert.True(restored.Predicate.Passed);
|
||||
Assert.Equal("Budget check passed", restored.Predicate.Message);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void UncertaintyBudgetStatement_WithExceptions_SerializesCorrectly()
|
||||
{
|
||||
var subject = CreateSubject("image:demo", "abc123");
|
||||
var predicate = new UncertaintyBudgetPayload
|
||||
{
|
||||
Environment = "production",
|
||||
Passed = true,
|
||||
Action = "pass",
|
||||
Budget = new BudgetDefinition
|
||||
{
|
||||
BudgetId = "prod-budget",
|
||||
TotalLimit = 5
|
||||
},
|
||||
Observed = new BudgetObservation
|
||||
{
|
||||
TotalUnknowns = 7,
|
||||
ByReasonCode = new Dictionary<string, int>
|
||||
{
|
||||
["U-RCH"] = 4,
|
||||
["U-ID"] = 3
|
||||
}
|
||||
},
|
||||
ExceptionsApplied = new[]
|
||||
{
|
||||
new BudgetExceptionEntry
|
||||
{
|
||||
ExceptionId = "EXC-2025-001",
|
||||
CoveredReasons = new[] { "U-RCH" },
|
||||
Justification = "Known limitation in reachability analysis",
|
||||
ApprovedBy = "security-team",
|
||||
ExpiresAt = _fixedTime.AddDays(30)
|
||||
}
|
||||
},
|
||||
EvaluatedAt = _fixedTime
|
||||
};
|
||||
|
||||
var statement = _builder.BuildUncertaintyBudgetStatement(subject, predicate);
|
||||
var json = JsonSerializer.Serialize(statement, new JsonSerializerOptions { WriteIndented = true });
|
||||
|
||||
Assert.Contains("EXC-2025-001", json);
|
||||
Assert.Contains("U-RCH", json);
|
||||
Assert.Contains("security-team", json);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void BuildUncertaintyStatement_NullSubject_Throws()
|
||||
{
|
||||
var predicate = new UncertaintyPayload
|
||||
{
|
||||
AggregateTier = "T4",
|
||||
MeanEntropy = 0.05,
|
||||
MarkerCount = 0,
|
||||
RiskModifier = 1.0,
|
||||
States = Array.Empty<UncertaintyStateEntry>(),
|
||||
ComputedAt = _fixedTime
|
||||
};
|
||||
|
||||
Assert.Throws<ArgumentNullException>(() => _builder.BuildUncertaintyStatement(null!, predicate));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void BuildUncertaintyBudgetStatement_NullPredicate_Throws()
|
||||
{
|
||||
var subject = CreateSubject("image:demo", "abc123");
|
||||
|
||||
Assert.Throws<ArgumentNullException>(() => _builder.BuildUncertaintyBudgetStatement(subject, null!));
|
||||
}
|
||||
|
||||
private static ProofSubject CreateSubject(string name, string sha256Digest)
|
||||
=> new()
|
||||
{
|
||||
Name = name,
|
||||
Digest = new Dictionary<string, string> { ["sha256"] = sha256Digest }
|
||||
};
|
||||
}
|
||||
@@ -0,0 +1,241 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// UnknownsBudgetPredicateTests.cs
|
||||
// Sprint: SPRINT_5100_0004_0001_unknowns_budget_ci_gates
|
||||
// Task: T6 - Unit Tests
|
||||
// Description: Tests for UnknownsBudgetPredicate attestation integration.
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System.Collections.Immutable;
|
||||
using System.Text.Json;
|
||||
using System.Text.Json.Serialization;
|
||||
using StellaOps.Attestor.ProofChain.Predicates;
|
||||
|
||||
namespace StellaOps.Attestor.ProofChain.Tests.Statements;
|
||||
|
||||
public sealed class UnknownsBudgetPredicateTests
|
||||
{
|
||||
private static readonly JsonSerializerOptions JsonOptions = new(JsonSerializerDefaults.Web)
|
||||
{
|
||||
WriteIndented = true,
|
||||
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull,
|
||||
PropertyNamingPolicy = JsonNamingPolicy.CamelCase
|
||||
};
|
||||
|
||||
[Fact]
|
||||
public void PredicateType_IsCorrect()
|
||||
{
|
||||
Assert.Equal("unknowns-budget.stella/v1", UnknownsBudgetPredicate.PredicateType);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Create_WithinBudget_SetsCorrectProperties()
|
||||
{
|
||||
var predicate = new UnknownsBudgetPredicate
|
||||
{
|
||||
Environment = "prod",
|
||||
TotalUnknowns = 3,
|
||||
TotalLimit = 10,
|
||||
IsWithinBudget = true,
|
||||
PercentageUsed = 30m,
|
||||
EvaluatedAt = DateTimeOffset.UtcNow
|
||||
};
|
||||
|
||||
Assert.Equal("prod", predicate.Environment);
|
||||
Assert.Equal(3, predicate.TotalUnknowns);
|
||||
Assert.Equal(10, predicate.TotalLimit);
|
||||
Assert.True(predicate.IsWithinBudget);
|
||||
Assert.Equal(30m, predicate.PercentageUsed);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Create_ExceedsBudget_SetsCorrectProperties()
|
||||
{
|
||||
var predicate = new UnknownsBudgetPredicate
|
||||
{
|
||||
Environment = "prod",
|
||||
TotalUnknowns = 15,
|
||||
TotalLimit = 10,
|
||||
IsWithinBudget = false,
|
||||
PercentageUsed = 150m,
|
||||
RecommendedAction = "Block",
|
||||
Message = "Budget exceeded: 15 unknowns exceed limit of 10",
|
||||
EvaluatedAt = DateTimeOffset.UtcNow
|
||||
};
|
||||
|
||||
Assert.False(predicate.IsWithinBudget);
|
||||
Assert.Equal("Block", predicate.RecommendedAction);
|
||||
Assert.Contains("Budget exceeded", predicate.Message);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Create_WithViolations_SerializesCorrectly()
|
||||
{
|
||||
var violations = ImmutableArray.Create(
|
||||
new BudgetViolationPredicate
|
||||
{
|
||||
ReasonCode = "Reachability",
|
||||
Count = 5,
|
||||
Limit = 3
|
||||
},
|
||||
new BudgetViolationPredicate
|
||||
{
|
||||
ReasonCode = "Identity",
|
||||
Count = 2,
|
||||
Limit = 1
|
||||
}
|
||||
);
|
||||
|
||||
var predicate = new UnknownsBudgetPredicate
|
||||
{
|
||||
Environment = "stage",
|
||||
TotalUnknowns = 7,
|
||||
TotalLimit = 5,
|
||||
IsWithinBudget = false,
|
||||
Violations = violations,
|
||||
EvaluatedAt = DateTimeOffset.UtcNow
|
||||
};
|
||||
|
||||
Assert.Equal(2, predicate.Violations.Length);
|
||||
Assert.Equal("Reachability", predicate.Violations[0].ReasonCode);
|
||||
Assert.Equal(5, predicate.Violations[0].Count);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Create_WithByReasonCode_SerializesCorrectly()
|
||||
{
|
||||
var byReasonCode = ImmutableDictionary.CreateRange(new[]
|
||||
{
|
||||
new KeyValuePair<string, int>("Reachability", 5),
|
||||
new KeyValuePair<string, int>("Identity", 2),
|
||||
new KeyValuePair<string, int>("VexConflict", 1)
|
||||
});
|
||||
|
||||
var predicate = new UnknownsBudgetPredicate
|
||||
{
|
||||
Environment = "dev",
|
||||
TotalUnknowns = 8,
|
||||
TotalLimit = 20,
|
||||
IsWithinBudget = true,
|
||||
ByReasonCode = byReasonCode,
|
||||
EvaluatedAt = DateTimeOffset.UtcNow
|
||||
};
|
||||
|
||||
Assert.Equal(3, predicate.ByReasonCode.Count);
|
||||
Assert.Equal(5, predicate.ByReasonCode["Reachability"]);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Serialize_ToJson_ProducesValidOutput()
|
||||
{
|
||||
var predicate = new UnknownsBudgetPredicate
|
||||
{
|
||||
Environment = "prod",
|
||||
TotalUnknowns = 3,
|
||||
TotalLimit = 10,
|
||||
IsWithinBudget = true,
|
||||
PercentageUsed = 30m,
|
||||
EvaluatedAt = new DateTimeOffset(2025, 12, 22, 12, 0, 0, TimeSpan.Zero)
|
||||
};
|
||||
|
||||
var json = JsonSerializer.Serialize(predicate, JsonOptions);
|
||||
|
||||
Assert.Contains("\"environment\": \"prod\"", json);
|
||||
Assert.Contains("\"totalUnknowns\": 3", json);
|
||||
Assert.Contains("\"totalLimit\": 10", json);
|
||||
Assert.Contains("\"isWithinBudget\": true", json);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Deserialize_FromJson_RestoresProperties()
|
||||
{
|
||||
var json = """
|
||||
{
|
||||
"environment": "stage",
|
||||
"totalUnknowns": 7,
|
||||
"totalLimit": 5,
|
||||
"isWithinBudget": false,
|
||||
"percentageUsed": 140.0,
|
||||
"recommendedAction": "Warn",
|
||||
"violations": [
|
||||
{
|
||||
"reasonCode": "Reachability",
|
||||
"count": 5,
|
||||
"limit": 3
|
||||
}
|
||||
],
|
||||
"evaluatedAt": "2025-12-22T12:00:00Z"
|
||||
}
|
||||
""";
|
||||
|
||||
var predicate = JsonSerializer.Deserialize<UnknownsBudgetPredicate>(json, JsonOptions);
|
||||
|
||||
Assert.NotNull(predicate);
|
||||
Assert.Equal("stage", predicate.Environment);
|
||||
Assert.Equal(7, predicate.TotalUnknowns);
|
||||
Assert.Equal(5, predicate.TotalLimit);
|
||||
Assert.False(predicate.IsWithinBudget);
|
||||
Assert.Equal(140.0m, predicate.PercentageUsed);
|
||||
Assert.Single(predicate.Violations);
|
||||
Assert.Equal("Reachability", predicate.Violations[0].ReasonCode);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void DeltaVerdictPredicate_IncludesUnknownsBudget()
|
||||
{
|
||||
var budget = new UnknownsBudgetPredicate
|
||||
{
|
||||
Environment = "prod",
|
||||
TotalUnknowns = 2,
|
||||
TotalLimit = 10,
|
||||
IsWithinBudget = true,
|
||||
EvaluatedAt = DateTimeOffset.UtcNow
|
||||
};
|
||||
|
||||
var verdict = new DeltaVerdictPredicate
|
||||
{
|
||||
BeforeRevisionId = "rev-1",
|
||||
AfterRevisionId = "rev-2",
|
||||
HasMaterialChange = true,
|
||||
PriorityScore = 0.5,
|
||||
ComparedAt = DateTimeOffset.UtcNow,
|
||||
UnknownsBudget = budget
|
||||
};
|
||||
|
||||
Assert.NotNull(verdict.UnknownsBudget);
|
||||
Assert.Equal("prod", verdict.UnknownsBudget.Environment);
|
||||
Assert.True(verdict.UnknownsBudget.IsWithinBudget);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void DeltaVerdictPredicate_WithoutUnknownsBudget_SerializesCorrectly()
|
||||
{
|
||||
var verdict = new DeltaVerdictPredicate
|
||||
{
|
||||
BeforeRevisionId = "rev-1",
|
||||
AfterRevisionId = "rev-2",
|
||||
HasMaterialChange = false,
|
||||
PriorityScore = 0.0,
|
||||
ComparedAt = DateTimeOffset.UtcNow,
|
||||
UnknownsBudget = null
|
||||
};
|
||||
|
||||
var json = JsonSerializer.Serialize(verdict, JsonOptions);
|
||||
|
||||
Assert.DoesNotContain("unknownsBudget", json);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void BudgetViolationPredicate_Properties_AreCorrect()
|
||||
{
|
||||
var violation = new BudgetViolationPredicate
|
||||
{
|
||||
ReasonCode = "FeedGap",
|
||||
Count = 10,
|
||||
Limit = 5
|
||||
};
|
||||
|
||||
Assert.Equal("FeedGap", violation.ReasonCode);
|
||||
Assert.Equal(10, violation.Count);
|
||||
Assert.Equal(5, violation.Limit);
|
||||
}
|
||||
}
|
||||
@@ -9,8 +9,8 @@ using StellaOps.Authority.Plugin.Ldap.Connections;
|
||||
using StellaOps.Authority.Plugin.Ldap.Tests.Fakes;
|
||||
using StellaOps.Authority.Plugin.Ldap.Tests.TestHelpers;
|
||||
using StellaOps.Authority.Plugins.Abstractions;
|
||||
using StellaOps.Authority.Storage.InMemory.Documents;
|
||||
using StellaOps.Authority.Storage.InMemory.Sessions;
|
||||
using StellaOps.Authority.Storage.Documents;
|
||||
using StellaOps.Authority.Storage.Sessions;
|
||||
using StellaOps.Authority.Storage.InMemory.Stores;
|
||||
using StellaOps.Auth.Abstractions;
|
||||
using Xunit;
|
||||
|
||||
@@ -10,9 +10,9 @@ using StellaOps.Authority.Plugin.Ldap.Monitoring;
|
||||
using StellaOps.Authority.Plugin.Ldap.Tests.TestHelpers;
|
||||
using StellaOps.Authority.Plugin.Ldap.Tests.Fakes;
|
||||
using StellaOps.Authority.Plugins.Abstractions;
|
||||
using StellaOps.Authority.Storage.InMemory.Documents;
|
||||
using StellaOps.Authority.Storage.Documents;
|
||||
using StellaOps.Authority.Storage.InMemory.Stores;
|
||||
using StellaOps.Authority.Storage.InMemory.Sessions;
|
||||
using StellaOps.Authority.Storage.Sessions;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Authority.Plugin.Ldap.Tests.Credentials;
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
using System.Collections.Concurrent;
|
||||
using StellaOps.Authority.Storage.InMemory.Documents;
|
||||
using StellaOps.Authority.Storage.InMemory.Sessions;
|
||||
using StellaOps.Authority.Storage.Documents;
|
||||
using StellaOps.Authority.Storage.Sessions;
|
||||
using StellaOps.Authority.Storage.InMemory.Stores;
|
||||
|
||||
namespace StellaOps.Authority.Plugin.Ldap.Tests.TestHelpers;
|
||||
|
||||
@@ -9,7 +9,7 @@ using StellaOps.Authority.InMemoryDriver;
|
||||
using StellaOps.Authority.Plugin.Ldap.Connections;
|
||||
using StellaOps.Authority.Plugin.Ldap.Security;
|
||||
using StellaOps.Authority.Plugins.Abstractions;
|
||||
using StellaOps.Authority.Storage.InMemory.Documents;
|
||||
using StellaOps.Authority.Storage.Documents;
|
||||
using StellaOps.Authority.Storage.InMemory.Stores;
|
||||
using StellaOps.Auth.Abstractions;
|
||||
|
||||
|
||||
@@ -11,7 +11,7 @@ using StellaOps.Authority.Plugin.Ldap.ClientProvisioning;
|
||||
using StellaOps.Authority.Plugin.Ldap.Connections;
|
||||
using StellaOps.Authority.Plugin.Ldap.Monitoring;
|
||||
using StellaOps.Authority.Plugin.Ldap.Security;
|
||||
using StellaOps.Authority.Storage.InMemory.Documents;
|
||||
using StellaOps.Authority.Storage.Documents;
|
||||
using StellaOps.Authority.Storage.InMemory.Stores;
|
||||
using StellaOps.Cryptography.Audit;
|
||||
|
||||
|
||||
@@ -6,7 +6,7 @@ using System.Threading.Tasks;
|
||||
using StellaOps.Authority.InMemoryDriver;
|
||||
using StellaOps.Authority.Plugins.Abstractions;
|
||||
using StellaOps.Authority.Plugin.Standard.Storage;
|
||||
using StellaOps.Authority.Storage.InMemory.Documents;
|
||||
using StellaOps.Authority.Storage.Documents;
|
||||
using StellaOps.Authority.Storage.InMemory.Stores;
|
||||
using Xunit;
|
||||
|
||||
|
||||
@@ -13,7 +13,7 @@ using StellaOps.Authority.Plugins.Abstractions;
|
||||
using StellaOps.Authority.Plugin.Standard;
|
||||
using StellaOps.Authority.Plugin.Standard.Bootstrap;
|
||||
using StellaOps.Authority.Plugin.Standard.Storage;
|
||||
using StellaOps.Authority.Storage.InMemory.Documents;
|
||||
using StellaOps.Authority.Storage.Documents;
|
||||
using StellaOps.Authority.Storage.InMemory.Stores;
|
||||
using StellaOps.Cryptography.Audit;
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using StellaOps.Authority.Plugins.Abstractions;
|
||||
using StellaOps.Authority.Storage.InMemory.Documents;
|
||||
using StellaOps.Authority.Storage.Documents;
|
||||
using StellaOps.Authority.Storage.InMemory.Stores;
|
||||
|
||||
namespace StellaOps.Authority.Plugin.Standard.Storage;
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
using Microsoft.Extensions.DependencyInjection;
|
||||
using StellaOps.Authority.InMemoryDriver;
|
||||
using StellaOps.Authority.Storage.InMemory.Initialization;
|
||||
using StellaOps.Authority.Storage.InMemory.Sessions;
|
||||
using StellaOps.Authority.Storage.Sessions;
|
||||
using StellaOps.Authority.Storage.InMemory.Stores;
|
||||
|
||||
namespace StellaOps.Authority.Storage.Extensions;
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
using StellaOps.Authority.Storage.InMemory.Documents;
|
||||
using StellaOps.Authority.Storage.InMemory.Sessions;
|
||||
using StellaOps.Authority.Storage.Documents;
|
||||
using StellaOps.Authority.Storage.Sessions;
|
||||
|
||||
namespace StellaOps.Authority.Storage.InMemory.Stores;
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
using System.Collections.Concurrent;
|
||||
using System.Threading;
|
||||
using StellaOps.Authority.Storage.InMemory.Documents;
|
||||
using StellaOps.Authority.Storage.InMemory.Sessions;
|
||||
using StellaOps.Authority.Storage.Documents;
|
||||
using StellaOps.Authority.Storage.Sessions;
|
||||
|
||||
namespace StellaOps.Authority.Storage.InMemory.Stores;
|
||||
|
||||
|
||||
@@ -9,8 +9,8 @@ using Microsoft.AspNetCore.Authentication;
|
||||
using Microsoft.AspNetCore.TestHost;
|
||||
using Microsoft.Extensions.DependencyInjection;
|
||||
using StellaOps.Auth.Abstractions;
|
||||
using StellaOps.Authority.Storage.InMemory.Documents;
|
||||
using StellaOps.Authority.Storage.InMemory.Sessions;
|
||||
using StellaOps.Authority.Storage.Documents;
|
||||
using StellaOps.Authority.Storage.Sessions;
|
||||
using StellaOps.Authority.Storage.InMemory.Stores;
|
||||
using StellaOps.Authority.Tests.Infrastructure;
|
||||
using StellaOps.Configuration;
|
||||
|
||||
@@ -13,8 +13,8 @@ using Microsoft.Extensions.DependencyInjection.Extensions;
|
||||
using Microsoft.Extensions.Time.Testing;
|
||||
using StellaOps.Auth.Abstractions;
|
||||
using StellaOps.Authority.Airgap;
|
||||
using StellaOps.Authority.Storage.InMemory.Documents;
|
||||
using StellaOps.Authority.Storage.InMemory.Sessions;
|
||||
using StellaOps.Authority.Storage.Documents;
|
||||
using StellaOps.Authority.Storage.Sessions;
|
||||
using StellaOps.Authority.Storage.InMemory.Stores;
|
||||
using StellaOps.Authority.Tests.Infrastructure;
|
||||
using Xunit;
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
using System.Linq;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using StellaOps.Authority.Audit;
|
||||
using StellaOps.Authority.Storage.InMemory.Documents;
|
||||
using StellaOps.Authority.Storage.Documents;
|
||||
using StellaOps.Authority.Storage.InMemory.Stores;
|
||||
using StellaOps.Cryptography.Audit;
|
||||
using StellaOps.Authority.Storage.InMemory.Sessions;
|
||||
using StellaOps.Authority.Storage.Sessions;
|
||||
|
||||
namespace StellaOps.Authority.Tests.Audit;
|
||||
|
||||
|
||||
@@ -6,9 +6,9 @@ using System.Threading.Tasks;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using Microsoft.Extensions.Time.Testing;
|
||||
using StellaOps.Authority.Bootstrap;
|
||||
using StellaOps.Authority.Storage.InMemory.Documents;
|
||||
using StellaOps.Authority.Storage.Documents;
|
||||
using StellaOps.Authority.Storage.InMemory.Stores;
|
||||
using StellaOps.Authority.Storage.InMemory.Sessions;
|
||||
using StellaOps.Authority.Storage.Sessions;
|
||||
using StellaOps.Cryptography.Audit;
|
||||
using Xunit;
|
||||
|
||||
|
||||
@@ -17,9 +17,9 @@ using StellaOps.Auth.Abstractions;
|
||||
using Microsoft.AspNetCore.Routing;
|
||||
using StellaOps.Configuration;
|
||||
using StellaOps.Authority.OpenIddict;
|
||||
using StellaOps.Authority.Storage.InMemory.Documents;
|
||||
using StellaOps.Authority.Storage.Documents;
|
||||
using StellaOps.Authority.Storage.InMemory.Stores;
|
||||
using StellaOps.Authority.Storage.InMemory.Sessions;
|
||||
using StellaOps.Authority.Storage.Sessions;
|
||||
using StellaOps.Authority.Tests.Infrastructure;
|
||||
using StellaOps.Cryptography.Audit;
|
||||
using Xunit;
|
||||
|
||||
@@ -11,7 +11,7 @@ using Microsoft.Extensions.DependencyInjection;
|
||||
using Microsoft.Extensions.DependencyInjection.Extensions;
|
||||
using StellaOps.Authority.Storage.InMemory.Extensions;
|
||||
using StellaOps.Authority.Storage.InMemory.Stores;
|
||||
using StellaOps.Authority.Storage.InMemory.Sessions;
|
||||
using StellaOps.Authority.Storage.Sessions;
|
||||
using StellaOps.Authority.Storage.Postgres;
|
||||
|
||||
namespace StellaOps.Authority.Tests.Infrastructure;
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
using System.Collections.Concurrent;
|
||||
using StellaOps.Authority.Storage.InMemory.Documents;
|
||||
using StellaOps.Authority.Storage.InMemory.Sessions;
|
||||
using StellaOps.Authority.Storage.Documents;
|
||||
using StellaOps.Authority.Storage.Sessions;
|
||||
using StellaOps.Authority.Storage.InMemory.Stores;
|
||||
|
||||
namespace StellaOps.Authority.Tests.Infrastructure;
|
||||
|
||||
@@ -30,8 +30,8 @@ using StellaOps.Authority.Airgap;
|
||||
using StellaOps.Authority.OpenIddict;
|
||||
using StellaOps.Authority.OpenIddict.Handlers;
|
||||
using StellaOps.Authority.Plugins.Abstractions;
|
||||
using StellaOps.Authority.Storage.InMemory.Documents;
|
||||
using StellaOps.Authority.Storage.InMemory.Sessions;
|
||||
using StellaOps.Authority.Storage.Documents;
|
||||
using StellaOps.Authority.Storage.Sessions;
|
||||
using StellaOps.Authority.Storage.InMemory.Stores;
|
||||
using StellaOps.Authority.RateLimiting;
|
||||
using StellaOps.Cryptography.Audit;
|
||||
|
||||
@@ -23,9 +23,9 @@ using StellaOps.Authority.OpenIddict.Handlers;
|
||||
using StellaOps.Authority.Plugins.Abstractions;
|
||||
using StellaOps.Authority.RateLimiting;
|
||||
using StellaOps.Authority.Airgap;
|
||||
using StellaOps.Authority.Storage.InMemory.Documents;
|
||||
using StellaOps.Authority.Storage.Documents;
|
||||
using StellaOps.Authority.Storage.InMemory.Stores;
|
||||
using StellaOps.Authority.Storage.InMemory.Sessions;
|
||||
using StellaOps.Authority.Storage.Sessions;
|
||||
using StellaOps.Cryptography.Audit;
|
||||
using StellaOps.Configuration;
|
||||
using StellaOps.Auth.Abstractions;
|
||||
|
||||
@@ -5,8 +5,8 @@ using Microsoft.Extensions.Time.Testing;
|
||||
using OpenIddict.Abstractions;
|
||||
using OpenIddict.Server;
|
||||
using StellaOps.Authority.OpenIddict.Handlers;
|
||||
using StellaOps.Authority.Storage.InMemory.Documents;
|
||||
using StellaOps.Authority.Storage.InMemory.Sessions;
|
||||
using StellaOps.Authority.Storage.Documents;
|
||||
using StellaOps.Authority.Storage.Sessions;
|
||||
using StellaOps.Authority.Storage.InMemory.Stores;
|
||||
using Xunit;
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
using System.Collections.Generic;
|
||||
using System.Collections.Immutable;
|
||||
using System.Linq;
|
||||
using StellaOps.Authority.Storage.InMemory.Documents;
|
||||
using StellaOps.Authority.Storage.Documents;
|
||||
using StellaOps.Authority.Storage.InMemory.Stores;
|
||||
|
||||
namespace StellaOps.Authority.Airgap;
|
||||
|
||||
@@ -5,7 +5,7 @@ using System.Linq;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using StellaOps.Authority.Storage.InMemory.Documents;
|
||||
using StellaOps.Authority.Storage.Documents;
|
||||
using StellaOps.Authority.Storage.InMemory.Stores;
|
||||
using StellaOps.Cryptography.Audit;
|
||||
|
||||
|
||||
@@ -4,7 +4,7 @@ using System.Globalization;
|
||||
using Microsoft.Extensions.Hosting;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using StellaOps.Authority.Storage.InMemory.Stores;
|
||||
using StellaOps.Authority.Storage.InMemory.Documents;
|
||||
using StellaOps.Authority.Storage.Documents;
|
||||
using StellaOps.Cryptography.Audit;
|
||||
|
||||
namespace StellaOps.Authority.Bootstrap;
|
||||
|
||||
@@ -10,7 +10,7 @@ using Microsoft.AspNetCore.Mvc;
|
||||
using StellaOps.Auth.Abstractions;
|
||||
using StellaOps.Auth.ServerIntegration;
|
||||
using StellaOps.Authority.Console;
|
||||
using StellaOps.Authority.Storage.InMemory.Documents;
|
||||
using StellaOps.Authority.Storage.Documents;
|
||||
using StellaOps.Authority.Storage.InMemory.Stores;
|
||||
|
||||
namespace StellaOps.Authority.Observability;
|
||||
|
||||
@@ -17,8 +17,8 @@ using StellaOps.Auth.Abstractions;
|
||||
using StellaOps.Authority.Airgap;
|
||||
using StellaOps.Authority.OpenIddict;
|
||||
using StellaOps.Authority.Plugins.Abstractions;
|
||||
using StellaOps.Authority.Storage.InMemory.Documents;
|
||||
using StellaOps.Authority.Storage.InMemory.Sessions;
|
||||
using StellaOps.Authority.Storage.Documents;
|
||||
using StellaOps.Authority.Storage.Sessions;
|
||||
using StellaOps.Authority.Storage.InMemory.Stores;
|
||||
using StellaOps.Authority.RateLimiting;
|
||||
using StellaOps.Authority.Security;
|
||||
|
||||
@@ -19,7 +19,7 @@ using StellaOps.Authority.OpenIddict;
|
||||
using StellaOps.Auth.Abstractions;
|
||||
using StellaOps.Authority.RateLimiting;
|
||||
using StellaOps.Authority.Security;
|
||||
using StellaOps.Authority.Storage.InMemory.Documents;
|
||||
using StellaOps.Authority.Storage.Documents;
|
||||
using StellaOps.Authority.Storage.InMemory.Stores;
|
||||
using StellaOps.Authority.Plugins.Abstractions;
|
||||
using StellaOps.Cryptography.Audit;
|
||||
|
||||
@@ -15,7 +15,7 @@ using StellaOps.Authority.Airgap;
|
||||
using StellaOps.Authority.OpenIddict;
|
||||
using StellaOps.Authority.Plugins.Abstractions;
|
||||
using StellaOps.Authority.RateLimiting;
|
||||
using StellaOps.Authority.Storage.InMemory.Documents;
|
||||
using StellaOps.Authority.Storage.Documents;
|
||||
using StellaOps.Authority.Storage.InMemory.Stores;
|
||||
using StellaOps.Cryptography.Audit;
|
||||
|
||||
|
||||
@@ -11,7 +11,7 @@ using OpenIddict.Server;
|
||||
using StellaOps.Auth.Abstractions;
|
||||
using StellaOps.Authority.Airgap;
|
||||
using StellaOps.Authority.Security;
|
||||
using StellaOps.Authority.Storage.InMemory.Documents;
|
||||
using StellaOps.Authority.Storage.Documents;
|
||||
using StellaOps.Authority.Storage.InMemory.Stores;
|
||||
|
||||
namespace StellaOps.Authority.OpenIddict.Handlers;
|
||||
|
||||
@@ -6,7 +6,7 @@ using System.Threading.Tasks;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using OpenIddict.Abstractions;
|
||||
using OpenIddict.Server;
|
||||
using StellaOps.Authority.Storage.InMemory.Sessions;
|
||||
using StellaOps.Authority.Storage.Sessions;
|
||||
using StellaOps.Authority.Storage.InMemory.Stores;
|
||||
|
||||
namespace StellaOps.Authority.OpenIddict.Handlers;
|
||||
|
||||
@@ -11,8 +11,8 @@ using Microsoft.Extensions.Logging;
|
||||
using OpenIddict.Abstractions;
|
||||
using OpenIddict.Extensions;
|
||||
using OpenIddict.Server;
|
||||
using StellaOps.Authority.Storage.InMemory.Documents;
|
||||
using StellaOps.Authority.Storage.InMemory.Sessions;
|
||||
using StellaOps.Authority.Storage.Documents;
|
||||
using StellaOps.Authority.Storage.Sessions;
|
||||
using StellaOps.Authority.Storage.InMemory.Stores;
|
||||
using StellaOps.Auth.Abstractions;
|
||||
|
||||
|
||||
@@ -15,8 +15,8 @@ using StellaOps.Auth.Abstractions;
|
||||
using StellaOps.Authority.OpenIddict;
|
||||
using StellaOps.Authority.Plugins.Abstractions;
|
||||
using StellaOps.Authority.RateLimiting;
|
||||
using StellaOps.Authority.Storage.InMemory.Documents;
|
||||
using StellaOps.Authority.Storage.InMemory.Sessions;
|
||||
using StellaOps.Authority.Storage.Documents;
|
||||
using StellaOps.Authority.Storage.Sessions;
|
||||
using StellaOps.Authority.Storage.InMemory.Stores;
|
||||
using StellaOps.Cryptography.Audit;
|
||||
using StellaOps.Authority.Security;
|
||||
|
||||
@@ -32,9 +32,9 @@ using StellaOps.Authority.Plugins.Abstractions;
|
||||
using StellaOps.Authority.Plugins;
|
||||
using StellaOps.Authority.Bootstrap;
|
||||
using StellaOps.Authority.Console;
|
||||
using StellaOps.Authority.Storage.InMemory.Documents;
|
||||
using StellaOps.Authority.Storage.Documents;
|
||||
using StellaOps.Authority.Storage.InMemory.Stores;
|
||||
using StellaOps.Authority.Storage.InMemory.Sessions;
|
||||
using StellaOps.Authority.Storage.Sessions;
|
||||
using StellaOps.Authority.Storage.Postgres;
|
||||
using StellaOps.Authority.Storage.PostgresAdapters;
|
||||
using StellaOps.Authority.RateLimiting;
|
||||
@@ -54,7 +54,7 @@ using System.Text;
|
||||
using StellaOps.Authority.Signing;
|
||||
using StellaOps.Cryptography;
|
||||
using StellaOps.Cryptography.Kms;
|
||||
using StellaOps.Authority.Storage.InMemory.Documents;
|
||||
using StellaOps.Authority.Storage.Documents;
|
||||
using StellaOps.Authority.Security;
|
||||
using StellaOps.Authority.OpenApi;
|
||||
using StellaOps.Auth.Abstractions;
|
||||
|
||||
@@ -10,7 +10,7 @@ using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Microsoft.Extensions.Options;
|
||||
using StellaOps.Authority.Storage.InMemory.Documents;
|
||||
using StellaOps.Authority.Storage.Documents;
|
||||
using StellaOps.Authority.Storage.InMemory.Stores;
|
||||
using StellaOps.Configuration;
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
using System;
|
||||
using StellaOps.Authority.Storage.InMemory.Documents;
|
||||
using StellaOps.Authority.Storage.Documents;
|
||||
|
||||
namespace StellaOps.Authority.Security;
|
||||
|
||||
|
||||
@@ -9,7 +9,7 @@ using System.Formats.Asn1;
|
||||
using System.Net;
|
||||
using Microsoft.AspNetCore.Http;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using StellaOps.Authority.Storage.InMemory.Documents;
|
||||
using StellaOps.Authority.Storage.Documents;
|
||||
using StellaOps.Configuration;
|
||||
using Microsoft.IdentityModel.Tokens;
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using Microsoft.AspNetCore.Http;
|
||||
using StellaOps.Authority.Storage.InMemory.Documents;
|
||||
using StellaOps.Authority.Storage.Documents;
|
||||
|
||||
namespace StellaOps.Authority.Security;
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
using StellaOps.Authority.Storage.InMemory.Documents;
|
||||
using StellaOps.Authority.Storage.InMemory.Sessions;
|
||||
using StellaOps.Authority.Storage.Documents;
|
||||
using StellaOps.Authority.Storage.Sessions;
|
||||
using StellaOps.Authority.Storage.InMemory.Stores;
|
||||
using StellaOps.Authority.Storage.Postgres.Models;
|
||||
using StellaOps.Authority.Storage.Postgres.Repositories;
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
using StellaOps.Authority.Storage.InMemory.Documents;
|
||||
using StellaOps.Authority.Storage.InMemory.Sessions;
|
||||
using StellaOps.Authority.Storage.Documents;
|
||||
using StellaOps.Authority.Storage.Sessions;
|
||||
using StellaOps.Authority.Storage.InMemory.Stores;
|
||||
using StellaOps.Authority.Storage.Postgres.Models;
|
||||
using StellaOps.Authority.Storage.Postgres.Repositories;
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
using StellaOps.Authority.Storage.InMemory.Documents;
|
||||
using StellaOps.Authority.Storage.InMemory.Sessions;
|
||||
using StellaOps.Authority.Storage.Documents;
|
||||
using StellaOps.Authority.Storage.Sessions;
|
||||
using StellaOps.Authority.Storage.InMemory.Stores;
|
||||
using StellaOps.Authority.Storage.Postgres.Models;
|
||||
using StellaOps.Authority.Storage.Postgres.Repositories;
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
using System.Globalization;
|
||||
using StellaOps.Authority.Storage.InMemory.Documents;
|
||||
using StellaOps.Authority.Storage.InMemory.Sessions;
|
||||
using StellaOps.Authority.Storage.Documents;
|
||||
using StellaOps.Authority.Storage.Sessions;
|
||||
using StellaOps.Authority.Storage.InMemory.Stores;
|
||||
using StellaOps.Authority.Storage.Postgres.Models;
|
||||
using StellaOps.Authority.Storage.Postgres.Repositories;
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
using StellaOps.Authority.Storage.InMemory.Documents;
|
||||
using StellaOps.Authority.Storage.InMemory.Sessions;
|
||||
using StellaOps.Authority.Storage.Documents;
|
||||
using StellaOps.Authority.Storage.Sessions;
|
||||
using StellaOps.Authority.Storage.InMemory.Stores;
|
||||
using StellaOps.Authority.Storage.Postgres.Models;
|
||||
using StellaOps.Authority.Storage.Postgres.Repositories;
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
using StellaOps.Authority.Storage.InMemory.Documents;
|
||||
using StellaOps.Authority.Storage.InMemory.Sessions;
|
||||
using StellaOps.Authority.Storage.Documents;
|
||||
using StellaOps.Authority.Storage.Sessions;
|
||||
using StellaOps.Authority.Storage.InMemory.Stores;
|
||||
using StellaOps.Authority.Storage.Postgres.Models;
|
||||
using StellaOps.Authority.Storage.Postgres.Repositories;
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
using StellaOps.Authority.Storage.InMemory.Documents;
|
||||
using StellaOps.Authority.Storage.InMemory.Sessions;
|
||||
using StellaOps.Authority.Storage.Documents;
|
||||
using StellaOps.Authority.Storage.Sessions;
|
||||
using StellaOps.Authority.Storage.InMemory.Stores;
|
||||
using StellaOps.Authority.Storage.Postgres.Models;
|
||||
using StellaOps.Authority.Storage.Postgres.Repositories;
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
using System.Collections.Concurrent;
|
||||
using System.Text.Json;
|
||||
using StellaOps.Authority.Storage.InMemory.Documents;
|
||||
using StellaOps.Authority.Storage.InMemory.Sessions;
|
||||
using StellaOps.Authority.Storage.Documents;
|
||||
using StellaOps.Authority.Storage.Sessions;
|
||||
using StellaOps.Authority.Storage.InMemory.Stores;
|
||||
using StellaOps.Authority.Storage.Postgres.Models;
|
||||
using StellaOps.Authority.Storage.Postgres.Repositories;
|
||||
|
||||
@@ -0,0 +1,14 @@
|
||||
<?xml version='1.0' encoding='utf-8'?>
|
||||
<Project Sdk="Microsoft.NET.Sdk">
|
||||
<PropertyGroup>
|
||||
<TargetFramework>net10.0</TargetFramework>
|
||||
<LangVersion>preview</LangVersion>
|
||||
<Nullable>enable</Nullable>
|
||||
<ImplicitUsings>enable</ImplicitUsings>
|
||||
<TreatWarningsAsErrors>false</TreatWarningsAsErrors>
|
||||
</PropertyGroup>
|
||||
<ItemGroup>
|
||||
<PackageReference Include="System.Collections.Immutable" Version="9.0.3" />
|
||||
<PackageReference Include="System.Text.Json" Version="9.0.4" />
|
||||
</ItemGroup>
|
||||
</Project>
|
||||
@@ -0,0 +1,81 @@
|
||||
namespace StellaOps.Authority.Core.Verdicts;
|
||||
|
||||
/// <summary>
|
||||
/// Interface for signing and verifying verdict manifests using DSSE.
|
||||
/// </summary>
|
||||
public interface IVerdictManifestSigner
|
||||
{
|
||||
/// <summary>
|
||||
/// Sign a verdict manifest.
|
||||
/// </summary>
|
||||
/// <param name="manifest">The manifest to sign.</param>
|
||||
/// <param name="ct">Cancellation token.</param>
|
||||
/// <returns>Signed manifest with signature data populated.</returns>
|
||||
Task<VerdictManifest> SignAsync(VerdictManifest manifest, CancellationToken ct = default);
|
||||
|
||||
/// <summary>
|
||||
/// Verify the signature on a verdict manifest.
|
||||
/// </summary>
|
||||
/// <param name="manifest">The manifest to verify.</param>
|
||||
/// <param name="ct">Cancellation token.</param>
|
||||
/// <returns>Verification result.</returns>
|
||||
Task<SignatureVerificationResult> VerifyAsync(VerdictManifest manifest, CancellationToken ct = default);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Result of signature verification.
|
||||
/// </summary>
|
||||
public sealed record SignatureVerificationResult
|
||||
{
|
||||
/// <summary>True if signature is valid.</summary>
|
||||
public required bool Valid { get; init; }
|
||||
|
||||
/// <summary>Key ID that signed the manifest.</summary>
|
||||
public string? SigningKeyId { get; init; }
|
||||
|
||||
/// <summary>Signature algorithm used.</summary>
|
||||
public string? Algorithm { get; init; }
|
||||
|
||||
/// <summary>Timestamp when signature was created.</summary>
|
||||
public DateTimeOffset? SignedAt { get; init; }
|
||||
|
||||
/// <summary>Error message if verification failed.</summary>
|
||||
public string? Error { get; init; }
|
||||
|
||||
/// <summary>Rekor transparency log verification status.</summary>
|
||||
public RekorVerificationStatus? RekorStatus { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Rekor transparency log verification status.
|
||||
/// </summary>
|
||||
public sealed record RekorVerificationStatus
|
||||
{
|
||||
/// <summary>True if log entry was verified.</summary>
|
||||
public required bool Verified { get; init; }
|
||||
|
||||
/// <summary>Log index in Rekor.</summary>
|
||||
public long? LogIndex { get; init; }
|
||||
|
||||
/// <summary>Integrated time from Rekor.</summary>
|
||||
public DateTimeOffset? IntegratedTime { get; init; }
|
||||
|
||||
/// <summary>Log ID.</summary>
|
||||
public string? LogId { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Null implementation for environments where signing is disabled.
|
||||
/// </summary>
|
||||
public sealed class NullVerdictManifestSigner : IVerdictManifestSigner
|
||||
{
|
||||
public Task<VerdictManifest> SignAsync(VerdictManifest manifest, CancellationToken ct = default)
|
||||
=> Task.FromResult(manifest);
|
||||
|
||||
public Task<SignatureVerificationResult> VerifyAsync(VerdictManifest manifest, CancellationToken ct = default)
|
||||
=> Task.FromResult(new SignatureVerificationResult
|
||||
{
|
||||
Valid = true,
|
||||
Error = "Signing disabled",
|
||||
});
|
||||
}
|
||||
@@ -0,0 +1,102 @@
|
||||
using System.Collections.Immutable;
|
||||
|
||||
namespace StellaOps.Authority.Core.Verdicts;
|
||||
|
||||
/// <summary>
|
||||
/// Repository interface for verdict manifest persistence.
|
||||
/// </summary>
|
||||
public interface IVerdictManifestStore
|
||||
{
|
||||
/// <summary>
|
||||
/// Store a verdict manifest.
|
||||
/// </summary>
|
||||
/// <param name="manifest">The manifest to store.</param>
|
||||
/// <param name="ct">Cancellation token.</param>
|
||||
/// <returns>The stored manifest.</returns>
|
||||
Task<VerdictManifest> StoreAsync(VerdictManifest manifest, CancellationToken ct = default);
|
||||
|
||||
/// <summary>
|
||||
/// Retrieve a manifest by its ID.
|
||||
/// </summary>
|
||||
/// <param name="tenant">Tenant identifier.</param>
|
||||
/// <param name="manifestId">Manifest identifier.</param>
|
||||
/// <param name="ct">Cancellation token.</param>
|
||||
/// <returns>The manifest or null if not found.</returns>
|
||||
Task<VerdictManifest?> GetByIdAsync(string tenant, string manifestId, CancellationToken ct = default);
|
||||
|
||||
/// <summary>
|
||||
/// Retrieve the latest manifest for a specific asset and vulnerability.
|
||||
/// </summary>
|
||||
/// <param name="tenant">Tenant identifier.</param>
|
||||
/// <param name="assetDigest">Asset digest.</param>
|
||||
/// <param name="vulnerabilityId">Vulnerability identifier.</param>
|
||||
/// <param name="policyHash">Optional policy hash filter.</param>
|
||||
/// <param name="latticeVersion">Optional lattice version filter.</param>
|
||||
/// <param name="ct">Cancellation token.</param>
|
||||
/// <returns>The latest matching manifest or null.</returns>
|
||||
Task<VerdictManifest?> GetByScopeAsync(
|
||||
string tenant,
|
||||
string assetDigest,
|
||||
string vulnerabilityId,
|
||||
string? policyHash = null,
|
||||
string? latticeVersion = null,
|
||||
CancellationToken ct = default);
|
||||
|
||||
/// <summary>
|
||||
/// List manifests by policy hash and lattice version.
|
||||
/// </summary>
|
||||
/// <param name="tenant">Tenant identifier.</param>
|
||||
/// <param name="policyHash">Policy hash.</param>
|
||||
/// <param name="latticeVersion">Lattice version.</param>
|
||||
/// <param name="limit">Maximum results to return.</param>
|
||||
/// <param name="pageToken">Continuation token for pagination.</param>
|
||||
/// <param name="ct">Cancellation token.</param>
|
||||
/// <returns>List of matching manifests.</returns>
|
||||
Task<VerdictManifestPage> ListByPolicyAsync(
|
||||
string tenant,
|
||||
string policyHash,
|
||||
string latticeVersion,
|
||||
int limit = 100,
|
||||
string? pageToken = null,
|
||||
CancellationToken ct = default);
|
||||
|
||||
/// <summary>
|
||||
/// List manifests for a specific asset.
|
||||
/// </summary>
|
||||
/// <param name="tenant">Tenant identifier.</param>
|
||||
/// <param name="assetDigest">Asset digest.</param>
|
||||
/// <param name="limit">Maximum results to return.</param>
|
||||
/// <param name="pageToken">Continuation token for pagination.</param>
|
||||
/// <param name="ct">Cancellation token.</param>
|
||||
/// <returns>List of matching manifests.</returns>
|
||||
Task<VerdictManifestPage> ListByAssetAsync(
|
||||
string tenant,
|
||||
string assetDigest,
|
||||
int limit = 100,
|
||||
string? pageToken = null,
|
||||
CancellationToken ct = default);
|
||||
|
||||
/// <summary>
|
||||
/// Delete a manifest by ID.
|
||||
/// </summary>
|
||||
/// <param name="tenant">Tenant identifier.</param>
|
||||
/// <param name="manifestId">Manifest identifier.</param>
|
||||
/// <param name="ct">Cancellation token.</param>
|
||||
/// <returns>True if deleted, false if not found.</returns>
|
||||
Task<bool> DeleteAsync(string tenant, string manifestId, CancellationToken ct = default);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Paginated result for manifest list queries.
|
||||
/// </summary>
|
||||
public sealed record VerdictManifestPage
|
||||
{
|
||||
/// <summary>Manifests in this page.</summary>
|
||||
public required ImmutableArray<VerdictManifest> Manifests { get; init; }
|
||||
|
||||
/// <summary>Token for retrieving the next page, or null if no more pages.</summary>
|
||||
public string? NextPageToken { get; init; }
|
||||
|
||||
/// <summary>Total count if available.</summary>
|
||||
public int? TotalCount { get; init; }
|
||||
}
|
||||
@@ -0,0 +1,155 @@
|
||||
using System.Collections.Concurrent;
|
||||
using System.Collections.Immutable;
|
||||
|
||||
namespace StellaOps.Authority.Core.Verdicts;
|
||||
|
||||
/// <summary>
|
||||
/// In-memory implementation of verdict manifest store for testing and development.
|
||||
/// </summary>
|
||||
public sealed class InMemoryVerdictManifestStore : IVerdictManifestStore
|
||||
{
|
||||
private readonly ConcurrentDictionary<(string Tenant, string ManifestId), VerdictManifest> _manifests = new();
|
||||
|
||||
public Task<VerdictManifest> StoreAsync(VerdictManifest manifest, CancellationToken ct = default)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(manifest);
|
||||
|
||||
var key = (manifest.Tenant, manifest.ManifestId);
|
||||
_manifests[key] = manifest;
|
||||
return Task.FromResult(manifest);
|
||||
}
|
||||
|
||||
public Task<VerdictManifest?> GetByIdAsync(string tenant, string manifestId, CancellationToken ct = default)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(tenant);
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(manifestId);
|
||||
|
||||
var key = (tenant, manifestId);
|
||||
return Task.FromResult(_manifests.TryGetValue(key, out var manifest) ? manifest : null);
|
||||
}
|
||||
|
||||
public Task<VerdictManifest?> GetByScopeAsync(
|
||||
string tenant,
|
||||
string assetDigest,
|
||||
string vulnerabilityId,
|
||||
string? policyHash = null,
|
||||
string? latticeVersion = null,
|
||||
CancellationToken ct = default)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(tenant);
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(assetDigest);
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(vulnerabilityId);
|
||||
|
||||
var query = _manifests.Values
|
||||
.Where(m => m.Tenant == tenant
|
||||
&& m.AssetDigest == assetDigest
|
||||
&& m.VulnerabilityId.Equals(vulnerabilityId, StringComparison.OrdinalIgnoreCase));
|
||||
|
||||
if (!string.IsNullOrWhiteSpace(policyHash))
|
||||
{
|
||||
query = query.Where(m => m.PolicyHash == policyHash);
|
||||
}
|
||||
|
||||
if (!string.IsNullOrWhiteSpace(latticeVersion))
|
||||
{
|
||||
query = query.Where(m => m.LatticeVersion == latticeVersion);
|
||||
}
|
||||
|
||||
var latest = query
|
||||
.OrderByDescending(m => m.EvaluatedAt)
|
||||
.FirstOrDefault();
|
||||
|
||||
return Task.FromResult(latest);
|
||||
}
|
||||
|
||||
public Task<VerdictManifestPage> ListByPolicyAsync(
|
||||
string tenant,
|
||||
string policyHash,
|
||||
string latticeVersion,
|
||||
int limit = 100,
|
||||
string? pageToken = null,
|
||||
CancellationToken ct = default)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(tenant);
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(policyHash);
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(latticeVersion);
|
||||
|
||||
var offset = 0;
|
||||
if (!string.IsNullOrWhiteSpace(pageToken) && int.TryParse(pageToken, out var parsed))
|
||||
{
|
||||
offset = parsed;
|
||||
}
|
||||
|
||||
var query = _manifests.Values
|
||||
.Where(m => m.Tenant == tenant
|
||||
&& m.PolicyHash == policyHash
|
||||
&& m.LatticeVersion == latticeVersion)
|
||||
.OrderByDescending(m => m.EvaluatedAt)
|
||||
.ThenBy(m => m.ManifestId, StringComparer.Ordinal)
|
||||
.Skip(offset)
|
||||
.Take(limit + 1)
|
||||
.ToList();
|
||||
|
||||
var hasMore = query.Count > limit;
|
||||
var manifests = query.Take(limit).ToImmutableArray();
|
||||
|
||||
return Task.FromResult(new VerdictManifestPage
|
||||
{
|
||||
Manifests = manifests,
|
||||
NextPageToken = hasMore ? (offset + limit).ToString() : null,
|
||||
});
|
||||
}
|
||||
|
||||
public Task<VerdictManifestPage> ListByAssetAsync(
|
||||
string tenant,
|
||||
string assetDigest,
|
||||
int limit = 100,
|
||||
string? pageToken = null,
|
||||
CancellationToken ct = default)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(tenant);
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(assetDigest);
|
||||
|
||||
var offset = 0;
|
||||
if (!string.IsNullOrWhiteSpace(pageToken) && int.TryParse(pageToken, out var parsed))
|
||||
{
|
||||
offset = parsed;
|
||||
}
|
||||
|
||||
var query = _manifests.Values
|
||||
.Where(m => m.Tenant == tenant && m.AssetDigest == assetDigest)
|
||||
.OrderByDescending(m => m.EvaluatedAt)
|
||||
.ThenBy(m => m.ManifestId, StringComparer.Ordinal)
|
||||
.Skip(offset)
|
||||
.Take(limit + 1)
|
||||
.ToList();
|
||||
|
||||
var hasMore = query.Count > limit;
|
||||
var manifests = query.Take(limit).ToImmutableArray();
|
||||
|
||||
return Task.FromResult(new VerdictManifestPage
|
||||
{
|
||||
Manifests = manifests,
|
||||
NextPageToken = hasMore ? (offset + limit).ToString() : null,
|
||||
});
|
||||
}
|
||||
|
||||
public Task<bool> DeleteAsync(string tenant, string manifestId, CancellationToken ct = default)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(tenant);
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(manifestId);
|
||||
|
||||
var key = (tenant, manifestId);
|
||||
return Task.FromResult(_manifests.TryRemove(key, out _));
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Clear all stored manifests (for testing).
|
||||
/// </summary>
|
||||
public void Clear() => _manifests.Clear();
|
||||
|
||||
/// <summary>
|
||||
/// Get count of stored manifests (for testing).
|
||||
/// </summary>
|
||||
public int Count => _manifests.Count;
|
||||
}
|
||||
@@ -0,0 +1,199 @@
|
||||
using System.Collections.Immutable;
|
||||
using System.Security.Cryptography;
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
using System.Text.Json.Serialization;
|
||||
|
||||
namespace StellaOps.Authority.Core.Verdicts;
|
||||
|
||||
/// <summary>
|
||||
/// VEX verdict status enumeration per OpenVEX specification.
|
||||
/// </summary>
|
||||
public enum VexStatus
|
||||
{
|
||||
[JsonPropertyName("affected")]
|
||||
Affected,
|
||||
|
||||
[JsonPropertyName("not_affected")]
|
||||
NotAffected,
|
||||
|
||||
[JsonPropertyName("fixed")]
|
||||
Fixed,
|
||||
|
||||
[JsonPropertyName("under_investigation")]
|
||||
UnderInvestigation,
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Captures all inputs and outputs of a VEX verdict for deterministic replay.
|
||||
/// </summary>
|
||||
public sealed record VerdictManifest
|
||||
{
|
||||
/// <summary>Unique identifier for this manifest.</summary>
|
||||
public required string ManifestId { get; init; }
|
||||
|
||||
/// <summary>Tenant that owns this verdict.</summary>
|
||||
public required string Tenant { get; init; }
|
||||
|
||||
/// <summary>SHA256 digest of the asset being evaluated.</summary>
|
||||
public required string AssetDigest { get; init; }
|
||||
|
||||
/// <summary>CVE or vulnerability identifier.</summary>
|
||||
public required string VulnerabilityId { get; init; }
|
||||
|
||||
/// <summary>All inputs pinned for replay.</summary>
|
||||
public required VerdictInputs Inputs { get; init; }
|
||||
|
||||
/// <summary>The computed verdict result.</summary>
|
||||
public required VerdictResult Result { get; init; }
|
||||
|
||||
/// <summary>SHA256 hash of the policy document used.</summary>
|
||||
public required string PolicyHash { get; init; }
|
||||
|
||||
/// <summary>Version of the trust lattice configuration.</summary>
|
||||
public required string LatticeVersion { get; init; }
|
||||
|
||||
/// <summary>UTC timestamp when evaluation occurred.</summary>
|
||||
public required DateTimeOffset EvaluatedAt { get; init; }
|
||||
|
||||
/// <summary>SHA256 digest of the canonical manifest payload.</summary>
|
||||
public required string ManifestDigest { get; init; }
|
||||
|
||||
/// <summary>Optional DSSE signature bytes (base64 encoded).</summary>
|
||||
public string? SignatureBase64 { get; init; }
|
||||
|
||||
/// <summary>Optional Rekor transparency log ID.</summary>
|
||||
public string? RekorLogId { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// All inputs required to replay a verdict deterministically.
|
||||
/// </summary>
|
||||
public sealed record VerdictInputs
|
||||
{
|
||||
/// <summary>SBOM digests used in evaluation.</summary>
|
||||
public required ImmutableArray<string> SbomDigests { get; init; }
|
||||
|
||||
/// <summary>Vulnerability feed snapshot identifiers.</summary>
|
||||
public required ImmutableArray<string> VulnFeedSnapshotIds { get; init; }
|
||||
|
||||
/// <summary>VEX document digests considered.</summary>
|
||||
public required ImmutableArray<string> VexDocumentDigests { get; init; }
|
||||
|
||||
/// <summary>Reachability graph IDs if reachability analysis was used.</summary>
|
||||
public required ImmutableArray<string> ReachabilityGraphIds { get; init; }
|
||||
|
||||
/// <summary>Clock cutoff for deterministic time-based evaluation.</summary>
|
||||
public required DateTimeOffset ClockCutoff { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// The computed verdict result with confidence and explanations.
|
||||
/// </summary>
|
||||
public sealed record VerdictResult
|
||||
{
|
||||
/// <summary>Final VEX status determination.</summary>
|
||||
public required VexStatus Status { get; init; }
|
||||
|
||||
/// <summary>Confidence score [0, 1].</summary>
|
||||
public required double Confidence { get; init; }
|
||||
|
||||
/// <summary>Detailed explanations from contributing VEX sources.</summary>
|
||||
public required ImmutableArray<VerdictExplanation> Explanations { get; init; }
|
||||
|
||||
/// <summary>References to supporting evidence.</summary>
|
||||
public required ImmutableArray<string> EvidenceRefs { get; init; }
|
||||
|
||||
/// <summary>True if conflicting claims were detected.</summary>
|
||||
public bool HasConflicts { get; init; }
|
||||
|
||||
/// <summary>True if reachability proof was required and present.</summary>
|
||||
public bool RequiresReplayProof { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Explanation of how a single VEX source contributed to the verdict.
|
||||
/// </summary>
|
||||
public sealed record VerdictExplanation
|
||||
{
|
||||
/// <summary>Identifier of the VEX source.</summary>
|
||||
public required string SourceId { get; init; }
|
||||
|
||||
/// <summary>Human-readable reason for this contribution.</summary>
|
||||
public required string Reason { get; init; }
|
||||
|
||||
/// <summary>Provenance score component [0, 1].</summary>
|
||||
public required double ProvenanceScore { get; init; }
|
||||
|
||||
/// <summary>Coverage score component [0, 1].</summary>
|
||||
public required double CoverageScore { get; init; }
|
||||
|
||||
/// <summary>Replayability score component [0, 1].</summary>
|
||||
public required double ReplayabilityScore { get; init; }
|
||||
|
||||
/// <summary>Claim strength multiplier.</summary>
|
||||
public required double StrengthMultiplier { get; init; }
|
||||
|
||||
/// <summary>Freshness decay multiplier.</summary>
|
||||
public required double FreshnessMultiplier { get; init; }
|
||||
|
||||
/// <summary>Final computed claim score.</summary>
|
||||
public required double ClaimScore { get; init; }
|
||||
|
||||
/// <summary>VEX status this source asserted.</summary>
|
||||
public required VexStatus AssertedStatus { get; init; }
|
||||
|
||||
/// <summary>True if this source's claim was accepted as the winner.</summary>
|
||||
public bool Accepted { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Serialization helper for canonical JSON output.
|
||||
/// </summary>
|
||||
public static class VerdictManifestSerializer
|
||||
{
|
||||
private static readonly JsonSerializerOptions s_options = new()
|
||||
{
|
||||
PropertyNamingPolicy = JsonNamingPolicy.SnakeCaseLower,
|
||||
WriteIndented = false,
|
||||
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull,
|
||||
Converters = { new JsonStringEnumConverter(JsonNamingPolicy.SnakeCaseLower) },
|
||||
};
|
||||
|
||||
/// <summary>
|
||||
/// Serialize manifest to canonical JSON (sorted keys, no indentation).
|
||||
/// </summary>
|
||||
public static string Serialize(VerdictManifest manifest)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(manifest);
|
||||
return JsonSerializer.Serialize(manifest, s_options);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Deserialize from JSON.
|
||||
/// </summary>
|
||||
public static VerdictManifest? Deserialize(string json)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(json))
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
return JsonSerializer.Deserialize<VerdictManifest>(json, s_options);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Compute SHA256 digest of the canonical JSON representation.
|
||||
/// </summary>
|
||||
public static string ComputeDigest(VerdictManifest manifest)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(manifest);
|
||||
|
||||
// Create a copy without the digest field for hashing
|
||||
var forHashing = manifest with { ManifestDigest = string.Empty, SignatureBase64 = null, RekorLogId = null };
|
||||
var json = Serialize(forHashing);
|
||||
var bytes = Encoding.UTF8.GetBytes(json);
|
||||
var hash = SHA256.HashData(bytes);
|
||||
return $"sha256:{Convert.ToHexString(hash).ToLowerInvariant()}";
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,219 @@
|
||||
using System.Collections.Immutable;
|
||||
|
||||
namespace StellaOps.Authority.Core.Verdicts;
|
||||
|
||||
/// <summary>
|
||||
/// Fluent builder for constructing VerdictManifest instances with deterministic ordering.
|
||||
/// </summary>
|
||||
public sealed class VerdictManifestBuilder
|
||||
{
|
||||
private string? _tenant;
|
||||
private string? _assetDigest;
|
||||
private string? _vulnerabilityId;
|
||||
private VerdictInputs? _inputs;
|
||||
private VerdictResult? _result;
|
||||
private string? _policyHash;
|
||||
private string? _latticeVersion;
|
||||
private DateTimeOffset _evaluatedAt = DateTimeOffset.UtcNow;
|
||||
private readonly Func<string> _idGenerator;
|
||||
|
||||
public VerdictManifestBuilder()
|
||||
: this(() => Guid.NewGuid().ToString("n"))
|
||||
{
|
||||
}
|
||||
|
||||
public VerdictManifestBuilder(Func<string> idGenerator)
|
||||
{
|
||||
_idGenerator = idGenerator ?? throw new ArgumentNullException(nameof(idGenerator));
|
||||
}
|
||||
|
||||
public VerdictManifestBuilder WithTenant(string tenant)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(tenant))
|
||||
{
|
||||
throw new ArgumentException("Tenant must be provided.", nameof(tenant));
|
||||
}
|
||||
|
||||
_tenant = tenant.Trim();
|
||||
return this;
|
||||
}
|
||||
|
||||
public VerdictManifestBuilder WithAsset(string assetDigest, string vulnerabilityId)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(assetDigest))
|
||||
{
|
||||
throw new ArgumentException("Asset digest must be provided.", nameof(assetDigest));
|
||||
}
|
||||
|
||||
if (string.IsNullOrWhiteSpace(vulnerabilityId))
|
||||
{
|
||||
throw new ArgumentException("Vulnerability ID must be provided.", nameof(vulnerabilityId));
|
||||
}
|
||||
|
||||
_assetDigest = assetDigest.Trim();
|
||||
_vulnerabilityId = vulnerabilityId.Trim().ToUpperInvariant();
|
||||
return this;
|
||||
}
|
||||
|
||||
public VerdictManifestBuilder WithInputs(VerdictInputs inputs)
|
||||
{
|
||||
_inputs = inputs ?? throw new ArgumentNullException(nameof(inputs));
|
||||
return this;
|
||||
}
|
||||
|
||||
public VerdictManifestBuilder WithInputs(
|
||||
IEnumerable<string> sbomDigests,
|
||||
IEnumerable<string> vulnFeedSnapshotIds,
|
||||
IEnumerable<string> vexDocumentDigests,
|
||||
IEnumerable<string>? reachabilityGraphIds = null,
|
||||
DateTimeOffset? clockCutoff = null)
|
||||
{
|
||||
_inputs = new VerdictInputs
|
||||
{
|
||||
SbomDigests = SortedImmutable(sbomDigests),
|
||||
VulnFeedSnapshotIds = SortedImmutable(vulnFeedSnapshotIds),
|
||||
VexDocumentDigests = SortedImmutable(vexDocumentDigests),
|
||||
ReachabilityGraphIds = SortedImmutable(reachabilityGraphIds ?? Enumerable.Empty<string>()),
|
||||
ClockCutoff = clockCutoff ?? DateTimeOffset.UtcNow,
|
||||
};
|
||||
return this;
|
||||
}
|
||||
|
||||
public VerdictManifestBuilder WithResult(VerdictResult result)
|
||||
{
|
||||
_result = result ?? throw new ArgumentNullException(nameof(result));
|
||||
return this;
|
||||
}
|
||||
|
||||
public VerdictManifestBuilder WithResult(
|
||||
VexStatus status,
|
||||
double confidence,
|
||||
IEnumerable<VerdictExplanation> explanations,
|
||||
IEnumerable<string>? evidenceRefs = null,
|
||||
bool hasConflicts = false,
|
||||
bool requiresReplayProof = false)
|
||||
{
|
||||
if (confidence < 0 || confidence > 1)
|
||||
{
|
||||
throw new ArgumentOutOfRangeException(nameof(confidence), "Confidence must be between 0 and 1.");
|
||||
}
|
||||
|
||||
// Sort explanations deterministically by source ID
|
||||
var sortedExplanations = explanations
|
||||
.OrderByDescending(e => e.ClaimScore)
|
||||
.ThenByDescending(e => e.ProvenanceScore)
|
||||
.ThenBy(e => e.SourceId, StringComparer.Ordinal)
|
||||
.ToImmutableArray();
|
||||
|
||||
_result = new VerdictResult
|
||||
{
|
||||
Status = status,
|
||||
Confidence = confidence,
|
||||
Explanations = sortedExplanations,
|
||||
EvidenceRefs = SortedImmutable(evidenceRefs ?? Enumerable.Empty<string>()),
|
||||
HasConflicts = hasConflicts,
|
||||
RequiresReplayProof = requiresReplayProof,
|
||||
};
|
||||
return this;
|
||||
}
|
||||
|
||||
public VerdictManifestBuilder WithPolicy(string policyHash, string latticeVersion)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(policyHash))
|
||||
{
|
||||
throw new ArgumentException("Policy hash must be provided.", nameof(policyHash));
|
||||
}
|
||||
|
||||
if (string.IsNullOrWhiteSpace(latticeVersion))
|
||||
{
|
||||
throw new ArgumentException("Lattice version must be provided.", nameof(latticeVersion));
|
||||
}
|
||||
|
||||
_policyHash = policyHash.Trim();
|
||||
_latticeVersion = latticeVersion.Trim();
|
||||
return this;
|
||||
}
|
||||
|
||||
public VerdictManifestBuilder WithClock(DateTimeOffset evaluatedAt)
|
||||
{
|
||||
_evaluatedAt = evaluatedAt.ToUniversalTime();
|
||||
return this;
|
||||
}
|
||||
|
||||
public VerdictManifest Build()
|
||||
{
|
||||
Validate();
|
||||
|
||||
var manifestId = _idGenerator();
|
||||
var manifest = new VerdictManifest
|
||||
{
|
||||
ManifestId = manifestId,
|
||||
Tenant = _tenant!,
|
||||
AssetDigest = _assetDigest!,
|
||||
VulnerabilityId = _vulnerabilityId!,
|
||||
Inputs = _inputs!,
|
||||
Result = _result!,
|
||||
PolicyHash = _policyHash!,
|
||||
LatticeVersion = _latticeVersion!,
|
||||
EvaluatedAt = _evaluatedAt,
|
||||
ManifestDigest = string.Empty, // Will be computed
|
||||
};
|
||||
|
||||
// Compute digest over the complete manifest
|
||||
var digest = VerdictManifestSerializer.ComputeDigest(manifest);
|
||||
return manifest with { ManifestDigest = digest };
|
||||
}
|
||||
|
||||
private void Validate()
|
||||
{
|
||||
var errors = new List<string>();
|
||||
|
||||
if (string.IsNullOrWhiteSpace(_tenant))
|
||||
{
|
||||
errors.Add("Tenant is required.");
|
||||
}
|
||||
|
||||
if (string.IsNullOrWhiteSpace(_assetDigest))
|
||||
{
|
||||
errors.Add("Asset digest is required.");
|
||||
}
|
||||
|
||||
if (string.IsNullOrWhiteSpace(_vulnerabilityId))
|
||||
{
|
||||
errors.Add("Vulnerability ID is required.");
|
||||
}
|
||||
|
||||
if (_inputs is null)
|
||||
{
|
||||
errors.Add("Inputs are required.");
|
||||
}
|
||||
|
||||
if (_result is null)
|
||||
{
|
||||
errors.Add("Result is required.");
|
||||
}
|
||||
|
||||
if (string.IsNullOrWhiteSpace(_policyHash))
|
||||
{
|
||||
errors.Add("Policy hash is required.");
|
||||
}
|
||||
|
||||
if (string.IsNullOrWhiteSpace(_latticeVersion))
|
||||
{
|
||||
errors.Add("Lattice version is required.");
|
||||
}
|
||||
|
||||
if (errors.Count > 0)
|
||||
{
|
||||
throw new InvalidOperationException($"VerdictManifest validation failed: {string.Join("; ", errors)}");
|
||||
}
|
||||
}
|
||||
|
||||
private static ImmutableArray<string> SortedImmutable(IEnumerable<string> items)
|
||||
=> items
|
||||
.Where(s => !string.IsNullOrWhiteSpace(s))
|
||||
.Select(s => s.Trim())
|
||||
.OrderBy(s => s, StringComparer.Ordinal)
|
||||
.Distinct(StringComparer.Ordinal)
|
||||
.ToImmutableArray();
|
||||
}
|
||||
@@ -0,0 +1,240 @@
|
||||
using System.Collections.Immutable;
|
||||
|
||||
namespace StellaOps.Authority.Core.Verdicts;
|
||||
|
||||
/// <summary>
|
||||
/// Result of replay verification.
|
||||
/// </summary>
|
||||
public sealed record ReplayVerificationResult
|
||||
{
|
||||
/// <summary>True if replay produced identical results.</summary>
|
||||
public required bool Success { get; init; }
|
||||
|
||||
/// <summary>The original manifest being verified.</summary>
|
||||
public required VerdictManifest OriginalManifest { get; init; }
|
||||
|
||||
/// <summary>The manifest produced by replay (if successful).</summary>
|
||||
public VerdictManifest? ReplayedManifest { get; init; }
|
||||
|
||||
/// <summary>List of differences between original and replayed manifests.</summary>
|
||||
public ImmutableArray<string>? Differences { get; init; }
|
||||
|
||||
/// <summary>True if signature verification passed.</summary>
|
||||
public bool SignatureValid { get; init; }
|
||||
|
||||
/// <summary>Error message if replay failed.</summary>
|
||||
public string? Error { get; init; }
|
||||
|
||||
/// <summary>Duration of the replay operation.</summary>
|
||||
public TimeSpan? ReplayDuration { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Interface for replaying verdicts to verify determinism.
|
||||
/// </summary>
|
||||
public interface IVerdictReplayVerifier
|
||||
{
|
||||
/// <summary>
|
||||
/// Verify that a verdict can be replayed to produce identical results.
|
||||
/// </summary>
|
||||
/// <param name="manifestId">Manifest ID to verify.</param>
|
||||
/// <param name="ct">Cancellation token.</param>
|
||||
/// <returns>Verification result with differences if any.</returns>
|
||||
Task<ReplayVerificationResult> VerifyAsync(string manifestId, CancellationToken ct = default);
|
||||
|
||||
/// <summary>
|
||||
/// Verify that a verdict can be replayed to produce identical results.
|
||||
/// </summary>
|
||||
/// <param name="manifest">Manifest to verify.</param>
|
||||
/// <param name="ct">Cancellation token.</param>
|
||||
/// <returns>Verification result with differences if any.</returns>
|
||||
Task<ReplayVerificationResult> VerifyAsync(VerdictManifest manifest, CancellationToken ct = default);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Provides verdict evaluation capability for replay verification.
|
||||
/// </summary>
|
||||
public interface IVerdictEvaluator
|
||||
{
|
||||
/// <summary>
|
||||
/// Evaluate a verdict using the specified inputs and policy context.
|
||||
/// </summary>
|
||||
/// <param name="tenant">Tenant identifier.</param>
|
||||
/// <param name="assetDigest">Asset being evaluated.</param>
|
||||
/// <param name="vulnerabilityId">Vulnerability being evaluated.</param>
|
||||
/// <param name="inputs">Pinned inputs for evaluation.</param>
|
||||
/// <param name="policyHash">Policy hash to use.</param>
|
||||
/// <param name="latticeVersion">Lattice version to use.</param>
|
||||
/// <param name="ct">Cancellation token.</param>
|
||||
/// <returns>Verdict result.</returns>
|
||||
Task<VerdictResult> EvaluateAsync(
|
||||
string tenant,
|
||||
string assetDigest,
|
||||
string vulnerabilityId,
|
||||
VerdictInputs inputs,
|
||||
string policyHash,
|
||||
string latticeVersion,
|
||||
CancellationToken ct = default);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Default implementation of verdict replay verifier.
|
||||
/// </summary>
|
||||
public sealed class VerdictReplayVerifier : IVerdictReplayVerifier
|
||||
{
|
||||
private readonly IVerdictManifestStore _store;
|
||||
private readonly IVerdictManifestSigner _signer;
|
||||
private readonly IVerdictEvaluator _evaluator;
|
||||
|
||||
public VerdictReplayVerifier(
|
||||
IVerdictManifestStore store,
|
||||
IVerdictManifestSigner signer,
|
||||
IVerdictEvaluator evaluator)
|
||||
{
|
||||
_store = store ?? throw new ArgumentNullException(nameof(store));
|
||||
_signer = signer ?? throw new ArgumentNullException(nameof(signer));
|
||||
_evaluator = evaluator ?? throw new ArgumentNullException(nameof(evaluator));
|
||||
}
|
||||
|
||||
public async Task<ReplayVerificationResult> VerifyAsync(string manifestId, CancellationToken ct = default)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(manifestId);
|
||||
|
||||
// We need to find the manifest - this requires a search across tenants
|
||||
// In practice, the caller should provide the tenant or the manifest directly
|
||||
return new ReplayVerificationResult
|
||||
{
|
||||
Success = false,
|
||||
OriginalManifest = null!,
|
||||
Error = "Use VerifyAsync(VerdictManifest) overload with the full manifest.",
|
||||
};
|
||||
}
|
||||
|
||||
public async Task<ReplayVerificationResult> VerifyAsync(VerdictManifest manifest, CancellationToken ct = default)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(manifest);
|
||||
|
||||
var stopwatch = System.Diagnostics.Stopwatch.StartNew();
|
||||
|
||||
try
|
||||
{
|
||||
// Verify signature first if present
|
||||
var signatureValid = true;
|
||||
if (!string.IsNullOrWhiteSpace(manifest.SignatureBase64))
|
||||
{
|
||||
var sigResult = await _signer.VerifyAsync(manifest, ct).ConfigureAwait(false);
|
||||
signatureValid = sigResult.Valid;
|
||||
if (!signatureValid)
|
||||
{
|
||||
return new ReplayVerificationResult
|
||||
{
|
||||
Success = false,
|
||||
OriginalManifest = manifest,
|
||||
SignatureValid = false,
|
||||
Error = $"Signature verification failed: {sigResult.Error}",
|
||||
ReplayDuration = stopwatch.Elapsed,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
// Re-evaluate using pinned inputs
|
||||
var replayedResult = await _evaluator.EvaluateAsync(
|
||||
manifest.Tenant,
|
||||
manifest.AssetDigest,
|
||||
manifest.VulnerabilityId,
|
||||
manifest.Inputs,
|
||||
manifest.PolicyHash,
|
||||
manifest.LatticeVersion,
|
||||
ct).ConfigureAwait(false);
|
||||
|
||||
// Build replayed manifest
|
||||
var replayedManifest = new VerdictManifestBuilder(() => manifest.ManifestId)
|
||||
.WithTenant(manifest.Tenant)
|
||||
.WithAsset(manifest.AssetDigest, manifest.VulnerabilityId)
|
||||
.WithInputs(manifest.Inputs)
|
||||
.WithResult(replayedResult)
|
||||
.WithPolicy(manifest.PolicyHash, manifest.LatticeVersion)
|
||||
.WithClock(manifest.Inputs.ClockCutoff)
|
||||
.Build();
|
||||
|
||||
// Compare results
|
||||
var differences = CompareManifests(manifest, replayedManifest);
|
||||
var success = differences.Length == 0;
|
||||
|
||||
stopwatch.Stop();
|
||||
|
||||
return new ReplayVerificationResult
|
||||
{
|
||||
Success = success,
|
||||
OriginalManifest = manifest,
|
||||
ReplayedManifest = replayedManifest,
|
||||
Differences = differences,
|
||||
SignatureValid = signatureValid,
|
||||
Error = success ? null : "Replay produced different results",
|
||||
ReplayDuration = stopwatch.Elapsed,
|
||||
};
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
stopwatch.Stop();
|
||||
return new ReplayVerificationResult
|
||||
{
|
||||
Success = false,
|
||||
OriginalManifest = manifest,
|
||||
Error = $"Replay failed: {ex.Message}",
|
||||
ReplayDuration = stopwatch.Elapsed,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
private static ImmutableArray<string> CompareManifests(VerdictManifest original, VerdictManifest replayed)
|
||||
{
|
||||
var diffs = new List<string>();
|
||||
|
||||
if (original.Result.Status != replayed.Result.Status)
|
||||
{
|
||||
diffs.Add($"Status: {original.Result.Status} vs {replayed.Result.Status}");
|
||||
}
|
||||
|
||||
if (Math.Abs(original.Result.Confidence - replayed.Result.Confidence) > 0.0001)
|
||||
{
|
||||
diffs.Add($"Confidence: {original.Result.Confidence:F4} vs {replayed.Result.Confidence:F4}");
|
||||
}
|
||||
|
||||
if (original.Result.HasConflicts != replayed.Result.HasConflicts)
|
||||
{
|
||||
diffs.Add($"HasConflicts: {original.Result.HasConflicts} vs {replayed.Result.HasConflicts}");
|
||||
}
|
||||
|
||||
if (original.Result.Explanations.Length != replayed.Result.Explanations.Length)
|
||||
{
|
||||
diffs.Add($"Explanations count: {original.Result.Explanations.Length} vs {replayed.Result.Explanations.Length}");
|
||||
}
|
||||
else
|
||||
{
|
||||
for (var i = 0; i < original.Result.Explanations.Length; i++)
|
||||
{
|
||||
var origExp = original.Result.Explanations[i];
|
||||
var repExp = replayed.Result.Explanations[i];
|
||||
|
||||
if (origExp.SourceId != repExp.SourceId)
|
||||
{
|
||||
diffs.Add($"Explanation[{i}].SourceId: {origExp.SourceId} vs {repExp.SourceId}");
|
||||
}
|
||||
|
||||
if (Math.Abs(origExp.ClaimScore - repExp.ClaimScore) > 0.0001)
|
||||
{
|
||||
diffs.Add($"Explanation[{i}].ClaimScore: {origExp.ClaimScore:F4} vs {repExp.ClaimScore:F4}");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Compare manifest digest (computed from result)
|
||||
if (original.ManifestDigest != replayed.ManifestDigest)
|
||||
{
|
||||
diffs.Add($"ManifestDigest: {original.ManifestDigest} vs {replayed.ManifestDigest}");
|
||||
}
|
||||
|
||||
return diffs.ToImmutableArray();
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,84 @@
|
||||
-- Verdict Manifest Schema for VEX Trust Lattice
|
||||
-- Sprint: 7100.0001.0002
|
||||
|
||||
-- Create schema if not exists
|
||||
CREATE SCHEMA IF NOT EXISTS authority;
|
||||
|
||||
-- Verdict manifests table
|
||||
CREATE TABLE IF NOT EXISTS authority.verdict_manifests (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
manifest_id TEXT NOT NULL,
|
||||
tenant TEXT NOT NULL,
|
||||
|
||||
-- Scope
|
||||
asset_digest TEXT NOT NULL,
|
||||
vulnerability_id TEXT NOT NULL,
|
||||
|
||||
-- Inputs (JSONB for flexibility and schema evolution)
|
||||
inputs_json JSONB NOT NULL,
|
||||
|
||||
-- Result
|
||||
status TEXT NOT NULL CHECK (status IN ('affected', 'not_affected', 'fixed', 'under_investigation')),
|
||||
confidence DOUBLE PRECISION NOT NULL CHECK (confidence >= 0 AND confidence <= 1),
|
||||
result_json JSONB NOT NULL,
|
||||
|
||||
-- Policy context
|
||||
policy_hash TEXT NOT NULL,
|
||||
lattice_version TEXT NOT NULL,
|
||||
|
||||
-- Metadata
|
||||
evaluated_at TIMESTAMPTZ NOT NULL,
|
||||
manifest_digest TEXT NOT NULL,
|
||||
|
||||
-- Signature
|
||||
signature_base64 TEXT,
|
||||
rekor_log_id TEXT,
|
||||
|
||||
-- Timestamps
|
||||
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||
|
||||
-- Uniqueness constraints
|
||||
CONSTRAINT uq_verdict_manifest_id UNIQUE (tenant, manifest_id)
|
||||
);
|
||||
|
||||
-- Primary lookup: asset + CVE
|
||||
CREATE INDEX IF NOT EXISTS idx_verdict_asset_vuln
|
||||
ON authority.verdict_manifests(tenant, asset_digest, vulnerability_id);
|
||||
|
||||
-- Replay queries: same policy + lattice
|
||||
CREATE INDEX IF NOT EXISTS idx_verdict_policy
|
||||
ON authority.verdict_manifests(tenant, policy_hash, lattice_version);
|
||||
|
||||
-- Time-based queries (BRIN for append-mostly workload)
|
||||
CREATE INDEX IF NOT EXISTS idx_verdict_time
|
||||
ON authority.verdict_manifests USING BRIN (evaluated_at);
|
||||
|
||||
-- Composite for deterministic replay lookup
|
||||
CREATE UNIQUE INDEX IF NOT EXISTS idx_verdict_replay
|
||||
ON authority.verdict_manifests(
|
||||
tenant, asset_digest, vulnerability_id, policy_hash, lattice_version
|
||||
);
|
||||
|
||||
-- Index for digest lookups (verification)
|
||||
CREATE INDEX IF NOT EXISTS idx_verdict_digest
|
||||
ON authority.verdict_manifests(manifest_digest);
|
||||
|
||||
-- Row-level security
|
||||
ALTER TABLE authority.verdict_manifests ENABLE ROW LEVEL SECURITY;
|
||||
|
||||
-- RLS policy for tenant isolation
|
||||
CREATE POLICY verdict_tenant_isolation ON authority.verdict_manifests
|
||||
USING (tenant = current_setting('app.current_tenant', true))
|
||||
WITH CHECK (tenant = current_setting('app.current_tenant', true));
|
||||
|
||||
-- Grant permissions
|
||||
GRANT SELECT, INSERT, UPDATE, DELETE ON authority.verdict_manifests TO stellaops_app;
|
||||
GRANT USAGE ON SCHEMA authority TO stellaops_app;
|
||||
|
||||
COMMENT ON TABLE authority.verdict_manifests IS 'VEX verdict manifests for deterministic replay verification';
|
||||
COMMENT ON COLUMN authority.verdict_manifests.manifest_id IS 'Unique manifest identifier';
|
||||
COMMENT ON COLUMN authority.verdict_manifests.inputs_json IS 'JSONB containing VerdictInputs (SBOM digests, VEX docs, etc.)';
|
||||
COMMENT ON COLUMN authority.verdict_manifests.result_json IS 'JSONB containing VerdictResult with explanations';
|
||||
COMMENT ON COLUMN authority.verdict_manifests.policy_hash IS 'SHA256 hash of the policy document used';
|
||||
COMMENT ON COLUMN authority.verdict_manifests.lattice_version IS 'Version of trust lattice configuration';
|
||||
COMMENT ON COLUMN authority.verdict_manifests.manifest_digest IS 'SHA256 digest of canonical manifest for integrity';
|
||||
@@ -16,6 +16,7 @@
|
||||
|
||||
<ItemGroup>
|
||||
<ProjectReference Include="..\..\..\__Libraries\StellaOps.Infrastructure.Postgres\StellaOps.Infrastructure.Postgres.csproj" />
|
||||
<ProjectReference Include="..\StellaOps.Authority.Core\StellaOps.Authority.Core.csproj" />
|
||||
</ItemGroup>
|
||||
|
||||
</Project>
|
||||
|
||||
@@ -0,0 +1,335 @@
|
||||
using System.Collections.Immutable;
|
||||
using System.Text.Json;
|
||||
using Npgsql;
|
||||
using StellaOps.Authority.Core.Verdicts;
|
||||
|
||||
namespace StellaOps.Authority.Storage.Postgres;
|
||||
|
||||
/// <summary>
|
||||
/// PostgreSQL implementation of verdict manifest store.
|
||||
/// </summary>
|
||||
public sealed class PostgresVerdictManifestStore : IVerdictManifestStore
|
||||
{
|
||||
private readonly NpgsqlDataSource _dataSource;
|
||||
private static readonly JsonSerializerOptions s_jsonOptions = new()
|
||||
{
|
||||
PropertyNamingPolicy = JsonNamingPolicy.SnakeCaseLower,
|
||||
WriteIndented = false,
|
||||
};
|
||||
|
||||
public PostgresVerdictManifestStore(NpgsqlDataSource dataSource)
|
||||
{
|
||||
_dataSource = dataSource ?? throw new ArgumentNullException(nameof(dataSource));
|
||||
}
|
||||
|
||||
public async Task<VerdictManifest> StoreAsync(VerdictManifest manifest, CancellationToken ct = default)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(manifest);
|
||||
|
||||
const string sql = """
|
||||
INSERT INTO authority.verdict_manifests (
|
||||
manifest_id, tenant, asset_digest, vulnerability_id,
|
||||
inputs_json, status, confidence, result_json,
|
||||
policy_hash, lattice_version, evaluated_at, manifest_digest,
|
||||
signature_base64, rekor_log_id
|
||||
) VALUES (
|
||||
@manifestId, @tenant, @assetDigest, @vulnerabilityId,
|
||||
@inputsJson::jsonb, @status, @confidence, @resultJson::jsonb,
|
||||
@policyHash, @latticeVersion, @evaluatedAt, @manifestDigest,
|
||||
@signatureBase64, @rekorLogId
|
||||
)
|
||||
ON CONFLICT (tenant, asset_digest, vulnerability_id, policy_hash, lattice_version)
|
||||
DO UPDATE SET
|
||||
manifest_id = EXCLUDED.manifest_id,
|
||||
inputs_json = EXCLUDED.inputs_json,
|
||||
status = EXCLUDED.status,
|
||||
confidence = EXCLUDED.confidence,
|
||||
result_json = EXCLUDED.result_json,
|
||||
evaluated_at = EXCLUDED.evaluated_at,
|
||||
manifest_digest = EXCLUDED.manifest_digest,
|
||||
signature_base64 = EXCLUDED.signature_base64,
|
||||
rekor_log_id = EXCLUDED.rekor_log_id
|
||||
""";
|
||||
|
||||
await using var conn = await _dataSource.OpenConnectionAsync(ct).ConfigureAwait(false);
|
||||
await using var cmd = new NpgsqlCommand(sql, conn);
|
||||
|
||||
cmd.Parameters.AddWithValue("manifestId", manifest.ManifestId);
|
||||
cmd.Parameters.AddWithValue("tenant", manifest.Tenant);
|
||||
cmd.Parameters.AddWithValue("assetDigest", manifest.AssetDigest);
|
||||
cmd.Parameters.AddWithValue("vulnerabilityId", manifest.VulnerabilityId);
|
||||
cmd.Parameters.AddWithValue("inputsJson", JsonSerializer.Serialize(manifest.Inputs, s_jsonOptions));
|
||||
cmd.Parameters.AddWithValue("status", StatusToString(manifest.Result.Status));
|
||||
cmd.Parameters.AddWithValue("confidence", manifest.Result.Confidence);
|
||||
cmd.Parameters.AddWithValue("resultJson", JsonSerializer.Serialize(manifest.Result, s_jsonOptions));
|
||||
cmd.Parameters.AddWithValue("policyHash", manifest.PolicyHash);
|
||||
cmd.Parameters.AddWithValue("latticeVersion", manifest.LatticeVersion);
|
||||
cmd.Parameters.AddWithValue("evaluatedAt", manifest.EvaluatedAt);
|
||||
cmd.Parameters.AddWithValue("manifestDigest", manifest.ManifestDigest);
|
||||
cmd.Parameters.AddWithValue("signatureBase64", (object?)manifest.SignatureBase64 ?? DBNull.Value);
|
||||
cmd.Parameters.AddWithValue("rekorLogId", (object?)manifest.RekorLogId ?? DBNull.Value);
|
||||
|
||||
await cmd.ExecuteNonQueryAsync(ct).ConfigureAwait(false);
|
||||
return manifest;
|
||||
}
|
||||
|
||||
public async Task<VerdictManifest?> GetByIdAsync(string tenant, string manifestId, CancellationToken ct = default)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(tenant);
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(manifestId);
|
||||
|
||||
const string sql = """
|
||||
SELECT manifest_id, tenant, asset_digest, vulnerability_id,
|
||||
inputs_json, status, confidence, result_json,
|
||||
policy_hash, lattice_version, evaluated_at, manifest_digest,
|
||||
signature_base64, rekor_log_id
|
||||
FROM authority.verdict_manifests
|
||||
WHERE tenant = @tenant AND manifest_id = @manifestId
|
||||
""";
|
||||
|
||||
await using var conn = await _dataSource.OpenConnectionAsync(ct).ConfigureAwait(false);
|
||||
await using var cmd = new NpgsqlCommand(sql, conn);
|
||||
cmd.Parameters.AddWithValue("tenant", tenant);
|
||||
cmd.Parameters.AddWithValue("manifestId", manifestId);
|
||||
|
||||
await using var reader = await cmd.ExecuteReaderAsync(ct).ConfigureAwait(false);
|
||||
if (await reader.ReadAsync(ct).ConfigureAwait(false))
|
||||
{
|
||||
return MapFromReader(reader);
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
public async Task<VerdictManifest?> GetByScopeAsync(
|
||||
string tenant,
|
||||
string assetDigest,
|
||||
string vulnerabilityId,
|
||||
string? policyHash = null,
|
||||
string? latticeVersion = null,
|
||||
CancellationToken ct = default)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(tenant);
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(assetDigest);
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(vulnerabilityId);
|
||||
|
||||
var sql = """
|
||||
SELECT manifest_id, tenant, asset_digest, vulnerability_id,
|
||||
inputs_json, status, confidence, result_json,
|
||||
policy_hash, lattice_version, evaluated_at, manifest_digest,
|
||||
signature_base64, rekor_log_id
|
||||
FROM authority.verdict_manifests
|
||||
WHERE tenant = @tenant
|
||||
AND asset_digest = @assetDigest
|
||||
AND vulnerability_id = @vulnerabilityId
|
||||
""";
|
||||
|
||||
if (!string.IsNullOrWhiteSpace(policyHash))
|
||||
{
|
||||
sql += " AND policy_hash = @policyHash";
|
||||
}
|
||||
|
||||
if (!string.IsNullOrWhiteSpace(latticeVersion))
|
||||
{
|
||||
sql += " AND lattice_version = @latticeVersion";
|
||||
}
|
||||
|
||||
sql += " ORDER BY evaluated_at DESC LIMIT 1";
|
||||
|
||||
await using var conn = await _dataSource.OpenConnectionAsync(ct).ConfigureAwait(false);
|
||||
await using var cmd = new NpgsqlCommand(sql, conn);
|
||||
cmd.Parameters.AddWithValue("tenant", tenant);
|
||||
cmd.Parameters.AddWithValue("assetDigest", assetDigest);
|
||||
cmd.Parameters.AddWithValue("vulnerabilityId", vulnerabilityId);
|
||||
|
||||
if (!string.IsNullOrWhiteSpace(policyHash))
|
||||
{
|
||||
cmd.Parameters.AddWithValue("policyHash", policyHash);
|
||||
}
|
||||
|
||||
if (!string.IsNullOrWhiteSpace(latticeVersion))
|
||||
{
|
||||
cmd.Parameters.AddWithValue("latticeVersion", latticeVersion);
|
||||
}
|
||||
|
||||
await using var reader = await cmd.ExecuteReaderAsync(ct).ConfigureAwait(false);
|
||||
if (await reader.ReadAsync(ct).ConfigureAwait(false))
|
||||
{
|
||||
return MapFromReader(reader);
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
public async Task<VerdictManifestPage> ListByPolicyAsync(
|
||||
string tenant,
|
||||
string policyHash,
|
||||
string latticeVersion,
|
||||
int limit = 100,
|
||||
string? pageToken = null,
|
||||
CancellationToken ct = default)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(tenant);
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(policyHash);
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(latticeVersion);
|
||||
|
||||
var offset = ParsePageToken(pageToken);
|
||||
limit = Math.Clamp(limit, 1, 1000);
|
||||
|
||||
const string sql = """
|
||||
SELECT manifest_id, tenant, asset_digest, vulnerability_id,
|
||||
inputs_json, status, confidence, result_json,
|
||||
policy_hash, lattice_version, evaluated_at, manifest_digest,
|
||||
signature_base64, rekor_log_id
|
||||
FROM authority.verdict_manifests
|
||||
WHERE tenant = @tenant
|
||||
AND policy_hash = @policyHash
|
||||
AND lattice_version = @latticeVersion
|
||||
ORDER BY evaluated_at DESC, manifest_id
|
||||
LIMIT @limit OFFSET @offset
|
||||
""";
|
||||
|
||||
await using var conn = await _dataSource.OpenConnectionAsync(ct).ConfigureAwait(false);
|
||||
await using var cmd = new NpgsqlCommand(sql, conn);
|
||||
cmd.Parameters.AddWithValue("tenant", tenant);
|
||||
cmd.Parameters.AddWithValue("policyHash", policyHash);
|
||||
cmd.Parameters.AddWithValue("latticeVersion", latticeVersion);
|
||||
cmd.Parameters.AddWithValue("limit", limit + 1);
|
||||
cmd.Parameters.AddWithValue("offset", offset);
|
||||
|
||||
var manifests = new List<VerdictManifest>();
|
||||
await using var reader = await cmd.ExecuteReaderAsync(ct).ConfigureAwait(false);
|
||||
while (await reader.ReadAsync(ct).ConfigureAwait(false))
|
||||
{
|
||||
manifests.Add(MapFromReader(reader));
|
||||
}
|
||||
|
||||
var hasMore = manifests.Count > limit;
|
||||
if (hasMore)
|
||||
{
|
||||
manifests.RemoveAt(manifests.Count - 1);
|
||||
}
|
||||
|
||||
return new VerdictManifestPage
|
||||
{
|
||||
Manifests = manifests.ToImmutableArray(),
|
||||
NextPageToken = hasMore ? (offset + limit).ToString() : null,
|
||||
};
|
||||
}
|
||||
|
||||
public async Task<VerdictManifestPage> ListByAssetAsync(
|
||||
string tenant,
|
||||
string assetDigest,
|
||||
int limit = 100,
|
||||
string? pageToken = null,
|
||||
CancellationToken ct = default)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(tenant);
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(assetDigest);
|
||||
|
||||
var offset = ParsePageToken(pageToken);
|
||||
limit = Math.Clamp(limit, 1, 1000);
|
||||
|
||||
const string sql = """
|
||||
SELECT manifest_id, tenant, asset_digest, vulnerability_id,
|
||||
inputs_json, status, confidence, result_json,
|
||||
policy_hash, lattice_version, evaluated_at, manifest_digest,
|
||||
signature_base64, rekor_log_id
|
||||
FROM authority.verdict_manifests
|
||||
WHERE tenant = @tenant AND asset_digest = @assetDigest
|
||||
ORDER BY evaluated_at DESC, manifest_id
|
||||
LIMIT @limit OFFSET @offset
|
||||
""";
|
||||
|
||||
await using var conn = await _dataSource.OpenConnectionAsync(ct).ConfigureAwait(false);
|
||||
await using var cmd = new NpgsqlCommand(sql, conn);
|
||||
cmd.Parameters.AddWithValue("tenant", tenant);
|
||||
cmd.Parameters.AddWithValue("assetDigest", assetDigest);
|
||||
cmd.Parameters.AddWithValue("limit", limit + 1);
|
||||
cmd.Parameters.AddWithValue("offset", offset);
|
||||
|
||||
var manifests = new List<VerdictManifest>();
|
||||
await using var reader = await cmd.ExecuteReaderAsync(ct).ConfigureAwait(false);
|
||||
while (await reader.ReadAsync(ct).ConfigureAwait(false))
|
||||
{
|
||||
manifests.Add(MapFromReader(reader));
|
||||
}
|
||||
|
||||
var hasMore = manifests.Count > limit;
|
||||
if (hasMore)
|
||||
{
|
||||
manifests.RemoveAt(manifests.Count - 1);
|
||||
}
|
||||
|
||||
return new VerdictManifestPage
|
||||
{
|
||||
Manifests = manifests.ToImmutableArray(),
|
||||
NextPageToken = hasMore ? (offset + limit).ToString() : null,
|
||||
};
|
||||
}
|
||||
|
||||
public async Task<bool> DeleteAsync(string tenant, string manifestId, CancellationToken ct = default)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(tenant);
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(manifestId);
|
||||
|
||||
const string sql = """
|
||||
DELETE FROM authority.verdict_manifests
|
||||
WHERE tenant = @tenant AND manifest_id = @manifestId
|
||||
""";
|
||||
|
||||
await using var conn = await _dataSource.OpenConnectionAsync(ct).ConfigureAwait(false);
|
||||
await using var cmd = new NpgsqlCommand(sql, conn);
|
||||
cmd.Parameters.AddWithValue("tenant", tenant);
|
||||
cmd.Parameters.AddWithValue("manifestId", manifestId);
|
||||
|
||||
var rows = await cmd.ExecuteNonQueryAsync(ct).ConfigureAwait(false);
|
||||
return rows > 0;
|
||||
}
|
||||
|
||||
private static VerdictManifest MapFromReader(NpgsqlDataReader reader)
|
||||
{
|
||||
var inputsJson = reader.GetString(4);
|
||||
var resultJson = reader.GetString(7);
|
||||
|
||||
var inputs = JsonSerializer.Deserialize<VerdictInputs>(inputsJson, s_jsonOptions)
|
||||
?? throw new InvalidOperationException("Failed to deserialize inputs");
|
||||
var result = JsonSerializer.Deserialize<VerdictResult>(resultJson, s_jsonOptions)
|
||||
?? throw new InvalidOperationException("Failed to deserialize result");
|
||||
|
||||
return new VerdictManifest
|
||||
{
|
||||
ManifestId = reader.GetString(0),
|
||||
Tenant = reader.GetString(1),
|
||||
AssetDigest = reader.GetString(2),
|
||||
VulnerabilityId = reader.GetString(3),
|
||||
Inputs = inputs,
|
||||
Result = result,
|
||||
PolicyHash = reader.GetString(8),
|
||||
LatticeVersion = reader.GetString(9),
|
||||
EvaluatedAt = reader.GetDateTime(10),
|
||||
ManifestDigest = reader.GetString(11),
|
||||
SignatureBase64 = reader.IsDBNull(12) ? null : reader.GetString(12),
|
||||
RekorLogId = reader.IsDBNull(13) ? null : reader.GetString(13),
|
||||
};
|
||||
}
|
||||
|
||||
private static string StatusToString(VexStatus status) => status switch
|
||||
{
|
||||
VexStatus.Affected => "affected",
|
||||
VexStatus.NotAffected => "not_affected",
|
||||
VexStatus.Fixed => "fixed",
|
||||
VexStatus.UnderInvestigation => "under_investigation",
|
||||
_ => "affected",
|
||||
};
|
||||
|
||||
private static int ParsePageToken(string? pageToken)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(pageToken))
|
||||
{
|
||||
return 0;
|
||||
}
|
||||
|
||||
return int.TryParse(pageToken, out var offset) ? Math.Max(0, offset) : 0;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,25 @@
|
||||
<?xml version='1.0' encoding='utf-8'?>
|
||||
<Project Sdk="Microsoft.NET.Sdk">
|
||||
<PropertyGroup>
|
||||
<TargetFramework>net10.0</TargetFramework>
|
||||
<LangVersion>preview</LangVersion>
|
||||
<Nullable>enable</Nullable>
|
||||
<ImplicitUsings>enable</ImplicitUsings>
|
||||
<TreatWarningsAsErrors>false</TreatWarningsAsErrors>
|
||||
<IsPackable>false</IsPackable>
|
||||
<IsTestProject>true</IsTestProject>
|
||||
</PropertyGroup>
|
||||
<ItemGroup>
|
||||
<PackageReference Include="FluentAssertions" Version="8.2.0" />
|
||||
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.14.0" />
|
||||
<PackageReference Include="Moq" Version="4.20.72" />
|
||||
<PackageReference Include="xunit" Version="2.9.3" />
|
||||
<PackageReference Include="xunit.runner.visualstudio" Version="3.0.1">
|
||||
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
|
||||
<PrivateAssets>all</PrivateAssets>
|
||||
</PackageReference>
|
||||
</ItemGroup>
|
||||
<ItemGroup>
|
||||
<ProjectReference Include="../../__Libraries/StellaOps.Authority.Core/StellaOps.Authority.Core.csproj" />
|
||||
</ItemGroup>
|
||||
</Project>
|
||||
@@ -0,0 +1,155 @@
|
||||
using System.Collections.Immutable;
|
||||
using FluentAssertions;
|
||||
using StellaOps.Authority.Core.Verdicts;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Authority.Core.Tests.Verdicts;
|
||||
|
||||
public sealed class InMemoryVerdictManifestStoreTests
|
||||
{
|
||||
private readonly InMemoryVerdictManifestStore _store = new();
|
||||
|
||||
[Fact]
|
||||
public async Task StoreAndRetrieve_ByManifestId()
|
||||
{
|
||||
var manifest = CreateManifest("manifest-1", "tenant-1");
|
||||
|
||||
await _store.StoreAsync(manifest);
|
||||
|
||||
var retrieved = await _store.GetByIdAsync("tenant-1", "manifest-1");
|
||||
|
||||
retrieved.Should().NotBeNull();
|
||||
retrieved!.ManifestId.Should().Be("manifest-1");
|
||||
retrieved.Tenant.Should().Be("tenant-1");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetByScope_ReturnsLatest()
|
||||
{
|
||||
var older = CreateManifest("m1", "t", evaluatedAt: DateTimeOffset.Parse("2025-01-01T00:00:00Z"));
|
||||
var newer = CreateManifest("m2", "t", evaluatedAt: DateTimeOffset.Parse("2025-01-02T00:00:00Z"));
|
||||
|
||||
await _store.StoreAsync(older);
|
||||
await _store.StoreAsync(newer);
|
||||
|
||||
var result = await _store.GetByScopeAsync("t", "sha256:asset", "CVE-2024-1234");
|
||||
|
||||
result.Should().NotBeNull();
|
||||
result!.ManifestId.Should().Be("m2");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetByScope_FiltersOnPolicyAndLattice()
|
||||
{
|
||||
var m1 = CreateManifest("m1", "t", policyHash: "p1", latticeVersion: "v1");
|
||||
var m2 = CreateManifest("m2", "t", policyHash: "p2", latticeVersion: "v1");
|
||||
|
||||
await _store.StoreAsync(m1);
|
||||
await _store.StoreAsync(m2);
|
||||
|
||||
var result = await _store.GetByScopeAsync("t", "sha256:asset", "CVE-2024-1234", policyHash: "p1");
|
||||
|
||||
result.Should().NotBeNull();
|
||||
result!.ManifestId.Should().Be("m1");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ListByPolicy_Paginates()
|
||||
{
|
||||
for (var i = 0; i < 5; i++)
|
||||
{
|
||||
var manifest = CreateManifest($"m{i}", "t", policyHash: "p1", latticeVersion: "v1",
|
||||
evaluatedAt: DateTimeOffset.UtcNow.AddMinutes(-i));
|
||||
await _store.StoreAsync(manifest);
|
||||
}
|
||||
|
||||
var page1 = await _store.ListByPolicyAsync("t", "p1", "v1", limit: 2);
|
||||
page1.Manifests.Should().HaveCount(2);
|
||||
page1.NextPageToken.Should().NotBeNull();
|
||||
|
||||
var page2 = await _store.ListByPolicyAsync("t", "p1", "v1", limit: 2, pageToken: page1.NextPageToken);
|
||||
page2.Manifests.Should().HaveCount(2);
|
||||
page2.NextPageToken.Should().NotBeNull();
|
||||
|
||||
var page3 = await _store.ListByPolicyAsync("t", "p1", "v1", limit: 2, pageToken: page2.NextPageToken);
|
||||
page3.Manifests.Should().HaveCount(1);
|
||||
page3.NextPageToken.Should().BeNull();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task Delete_RemovesManifest()
|
||||
{
|
||||
var manifest = CreateManifest("m1", "t");
|
||||
await _store.StoreAsync(manifest);
|
||||
|
||||
var deleted = await _store.DeleteAsync("t", "m1");
|
||||
deleted.Should().BeTrue();
|
||||
|
||||
var retrieved = await _store.GetByIdAsync("t", "m1");
|
||||
retrieved.Should().BeNull();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task Delete_ReturnsFalseWhenNotFound()
|
||||
{
|
||||
var deleted = await _store.DeleteAsync("t", "nonexistent");
|
||||
deleted.Should().BeFalse();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task TenantIsolation_Works()
|
||||
{
|
||||
var m1 = CreateManifest("shared-id", "tenant-a");
|
||||
var m2 = CreateManifest("shared-id", "tenant-b");
|
||||
|
||||
await _store.StoreAsync(m1);
|
||||
await _store.StoreAsync(m2);
|
||||
|
||||
var fromA = await _store.GetByIdAsync("tenant-a", "shared-id");
|
||||
var fromB = await _store.GetByIdAsync("tenant-b", "shared-id");
|
||||
|
||||
fromA.Should().NotBeNull();
|
||||
fromB.Should().NotBeNull();
|
||||
fromA!.Tenant.Should().Be("tenant-a");
|
||||
fromB!.Tenant.Should().Be("tenant-b");
|
||||
|
||||
_store.Count.Should().Be(2);
|
||||
}
|
||||
|
||||
private static VerdictManifest CreateManifest(
|
||||
string manifestId,
|
||||
string tenant,
|
||||
string assetDigest = "sha256:asset",
|
||||
string vulnerabilityId = "CVE-2024-1234",
|
||||
string policyHash = "sha256:policy",
|
||||
string latticeVersion = "1.0.0",
|
||||
DateTimeOffset? evaluatedAt = null)
|
||||
{
|
||||
return new VerdictManifest
|
||||
{
|
||||
ManifestId = manifestId,
|
||||
Tenant = tenant,
|
||||
AssetDigest = assetDigest,
|
||||
VulnerabilityId = vulnerabilityId,
|
||||
Inputs = new VerdictInputs
|
||||
{
|
||||
SbomDigests = ImmutableArray.Create("sha256:sbom"),
|
||||
VulnFeedSnapshotIds = ImmutableArray.Create("feed-1"),
|
||||
VexDocumentDigests = ImmutableArray.Create("sha256:vex"),
|
||||
ReachabilityGraphIds = ImmutableArray<string>.Empty,
|
||||
ClockCutoff = DateTimeOffset.UtcNow,
|
||||
},
|
||||
Result = new VerdictResult
|
||||
{
|
||||
Status = VexStatus.NotAffected,
|
||||
Confidence = 0.85,
|
||||
Explanations = ImmutableArray<VerdictExplanation>.Empty,
|
||||
EvidenceRefs = ImmutableArray<string>.Empty,
|
||||
},
|
||||
PolicyHash = policyHash,
|
||||
LatticeVersion = latticeVersion,
|
||||
EvaluatedAt = evaluatedAt ?? DateTimeOffset.UtcNow,
|
||||
ManifestDigest = $"sha256:{manifestId}",
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,165 @@
|
||||
using System.Collections.Immutable;
|
||||
using FluentAssertions;
|
||||
using StellaOps.Authority.Core.Verdicts;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Authority.Core.Tests.Verdicts;
|
||||
|
||||
public sealed class VerdictManifestBuilderTests
|
||||
{
|
||||
[Fact]
|
||||
public void Build_CreatesValidManifest()
|
||||
{
|
||||
var builder = new VerdictManifestBuilder(() => "test-manifest-id")
|
||||
.WithTenant("tenant-1")
|
||||
.WithAsset("sha256:abc123", "CVE-2024-1234")
|
||||
.WithInputs(
|
||||
sbomDigests: new[] { "sha256:sbom1" },
|
||||
vulnFeedSnapshotIds: new[] { "feed-snapshot-1" },
|
||||
vexDocumentDigests: new[] { "sha256:vex1" },
|
||||
clockCutoff: DateTimeOffset.Parse("2025-01-01T00:00:00Z"))
|
||||
.WithResult(
|
||||
status: VexStatus.NotAffected,
|
||||
confidence: 0.85,
|
||||
explanations: new[]
|
||||
{
|
||||
new VerdictExplanation
|
||||
{
|
||||
SourceId = "vendor-a",
|
||||
Reason = "Official vendor VEX",
|
||||
ProvenanceScore = 0.9,
|
||||
CoverageScore = 0.8,
|
||||
ReplayabilityScore = 0.7,
|
||||
StrengthMultiplier = 1.0,
|
||||
FreshnessMultiplier = 0.95,
|
||||
ClaimScore = 0.85,
|
||||
AssertedStatus = VexStatus.NotAffected,
|
||||
Accepted = true,
|
||||
},
|
||||
})
|
||||
.WithPolicy("sha256:policy123", "1.0.0")
|
||||
.WithClock(DateTimeOffset.Parse("2025-01-01T12:00:00Z"));
|
||||
|
||||
var manifest = builder.Build();
|
||||
|
||||
manifest.ManifestId.Should().Be("test-manifest-id");
|
||||
manifest.Tenant.Should().Be("tenant-1");
|
||||
manifest.AssetDigest.Should().Be("sha256:abc123");
|
||||
manifest.VulnerabilityId.Should().Be("CVE-2024-1234");
|
||||
manifest.Result.Status.Should().Be(VexStatus.NotAffected);
|
||||
manifest.Result.Confidence.Should().Be(0.85);
|
||||
manifest.ManifestDigest.Should().StartWith("sha256:");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Build_IsDeterministic()
|
||||
{
|
||||
var clock = DateTimeOffset.Parse("2025-01-01T12:00:00Z");
|
||||
var inputClock = DateTimeOffset.Parse("2025-01-01T00:00:00Z");
|
||||
|
||||
VerdictManifest BuildManifest(int seed)
|
||||
{
|
||||
return new VerdictManifestBuilder(() => "fixed-id")
|
||||
.WithTenant("tenant")
|
||||
.WithAsset("sha256:asset", "CVE-2024-0001")
|
||||
.WithInputs(
|
||||
sbomDigests: new[] { "sha256:sbom" },
|
||||
vulnFeedSnapshotIds: new[] { "feed-1" },
|
||||
vexDocumentDigests: new[] { "sha256:vex" },
|
||||
clockCutoff: inputClock)
|
||||
.WithResult(
|
||||
status: VexStatus.Fixed,
|
||||
confidence: 0.9,
|
||||
explanations: new[]
|
||||
{
|
||||
new VerdictExplanation
|
||||
{
|
||||
SourceId = "source",
|
||||
Reason = "Fixed",
|
||||
ProvenanceScore = 0.9,
|
||||
CoverageScore = 0.9,
|
||||
ReplayabilityScore = 0.9,
|
||||
StrengthMultiplier = 1.0,
|
||||
FreshnessMultiplier = 1.0,
|
||||
ClaimScore = 0.9,
|
||||
AssertedStatus = VexStatus.Fixed,
|
||||
Accepted = true,
|
||||
},
|
||||
})
|
||||
.WithPolicy("sha256:policy", "1.0")
|
||||
.WithClock(clock)
|
||||
.Build();
|
||||
}
|
||||
|
||||
var first = BuildManifest(1);
|
||||
for (var i = 0; i < 100; i++)
|
||||
{
|
||||
var next = BuildManifest(i);
|
||||
next.ManifestDigest.Should().Be(first.ManifestDigest, "manifests should be deterministic");
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Build_SortsInputsDeterministically()
|
||||
{
|
||||
var clock = DateTimeOffset.Parse("2025-01-01T00:00:00Z");
|
||||
|
||||
var manifestA = new VerdictManifestBuilder(() => "id")
|
||||
.WithTenant("t")
|
||||
.WithAsset("sha256:a", "CVE-1")
|
||||
.WithInputs(
|
||||
sbomDigests: new[] { "c", "a", "b" },
|
||||
vulnFeedSnapshotIds: new[] { "z", "y" },
|
||||
vexDocumentDigests: new[] { "3", "1", "2" },
|
||||
clockCutoff: clock)
|
||||
.WithResult(VexStatus.Affected, 0.5, Enumerable.Empty<VerdictExplanation>())
|
||||
.WithPolicy("p", "v")
|
||||
.WithClock(clock)
|
||||
.Build();
|
||||
|
||||
var manifestB = new VerdictManifestBuilder(() => "id")
|
||||
.WithTenant("t")
|
||||
.WithAsset("sha256:a", "CVE-1")
|
||||
.WithInputs(
|
||||
sbomDigests: new[] { "b", "c", "a" },
|
||||
vulnFeedSnapshotIds: new[] { "y", "z" },
|
||||
vexDocumentDigests: new[] { "2", "3", "1" },
|
||||
clockCutoff: clock)
|
||||
.WithResult(VexStatus.Affected, 0.5, Enumerable.Empty<VerdictExplanation>())
|
||||
.WithPolicy("p", "v")
|
||||
.WithClock(clock)
|
||||
.Build();
|
||||
|
||||
manifestA.ManifestDigest.Should().Be(manifestB.ManifestDigest);
|
||||
manifestA.Inputs.SbomDigests.Should().Equal("a", "b", "c");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Build_ThrowsOnMissingRequiredFields()
|
||||
{
|
||||
var builder = new VerdictManifestBuilder();
|
||||
|
||||
var act = () => builder.Build();
|
||||
|
||||
act.Should().Throw<InvalidOperationException>()
|
||||
.WithMessage("*validation failed*");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Build_NormalizesVulnerabilityIdToUpperCase()
|
||||
{
|
||||
var manifest = new VerdictManifestBuilder(() => "id")
|
||||
.WithTenant("t")
|
||||
.WithAsset("sha256:a", "cve-2024-1234")
|
||||
.WithInputs(
|
||||
sbomDigests: new[] { "sha256:s" },
|
||||
vulnFeedSnapshotIds: new[] { "f" },
|
||||
vexDocumentDigests: new[] { "v" },
|
||||
clockCutoff: DateTimeOffset.UtcNow)
|
||||
.WithResult(VexStatus.Affected, 0.5, Enumerable.Empty<VerdictExplanation>())
|
||||
.WithPolicy("p", "v")
|
||||
.Build();
|
||||
|
||||
manifest.VulnerabilityId.Should().Be("CVE-2024-1234");
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,122 @@
|
||||
using System.Collections.Immutable;
|
||||
using FluentAssertions;
|
||||
using StellaOps.Authority.Core.Verdicts;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Authority.Core.Tests.Verdicts;
|
||||
|
||||
public sealed class VerdictManifestSerializerTests
|
||||
{
|
||||
[Fact]
|
||||
public void Serialize_ProducesValidJson()
|
||||
{
|
||||
var manifest = CreateTestManifest();
|
||||
|
||||
var json = VerdictManifestSerializer.Serialize(manifest);
|
||||
|
||||
json.Should().Contain("\"manifest_id\"");
|
||||
json.Should().Contain("\"tenant\"");
|
||||
json.Should().Contain("\"not_affected\"");
|
||||
json.Should().NotContain("\"ManifestId\""); // Should use snake_case
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void SerializeDeserialize_RoundTrips()
|
||||
{
|
||||
var manifest = CreateTestManifest();
|
||||
|
||||
var json = VerdictManifestSerializer.Serialize(manifest);
|
||||
var deserialized = VerdictManifestSerializer.Deserialize(json);
|
||||
|
||||
deserialized.Should().NotBeNull();
|
||||
deserialized!.ManifestId.Should().Be(manifest.ManifestId);
|
||||
deserialized.Result.Status.Should().Be(manifest.Result.Status);
|
||||
deserialized.Result.Confidence.Should().Be(manifest.Result.Confidence);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ComputeDigest_IsDeterministic()
|
||||
{
|
||||
var manifest = CreateTestManifest();
|
||||
|
||||
var digest1 = VerdictManifestSerializer.ComputeDigest(manifest);
|
||||
var digest2 = VerdictManifestSerializer.ComputeDigest(manifest);
|
||||
|
||||
digest1.Should().Be(digest2);
|
||||
digest1.Should().StartWith("sha256:");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ComputeDigest_ChangesWithContent()
|
||||
{
|
||||
var manifest1 = CreateTestManifest();
|
||||
var manifest2 = manifest1 with
|
||||
{
|
||||
Result = manifest1.Result with { Confidence = 0.5 }
|
||||
};
|
||||
|
||||
var digest1 = VerdictManifestSerializer.ComputeDigest(manifest1);
|
||||
var digest2 = VerdictManifestSerializer.ComputeDigest(manifest2);
|
||||
|
||||
digest1.Should().NotBe(digest2);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ComputeDigest_IgnoresSignatureFields()
|
||||
{
|
||||
var manifest1 = CreateTestManifest();
|
||||
var manifest2 = manifest1 with
|
||||
{
|
||||
SignatureBase64 = "some-signature",
|
||||
RekorLogId = "some-log-id"
|
||||
};
|
||||
|
||||
var digest1 = VerdictManifestSerializer.ComputeDigest(manifest1);
|
||||
var digest2 = VerdictManifestSerializer.ComputeDigest(manifest2);
|
||||
|
||||
digest1.Should().Be(digest2);
|
||||
}
|
||||
|
||||
private static VerdictManifest CreateTestManifest()
|
||||
{
|
||||
return new VerdictManifest
|
||||
{
|
||||
ManifestId = "test-id",
|
||||
Tenant = "test-tenant",
|
||||
AssetDigest = "sha256:asset123",
|
||||
VulnerabilityId = "CVE-2024-1234",
|
||||
Inputs = new VerdictInputs
|
||||
{
|
||||
SbomDigests = ImmutableArray.Create("sha256:sbom1"),
|
||||
VulnFeedSnapshotIds = ImmutableArray.Create("feed-1"),
|
||||
VexDocumentDigests = ImmutableArray.Create("sha256:vex1"),
|
||||
ReachabilityGraphIds = ImmutableArray<string>.Empty,
|
||||
ClockCutoff = DateTimeOffset.Parse("2025-01-01T00:00:00Z"),
|
||||
},
|
||||
Result = new VerdictResult
|
||||
{
|
||||
Status = VexStatus.NotAffected,
|
||||
Confidence = 0.85,
|
||||
Explanations = ImmutableArray.Create(
|
||||
new VerdictExplanation
|
||||
{
|
||||
SourceId = "vendor-a",
|
||||
Reason = "Official vendor statement",
|
||||
ProvenanceScore = 0.9,
|
||||
CoverageScore = 0.8,
|
||||
ReplayabilityScore = 0.7,
|
||||
StrengthMultiplier = 1.0,
|
||||
FreshnessMultiplier = 0.95,
|
||||
ClaimScore = 0.85,
|
||||
AssertedStatus = VexStatus.NotAffected,
|
||||
Accepted = true,
|
||||
}),
|
||||
EvidenceRefs = ImmutableArray.Create("evidence-1"),
|
||||
},
|
||||
PolicyHash = "sha256:policy123",
|
||||
LatticeVersion = "1.0.0",
|
||||
EvaluatedAt = DateTimeOffset.Parse("2025-01-01T12:00:00Z"),
|
||||
ManifestDigest = "sha256:placeholder",
|
||||
};
|
||||
}
|
||||
}
|
||||
303
src/Cli/StellaOps.Cli/Commands/AirGapCommandGroup.cs
Normal file
303
src/Cli/StellaOps.Cli/Commands/AirGapCommandGroup.cs
Normal file
@@ -0,0 +1,303 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// AirGapCommandGroup.cs
|
||||
// Sprint: SPRINT_4300_0003_0001 (Sealed Knowledge Snapshot Export/Import)
|
||||
// Tasks: SEAL-005, SEAL-011, SEAL-018 - CLI commands for airgap operations.
|
||||
// Description: CLI commands for knowledge snapshot export, import, and diff.
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System.CommandLine;
|
||||
using StellaOps.Cli.Extensions;
|
||||
|
||||
namespace StellaOps.Cli.Commands;
|
||||
|
||||
internal static class AirGapCommandGroup
|
||||
{
|
||||
internal static Command BuildAirGapCommand(
|
||||
IServiceProvider services,
|
||||
Option<bool> verboseOption,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
var airgap = new Command("airgap", "Air-gap commands for sealed knowledge management.");
|
||||
|
||||
airgap.Add(BuildExportCommand(services, verboseOption, cancellationToken));
|
||||
airgap.Add(BuildImportCommand(services, verboseOption, cancellationToken));
|
||||
airgap.Add(BuildDiffCommand(services, verboseOption, cancellationToken));
|
||||
airgap.Add(BuildStatusCommand(services, verboseOption, cancellationToken));
|
||||
|
||||
return airgap;
|
||||
}
|
||||
|
||||
private static Command BuildExportCommand(
|
||||
IServiceProvider services,
|
||||
Option<bool> verboseOption,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
var outputOption = new Option<string>("--output", "-o")
|
||||
{
|
||||
Description = "Output path for the knowledge snapshot (default: knowledge-<date>.tar.gz)"
|
||||
};
|
||||
|
||||
var includeAdvisoriesOption = new Option<bool>("--include-advisories")
|
||||
{
|
||||
Description = "Include advisory feeds in the snapshot."
|
||||
};
|
||||
includeAdvisoriesOption.SetDefaultValue(true);
|
||||
|
||||
var includeVexOption = new Option<bool>("--include-vex")
|
||||
{
|
||||
Description = "Include VEX statements in the snapshot."
|
||||
};
|
||||
includeVexOption.SetDefaultValue(true);
|
||||
|
||||
var includePoliciesOption = new Option<bool>("--include-policies")
|
||||
{
|
||||
Description = "Include policy bundles in the snapshot."
|
||||
};
|
||||
includePoliciesOption.SetDefaultValue(true);
|
||||
|
||||
var includeTrustRootsOption = new Option<bool>("--include-trust-roots")
|
||||
{
|
||||
Description = "Include trust roots in the snapshot."
|
||||
};
|
||||
includeTrustRootsOption.SetDefaultValue(true);
|
||||
|
||||
var signOption = new Option<bool>("--sign")
|
||||
{
|
||||
Description = "Sign the snapshot manifest."
|
||||
};
|
||||
signOption.SetDefaultValue(true);
|
||||
|
||||
var signingKeyOption = new Option<string?>("--signing-key")
|
||||
{
|
||||
Description = "Path to signing key file or key ID."
|
||||
};
|
||||
|
||||
var timeAnchorOption = new Option<string?>("--time-anchor")
|
||||
{
|
||||
Description = "Time anchor source: 'local', 'roughtime:<server>', or path to token file."
|
||||
};
|
||||
|
||||
var feedsOption = new Option<string[]>("--feeds")
|
||||
{
|
||||
Description = "Specific advisory feeds to include (e.g., nvd, ghsa, osv). Empty = all."
|
||||
};
|
||||
|
||||
var ecosystemsOption = new Option<string[]>("--ecosystems")
|
||||
{
|
||||
Description = "Specific ecosystems to include (e.g., npm, pypi, maven). Empty = all."
|
||||
};
|
||||
|
||||
var command = new Command("export", "Export a sealed knowledge snapshot for air-gapped transfer.")
|
||||
{
|
||||
outputOption,
|
||||
includeAdvisoriesOption,
|
||||
includeVexOption,
|
||||
includePoliciesOption,
|
||||
includeTrustRootsOption,
|
||||
signOption,
|
||||
signingKeyOption,
|
||||
timeAnchorOption,
|
||||
feedsOption,
|
||||
ecosystemsOption,
|
||||
verboseOption
|
||||
};
|
||||
|
||||
command.SetAction(parseResult =>
|
||||
{
|
||||
var output = parseResult.GetValue(outputOption);
|
||||
var includeAdvisories = parseResult.GetValue(includeAdvisoriesOption);
|
||||
var includeVex = parseResult.GetValue(includeVexOption);
|
||||
var includePolicies = parseResult.GetValue(includePoliciesOption);
|
||||
var includeTrustRoots = parseResult.GetValue(includeTrustRootsOption);
|
||||
var sign = parseResult.GetValue(signOption);
|
||||
var signingKey = parseResult.GetValue(signingKeyOption);
|
||||
var timeAnchor = parseResult.GetValue(timeAnchorOption);
|
||||
var feeds = parseResult.GetValue(feedsOption) ?? Array.Empty<string>();
|
||||
var ecosystems = parseResult.GetValue(ecosystemsOption) ?? Array.Empty<string>();
|
||||
var verbose = parseResult.GetValue(verboseOption);
|
||||
|
||||
return CommandHandlers.HandleAirGapExportAsync(
|
||||
services,
|
||||
output,
|
||||
includeAdvisories,
|
||||
includeVex,
|
||||
includePolicies,
|
||||
includeTrustRoots,
|
||||
sign,
|
||||
signingKey,
|
||||
timeAnchor,
|
||||
feeds,
|
||||
ecosystems,
|
||||
verbose,
|
||||
cancellationToken);
|
||||
});
|
||||
|
||||
return command;
|
||||
}
|
||||
|
||||
private static Command BuildImportCommand(
|
||||
IServiceProvider services,
|
||||
Option<bool> verboseOption,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
var bundleArg = new Argument<string>("bundle")
|
||||
{
|
||||
Description = "Path to the knowledge snapshot bundle (knowledge-*.tar.gz)"
|
||||
};
|
||||
|
||||
var verifyOnlyOption = new Option<bool>("--verify-only")
|
||||
{
|
||||
Description = "Verify the bundle without applying changes."
|
||||
};
|
||||
|
||||
var forceOption = new Option<bool>("--force")
|
||||
{
|
||||
Description = "Force import even if staleness policy would reject it."
|
||||
};
|
||||
|
||||
var trustPolicyOption = new Option<string?>("--trust-policy")
|
||||
{
|
||||
Description = "Path to trust policy file for signature verification."
|
||||
};
|
||||
|
||||
var maxAgeHoursOption = new Option<int?>("--max-age-hours")
|
||||
{
|
||||
Description = "Maximum age for the snapshot (overrides staleness policy)."
|
||||
};
|
||||
|
||||
var quarantineOption = new Option<bool>("--quarantine-on-failure")
|
||||
{
|
||||
Description = "Quarantine the bundle if validation fails."
|
||||
};
|
||||
quarantineOption.SetDefaultValue(true);
|
||||
|
||||
var outputOption = new Option<string>("--output", "-o")
|
||||
{
|
||||
Description = "Output format: text, json"
|
||||
}.SetDefaultValue("text").FromAmong("text", "json");
|
||||
|
||||
var command = new Command("import", "Import a sealed knowledge snapshot.")
|
||||
{
|
||||
bundleArg,
|
||||
verifyOnlyOption,
|
||||
forceOption,
|
||||
trustPolicyOption,
|
||||
maxAgeHoursOption,
|
||||
quarantineOption,
|
||||
outputOption,
|
||||
verboseOption
|
||||
};
|
||||
|
||||
command.SetAction(parseResult =>
|
||||
{
|
||||
var bundle = parseResult.GetValue(bundleArg) ?? string.Empty;
|
||||
var verifyOnly = parseResult.GetValue(verifyOnlyOption);
|
||||
var force = parseResult.GetValue(forceOption);
|
||||
var trustPolicy = parseResult.GetValue(trustPolicyOption);
|
||||
var maxAgeHours = parseResult.GetValue(maxAgeHoursOption);
|
||||
var quarantine = parseResult.GetValue(quarantineOption);
|
||||
var output = parseResult.GetValue(outputOption) ?? "text";
|
||||
var verbose = parseResult.GetValue(verboseOption);
|
||||
|
||||
return CommandHandlers.HandleAirGapImportAsync(
|
||||
services,
|
||||
bundle,
|
||||
verifyOnly,
|
||||
force,
|
||||
trustPolicy,
|
||||
maxAgeHours,
|
||||
quarantine,
|
||||
output,
|
||||
verbose,
|
||||
cancellationToken);
|
||||
});
|
||||
|
||||
return command;
|
||||
}
|
||||
|
||||
private static Command BuildDiffCommand(
|
||||
IServiceProvider services,
|
||||
Option<bool> verboseOption,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
var baseArg = new Argument<string>("base")
|
||||
{
|
||||
Description = "Path to the base snapshot bundle (older)"
|
||||
};
|
||||
|
||||
var targetArg = new Argument<string>("target")
|
||||
{
|
||||
Description = "Path to the target snapshot bundle (newer)"
|
||||
};
|
||||
|
||||
var componentOption = new Option<string?>("--component")
|
||||
{
|
||||
Description = "Filter diff to specific component: advisories, vex, policies"
|
||||
}.FromAmong("advisories", "vex", "policies", "all");
|
||||
|
||||
var outputOption = new Option<string>("--output", "-o")
|
||||
{
|
||||
Description = "Output format: text, json"
|
||||
}.SetDefaultValue("text").FromAmong("text", "json");
|
||||
|
||||
var command = new Command("diff", "Compare two knowledge snapshots.")
|
||||
{
|
||||
baseArg,
|
||||
targetArg,
|
||||
componentOption,
|
||||
outputOption,
|
||||
verboseOption
|
||||
};
|
||||
|
||||
command.SetAction(parseResult =>
|
||||
{
|
||||
var baseBundle = parseResult.GetValue(baseArg) ?? string.Empty;
|
||||
var targetBundle = parseResult.GetValue(targetArg) ?? string.Empty;
|
||||
var component = parseResult.GetValue(componentOption);
|
||||
var output = parseResult.GetValue(outputOption) ?? "text";
|
||||
var verbose = parseResult.GetValue(verboseOption);
|
||||
|
||||
return CommandHandlers.HandleAirGapDiffAsync(
|
||||
services,
|
||||
baseBundle,
|
||||
targetBundle,
|
||||
component,
|
||||
output,
|
||||
verbose,
|
||||
cancellationToken);
|
||||
});
|
||||
|
||||
return command;
|
||||
}
|
||||
|
||||
private static Command BuildStatusCommand(
|
||||
IServiceProvider services,
|
||||
Option<bool> verboseOption,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
var outputOption = new Option<string>("--output", "-o")
|
||||
{
|
||||
Description = "Output format: text, json"
|
||||
}.SetDefaultValue("text").FromAmong("text", "json");
|
||||
|
||||
var command = new Command("status", "Show current air-gap state and staleness status.")
|
||||
{
|
||||
outputOption,
|
||||
verboseOption
|
||||
};
|
||||
|
||||
command.SetAction(parseResult =>
|
||||
{
|
||||
var output = parseResult.GetValue(outputOption) ?? "text";
|
||||
var verbose = parseResult.GetValue(verboseOption);
|
||||
|
||||
return CommandHandlers.HandleAirGapStatusAsync(
|
||||
services,
|
||||
output,
|
||||
verbose,
|
||||
cancellationToken);
|
||||
});
|
||||
|
||||
return command;
|
||||
}
|
||||
}
|
||||
236
src/Cli/StellaOps.Cli/Commands/AuditCommandGroup.cs
Normal file
236
src/Cli/StellaOps.Cli/Commands/AuditCommandGroup.cs
Normal file
@@ -0,0 +1,236 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// AuditCommandGroup.cs
|
||||
// Sprint: SPRINT_4300_0001_0002_one_command_audit_replay
|
||||
// Description: CLI commands for audit pack export and replay.
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System.CommandLine;
|
||||
using StellaOps.Cli.Extensions;
|
||||
|
||||
namespace StellaOps.Cli.Commands;
|
||||
|
||||
internal static class AuditCommandGroup
|
||||
{
|
||||
internal static Command BuildAuditCommand(
|
||||
IServiceProvider services,
|
||||
Option<bool> verboseOption,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
var audit = new Command("audit", "Audit pack commands for export and offline replay.");
|
||||
|
||||
audit.Add(BuildExportCommand(services, verboseOption, cancellationToken));
|
||||
audit.Add(BuildReplayCommand(services, verboseOption, cancellationToken));
|
||||
audit.Add(BuildVerifyCommand(services, verboseOption, cancellationToken));
|
||||
|
||||
return audit;
|
||||
}
|
||||
|
||||
private static Command BuildExportCommand(
|
||||
IServiceProvider services,
|
||||
Option<bool> verboseOption,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
var scanIdOption = new Option<string>("--scan-id", "-s")
|
||||
{
|
||||
Description = "Scan ID to export audit pack for.",
|
||||
Required = true
|
||||
};
|
||||
|
||||
var outputOption = new Option<string>("--output", "-o")
|
||||
{
|
||||
Description = "Output path for the audit pack (default: audit-<scan-id>.tar.gz)"
|
||||
};
|
||||
|
||||
var nameOption = new Option<string?>("--name")
|
||||
{
|
||||
Description = "Human-readable name for the audit pack."
|
||||
};
|
||||
|
||||
var signOption = new Option<bool>("--sign")
|
||||
{
|
||||
Description = "Sign the audit pack manifest."
|
||||
};
|
||||
signOption.SetDefaultValue(true);
|
||||
|
||||
var signingKeyOption = new Option<string?>("--signing-key")
|
||||
{
|
||||
Description = "Path to signing key file."
|
||||
};
|
||||
|
||||
var includeFeedsOption = new Option<bool>("--include-feeds")
|
||||
{
|
||||
Description = "Include feed snapshot in the bundle."
|
||||
};
|
||||
includeFeedsOption.SetDefaultValue(true);
|
||||
|
||||
var includePolicyOption = new Option<bool>("--include-policy")
|
||||
{
|
||||
Description = "Include policy snapshot in the bundle."
|
||||
};
|
||||
includePolicyOption.SetDefaultValue(true);
|
||||
|
||||
var minimalOption = new Option<bool>("--minimal")
|
||||
{
|
||||
Description = "Create minimal bundle (only required evidence)."
|
||||
};
|
||||
|
||||
var command = new Command("export", "Export an audit pack for offline verification.")
|
||||
{
|
||||
scanIdOption,
|
||||
outputOption,
|
||||
nameOption,
|
||||
signOption,
|
||||
signingKeyOption,
|
||||
includeFeedsOption,
|
||||
includePolicyOption,
|
||||
minimalOption,
|
||||
verboseOption
|
||||
};
|
||||
|
||||
command.SetAction(parseResult =>
|
||||
{
|
||||
var scanId = parseResult.GetValue(scanIdOption) ?? string.Empty;
|
||||
var output = parseResult.GetValue(outputOption);
|
||||
var name = parseResult.GetValue(nameOption);
|
||||
var sign = parseResult.GetValue(signOption);
|
||||
var signingKey = parseResult.GetValue(signingKeyOption);
|
||||
var includeFeeds = parseResult.GetValue(includeFeedsOption);
|
||||
var includePolicy = parseResult.GetValue(includePolicyOption);
|
||||
var minimal = parseResult.GetValue(minimalOption);
|
||||
var verbose = parseResult.GetValue(verboseOption);
|
||||
|
||||
return CommandHandlers.HandleAuditExportAsync(
|
||||
services,
|
||||
scanId,
|
||||
output,
|
||||
name,
|
||||
sign,
|
||||
signingKey,
|
||||
includeFeeds,
|
||||
includePolicy,
|
||||
minimal,
|
||||
verbose,
|
||||
cancellationToken);
|
||||
});
|
||||
|
||||
return command;
|
||||
}
|
||||
|
||||
private static Command BuildReplayCommand(
|
||||
IServiceProvider services,
|
||||
Option<bool> verboseOption,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
var bundleArg = new Argument<string>("bundle")
|
||||
{
|
||||
Description = "Path to audit pack bundle (audit-*.tar.gz)"
|
||||
};
|
||||
|
||||
var outputDirOption = new Option<string?>("--output-dir")
|
||||
{
|
||||
Description = "Directory for replay output and intermediate files."
|
||||
};
|
||||
|
||||
var formatOption = new Option<string>("--format", "-f")
|
||||
{
|
||||
Description = "Output format: text, json"
|
||||
}.SetDefaultValue("text").FromAmong("text", "json");
|
||||
|
||||
var strictOption = new Option<bool>("--strict")
|
||||
{
|
||||
Description = "Fail if any input differs from original scan."
|
||||
};
|
||||
|
||||
var offlineOption = new Option<bool>("--offline")
|
||||
{
|
||||
Description = "Enforce offline mode (no network calls)."
|
||||
};
|
||||
|
||||
var trustStoreOption = new Option<string?>("--trust-store")
|
||||
{
|
||||
Description = "Path to offline trust store directory."
|
||||
};
|
||||
|
||||
var timeAnchorOption = new Option<string?>("--time-anchor")
|
||||
{
|
||||
Description = "Override evaluation time (ISO-8601 format)."
|
||||
};
|
||||
|
||||
var command = new Command("replay", "Replay and verify an audit pack offline.")
|
||||
{
|
||||
bundleArg,
|
||||
outputDirOption,
|
||||
formatOption,
|
||||
strictOption,
|
||||
offlineOption,
|
||||
trustStoreOption,
|
||||
timeAnchorOption,
|
||||
verboseOption
|
||||
};
|
||||
|
||||
command.SetAction(parseResult =>
|
||||
{
|
||||
var bundle = parseResult.GetValue(bundleArg) ?? string.Empty;
|
||||
var outputDir = parseResult.GetValue(outputDirOption);
|
||||
var format = parseResult.GetValue(formatOption) ?? "text";
|
||||
var strict = parseResult.GetValue(strictOption);
|
||||
var offline = parseResult.GetValue(offlineOption);
|
||||
var trustStore = parseResult.GetValue(trustStoreOption);
|
||||
var timeAnchor = parseResult.GetValue(timeAnchorOption);
|
||||
var verbose = parseResult.GetValue(verboseOption);
|
||||
|
||||
return CommandHandlers.HandleAuditReplayAsync(
|
||||
services,
|
||||
bundle,
|
||||
outputDir,
|
||||
format,
|
||||
strict,
|
||||
offline,
|
||||
trustStore,
|
||||
timeAnchor,
|
||||
verbose,
|
||||
cancellationToken);
|
||||
});
|
||||
|
||||
return command;
|
||||
}
|
||||
|
||||
private static Command BuildVerifyCommand(
|
||||
IServiceProvider services,
|
||||
Option<bool> verboseOption,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
var bundleArg = new Argument<string>("bundle")
|
||||
{
|
||||
Description = "Path to audit pack bundle (audit-*.tar.gz)"
|
||||
};
|
||||
|
||||
var formatOption = new Option<string>("--format", "-f")
|
||||
{
|
||||
Description = "Output format: text, json"
|
||||
}.SetDefaultValue("text").FromAmong("text", "json");
|
||||
|
||||
var command = new Command("verify", "Verify audit pack integrity without replay.")
|
||||
{
|
||||
bundleArg,
|
||||
formatOption,
|
||||
verboseOption
|
||||
};
|
||||
|
||||
command.SetAction(parseResult =>
|
||||
{
|
||||
var bundle = parseResult.GetValue(bundleArg) ?? string.Empty;
|
||||
var format = parseResult.GetValue(formatOption) ?? "text";
|
||||
var verbose = parseResult.GetValue(verboseOption);
|
||||
|
||||
return CommandHandlers.HandleAuditVerifyAsync(
|
||||
services,
|
||||
bundle,
|
||||
format,
|
||||
verbose,
|
||||
cancellationToken);
|
||||
});
|
||||
|
||||
return command;
|
||||
}
|
||||
}
|
||||
@@ -220,13 +220,13 @@ internal static class BinaryCommandGroup
|
||||
var graphOption = new Option<string>("--graph", new[] { "-g" })
|
||||
{
|
||||
Description = "Path to graph file.",
|
||||
IsRequired = true
|
||||
Required = true
|
||||
};
|
||||
|
||||
var dsseOption = new Option<string>("--dsse", new[] { "-d" })
|
||||
{
|
||||
Description = "Path to DSSE envelope.",
|
||||
IsRequired = true
|
||||
Required = true
|
||||
};
|
||||
|
||||
var publicKeyOption = new Option<string?>("--public-key", new[] { "-k" })
|
||||
|
||||
@@ -35,7 +35,8 @@ internal static class BinaryCommandHandlers
|
||||
bool verbose,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
var logger = services.GetRequiredService<ILogger<Program>>();
|
||||
var loggerFactory = services.GetRequiredService<ILoggerFactory>();
|
||||
var logger = loggerFactory.CreateLogger("binary-submit");
|
||||
|
||||
if (string.IsNullOrWhiteSpace(graphPath) && string.IsNullOrWhiteSpace(binaryPath))
|
||||
{
|
||||
@@ -129,7 +130,8 @@ internal static class BinaryCommandHandlers
|
||||
bool verbose,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
var logger = services.GetRequiredService<ILogger<Program>>();
|
||||
var loggerFactory = services.GetRequiredService<ILoggerFactory>();
|
||||
var logger = loggerFactory.CreateLogger("binary-info");
|
||||
|
||||
try
|
||||
{
|
||||
@@ -193,7 +195,8 @@ internal static class BinaryCommandHandlers
|
||||
bool verbose,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
var logger = services.GetRequiredService<ILogger<Program>>();
|
||||
var loggerFactory = services.GetRequiredService<ILoggerFactory>();
|
||||
var logger = loggerFactory.CreateLogger("binary-symbols");
|
||||
|
||||
try
|
||||
{
|
||||
@@ -280,7 +283,8 @@ internal static class BinaryCommandHandlers
|
||||
bool verbose,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
var logger = services.GetRequiredService<ILogger<Program>>();
|
||||
var loggerFactory = services.GetRequiredService<ILoggerFactory>();
|
||||
var logger = loggerFactory.CreateLogger("binary-verify");
|
||||
|
||||
try
|
||||
{
|
||||
|
||||
@@ -93,8 +93,9 @@ internal static class CommandFactory
|
||||
root.Add(ScoreReplayCommandGroup.BuildScoreCommand(services, verboseOption, cancellationToken));
|
||||
root.Add(UnknownsCommandGroup.BuildUnknownsCommand(services, verboseOption, cancellationToken));
|
||||
root.Add(ProofCommandGroup.BuildProofCommand(services, verboseOption, cancellationToken));
|
||||
root.Add(ReplayCommandGroup.BuildReplayCommand(verboseOption, cancellationToken));
|
||||
root.Add(ReplayCommandGroup.BuildReplayCommand(services, verboseOption, cancellationToken));
|
||||
root.Add(DeltaCommandGroup.BuildDeltaCommand(verboseOption, cancellationToken));
|
||||
root.Add(ReachabilityCommandGroup.BuildReachabilityCommand(services, verboseOption, cancellationToken));
|
||||
|
||||
// Add scan graph subcommand to existing scan command
|
||||
var scanCommand = root.Children.OfType<Command>().FirstOrDefault(c => c.Name == "scan");
|
||||
@@ -2690,6 +2691,9 @@ internal static class CommandFactory
|
||||
|
||||
policy.Add(verifySignature);
|
||||
|
||||
// Add policy pack commands (validate, install, list-packs)
|
||||
PolicyCommandGroup.AddPolicyPackCommands(policy, verboseOption, cancellationToken);
|
||||
|
||||
return policy;
|
||||
}
|
||||
|
||||
|
||||
107
src/Cli/StellaOps.Cli/Commands/CommandHandlers.AirGap.cs
Normal file
107
src/Cli/StellaOps.Cli/Commands/CommandHandlers.AirGap.cs
Normal file
@@ -0,0 +1,107 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// CommandHandlers.AirGap.cs
|
||||
// Sprint: SPRINT_4300_0001_0002_one_command_audit_replay
|
||||
// Description: Command handlers for airgap operations.
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System.Text.Json;
|
||||
using System.Text.Json.Serialization;
|
||||
using Spectre.Console;
|
||||
|
||||
namespace StellaOps.Cli.Commands;
|
||||
|
||||
internal static partial class CommandHandlers
|
||||
{
|
||||
internal static async Task<int> HandleAirGapExportAsync(
|
||||
IServiceProvider services,
|
||||
string output,
|
||||
bool includeAdvisories,
|
||||
bool includeVex,
|
||||
bool includePolicies,
|
||||
bool includeTrustRoots,
|
||||
bool sign,
|
||||
string? signingKey,
|
||||
string? timeAnchor,
|
||||
string[] feeds,
|
||||
string[] ecosystems,
|
||||
bool verbose,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
AnsiConsole.MarkupLine("[blue]Exporting airgap bundle...[/]");
|
||||
AnsiConsole.MarkupLine($" Output: [bold]{Markup.Escape(output)}[/]");
|
||||
AnsiConsole.MarkupLine($" Advisories: {includeAdvisories}");
|
||||
AnsiConsole.MarkupLine($" VEX: {includeVex}");
|
||||
AnsiConsole.MarkupLine($" Policies: {includePolicies}");
|
||||
AnsiConsole.MarkupLine($" Trust Roots: {includeTrustRoots}");
|
||||
|
||||
// Stub implementation
|
||||
await Task.Delay(100, cancellationToken);
|
||||
|
||||
AnsiConsole.MarkupLine("[green]Airgap bundle exported successfully.[/]");
|
||||
return 0;
|
||||
}
|
||||
|
||||
internal static async Task<int> HandleAirGapImportAsync(
|
||||
IServiceProvider services,
|
||||
string bundle,
|
||||
bool verifyOnly,
|
||||
bool force,
|
||||
string? trustPolicy,
|
||||
int? maxAgeHours,
|
||||
bool quarantine,
|
||||
string output,
|
||||
bool verbose,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
AnsiConsole.MarkupLine("[blue]Importing airgap bundle...[/]");
|
||||
AnsiConsole.MarkupLine($" Bundle: [bold]{Markup.Escape(bundle)}[/]");
|
||||
AnsiConsole.MarkupLine($" Verify Only: {verifyOnly}");
|
||||
AnsiConsole.MarkupLine($" Force: {force}");
|
||||
AnsiConsole.MarkupLine($" Quarantine: {quarantine}");
|
||||
|
||||
// Stub implementation
|
||||
await Task.Delay(100, cancellationToken);
|
||||
|
||||
AnsiConsole.MarkupLine("[green]Airgap bundle imported successfully.[/]");
|
||||
return 0;
|
||||
}
|
||||
|
||||
internal static async Task<int> HandleAirGapDiffAsync(
|
||||
IServiceProvider services,
|
||||
string baseBundle,
|
||||
string targetBundle,
|
||||
string? component,
|
||||
string output,
|
||||
bool verbose,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
AnsiConsole.MarkupLine("[blue]Computing airgap bundle diff...[/]");
|
||||
AnsiConsole.MarkupLine($" Base: [bold]{Markup.Escape(baseBundle)}[/]");
|
||||
AnsiConsole.MarkupLine($" Target: [bold]{Markup.Escape(targetBundle)}[/]");
|
||||
if (component != null)
|
||||
{
|
||||
AnsiConsole.MarkupLine($" Component: [bold]{Markup.Escape(component)}[/]");
|
||||
}
|
||||
|
||||
// Stub implementation
|
||||
await Task.Delay(100, cancellationToken);
|
||||
|
||||
AnsiConsole.MarkupLine("[green]Diff computed.[/]");
|
||||
return 0;
|
||||
}
|
||||
|
||||
internal static async Task<int> HandleAirGapStatusAsync(
|
||||
IServiceProvider services,
|
||||
string output,
|
||||
bool verbose,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
AnsiConsole.MarkupLine("[blue]Checking airgap status...[/]");
|
||||
|
||||
// Stub implementation
|
||||
await Task.Delay(100, cancellationToken);
|
||||
|
||||
AnsiConsole.MarkupLine("[green]Airgap mode: Enabled[/]");
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
475
src/Cli/StellaOps.Cli/Commands/CommandHandlers.Audit.cs
Normal file
475
src/Cli/StellaOps.Cli/Commands/CommandHandlers.Audit.cs
Normal file
@@ -0,0 +1,475 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// CommandHandlers.Audit.cs
|
||||
// Sprint: SPRINT_4300_0001_0002_one_command_audit_replay
|
||||
// Description: Command handlers for audit pack export, replay, and verification.
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System.Diagnostics;
|
||||
using System.Text.Json;
|
||||
using System.Text.Json.Serialization;
|
||||
using Microsoft.Extensions.DependencyInjection;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using StellaOps.AuditPack.Models;
|
||||
using StellaOps.AuditPack.Services;
|
||||
using StellaOps.Cli.Configuration;
|
||||
using StellaOps.Cli.Services;
|
||||
using StellaOps.Cli.Telemetry;
|
||||
using Spectre.Console;
|
||||
|
||||
namespace StellaOps.Cli.Commands;
|
||||
|
||||
internal static partial class CommandHandlers
|
||||
{
|
||||
private static readonly JsonSerializerOptions AuditJsonOptions = new()
|
||||
{
|
||||
WriteIndented = true,
|
||||
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
|
||||
Converters = { new JsonStringEnumConverter(JsonNamingPolicy.CamelCase) }
|
||||
};
|
||||
|
||||
internal static async Task<int> HandleAuditExportAsync(
|
||||
IServiceProvider services,
|
||||
string scanId,
|
||||
string? output,
|
||||
string? name,
|
||||
bool sign,
|
||||
string? signingKey,
|
||||
bool includeFeeds,
|
||||
bool includePolicy,
|
||||
bool minimal,
|
||||
bool verbose,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
await using var scope = services.CreateAsyncScope();
|
||||
var loggerFactory = scope.ServiceProvider.GetRequiredService<ILoggerFactory>();
|
||||
var logger = loggerFactory.CreateLogger("audit-export");
|
||||
var options = scope.ServiceProvider.GetRequiredService<StellaOpsCliOptions>();
|
||||
|
||||
using var activity = CliActivitySource.Instance.StartActivity("cli.audit.export", ActivityKind.Client);
|
||||
using var duration = CliMetrics.MeasureCommandDuration("audit export");
|
||||
|
||||
if (string.IsNullOrWhiteSpace(scanId))
|
||||
{
|
||||
AnsiConsole.MarkupLine("[red]Error:[/] --scan-id is required.");
|
||||
Environment.ExitCode = 2;
|
||||
return 2;
|
||||
}
|
||||
|
||||
var outputPath = output ?? $"audit-{scanId}.tar.gz";
|
||||
|
||||
try
|
||||
{
|
||||
AnsiConsole.MarkupLine($"Exporting audit pack for scan [bold]{Markup.Escape(scanId)}[/]...");
|
||||
|
||||
var builder = scope.ServiceProvider.GetService<IAuditPackBuilder>();
|
||||
if (builder is null)
|
||||
{
|
||||
AnsiConsole.MarkupLine("[red]Error:[/] Audit pack builder not available.");
|
||||
Environment.ExitCode = 2;
|
||||
return 2;
|
||||
}
|
||||
|
||||
// Build the audit pack
|
||||
var packOptions = new AuditPackOptions
|
||||
{
|
||||
Name = name,
|
||||
IncludeFeeds = includeFeeds,
|
||||
IncludePolicies = includePolicy,
|
||||
MinimizeSize = minimal
|
||||
};
|
||||
|
||||
var scanResult = new ScanResult(scanId);
|
||||
var pack = await builder.BuildAsync(scanResult, packOptions, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
// Export to archive
|
||||
var exportOptions = new ExportOptions
|
||||
{
|
||||
Sign = sign,
|
||||
SigningKey = signingKey,
|
||||
Compress = true
|
||||
};
|
||||
|
||||
await builder.ExportAsync(pack, outputPath, exportOptions, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
AnsiConsole.WriteLine();
|
||||
AnsiConsole.MarkupLine($"[green]Success![/] Audit pack exported to: [bold]{Markup.Escape(outputPath)}[/]");
|
||||
AnsiConsole.MarkupLine($"Pack ID: {Markup.Escape(pack.PackId)}");
|
||||
AnsiConsole.MarkupLine($"Pack digest: {Markup.Escape(pack.PackDigest ?? "unsigned")}");
|
||||
|
||||
if (verbose)
|
||||
{
|
||||
AnsiConsole.WriteLine();
|
||||
AnsiConsole.MarkupLine("Contents:");
|
||||
AnsiConsole.MarkupLine($" Files: {pack.Contents.FileCount}");
|
||||
AnsiConsole.MarkupLine($" Size: {FormatBytes(pack.Contents.TotalSizeBytes)}");
|
||||
AnsiConsole.MarkupLine($" Attestations: {pack.Attestations.Length}");
|
||||
AnsiConsole.MarkupLine($" SBOMs: {pack.Sboms.Length}");
|
||||
AnsiConsole.MarkupLine($" VEX documents: {pack.VexDocuments.Length}");
|
||||
}
|
||||
|
||||
Environment.ExitCode = 0;
|
||||
return 0;
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
logger.LogError(ex, "Audit export failed for scan {ScanId}", scanId);
|
||||
AnsiConsole.MarkupLine($"[red]Error:[/] {Markup.Escape(ex.Message)}");
|
||||
Environment.ExitCode = 2;
|
||||
return 2;
|
||||
}
|
||||
}
|
||||
|
||||
internal static async Task<int> HandleAuditReplayAsync(
|
||||
IServiceProvider services,
|
||||
string bundlePath,
|
||||
string? outputDir,
|
||||
string format,
|
||||
bool strict,
|
||||
bool offline,
|
||||
string? trustStore,
|
||||
string? timeAnchor,
|
||||
bool verbose,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
await using var scope = services.CreateAsyncScope();
|
||||
var loggerFactory = scope.ServiceProvider.GetRequiredService<ILoggerFactory>();
|
||||
var logger = loggerFactory.CreateLogger("audit-replay");
|
||||
var options = scope.ServiceProvider.GetRequiredService<StellaOpsCliOptions>();
|
||||
|
||||
using var activity = CliActivitySource.Instance.StartActivity("cli.audit.replay", ActivityKind.Client);
|
||||
using var duration = CliMetrics.MeasureCommandDuration("audit replay");
|
||||
|
||||
if (string.IsNullOrWhiteSpace(bundlePath))
|
||||
{
|
||||
WriteAuditError("Bundle path is required.", format);
|
||||
Environment.ExitCode = 2;
|
||||
return 2;
|
||||
}
|
||||
|
||||
if (!File.Exists(bundlePath))
|
||||
{
|
||||
WriteAuditError($"Bundle not found: {bundlePath}", format);
|
||||
Environment.ExitCode = 2;
|
||||
return 2;
|
||||
}
|
||||
|
||||
// Enforce offline mode if requested
|
||||
if (offline)
|
||||
{
|
||||
OfflineModeGuard.IsOffline = true;
|
||||
logger.LogDebug("Running in offline mode as requested.");
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
var importer = scope.ServiceProvider.GetService<IAuditPackImporter>();
|
||||
var replayer = scope.ServiceProvider.GetService<IAuditPackReplayer>();
|
||||
|
||||
if (importer is null || replayer is null)
|
||||
{
|
||||
WriteAuditError("Audit pack services not available.", format);
|
||||
Environment.ExitCode = 2;
|
||||
return 2;
|
||||
}
|
||||
|
||||
// Parse time anchor if provided
|
||||
DateTimeOffset? timeAnchorParsed = null;
|
||||
if (!string.IsNullOrWhiteSpace(timeAnchor))
|
||||
{
|
||||
if (DateTimeOffset.TryParse(timeAnchor, out var parsed))
|
||||
{
|
||||
timeAnchorParsed = parsed;
|
||||
}
|
||||
else
|
||||
{
|
||||
WriteAuditError($"Invalid time anchor format: {timeAnchor}", format);
|
||||
Environment.ExitCode = 2;
|
||||
return 2;
|
||||
}
|
||||
}
|
||||
|
||||
// Import the audit pack
|
||||
if (!string.Equals(format, "json", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
AnsiConsole.MarkupLine($"Loading audit pack: [bold]{Markup.Escape(bundlePath)}[/]...");
|
||||
}
|
||||
|
||||
var importOptions = new ImportOptions
|
||||
{
|
||||
TrustStorePath = trustStore,
|
||||
OutputDirectory = outputDir
|
||||
};
|
||||
var pack = await importer.ImportAsync(bundlePath, importOptions, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
// Execute replay
|
||||
if (!string.Equals(format, "json", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
AnsiConsole.MarkupLine("Executing replay...");
|
||||
}
|
||||
|
||||
var replayOptions = new ReplayOptions
|
||||
{
|
||||
Strict = strict,
|
||||
Offline = offline,
|
||||
TimeAnchor = timeAnchorParsed,
|
||||
OutputDirectory = outputDir
|
||||
};
|
||||
var result = await replayer.ReplayAsync(pack, replayOptions, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
// Output results
|
||||
WriteAuditReplayResult(result, format, verbose);
|
||||
|
||||
// Exit code based on result
|
||||
var exitCode = result.Status switch
|
||||
{
|
||||
AuditReplayStatus.Match => 0,
|
||||
AuditReplayStatus.Drift => 1,
|
||||
_ => 2
|
||||
};
|
||||
|
||||
Environment.ExitCode = exitCode;
|
||||
return exitCode;
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
logger.LogError(ex, "Audit replay failed for bundle {BundlePath}", bundlePath);
|
||||
WriteAuditError($"Replay failed: {ex.Message}", format);
|
||||
Environment.ExitCode = 2;
|
||||
return 2;
|
||||
}
|
||||
}
|
||||
|
||||
internal static async Task<int> HandleAuditVerifyAsync(
|
||||
IServiceProvider services,
|
||||
string bundlePath,
|
||||
string format,
|
||||
bool verbose,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
await using var scope = services.CreateAsyncScope();
|
||||
var loggerFactory = scope.ServiceProvider.GetRequiredService<ILoggerFactory>();
|
||||
var logger = loggerFactory.CreateLogger("audit-verify");
|
||||
|
||||
using var activity = CliActivitySource.Instance.StartActivity("cli.audit.verify", ActivityKind.Client);
|
||||
using var duration = CliMetrics.MeasureCommandDuration("audit verify");
|
||||
|
||||
if (string.IsNullOrWhiteSpace(bundlePath))
|
||||
{
|
||||
WriteAuditError("Bundle path is required.", format);
|
||||
Environment.ExitCode = 2;
|
||||
return 2;
|
||||
}
|
||||
|
||||
if (!File.Exists(bundlePath))
|
||||
{
|
||||
WriteAuditError($"Bundle not found: {bundlePath}", format);
|
||||
Environment.ExitCode = 2;
|
||||
return 2;
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
var importer = scope.ServiceProvider.GetService<IAuditPackImporter>();
|
||||
if (importer is null)
|
||||
{
|
||||
WriteAuditError("Audit pack importer not available.", format);
|
||||
Environment.ExitCode = 2;
|
||||
return 2;
|
||||
}
|
||||
|
||||
var importOptions = new ImportOptions { VerifyOnly = true };
|
||||
var pack = await importer.ImportAsync(bundlePath, importOptions, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
if (string.Equals(format, "json", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
var result = new
|
||||
{
|
||||
status = "valid",
|
||||
packId = pack.PackId,
|
||||
packDigest = pack.PackDigest,
|
||||
createdAt = pack.CreatedAt,
|
||||
fileCount = pack.Contents.FileCount,
|
||||
signatureValid = !string.IsNullOrWhiteSpace(pack.Signature)
|
||||
};
|
||||
AnsiConsole.WriteLine(JsonSerializer.Serialize(result, AuditJsonOptions));
|
||||
}
|
||||
else
|
||||
{
|
||||
AnsiConsole.MarkupLine("[green]Bundle verification passed![/]");
|
||||
AnsiConsole.WriteLine();
|
||||
AnsiConsole.MarkupLine($"Pack ID: {Markup.Escape(pack.PackId)}");
|
||||
AnsiConsole.MarkupLine($"Pack digest: {Markup.Escape(pack.PackDigest ?? "N/A")}");
|
||||
AnsiConsole.MarkupLine($"Created: {pack.CreatedAt:u}");
|
||||
AnsiConsole.MarkupLine($"Files: {pack.Contents.FileCount}");
|
||||
AnsiConsole.MarkupLine($"Signed: {(!string.IsNullOrWhiteSpace(pack.Signature) ? "[green]Yes[/]" : "[yellow]No[/]")}");
|
||||
|
||||
if (verbose)
|
||||
{
|
||||
AnsiConsole.WriteLine();
|
||||
AnsiConsole.MarkupLine("Contents:");
|
||||
AnsiConsole.MarkupLine($" Attestations: {pack.Attestations.Length}");
|
||||
AnsiConsole.MarkupLine($" SBOMs: {pack.Sboms.Length}");
|
||||
AnsiConsole.MarkupLine($" VEX documents: {pack.VexDocuments.Length}");
|
||||
AnsiConsole.MarkupLine($" Trust roots: {pack.TrustRoots.Length}");
|
||||
}
|
||||
}
|
||||
|
||||
Environment.ExitCode = 0;
|
||||
return 0;
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
logger.LogError(ex, "Bundle verification failed for {BundlePath}", bundlePath);
|
||||
WriteAuditError($"Verification failed: {ex.Message}", format);
|
||||
Environment.ExitCode = 2;
|
||||
return 2;
|
||||
}
|
||||
}
|
||||
|
||||
private static void WriteAuditReplayResult(AuditReplayResult result, string format, bool verbose)
|
||||
{
|
||||
if (string.Equals(format, "json", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
AnsiConsole.WriteLine(JsonSerializer.Serialize(result, AuditJsonOptions));
|
||||
return;
|
||||
}
|
||||
|
||||
AnsiConsole.WriteLine();
|
||||
var statusColor = result.Status switch
|
||||
{
|
||||
AuditReplayStatus.Match => "green",
|
||||
AuditReplayStatus.Drift => "yellow",
|
||||
_ => "red"
|
||||
};
|
||||
|
||||
AnsiConsole.MarkupLine($"Replay Status: [{statusColor}]{result.Status}[/]");
|
||||
AnsiConsole.WriteLine();
|
||||
|
||||
// Input validation table
|
||||
var inputTable = new Table().AddColumns("Input", "Expected", "Actual", "Match");
|
||||
inputTable.AddRow(
|
||||
"SBOM Digest",
|
||||
TruncateDigest(result.ExpectedSbomDigest),
|
||||
TruncateDigest(result.ActualSbomDigest),
|
||||
FormatMatch(result.SbomMatches));
|
||||
inputTable.AddRow(
|
||||
"Feeds Digest",
|
||||
TruncateDigest(result.ExpectedFeedsDigest),
|
||||
TruncateDigest(result.ActualFeedsDigest),
|
||||
FormatMatch(result.FeedsMatches));
|
||||
inputTable.AddRow(
|
||||
"Policy Digest",
|
||||
TruncateDigest(result.ExpectedPolicyDigest),
|
||||
TruncateDigest(result.ActualPolicyDigest),
|
||||
FormatMatch(result.PolicyMatches));
|
||||
|
||||
AnsiConsole.Write(inputTable);
|
||||
AnsiConsole.WriteLine();
|
||||
|
||||
// Verdict comparison
|
||||
AnsiConsole.MarkupLine($"Original Verdict: [bold]{Markup.Escape(result.OriginalVerdictDigest ?? "-")}[/]");
|
||||
AnsiConsole.MarkupLine($"Replayed Verdict: [bold]{Markup.Escape(result.ReplayedVerdictDigest ?? "-")}[/]");
|
||||
AnsiConsole.MarkupLine($"Verdict Match: {FormatMatch(result.VerdictMatches)}");
|
||||
|
||||
if (verbose && result.Drifts.Count > 0)
|
||||
{
|
||||
AnsiConsole.WriteLine();
|
||||
AnsiConsole.MarkupLine("[yellow]Detected Drifts:[/]");
|
||||
foreach (var drift in result.Drifts)
|
||||
{
|
||||
AnsiConsole.MarkupLine($" - {Markup.Escape(drift)}");
|
||||
}
|
||||
}
|
||||
|
||||
if (result.Errors.Count > 0)
|
||||
{
|
||||
AnsiConsole.WriteLine();
|
||||
AnsiConsole.MarkupLine("[red]Errors:[/]");
|
||||
foreach (var error in result.Errors)
|
||||
{
|
||||
AnsiConsole.MarkupLine($" - {Markup.Escape(error)}");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private static void WriteAuditError(string message, string format)
|
||||
{
|
||||
if (string.Equals(format, "json", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
var payload = new { status = "error", message };
|
||||
AnsiConsole.WriteLine(JsonSerializer.Serialize(payload, AuditJsonOptions));
|
||||
return;
|
||||
}
|
||||
|
||||
AnsiConsole.MarkupLine($"[red]Error:[/] {Markup.Escape(message)}");
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Result of an audit pack replay operation.
|
||||
/// </summary>
|
||||
public sealed record AuditReplayResult
|
||||
{
|
||||
public required string PackId { get; init; }
|
||||
public required AuditReplayStatus Status { get; init; }
|
||||
public string? ExpectedSbomDigest { get; init; }
|
||||
public string? ActualSbomDigest { get; init; }
|
||||
public bool? SbomMatches { get; init; }
|
||||
public string? ExpectedFeedsDigest { get; init; }
|
||||
public string? ActualFeedsDigest { get; init; }
|
||||
public bool? FeedsMatches { get; init; }
|
||||
public string? ExpectedPolicyDigest { get; init; }
|
||||
public string? ActualPolicyDigest { get; init; }
|
||||
public bool? PolicyMatches { get; init; }
|
||||
public string? OriginalVerdictDigest { get; init; }
|
||||
public string? ReplayedVerdictDigest { get; init; }
|
||||
public bool? VerdictMatches { get; init; }
|
||||
public IReadOnlyList<string> Drifts { get; init; } = Array.Empty<string>();
|
||||
public IReadOnlyList<string> Errors { get; init; } = Array.Empty<string>();
|
||||
public DateTimeOffset ReplayedAt { get; init; }
|
||||
}
|
||||
|
||||
public enum AuditReplayStatus
|
||||
{
|
||||
Match,
|
||||
Drift,
|
||||
Error
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Options for replay operation.
|
||||
/// </summary>
|
||||
public sealed record ReplayOptions
|
||||
{
|
||||
public bool Strict { get; init; }
|
||||
public bool Offline { get; init; }
|
||||
public DateTimeOffset? TimeAnchor { get; init; }
|
||||
public string? OutputDirectory { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Options for import operation.
|
||||
/// </summary>
|
||||
public sealed record ImportOptions
|
||||
{
|
||||
public string? TrustStorePath { get; init; }
|
||||
public string? OutputDirectory { get; init; }
|
||||
public bool VerifyOnly { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Interface for audit pack import.
|
||||
/// </summary>
|
||||
public interface IAuditPackImporter
|
||||
{
|
||||
Task<StellaOps.AuditPack.Models.AuditPack> ImportAsync(string bundlePath, ImportOptions options, CancellationToken ct = default);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Interface for audit pack replay.
|
||||
/// </summary>
|
||||
public interface IAuditPackReplayer
|
||||
{
|
||||
Task<AuditReplayResult> ReplayAsync(StellaOps.AuditPack.Models.AuditPack pack, ReplayOptions options, CancellationToken ct = default);
|
||||
}
|
||||
@@ -20,8 +20,9 @@ internal static partial class CommandHandlers
|
||||
|
||||
/// <summary>
|
||||
/// Handler for `drift compare` command.
|
||||
/// SPRINT_3600_0005_0001 GATE-006: Returns exit codes for CI/CD integration.
|
||||
/// </summary>
|
||||
internal static async Task HandleDriftCompareAsync(
|
||||
internal static async Task<int> HandleDriftCompareAsync(
|
||||
IServiceProvider services,
|
||||
string baseId,
|
||||
string? headId,
|
||||
@@ -74,12 +75,16 @@ internal static partial class CommandHandlers
|
||||
WriteTableOutput(console, driftResult, onlyIncreases, minSeverity);
|
||||
break;
|
||||
}
|
||||
|
||||
// GATE-006: Return appropriate exit code based on drift analysis
|
||||
return ComputeDriftExitCode(driftResult);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Handler for `drift show` command.
|
||||
/// SPRINT_3600_0005_0001 GATE-006: Returns exit codes for CI/CD integration.
|
||||
/// </summary>
|
||||
internal static async Task HandleDriftShowAsync(
|
||||
internal static async Task<int> HandleDriftShowAsync(
|
||||
IServiceProvider services,
|
||||
string id,
|
||||
string output,
|
||||
@@ -127,6 +132,46 @@ internal static partial class CommandHandlers
|
||||
WriteTableOutput(console, driftResult, false, "info");
|
||||
break;
|
||||
}
|
||||
|
||||
// GATE-006: Return appropriate exit code based on drift analysis
|
||||
return ComputeDriftExitCode(driftResult);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// SPRINT_3600_0005_0001 GATE-006: Compute exit code based on drift result.
|
||||
/// Exit codes follow DriftExitCodes conventions for CI/CD integration.
|
||||
/// </summary>
|
||||
private static int ComputeDriftExitCode(DriftResultDto driftResult)
|
||||
{
|
||||
// Check for KEV reachable (highest priority)
|
||||
if (driftResult.DriftedSinks.Any(s => s.IsKev && s.IsRiskIncrease))
|
||||
{
|
||||
return DriftExitCodes.KevReachable;
|
||||
}
|
||||
|
||||
// Check for affected vulnerabilities now reachable
|
||||
if (driftResult.DriftedSinks.Any(s =>
|
||||
s.IsRiskIncrease &&
|
||||
s.Severity is "critical" or "high" &&
|
||||
s.VexStatus is "affected" or "under_investigation"))
|
||||
{
|
||||
return DriftExitCodes.AffectedReachable;
|
||||
}
|
||||
|
||||
// Check for hardening (decreased reachability)
|
||||
if (driftResult.Summary.DecreasedReachability > 0 && driftResult.Summary.IncreasedReachability == 0)
|
||||
{
|
||||
return DriftExitCodes.SuccessHardening;
|
||||
}
|
||||
|
||||
// Check for informational drift (new paths but not to affected sinks)
|
||||
if (driftResult.Summary.IncreasedReachability > 0)
|
||||
{
|
||||
return DriftExitCodes.SuccessWithInfoDrift;
|
||||
}
|
||||
|
||||
// No material changes
|
||||
return DriftExitCodes.Success;
|
||||
}
|
||||
|
||||
// Task: UI-020 - Table output using Spectre.Console
|
||||
@@ -316,5 +361,16 @@ internal static partial class CommandHandlers
|
||||
public string CurrentBucket { get; init; } = string.Empty;
|
||||
public bool IsRiskIncrease { get; init; }
|
||||
public int RiskDelta { get; init; }
|
||||
|
||||
// SPRINT_3600_0005_0001 GATE-006: Additional fields for exit code computation
|
||||
/// <summary>
|
||||
/// Whether this sink is a Known Exploited Vulnerability (CISA KEV list).
|
||||
/// </summary>
|
||||
public bool IsKev { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// VEX status for this vulnerability: affected, not_affected, under_investigation, fixed.
|
||||
/// </summary>
|
||||
public string? VexStatus { get; init; }
|
||||
}
|
||||
}
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user