feat: add security sink detection patterns for JavaScript/TypeScript
- Introduced `sink-detect.js` with various security sink detection patterns categorized by type (e.g., command injection, SQL injection, file operations). - Implemented functions to build a lookup map for fast sink detection and to match sink calls against known patterns. - Added `package-lock.json` for dependency management.
This commit is contained in:
@@ -7,18 +7,11 @@
|
||||
<IsPackable>false</IsPackable>
|
||||
</PropertyGroup>
|
||||
|
||||
<!-- Test packages inherited from Directory.Build.props -->
|
||||
<ItemGroup>
|
||||
<PackageReference Include="coverlet.collector" Version="6.0.4" />
|
||||
<PackageReference Include="Microsoft.CodeAnalysis.CSharp" Version="3.11.0" PrivateAssets="all" />
|
||||
<PackageReference Include="Microsoft.CodeAnalysis.CSharp.Workspaces" Version="3.11.0" PrivateAssets="all" />
|
||||
<PackageReference Include="Microsoft.CodeAnalysis.Workspaces.Common" Version="3.11.0" PrivateAssets="all" />
|
||||
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.14.0" />
|
||||
<PackageReference Include="xunit" Version="2.9.2" />
|
||||
<PackageReference Include="xunit.runner.visualstudio" Version="2.8.2" />
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<Using Include="Xunit" />
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
|
||||
@@ -7,16 +7,7 @@
|
||||
<IsPackable>false</IsPackable>
|
||||
</PropertyGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<PackageReference Include="coverlet.collector" Version="6.0.4" />
|
||||
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.14.0" />
|
||||
<PackageReference Include="xunit" Version="2.9.2" />
|
||||
<PackageReference Include="xunit.runner.visualstudio" Version="2.8.2" />
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<Using Include="Xunit" />
|
||||
</ItemGroup>
|
||||
<!-- Test packages inherited from Directory.Build.props -->
|
||||
|
||||
<ItemGroup>
|
||||
<ProjectReference Include="..\StellaOps.AirGap.Policy\StellaOps.AirGap.Policy.csproj" />
|
||||
|
||||
@@ -0,0 +1,255 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// AdvisorySnapshotExtractor.cs
|
||||
// Sprint: SPRINT_4300_0003_0001 (Sealed Knowledge Snapshot Export/Import)
|
||||
// Task: SEAL-006 - Implement advisory snapshot extractor
|
||||
// Description: Extracts advisory data from Concelier for knowledge snapshot bundles.
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
using StellaOps.AirGap.Bundle.Services;
|
||||
|
||||
namespace StellaOps.AirGap.Bundle.Extractors;
|
||||
|
||||
/// <summary>
|
||||
/// Extracts advisory data from Concelier database for inclusion in knowledge snapshot bundles.
|
||||
/// </summary>
|
||||
public sealed class AdvisorySnapshotExtractor : IAdvisorySnapshotExtractor
|
||||
{
|
||||
private static readonly JsonSerializerOptions JsonOptions = new()
|
||||
{
|
||||
WriteIndented = false,
|
||||
PropertyNamingPolicy = JsonNamingPolicy.CamelCase
|
||||
};
|
||||
|
||||
private readonly IAdvisoryDataSource _dataSource;
|
||||
|
||||
public AdvisorySnapshotExtractor(IAdvisoryDataSource dataSource)
|
||||
{
|
||||
_dataSource = dataSource ?? throw new ArgumentNullException(nameof(dataSource));
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Extracts advisories from all configured feeds.
|
||||
/// </summary>
|
||||
public async Task<AdvisoryExtractionResult> ExtractAllAsync(
|
||||
AdvisoryExtractionRequest request,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(request);
|
||||
|
||||
var contents = new List<AdvisoryContent>();
|
||||
var errors = new List<string>();
|
||||
var totalRecords = 0;
|
||||
|
||||
try
|
||||
{
|
||||
var feeds = await _dataSource.GetAvailableFeedsAsync(cancellationToken);
|
||||
|
||||
foreach (var feed in feeds)
|
||||
{
|
||||
// Skip if specific feeds are requested and this isn't one of them
|
||||
if (request.FeedIds is { Count: > 0 } && !request.FeedIds.Contains(feed.FeedId))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
var feedResult = await ExtractFeedAsync(feed.FeedId, request, cancellationToken);
|
||||
if (feedResult.Success && feedResult.Content is not null)
|
||||
{
|
||||
contents.Add(feedResult.Content);
|
||||
totalRecords += feedResult.RecordCount;
|
||||
}
|
||||
else if (!string.IsNullOrEmpty(feedResult.Error))
|
||||
{
|
||||
errors.Add($"{feed.FeedId}: {feedResult.Error}");
|
||||
}
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
errors.Add($"{feed.FeedId}: {ex.Message}");
|
||||
}
|
||||
}
|
||||
|
||||
return new AdvisoryExtractionResult
|
||||
{
|
||||
Success = errors.Count == 0,
|
||||
Advisories = contents,
|
||||
TotalRecordCount = totalRecords,
|
||||
Errors = errors
|
||||
};
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
return new AdvisoryExtractionResult
|
||||
{
|
||||
Success = false,
|
||||
Advisories = [],
|
||||
Errors = [$"Extraction failed: {ex.Message}"]
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Extracts advisories from a specific feed.
|
||||
/// </summary>
|
||||
public async Task<FeedExtractionResult> ExtractFeedAsync(
|
||||
string feedId,
|
||||
AdvisoryExtractionRequest request,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(feedId);
|
||||
|
||||
try
|
||||
{
|
||||
var advisories = await _dataSource.GetAdvisoriesAsync(
|
||||
feedId,
|
||||
request.Since,
|
||||
request.MaxRecords,
|
||||
cancellationToken);
|
||||
|
||||
if (advisories.Count == 0)
|
||||
{
|
||||
return new FeedExtractionResult
|
||||
{
|
||||
Success = true,
|
||||
RecordCount = 0
|
||||
};
|
||||
}
|
||||
|
||||
// Serialize advisories to NDJSON format for deterministic output
|
||||
var contentBuilder = new StringBuilder();
|
||||
foreach (var advisory in advisories.OrderBy(a => a.Id, StringComparer.Ordinal))
|
||||
{
|
||||
var json = JsonSerializer.Serialize(advisory, JsonOptions);
|
||||
contentBuilder.AppendLine(json);
|
||||
}
|
||||
|
||||
var contentBytes = Encoding.UTF8.GetBytes(contentBuilder.ToString());
|
||||
var fileName = $"{feedId}-{DateTime.UtcNow:yyyyMMddHHmmss}.ndjson";
|
||||
|
||||
return new FeedExtractionResult
|
||||
{
|
||||
Success = true,
|
||||
RecordCount = advisories.Count,
|
||||
Content = new AdvisoryContent
|
||||
{
|
||||
FeedId = feedId,
|
||||
FileName = fileName,
|
||||
Content = contentBytes,
|
||||
SnapshotAt = DateTimeOffset.UtcNow,
|
||||
RecordCount = advisories.Count
|
||||
}
|
||||
};
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
return new FeedExtractionResult
|
||||
{
|
||||
Success = false,
|
||||
Error = ex.Message
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Interface for advisory snapshot extraction.
|
||||
/// </summary>
|
||||
public interface IAdvisorySnapshotExtractor
|
||||
{
|
||||
Task<AdvisoryExtractionResult> ExtractAllAsync(
|
||||
AdvisoryExtractionRequest request,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
Task<FeedExtractionResult> ExtractFeedAsync(
|
||||
string feedId,
|
||||
AdvisoryExtractionRequest request,
|
||||
CancellationToken cancellationToken = default);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Interface for advisory data access.
|
||||
/// This should be implemented by Concelier to provide advisory data.
|
||||
/// </summary>
|
||||
public interface IAdvisoryDataSource
|
||||
{
|
||||
Task<IReadOnlyList<FeedInfo>> GetAvailableFeedsAsync(CancellationToken cancellationToken = default);
|
||||
|
||||
Task<IReadOnlyList<AdvisoryRecord>> GetAdvisoriesAsync(
|
||||
string feedId,
|
||||
DateTimeOffset? since = null,
|
||||
int? maxRecords = null,
|
||||
CancellationToken cancellationToken = default);
|
||||
}
|
||||
|
||||
#region Data Models
|
||||
|
||||
/// <summary>
|
||||
/// Information about an available feed.
|
||||
/// </summary>
|
||||
public sealed record FeedInfo(string FeedId, string Name, string? Ecosystem);
|
||||
|
||||
/// <summary>
|
||||
/// A single advisory record.
|
||||
/// </summary>
|
||||
public sealed record AdvisoryRecord
|
||||
{
|
||||
public required string Id { get; init; }
|
||||
public required string FeedId { get; init; }
|
||||
public string? CveId { get; init; }
|
||||
public string? Summary { get; init; }
|
||||
public string? Severity { get; init; }
|
||||
public double? CvssScore { get; init; }
|
||||
public DateTimeOffset? PublishedAt { get; init; }
|
||||
public DateTimeOffset? ModifiedAt { get; init; }
|
||||
public IReadOnlyList<string>? AffectedPackages { get; init; }
|
||||
public IReadOnlyDictionary<string, object>? RawData { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Request for extracting advisories.
|
||||
/// </summary>
|
||||
public sealed record AdvisoryExtractionRequest
|
||||
{
|
||||
/// <summary>
|
||||
/// Specific feed IDs to extract. Empty means all feeds.
|
||||
/// </summary>
|
||||
public IReadOnlyList<string>? FeedIds { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Only extract advisories modified since this time.
|
||||
/// </summary>
|
||||
public DateTimeOffset? Since { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Maximum records per feed.
|
||||
/// </summary>
|
||||
public int? MaxRecords { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Result of extracting advisories from all feeds.
|
||||
/// </summary>
|
||||
public sealed record AdvisoryExtractionResult
|
||||
{
|
||||
public bool Success { get; init; }
|
||||
public IReadOnlyList<AdvisoryContent> Advisories { get; init; } = [];
|
||||
public int TotalRecordCount { get; init; }
|
||||
public IReadOnlyList<string> Errors { get; init; } = [];
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Result of extracting a single feed.
|
||||
/// </summary>
|
||||
public sealed record FeedExtractionResult
|
||||
{
|
||||
public bool Success { get; init; }
|
||||
public int RecordCount { get; init; }
|
||||
public AdvisoryContent? Content { get; init; }
|
||||
public string? Error { get; init; }
|
||||
}
|
||||
|
||||
#endregion
|
||||
@@ -0,0 +1,360 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// PolicySnapshotExtractor.cs
|
||||
// Sprint: SPRINT_4300_0003_0001 (Sealed Knowledge Snapshot Export/Import)
|
||||
// Task: SEAL-008 - Implement policy bundle extractor
|
||||
// Description: Extracts policy bundle data for knowledge snapshot bundles.
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System.IO.Compression;
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
using StellaOps.AirGap.Bundle.Services;
|
||||
|
||||
namespace StellaOps.AirGap.Bundle.Extractors;
|
||||
|
||||
/// <summary>
|
||||
/// Extracts policy bundles from the Policy registry for inclusion in knowledge snapshot bundles.
|
||||
/// </summary>
|
||||
public sealed class PolicySnapshotExtractor : IPolicySnapshotExtractor
|
||||
{
|
||||
private static readonly JsonSerializerOptions JsonOptions = new()
|
||||
{
|
||||
WriteIndented = false,
|
||||
PropertyNamingPolicy = JsonNamingPolicy.CamelCase
|
||||
};
|
||||
|
||||
private readonly IPolicyDataSource _dataSource;
|
||||
|
||||
public PolicySnapshotExtractor(IPolicyDataSource dataSource)
|
||||
{
|
||||
_dataSource = dataSource ?? throw new ArgumentNullException(nameof(dataSource));
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Extracts all registered policies.
|
||||
/// </summary>
|
||||
public async Task<PolicyExtractionResult> ExtractAllAsync(
|
||||
PolicyExtractionRequest request,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(request);
|
||||
|
||||
var contents = new List<PolicyContent>();
|
||||
var errors = new List<string>();
|
||||
|
||||
try
|
||||
{
|
||||
var policies = await _dataSource.GetAvailablePoliciesAsync(cancellationToken);
|
||||
|
||||
foreach (var policy in policies)
|
||||
{
|
||||
// Skip if specific types are requested and this isn't one of them
|
||||
if (request.Types is { Count: > 0 } && !request.Types.Contains(policy.Type))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
var policyResult = await ExtractPolicyAsync(policy.PolicyId, request, cancellationToken);
|
||||
if (policyResult.Success && policyResult.Content is not null)
|
||||
{
|
||||
contents.Add(policyResult.Content);
|
||||
}
|
||||
else if (!string.IsNullOrEmpty(policyResult.Error))
|
||||
{
|
||||
errors.Add($"{policy.PolicyId}: {policyResult.Error}");
|
||||
}
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
errors.Add($"{policy.PolicyId}: {ex.Message}");
|
||||
}
|
||||
}
|
||||
|
||||
return new PolicyExtractionResult
|
||||
{
|
||||
Success = errors.Count == 0,
|
||||
Policies = contents,
|
||||
Errors = errors
|
||||
};
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
return new PolicyExtractionResult
|
||||
{
|
||||
Success = false,
|
||||
Policies = [],
|
||||
Errors = [$"Extraction failed: {ex.Message}"]
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Extracts a specific policy.
|
||||
/// </summary>
|
||||
public async Task<PolicySingleExtractionResult> ExtractPolicyAsync(
|
||||
string policyId,
|
||||
PolicyExtractionRequest request,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(policyId);
|
||||
|
||||
try
|
||||
{
|
||||
var policyInfo = await _dataSource.GetPolicyInfoAsync(policyId, cancellationToken);
|
||||
if (policyInfo is null)
|
||||
{
|
||||
return new PolicySingleExtractionResult
|
||||
{
|
||||
Success = false,
|
||||
Error = "Policy not found"
|
||||
};
|
||||
}
|
||||
|
||||
var policyContent = await _dataSource.GetPolicyContentAsync(policyId, cancellationToken);
|
||||
if (policyContent is null || policyContent.Length == 0)
|
||||
{
|
||||
return new PolicySingleExtractionResult
|
||||
{
|
||||
Success = false,
|
||||
Error = "Policy content is empty"
|
||||
};
|
||||
}
|
||||
|
||||
// Package policy based on type
|
||||
byte[] contentBytes;
|
||||
string fileName;
|
||||
|
||||
switch (policyInfo.Type)
|
||||
{
|
||||
case "OpaRego":
|
||||
// Package Rego files as a tar.gz bundle
|
||||
contentBytes = await PackageRegoBundle(policyInfo, policyContent, cancellationToken);
|
||||
fileName = $"{policyInfo.PolicyId}-{policyInfo.Version}.tar.gz";
|
||||
break;
|
||||
|
||||
case "LatticeRules":
|
||||
// LatticeRules are JSON files
|
||||
contentBytes = policyContent;
|
||||
fileName = $"{policyInfo.PolicyId}-{policyInfo.Version}.json";
|
||||
break;
|
||||
|
||||
case "UnknownBudgets":
|
||||
// Unknown budgets are JSON files
|
||||
contentBytes = policyContent;
|
||||
fileName = $"{policyInfo.PolicyId}-{policyInfo.Version}.json";
|
||||
break;
|
||||
|
||||
case "ScoringWeights":
|
||||
// Scoring weights are JSON files
|
||||
contentBytes = policyContent;
|
||||
fileName = $"{policyInfo.PolicyId}-{policyInfo.Version}.json";
|
||||
break;
|
||||
|
||||
default:
|
||||
// Unknown types are passed through as-is
|
||||
contentBytes = policyContent;
|
||||
fileName = $"{policyInfo.PolicyId}-{policyInfo.Version}.bin";
|
||||
break;
|
||||
}
|
||||
|
||||
return new PolicySingleExtractionResult
|
||||
{
|
||||
Success = true,
|
||||
Content = new PolicyContent
|
||||
{
|
||||
PolicyId = policyInfo.PolicyId,
|
||||
Name = policyInfo.Name,
|
||||
Version = policyInfo.Version,
|
||||
FileName = fileName,
|
||||
Content = contentBytes,
|
||||
Type = policyInfo.Type
|
||||
}
|
||||
};
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
return new PolicySingleExtractionResult
|
||||
{
|
||||
Success = false,
|
||||
Error = ex.Message
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
private static async Task<byte[]> PackageRegoBundle(
|
||||
PolicyInfo policyInfo,
|
||||
byte[] policyContent,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
await Task.CompletedTask; // Operations below are synchronous
|
||||
|
||||
using var outputStream = new MemoryStream();
|
||||
using var gzipStream = new GZipStream(outputStream, CompressionLevel.Optimal);
|
||||
|
||||
// Write a simple tar with the rego file
|
||||
// Note: This is a minimal implementation; a full implementation would use System.Formats.Tar
|
||||
var header = CreateTarHeader($"{policyInfo.PolicyId}/policy.rego", policyContent.Length);
|
||||
gzipStream.Write(header);
|
||||
gzipStream.Write(policyContent);
|
||||
|
||||
// Pad to 512-byte boundary
|
||||
var padding = 512 - (policyContent.Length % 512);
|
||||
if (padding < 512)
|
||||
{
|
||||
gzipStream.Write(new byte[padding]);
|
||||
}
|
||||
|
||||
// Add manifest.json
|
||||
var manifest = new OpaBundleManifest
|
||||
{
|
||||
Revision = policyInfo.Version,
|
||||
Roots = [policyInfo.PolicyId]
|
||||
};
|
||||
var manifestBytes = JsonSerializer.SerializeToUtf8Bytes(manifest, JsonOptions);
|
||||
|
||||
var manifestHeader = CreateTarHeader(".manifest", manifestBytes.Length);
|
||||
gzipStream.Write(manifestHeader);
|
||||
gzipStream.Write(manifestBytes);
|
||||
|
||||
padding = 512 - (manifestBytes.Length % 512);
|
||||
if (padding < 512)
|
||||
{
|
||||
gzipStream.Write(new byte[padding]);
|
||||
}
|
||||
|
||||
// Write tar end-of-archive marker (two 512-byte zero blocks)
|
||||
gzipStream.Write(new byte[1024]);
|
||||
|
||||
gzipStream.Close();
|
||||
return outputStream.ToArray();
|
||||
}
|
||||
|
||||
private static byte[] CreateTarHeader(string fileName, long fileSize)
|
||||
{
|
||||
var header = new byte[512];
|
||||
var nameBytes = Encoding.ASCII.GetBytes(fileName);
|
||||
Array.Copy(nameBytes, header, Math.Min(nameBytes.Length, 100));
|
||||
|
||||
// Mode (100-107) - 0644
|
||||
Encoding.ASCII.GetBytes("0000644").CopyTo(header, 100);
|
||||
|
||||
// Owner/group UID/GID (108-123) - zeros
|
||||
Encoding.ASCII.GetBytes("0000000").CopyTo(header, 108);
|
||||
Encoding.ASCII.GetBytes("0000000").CopyTo(header, 116);
|
||||
|
||||
// File size in octal (124-135)
|
||||
Encoding.ASCII.GetBytes(Convert.ToString(fileSize, 8).PadLeft(11, '0')).CopyTo(header, 124);
|
||||
|
||||
// Modification time (136-147)
|
||||
var mtime = DateTimeOffset.UtcNow.ToUnixTimeSeconds();
|
||||
Encoding.ASCII.GetBytes(Convert.ToString(mtime, 8).PadLeft(11, '0')).CopyTo(header, 136);
|
||||
|
||||
// Checksum placeholder (148-155) - spaces
|
||||
for (var i = 148; i < 156; i++)
|
||||
{
|
||||
header[i] = 0x20;
|
||||
}
|
||||
|
||||
// Type flag (156) - regular file
|
||||
header[156] = (byte)'0';
|
||||
|
||||
// USTAR magic (257-264)
|
||||
Encoding.ASCII.GetBytes("ustar\0").CopyTo(header, 257);
|
||||
Encoding.ASCII.GetBytes("00").CopyTo(header, 263);
|
||||
|
||||
// Calculate and set checksum
|
||||
var checksum = 0;
|
||||
foreach (var b in header)
|
||||
{
|
||||
checksum += b;
|
||||
}
|
||||
Encoding.ASCII.GetBytes(Convert.ToString(checksum, 8).PadLeft(6, '0') + "\0 ").CopyTo(header, 148);
|
||||
|
||||
return header;
|
||||
}
|
||||
|
||||
private sealed record OpaBundleManifest
|
||||
{
|
||||
public required string Revision { get; init; }
|
||||
public required string[] Roots { get; init; }
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Interface for policy snapshot extraction.
|
||||
/// </summary>
|
||||
public interface IPolicySnapshotExtractor
|
||||
{
|
||||
Task<PolicyExtractionResult> ExtractAllAsync(
|
||||
PolicyExtractionRequest request,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
Task<PolicySingleExtractionResult> ExtractPolicyAsync(
|
||||
string policyId,
|
||||
PolicyExtractionRequest request,
|
||||
CancellationToken cancellationToken = default);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Interface for policy data access.
|
||||
/// This should be implemented by the Policy module to provide policy data.
|
||||
/// </summary>
|
||||
public interface IPolicyDataSource
|
||||
{
|
||||
Task<IReadOnlyList<PolicyInfo>> GetAvailablePoliciesAsync(CancellationToken cancellationToken = default);
|
||||
|
||||
Task<PolicyInfo?> GetPolicyInfoAsync(string policyId, CancellationToken cancellationToken = default);
|
||||
|
||||
Task<byte[]?> GetPolicyContentAsync(string policyId, CancellationToken cancellationToken = default);
|
||||
}
|
||||
|
||||
#region Data Models
|
||||
|
||||
/// <summary>
|
||||
/// Information about a policy.
|
||||
/// </summary>
|
||||
public sealed record PolicyInfo
|
||||
{
|
||||
public required string PolicyId { get; init; }
|
||||
public required string Name { get; init; }
|
||||
public required string Version { get; init; }
|
||||
public required string Type { get; init; }
|
||||
public string? Description { get; init; }
|
||||
public DateTimeOffset? CreatedAt { get; init; }
|
||||
public DateTimeOffset? ModifiedAt { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Request for extracting policies.
|
||||
/// </summary>
|
||||
public sealed record PolicyExtractionRequest
|
||||
{
|
||||
/// <summary>
|
||||
/// Specific policy types to extract. Empty means all types.
|
||||
/// </summary>
|
||||
public IReadOnlyList<string>? Types { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Result of extracting policies.
|
||||
/// </summary>
|
||||
public sealed record PolicyExtractionResult
|
||||
{
|
||||
public bool Success { get; init; }
|
||||
public IReadOnlyList<PolicyContent> Policies { get; init; } = [];
|
||||
public IReadOnlyList<string> Errors { get; init; } = [];
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Result of extracting a single policy.
|
||||
/// </summary>
|
||||
public sealed record PolicySingleExtractionResult
|
||||
{
|
||||
public bool Success { get; init; }
|
||||
public PolicyContent? Content { get; init; }
|
||||
public string? Error { get; init; }
|
||||
}
|
||||
|
||||
#endregion
|
||||
@@ -0,0 +1,281 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// VexSnapshotExtractor.cs
|
||||
// Sprint: SPRINT_4300_0003_0001 (Sealed Knowledge Snapshot Export/Import)
|
||||
// Task: SEAL-007 - Implement VEX snapshot extractor
|
||||
// Description: Extracts VEX statement data from Excititor for knowledge snapshot bundles.
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
using StellaOps.AirGap.Bundle.Services;
|
||||
|
||||
namespace StellaOps.AirGap.Bundle.Extractors;
|
||||
|
||||
/// <summary>
|
||||
/// Extracts VEX (Vulnerability Exploitability eXchange) statements from Excititor
|
||||
/// database for inclusion in knowledge snapshot bundles.
|
||||
/// </summary>
|
||||
public sealed class VexSnapshotExtractor : IVexSnapshotExtractor
|
||||
{
|
||||
private static readonly JsonSerializerOptions JsonOptions = new()
|
||||
{
|
||||
WriteIndented = false,
|
||||
PropertyNamingPolicy = JsonNamingPolicy.CamelCase
|
||||
};
|
||||
|
||||
private readonly IVexDataSource _dataSource;
|
||||
|
||||
public VexSnapshotExtractor(IVexDataSource dataSource)
|
||||
{
|
||||
_dataSource = dataSource ?? throw new ArgumentNullException(nameof(dataSource));
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Extracts VEX statements from all configured sources.
|
||||
/// </summary>
|
||||
public async Task<VexExtractionResult> ExtractAllAsync(
|
||||
VexExtractionRequest request,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(request);
|
||||
|
||||
var contents = new List<VexContent>();
|
||||
var errors = new List<string>();
|
||||
var totalStatements = 0;
|
||||
|
||||
try
|
||||
{
|
||||
var sources = await _dataSource.GetAvailableSourcesAsync(cancellationToken);
|
||||
|
||||
foreach (var source in sources)
|
||||
{
|
||||
// Skip if specific sources are requested and this isn't one of them
|
||||
if (request.SourceIds is { Count: > 0 } && !request.SourceIds.Contains(source.SourceId))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
var sourceResult = await ExtractSourceAsync(source.SourceId, request, cancellationToken);
|
||||
if (sourceResult.Success && sourceResult.Content is not null)
|
||||
{
|
||||
contents.Add(sourceResult.Content);
|
||||
totalStatements += sourceResult.StatementCount;
|
||||
}
|
||||
else if (!string.IsNullOrEmpty(sourceResult.Error))
|
||||
{
|
||||
errors.Add($"{source.SourceId}: {sourceResult.Error}");
|
||||
}
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
errors.Add($"{source.SourceId}: {ex.Message}");
|
||||
}
|
||||
}
|
||||
|
||||
return new VexExtractionResult
|
||||
{
|
||||
Success = errors.Count == 0,
|
||||
VexStatements = contents,
|
||||
TotalStatementCount = totalStatements,
|
||||
Errors = errors
|
||||
};
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
return new VexExtractionResult
|
||||
{
|
||||
Success = false,
|
||||
VexStatements = [],
|
||||
Errors = [$"Extraction failed: {ex.Message}"]
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Extracts VEX statements from a specific source.
|
||||
/// </summary>
|
||||
public async Task<VexSourceExtractionResult> ExtractSourceAsync(
|
||||
string sourceId,
|
||||
VexExtractionRequest request,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(sourceId);
|
||||
|
||||
try
|
||||
{
|
||||
var statements = await _dataSource.GetStatementsAsync(
|
||||
sourceId,
|
||||
request.Since,
|
||||
request.MaxStatements,
|
||||
cancellationToken);
|
||||
|
||||
if (statements.Count == 0)
|
||||
{
|
||||
return new VexSourceExtractionResult
|
||||
{
|
||||
Success = true,
|
||||
StatementCount = 0
|
||||
};
|
||||
}
|
||||
|
||||
// Serialize statements to OpenVEX format
|
||||
var document = new OpenVexDocument
|
||||
{
|
||||
Context = "https://openvex.dev/ns",
|
||||
Id = $"urn:stellaops:vex:{sourceId}:{DateTime.UtcNow:yyyyMMddHHmmss}",
|
||||
Author = sourceId,
|
||||
Timestamp = DateTimeOffset.UtcNow,
|
||||
Version = 1,
|
||||
Statements = statements.OrderBy(s => s.VulnerabilityId, StringComparer.Ordinal).ToList()
|
||||
};
|
||||
|
||||
var contentBytes = JsonSerializer.SerializeToUtf8Bytes(document, JsonOptions);
|
||||
var fileName = $"{sourceId}-{DateTime.UtcNow:yyyyMMddHHmmss}.json";
|
||||
|
||||
return new VexSourceExtractionResult
|
||||
{
|
||||
Success = true,
|
||||
StatementCount = statements.Count,
|
||||
Content = new VexContent
|
||||
{
|
||||
SourceId = sourceId,
|
||||
FileName = fileName,
|
||||
Content = contentBytes,
|
||||
SnapshotAt = DateTimeOffset.UtcNow,
|
||||
StatementCount = statements.Count
|
||||
}
|
||||
};
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
return new VexSourceExtractionResult
|
||||
{
|
||||
Success = false,
|
||||
Error = ex.Message
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Interface for VEX snapshot extraction.
|
||||
/// </summary>
|
||||
public interface IVexSnapshotExtractor
|
||||
{
|
||||
Task<VexExtractionResult> ExtractAllAsync(
|
||||
VexExtractionRequest request,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
Task<VexSourceExtractionResult> ExtractSourceAsync(
|
||||
string sourceId,
|
||||
VexExtractionRequest request,
|
||||
CancellationToken cancellationToken = default);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Interface for VEX data access.
|
||||
/// This should be implemented by Excititor to provide VEX data.
|
||||
/// </summary>
|
||||
public interface IVexDataSource
|
||||
{
|
||||
Task<IReadOnlyList<VexSourceInfo>> GetAvailableSourcesAsync(CancellationToken cancellationToken = default);
|
||||
|
||||
Task<IReadOnlyList<VexStatement>> GetStatementsAsync(
|
||||
string sourceId,
|
||||
DateTimeOffset? since = null,
|
||||
int? maxStatements = null,
|
||||
CancellationToken cancellationToken = default);
|
||||
}
|
||||
|
||||
#region Data Models
|
||||
|
||||
/// <summary>
|
||||
/// Information about an available VEX source.
|
||||
/// </summary>
|
||||
public sealed record VexSourceInfo(string SourceId, string Name, string? Publisher);
|
||||
|
||||
/// <summary>
|
||||
/// A VEX statement following OpenVEX format.
|
||||
/// </summary>
|
||||
public sealed record VexStatement
|
||||
{
|
||||
public required string VulnerabilityId { get; init; }
|
||||
public required string Status { get; init; }
|
||||
public string? Justification { get; init; }
|
||||
public string? ImpactStatement { get; init; }
|
||||
public string? ActionStatement { get; init; }
|
||||
public DateTimeOffset? Timestamp { get; init; }
|
||||
public IReadOnlyList<VexProduct>? Products { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// A product reference in a VEX statement.
|
||||
/// </summary>
|
||||
public sealed record VexProduct
|
||||
{
|
||||
public required string Id { get; init; }
|
||||
public string? Name { get; init; }
|
||||
public string? Version { get; init; }
|
||||
public string? Purl { get; init; }
|
||||
public IReadOnlyList<string>? Hashes { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// OpenVEX document format.
|
||||
/// </summary>
|
||||
public sealed record OpenVexDocument
|
||||
{
|
||||
public required string Context { get; init; }
|
||||
public required string Id { get; init; }
|
||||
public required string Author { get; init; }
|
||||
public required DateTimeOffset Timestamp { get; init; }
|
||||
public required int Version { get; init; }
|
||||
public required IReadOnlyList<VexStatement> Statements { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Request for extracting VEX statements.
|
||||
/// </summary>
|
||||
public sealed record VexExtractionRequest
|
||||
{
|
||||
/// <summary>
|
||||
/// Specific source IDs to extract. Empty means all sources.
|
||||
/// </summary>
|
||||
public IReadOnlyList<string>? SourceIds { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Only extract statements modified since this time.
|
||||
/// </summary>
|
||||
public DateTimeOffset? Since { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Maximum statements per source.
|
||||
/// </summary>
|
||||
public int? MaxStatements { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Result of extracting VEX statements from all sources.
|
||||
/// </summary>
|
||||
public sealed record VexExtractionResult
|
||||
{
|
||||
public bool Success { get; init; }
|
||||
public IReadOnlyList<VexContent> VexStatements { get; init; } = [];
|
||||
public int TotalStatementCount { get; init; }
|
||||
public IReadOnlyList<string> Errors { get; init; } = [];
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Result of extracting a single VEX source.
|
||||
/// </summary>
|
||||
public sealed record VexSourceExtractionResult
|
||||
{
|
||||
public bool Success { get; init; }
|
||||
public int StatementCount { get; init; }
|
||||
public VexContent? Content { get; init; }
|
||||
public string? Error { get; init; }
|
||||
}
|
||||
|
||||
#endregion
|
||||
@@ -0,0 +1,92 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// KnowledgeSnapshotManifest.cs
|
||||
// Sprint: SPRINT_4300_0003_0001 (Sealed Knowledge Snapshot Export/Import)
|
||||
// Task: SEAL-001 - Define KnowledgeSnapshotManifest schema
|
||||
// Description: Manifest model for sealed knowledge snapshots.
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
namespace StellaOps.AirGap.Bundle.Models;
|
||||
|
||||
/// <summary>
|
||||
/// Manifest for a sealed knowledge snapshot bundle.
|
||||
/// Contains metadata and integrity information for all bundled content.
|
||||
/// </summary>
|
||||
public sealed class KnowledgeSnapshotManifest
|
||||
{
|
||||
public required string BundleId { get; init; }
|
||||
public required string Name { get; init; }
|
||||
public required string Version { get; init; }
|
||||
public required DateTimeOffset CreatedAt { get; init; }
|
||||
public string SchemaVersion { get; init; } = "1.0.0";
|
||||
public string? MerkleRoot { get; set; }
|
||||
public long TotalSizeBytes { get; set; }
|
||||
public int EntryCount { get; set; }
|
||||
public List<AdvisorySnapshotEntry> Advisories { get; init; } = [];
|
||||
public List<VexSnapshotEntry> VexStatements { get; init; } = [];
|
||||
public List<PolicySnapshotEntry> Policies { get; init; } = [];
|
||||
public List<TrustRootSnapshotEntry> TrustRoots { get; init; } = [];
|
||||
public TimeAnchorEntry? TimeAnchor { get; set; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Entry for an advisory feed in the snapshot.
|
||||
/// </summary>
|
||||
public sealed class AdvisorySnapshotEntry
|
||||
{
|
||||
public required string FeedId { get; init; }
|
||||
public required string RelativePath { get; init; }
|
||||
public required string Digest { get; init; }
|
||||
public required long SizeBytes { get; init; }
|
||||
public DateTimeOffset SnapshotAt { get; init; }
|
||||
public int RecordCount { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Entry for VEX statements in the snapshot.
|
||||
/// </summary>
|
||||
public sealed class VexSnapshotEntry
|
||||
{
|
||||
public required string SourceId { get; init; }
|
||||
public required string RelativePath { get; init; }
|
||||
public required string Digest { get; init; }
|
||||
public required long SizeBytes { get; init; }
|
||||
public DateTimeOffset SnapshotAt { get; init; }
|
||||
public int StatementCount { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Entry for a policy in the snapshot.
|
||||
/// </summary>
|
||||
public sealed class PolicySnapshotEntry
|
||||
{
|
||||
public required string PolicyId { get; init; }
|
||||
public required string Name { get; init; }
|
||||
public required string Version { get; init; }
|
||||
public required string RelativePath { get; init; }
|
||||
public required string Digest { get; init; }
|
||||
public required long SizeBytes { get; init; }
|
||||
public string Type { get; init; } = "OpaRego";
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Entry for a trust root in the snapshot.
|
||||
/// </summary>
|
||||
public sealed class TrustRootSnapshotEntry
|
||||
{
|
||||
public required string KeyId { get; init; }
|
||||
public required string RelativePath { get; init; }
|
||||
public required string Digest { get; init; }
|
||||
public required long SizeBytes { get; init; }
|
||||
public string Algorithm { get; init; } = "ES256";
|
||||
public DateTimeOffset? ExpiresAt { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Time anchor entry in the manifest.
|
||||
/// </summary>
|
||||
public sealed class TimeAnchorEntry
|
||||
{
|
||||
public required DateTimeOffset AnchorTime { get; init; }
|
||||
public required string Source { get; init; }
|
||||
public string? Digest { get; init; }
|
||||
}
|
||||
@@ -0,0 +1,548 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// SnapshotBundleReader.cs
|
||||
// Sprint: SPRINT_4300_0003_0001 (Sealed Knowledge Snapshot Export/Import)
|
||||
// Tasks: SEAL-012, SEAL-013 - Implement signature verification and merkle root validation
|
||||
// Description: Reads and verifies sealed knowledge snapshot bundles.
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System.Formats.Tar;
|
||||
using System.IO.Compression;
|
||||
using System.Security.Cryptography;
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
using StellaOps.AirGap.Bundle.Models;
|
||||
using PolicySnapshotEntry = StellaOps.AirGap.Bundle.Models.PolicySnapshotEntry;
|
||||
|
||||
namespace StellaOps.AirGap.Bundle.Services;
|
||||
|
||||
/// <summary>
|
||||
/// Reads and verifies sealed knowledge snapshot bundles.
|
||||
/// </summary>
|
||||
public sealed class SnapshotBundleReader : ISnapshotBundleReader
|
||||
{
|
||||
private static readonly JsonSerializerOptions JsonOptions = new()
|
||||
{
|
||||
PropertyNamingPolicy = JsonNamingPolicy.CamelCase
|
||||
};
|
||||
|
||||
/// <summary>
|
||||
/// Reads and verifies a snapshot bundle.
|
||||
/// </summary>
|
||||
public async Task<SnapshotBundleReadResult> ReadAsync(
|
||||
SnapshotBundleReadRequest request,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(request);
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(request.BundlePath);
|
||||
|
||||
if (!File.Exists(request.BundlePath))
|
||||
{
|
||||
return SnapshotBundleReadResult.Failed("Bundle file not found");
|
||||
}
|
||||
|
||||
var tempDir = Path.Combine(Path.GetTempPath(), $"bundle-read-{Guid.NewGuid():N}");
|
||||
Directory.CreateDirectory(tempDir);
|
||||
|
||||
try
|
||||
{
|
||||
// Extract the bundle
|
||||
await ExtractBundleAsync(request.BundlePath, tempDir, cancellationToken);
|
||||
|
||||
// Read manifest
|
||||
var manifestPath = Path.Combine(tempDir, "manifest.json");
|
||||
if (!File.Exists(manifestPath))
|
||||
{
|
||||
return SnapshotBundleReadResult.Failed("Manifest not found in bundle");
|
||||
}
|
||||
|
||||
var manifestBytes = await File.ReadAllBytesAsync(manifestPath, cancellationToken);
|
||||
var manifest = JsonSerializer.Deserialize<KnowledgeSnapshotManifest>(manifestBytes, JsonOptions);
|
||||
if (manifest is null)
|
||||
{
|
||||
return SnapshotBundleReadResult.Failed("Failed to parse manifest");
|
||||
}
|
||||
|
||||
var result = new SnapshotBundleReadResult
|
||||
{
|
||||
Success = true,
|
||||
Manifest = manifest,
|
||||
BundleDigest = await ComputeFileDigestAsync(request.BundlePath, cancellationToken)
|
||||
};
|
||||
|
||||
// Verify signature if requested
|
||||
if (request.VerifySignature)
|
||||
{
|
||||
var signaturePath = Path.Combine(tempDir, "manifest.sig");
|
||||
if (File.Exists(signaturePath))
|
||||
{
|
||||
var signatureBytes = await File.ReadAllBytesAsync(signaturePath, cancellationToken);
|
||||
var signatureResult = await VerifySignatureAsync(
|
||||
manifestBytes, signatureBytes, request.PublicKey, cancellationToken);
|
||||
|
||||
result = result with
|
||||
{
|
||||
SignatureVerified = signatureResult.Verified,
|
||||
SignatureKeyId = signatureResult.KeyId,
|
||||
SignatureError = signatureResult.Error
|
||||
};
|
||||
|
||||
if (!signatureResult.Verified && request.RequireValidSignature)
|
||||
{
|
||||
return result with
|
||||
{
|
||||
Success = false,
|
||||
Error = $"Signature verification failed: {signatureResult.Error}"
|
||||
};
|
||||
}
|
||||
}
|
||||
else if (request.RequireValidSignature)
|
||||
{
|
||||
return SnapshotBundleReadResult.Failed("Signature file not found but signature is required");
|
||||
}
|
||||
}
|
||||
|
||||
// Verify merkle root if requested
|
||||
if (request.VerifyMerkleRoot)
|
||||
{
|
||||
var merkleResult = await VerifyMerkleRootAsync(tempDir, manifest, cancellationToken);
|
||||
result = result with
|
||||
{
|
||||
MerkleRootVerified = merkleResult.Verified,
|
||||
MerkleRootError = merkleResult.Error
|
||||
};
|
||||
|
||||
if (!merkleResult.Verified && request.RequireValidMerkleRoot)
|
||||
{
|
||||
return result with
|
||||
{
|
||||
Success = false,
|
||||
Error = $"Merkle root verification failed: {merkleResult.Error}"
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
// Verify time anchor if present
|
||||
if (request.VerifyTimeAnchor && manifest.TimeAnchor is not null)
|
||||
{
|
||||
var timeAnchorService = new TimeAnchorService();
|
||||
var timeAnchorContent = new TimeAnchorContent
|
||||
{
|
||||
AnchorTime = manifest.TimeAnchor.AnchorTime,
|
||||
Source = manifest.TimeAnchor.Source,
|
||||
TokenDigest = manifest.TimeAnchor.Digest
|
||||
};
|
||||
|
||||
var timeAnchorResult = await timeAnchorService.ValidateAnchorAsync(
|
||||
timeAnchorContent,
|
||||
new TimeAnchorValidationRequest
|
||||
{
|
||||
MaxAgeHours = request.MaxAgeHours,
|
||||
MaxClockDriftSeconds = request.MaxClockDriftSeconds
|
||||
},
|
||||
cancellationToken);
|
||||
|
||||
result = result with
|
||||
{
|
||||
TimeAnchorValid = timeAnchorResult.IsValid,
|
||||
TimeAnchorAgeHours = timeAnchorResult.AgeHours,
|
||||
TimeAnchorError = timeAnchorResult.Error
|
||||
};
|
||||
|
||||
if (!timeAnchorResult.IsValid && request.RequireValidTimeAnchor)
|
||||
{
|
||||
return result with
|
||||
{
|
||||
Success = false,
|
||||
Error = $"Time anchor validation failed: {timeAnchorResult.Error}"
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
return SnapshotBundleReadResult.Failed($"Failed to read bundle: {ex.Message}");
|
||||
}
|
||||
finally
|
||||
{
|
||||
// Clean up temp directory
|
||||
try
|
||||
{
|
||||
if (Directory.Exists(tempDir))
|
||||
{
|
||||
Directory.Delete(tempDir, recursive: true);
|
||||
}
|
||||
}
|
||||
catch
|
||||
{
|
||||
// Ignore cleanup errors
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private static async Task ExtractBundleAsync(string bundlePath, string targetDir, CancellationToken ct)
|
||||
{
|
||||
await using var fileStream = File.OpenRead(bundlePath);
|
||||
await using var gzipStream = new GZipStream(fileStream, CompressionMode.Decompress);
|
||||
await TarFile.ExtractToDirectoryAsync(gzipStream, targetDir, overwriteFiles: true, ct);
|
||||
}
|
||||
|
||||
private static async Task<string> ComputeFileDigestAsync(string filePath, CancellationToken ct)
|
||||
{
|
||||
await using var stream = File.OpenRead(filePath);
|
||||
var hash = await SHA256.HashDataAsync(stream, ct);
|
||||
return $"sha256:{Convert.ToHexString(hash).ToLowerInvariant()}";
|
||||
}
|
||||
|
||||
private static async Task<SignatureVerificationResult> VerifySignatureAsync(
|
||||
byte[] manifestBytes,
|
||||
byte[] signatureEnvelopeBytes,
|
||||
AsymmetricAlgorithm? publicKey,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
try
|
||||
{
|
||||
var signer = new SnapshotManifestSigner();
|
||||
var result = await signer.VerifyAsync(
|
||||
new ManifestVerificationRequest
|
||||
{
|
||||
EnvelopeBytes = signatureEnvelopeBytes,
|
||||
PublicKey = publicKey
|
||||
},
|
||||
cancellationToken);
|
||||
|
||||
if (!result.Success)
|
||||
{
|
||||
return new SignatureVerificationResult
|
||||
{
|
||||
Verified = false,
|
||||
Error = result.Error
|
||||
};
|
||||
}
|
||||
|
||||
// Verify the payload digest matches the manifest
|
||||
var manifestDigest = ComputeSha256(manifestBytes);
|
||||
if (result.PayloadDigest != manifestDigest)
|
||||
{
|
||||
return new SignatureVerificationResult
|
||||
{
|
||||
Verified = false,
|
||||
Error = "Manifest digest does not match signed payload"
|
||||
};
|
||||
}
|
||||
|
||||
var keyId = result.VerifiedSignatures?.FirstOrDefault()?.KeyId;
|
||||
|
||||
return new SignatureVerificationResult
|
||||
{
|
||||
Verified = publicKey is null || (result.VerifiedSignatures?.Any(s => s.Verified == true) ?? false),
|
||||
KeyId = keyId
|
||||
};
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
return new SignatureVerificationResult
|
||||
{
|
||||
Verified = false,
|
||||
Error = ex.Message
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
private static async Task<MerkleVerificationResult> VerifyMerkleRootAsync(
|
||||
string bundleDir,
|
||||
KnowledgeSnapshotManifest manifest,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
try
|
||||
{
|
||||
var entries = new List<BundleEntry>();
|
||||
|
||||
// Collect all entries from manifest
|
||||
foreach (var advisory in manifest.Advisories)
|
||||
{
|
||||
var filePath = Path.Combine(bundleDir, advisory.RelativePath.Replace('/', Path.DirectorySeparatorChar));
|
||||
if (!File.Exists(filePath))
|
||||
{
|
||||
return new MerkleVerificationResult
|
||||
{
|
||||
Verified = false,
|
||||
Error = $"Missing file: {advisory.RelativePath}"
|
||||
};
|
||||
}
|
||||
|
||||
var content = await File.ReadAllBytesAsync(filePath, cancellationToken);
|
||||
var digest = ComputeSha256(content);
|
||||
|
||||
if (digest != advisory.Digest)
|
||||
{
|
||||
return new MerkleVerificationResult
|
||||
{
|
||||
Verified = false,
|
||||
Error = $"Digest mismatch for {advisory.RelativePath}"
|
||||
};
|
||||
}
|
||||
|
||||
entries.Add(new BundleEntry(advisory.RelativePath, digest, content.Length));
|
||||
}
|
||||
|
||||
foreach (var vex in manifest.VexStatements)
|
||||
{
|
||||
var filePath = Path.Combine(bundleDir, vex.RelativePath.Replace('/', Path.DirectorySeparatorChar));
|
||||
if (!File.Exists(filePath))
|
||||
{
|
||||
return new MerkleVerificationResult
|
||||
{
|
||||
Verified = false,
|
||||
Error = $"Missing file: {vex.RelativePath}"
|
||||
};
|
||||
}
|
||||
|
||||
var content = await File.ReadAllBytesAsync(filePath, cancellationToken);
|
||||
var digest = ComputeSha256(content);
|
||||
|
||||
if (digest != vex.Digest)
|
||||
{
|
||||
return new MerkleVerificationResult
|
||||
{
|
||||
Verified = false,
|
||||
Error = $"Digest mismatch for {vex.RelativePath}"
|
||||
};
|
||||
}
|
||||
|
||||
entries.Add(new BundleEntry(vex.RelativePath, digest, content.Length));
|
||||
}
|
||||
|
||||
foreach (var policy in manifest.Policies)
|
||||
{
|
||||
var filePath = Path.Combine(bundleDir, policy.RelativePath.Replace('/', Path.DirectorySeparatorChar));
|
||||
if (!File.Exists(filePath))
|
||||
{
|
||||
return new MerkleVerificationResult
|
||||
{
|
||||
Verified = false,
|
||||
Error = $"Missing file: {policy.RelativePath}"
|
||||
};
|
||||
}
|
||||
|
||||
var content = await File.ReadAllBytesAsync(filePath, cancellationToken);
|
||||
var digest = ComputeSha256(content);
|
||||
|
||||
if (digest != policy.Digest)
|
||||
{
|
||||
return new MerkleVerificationResult
|
||||
{
|
||||
Verified = false,
|
||||
Error = $"Digest mismatch for {policy.RelativePath}"
|
||||
};
|
||||
}
|
||||
|
||||
entries.Add(new BundleEntry(policy.RelativePath, digest, content.Length));
|
||||
}
|
||||
|
||||
foreach (var trust in manifest.TrustRoots)
|
||||
{
|
||||
var filePath = Path.Combine(bundleDir, trust.RelativePath.Replace('/', Path.DirectorySeparatorChar));
|
||||
if (!File.Exists(filePath))
|
||||
{
|
||||
return new MerkleVerificationResult
|
||||
{
|
||||
Verified = false,
|
||||
Error = $"Missing file: {trust.RelativePath}"
|
||||
};
|
||||
}
|
||||
|
||||
var content = await File.ReadAllBytesAsync(filePath, cancellationToken);
|
||||
var digest = ComputeSha256(content);
|
||||
|
||||
if (digest != trust.Digest)
|
||||
{
|
||||
return new MerkleVerificationResult
|
||||
{
|
||||
Verified = false,
|
||||
Error = $"Digest mismatch for {trust.RelativePath}"
|
||||
};
|
||||
}
|
||||
|
||||
entries.Add(new BundleEntry(trust.RelativePath, digest, content.Length));
|
||||
}
|
||||
|
||||
// Compute merkle root
|
||||
var computedRoot = ComputeMerkleRoot(entries);
|
||||
|
||||
if (computedRoot != manifest.MerkleRoot)
|
||||
{
|
||||
return new MerkleVerificationResult
|
||||
{
|
||||
Verified = false,
|
||||
Error = $"Merkle root mismatch: expected {manifest.MerkleRoot}, got {computedRoot}"
|
||||
};
|
||||
}
|
||||
|
||||
return new MerkleVerificationResult { Verified = true };
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
return new MerkleVerificationResult
|
||||
{
|
||||
Verified = false,
|
||||
Error = ex.Message
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
private static string ComputeSha256(byte[] content)
|
||||
{
|
||||
var hash = SHA256.HashData(content);
|
||||
return $"sha256:{Convert.ToHexString(hash).ToLowerInvariant()}";
|
||||
}
|
||||
|
||||
private static string ComputeMerkleRoot(List<BundleEntry> entries)
|
||||
{
|
||||
if (entries.Count == 0)
|
||||
{
|
||||
return string.Empty;
|
||||
}
|
||||
|
||||
var leaves = entries
|
||||
.OrderBy(e => e.Path, StringComparer.Ordinal)
|
||||
.Select(e => SHA256.HashData(Encoding.UTF8.GetBytes($"{e.Path}:{e.Digest}")))
|
||||
.ToArray();
|
||||
|
||||
while (leaves.Length > 1)
|
||||
{
|
||||
leaves = PairwiseHash(leaves).ToArray();
|
||||
}
|
||||
|
||||
return Convert.ToHexString(leaves[0]).ToLowerInvariant();
|
||||
}
|
||||
|
||||
private static IEnumerable<byte[]> PairwiseHash(byte[][] nodes)
|
||||
{
|
||||
for (var i = 0; i < nodes.Length; i += 2)
|
||||
{
|
||||
if (i + 1 >= nodes.Length)
|
||||
{
|
||||
yield return SHA256.HashData(nodes[i]);
|
||||
continue;
|
||||
}
|
||||
|
||||
var combined = new byte[nodes[i].Length + nodes[i + 1].Length];
|
||||
Buffer.BlockCopy(nodes[i], 0, combined, 0, nodes[i].Length);
|
||||
Buffer.BlockCopy(nodes[i + 1], 0, combined, nodes[i].Length, nodes[i + 1].Length);
|
||||
yield return SHA256.HashData(combined);
|
||||
}
|
||||
}
|
||||
|
||||
private sealed record BundleEntry(string Path, string Digest, long SizeBytes);
|
||||
private sealed record SignatureVerificationResult
|
||||
{
|
||||
public bool Verified { get; init; }
|
||||
public string? KeyId { get; init; }
|
||||
public string? Error { get; init; }
|
||||
}
|
||||
private sealed record MerkleVerificationResult
|
||||
{
|
||||
public bool Verified { get; init; }
|
||||
public string? Error { get; init; }
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Interface for snapshot bundle reading.
|
||||
/// </summary>
|
||||
public interface ISnapshotBundleReader
|
||||
{
|
||||
Task<SnapshotBundleReadResult> ReadAsync(
|
||||
SnapshotBundleReadRequest request,
|
||||
CancellationToken cancellationToken = default);
|
||||
}
|
||||
|
||||
#region Request and Result Models
|
||||
|
||||
/// <summary>
|
||||
/// Request for reading a snapshot bundle.
|
||||
/// </summary>
|
||||
public sealed record SnapshotBundleReadRequest
|
||||
{
|
||||
public required string BundlePath { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Verify the manifest signature.
|
||||
/// </summary>
|
||||
public bool VerifySignature { get; init; } = true;
|
||||
|
||||
/// <summary>
|
||||
/// Fail if signature is invalid.
|
||||
/// </summary>
|
||||
public bool RequireValidSignature { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Verify the merkle root.
|
||||
/// </summary>
|
||||
public bool VerifyMerkleRoot { get; init; } = true;
|
||||
|
||||
/// <summary>
|
||||
/// Fail if merkle root is invalid.
|
||||
/// </summary>
|
||||
public bool RequireValidMerkleRoot { get; init; } = true;
|
||||
|
||||
/// <summary>
|
||||
/// Verify time anchor freshness.
|
||||
/// </summary>
|
||||
public bool VerifyTimeAnchor { get; init; } = true;
|
||||
|
||||
/// <summary>
|
||||
/// Fail if time anchor is invalid.
|
||||
/// </summary>
|
||||
public bool RequireValidTimeAnchor { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Maximum age in hours for time anchor validation.
|
||||
/// </summary>
|
||||
public int? MaxAgeHours { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Maximum clock drift in seconds for time anchor validation.
|
||||
/// </summary>
|
||||
public int? MaxClockDriftSeconds { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Public key for signature verification.
|
||||
/// </summary>
|
||||
public AsymmetricAlgorithm? PublicKey { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Result of reading a snapshot bundle.
|
||||
/// </summary>
|
||||
public sealed record SnapshotBundleReadResult
|
||||
{
|
||||
public bool Success { get; init; }
|
||||
public KnowledgeSnapshotManifest? Manifest { get; init; }
|
||||
public string? BundleDigest { get; init; }
|
||||
public string? Error { get; init; }
|
||||
|
||||
// Signature verification
|
||||
public bool? SignatureVerified { get; init; }
|
||||
public string? SignatureKeyId { get; init; }
|
||||
public string? SignatureError { get; init; }
|
||||
|
||||
// Merkle root verification
|
||||
public bool? MerkleRootVerified { get; init; }
|
||||
public string? MerkleRootError { get; init; }
|
||||
|
||||
// Time anchor verification
|
||||
public bool? TimeAnchorValid { get; init; }
|
||||
public double? TimeAnchorAgeHours { get; init; }
|
||||
public string? TimeAnchorError { get; init; }
|
||||
|
||||
public static SnapshotBundleReadResult Failed(string error) => new()
|
||||
{
|
||||
Success = false,
|
||||
Error = error
|
||||
};
|
||||
}
|
||||
|
||||
#endregion
|
||||
@@ -0,0 +1,455 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// SnapshotBundleWriter.cs
|
||||
// Sprint: SPRINT_4300_0003_0001 (Sealed Knowledge Snapshot Export/Import)
|
||||
// Task: SEAL-003 - Create SnapshotBundleWriter
|
||||
// Description: Writes sealed knowledge snapshots to tar.gz bundles.
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System.Formats.Tar;
|
||||
using System.IO.Compression;
|
||||
using System.Security.Cryptography;
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
using StellaOps.AirGap.Bundle.Models;
|
||||
using PolicySnapshotEntry = StellaOps.AirGap.Bundle.Models.PolicySnapshotEntry;
|
||||
|
||||
namespace StellaOps.AirGap.Bundle.Services;
|
||||
|
||||
/// <summary>
|
||||
/// Writes sealed knowledge snapshots to tar.gz bundles with manifest and merkle root.
|
||||
/// </summary>
|
||||
public sealed class SnapshotBundleWriter : ISnapshotBundleWriter
|
||||
{
|
||||
private static readonly JsonSerializerOptions JsonOptions = new()
|
||||
{
|
||||
WriteIndented = true,
|
||||
PropertyNamingPolicy = JsonNamingPolicy.CamelCase
|
||||
};
|
||||
|
||||
/// <summary>
|
||||
/// Creates a knowledge snapshot bundle from the specified contents.
|
||||
/// </summary>
|
||||
public async Task<SnapshotBundleResult> WriteAsync(
|
||||
SnapshotBundleRequest request,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(request);
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(request.OutputPath);
|
||||
|
||||
var tempDir = Path.Combine(Path.GetTempPath(), $"snapshot-{Guid.NewGuid():N}");
|
||||
Directory.CreateDirectory(tempDir);
|
||||
|
||||
try
|
||||
{
|
||||
var entries = new List<BundleEntry>();
|
||||
var manifest = new KnowledgeSnapshotManifest
|
||||
{
|
||||
BundleId = request.BundleId ?? Guid.NewGuid().ToString("N"),
|
||||
Name = request.Name ?? $"knowledge-{DateTime.UtcNow:yyyy-MM-dd}",
|
||||
Version = request.Version ?? "1.0.0",
|
||||
CreatedAt = DateTimeOffset.UtcNow,
|
||||
SchemaVersion = "1.0.0"
|
||||
};
|
||||
|
||||
// Write advisories
|
||||
if (request.Advisories is { Count: > 0 })
|
||||
{
|
||||
var advisoriesDir = Path.Combine(tempDir, "advisories");
|
||||
Directory.CreateDirectory(advisoriesDir);
|
||||
|
||||
foreach (var advisory in request.Advisories)
|
||||
{
|
||||
var feedDir = Path.Combine(advisoriesDir, advisory.FeedId);
|
||||
Directory.CreateDirectory(feedDir);
|
||||
|
||||
var filePath = Path.Combine(feedDir, advisory.FileName);
|
||||
await File.WriteAllBytesAsync(filePath, advisory.Content, cancellationToken);
|
||||
|
||||
var relativePath = $"advisories/{advisory.FeedId}/{advisory.FileName}";
|
||||
var digest = ComputeSha256(advisory.Content);
|
||||
|
||||
entries.Add(new BundleEntry(relativePath, digest, advisory.Content.Length));
|
||||
manifest.Advisories.Add(new AdvisorySnapshotEntry
|
||||
{
|
||||
FeedId = advisory.FeedId,
|
||||
RelativePath = relativePath,
|
||||
Digest = digest,
|
||||
SizeBytes = advisory.Content.Length,
|
||||
SnapshotAt = advisory.SnapshotAt ?? DateTimeOffset.UtcNow,
|
||||
RecordCount = advisory.RecordCount
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Write VEX statements
|
||||
if (request.VexStatements is { Count: > 0 })
|
||||
{
|
||||
var vexDir = Path.Combine(tempDir, "vex");
|
||||
Directory.CreateDirectory(vexDir);
|
||||
|
||||
foreach (var vex in request.VexStatements)
|
||||
{
|
||||
var sourceDir = Path.Combine(vexDir, vex.SourceId);
|
||||
Directory.CreateDirectory(sourceDir);
|
||||
|
||||
var filePath = Path.Combine(sourceDir, vex.FileName);
|
||||
await File.WriteAllBytesAsync(filePath, vex.Content, cancellationToken);
|
||||
|
||||
var relativePath = $"vex/{vex.SourceId}/{vex.FileName}";
|
||||
var digest = ComputeSha256(vex.Content);
|
||||
|
||||
entries.Add(new BundleEntry(relativePath, digest, vex.Content.Length));
|
||||
manifest.VexStatements.Add(new VexSnapshotEntry
|
||||
{
|
||||
SourceId = vex.SourceId,
|
||||
RelativePath = relativePath,
|
||||
Digest = digest,
|
||||
SizeBytes = vex.Content.Length,
|
||||
SnapshotAt = vex.SnapshotAt ?? DateTimeOffset.UtcNow,
|
||||
StatementCount = vex.StatementCount
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Write policies
|
||||
if (request.Policies is { Count: > 0 })
|
||||
{
|
||||
var policiesDir = Path.Combine(tempDir, "policies");
|
||||
Directory.CreateDirectory(policiesDir);
|
||||
|
||||
foreach (var policy in request.Policies)
|
||||
{
|
||||
var filePath = Path.Combine(policiesDir, policy.FileName);
|
||||
await File.WriteAllBytesAsync(filePath, policy.Content, cancellationToken);
|
||||
|
||||
var relativePath = $"policies/{policy.FileName}";
|
||||
var digest = ComputeSha256(policy.Content);
|
||||
|
||||
entries.Add(new BundleEntry(relativePath, digest, policy.Content.Length));
|
||||
manifest.Policies.Add(new PolicySnapshotEntry
|
||||
{
|
||||
PolicyId = policy.PolicyId,
|
||||
Name = policy.Name,
|
||||
Version = policy.Version,
|
||||
RelativePath = relativePath,
|
||||
Digest = digest,
|
||||
SizeBytes = policy.Content.Length,
|
||||
Type = policy.Type
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Write trust roots
|
||||
if (request.TrustRoots is { Count: > 0 })
|
||||
{
|
||||
var trustDir = Path.Combine(tempDir, "trust");
|
||||
Directory.CreateDirectory(trustDir);
|
||||
|
||||
foreach (var trustRoot in request.TrustRoots)
|
||||
{
|
||||
var filePath = Path.Combine(trustDir, trustRoot.FileName);
|
||||
await File.WriteAllBytesAsync(filePath, trustRoot.Content, cancellationToken);
|
||||
|
||||
var relativePath = $"trust/{trustRoot.FileName}";
|
||||
var digest = ComputeSha256(trustRoot.Content);
|
||||
|
||||
entries.Add(new BundleEntry(relativePath, digest, trustRoot.Content.Length));
|
||||
manifest.TrustRoots.Add(new TrustRootSnapshotEntry
|
||||
{
|
||||
KeyId = trustRoot.KeyId,
|
||||
RelativePath = relativePath,
|
||||
Digest = digest,
|
||||
SizeBytes = trustRoot.Content.Length,
|
||||
Algorithm = trustRoot.Algorithm,
|
||||
ExpiresAt = trustRoot.ExpiresAt
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Write time anchor
|
||||
if (request.TimeAnchor is not null)
|
||||
{
|
||||
var timeAnchorPath = Path.Combine(tempDir, "time-anchor.json");
|
||||
var timeAnchorJson = JsonSerializer.SerializeToUtf8Bytes(request.TimeAnchor, JsonOptions);
|
||||
await File.WriteAllBytesAsync(timeAnchorPath, timeAnchorJson, cancellationToken);
|
||||
|
||||
var digest = ComputeSha256(timeAnchorJson);
|
||||
entries.Add(new BundleEntry("time-anchor.json", digest, timeAnchorJson.Length));
|
||||
manifest.TimeAnchor = new TimeAnchorEntry
|
||||
{
|
||||
AnchorTime = request.TimeAnchor.AnchorTime,
|
||||
Source = request.TimeAnchor.Source,
|
||||
Digest = digest
|
||||
};
|
||||
}
|
||||
|
||||
// Compute merkle root
|
||||
manifest.MerkleRoot = ComputeMerkleRoot(entries);
|
||||
manifest.TotalSizeBytes = entries.Sum(e => e.SizeBytes);
|
||||
manifest.EntryCount = entries.Count;
|
||||
|
||||
// Write manifest
|
||||
var manifestJson = JsonSerializer.SerializeToUtf8Bytes(manifest, JsonOptions);
|
||||
var manifestPath = Path.Combine(tempDir, "manifest.json");
|
||||
await File.WriteAllBytesAsync(manifestPath, manifestJson, cancellationToken);
|
||||
|
||||
// Sign manifest if requested
|
||||
string? signingKeyId = null;
|
||||
string? signingAlgorithm = null;
|
||||
var signed = false;
|
||||
|
||||
if (request.Sign)
|
||||
{
|
||||
var signer = new SnapshotManifestSigner();
|
||||
var signResult = await signer.SignAsync(new ManifestSigningRequest
|
||||
{
|
||||
ManifestBytes = manifestJson,
|
||||
KeyFilePath = request.SigningKeyPath,
|
||||
KeyPassword = request.SigningKeyPassword
|
||||
}, cancellationToken);
|
||||
|
||||
if (signResult.Success && signResult.Envelope is not null)
|
||||
{
|
||||
var signaturePath = Path.Combine(tempDir, "manifest.sig");
|
||||
await File.WriteAllBytesAsync(signaturePath, signResult.Envelope, cancellationToken);
|
||||
signingKeyId = signResult.KeyId;
|
||||
signingAlgorithm = signResult.Algorithm;
|
||||
signed = true;
|
||||
}
|
||||
}
|
||||
|
||||
// Create tar.gz bundle
|
||||
var outputPath = request.OutputPath;
|
||||
if (!outputPath.EndsWith(".tar.gz", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
outputPath = $"{outputPath}.tar.gz";
|
||||
}
|
||||
|
||||
await CreateTarGzAsync(tempDir, outputPath, cancellationToken);
|
||||
|
||||
var bundleDigest = await ComputeFileDigestAsync(outputPath, cancellationToken);
|
||||
|
||||
return new SnapshotBundleResult
|
||||
{
|
||||
Success = true,
|
||||
OutputPath = outputPath,
|
||||
BundleId = manifest.BundleId,
|
||||
MerkleRoot = manifest.MerkleRoot,
|
||||
BundleDigest = bundleDigest,
|
||||
TotalSizeBytes = new FileInfo(outputPath).Length,
|
||||
EntryCount = entries.Count,
|
||||
CreatedAt = manifest.CreatedAt,
|
||||
Signed = signed,
|
||||
SigningKeyId = signingKeyId,
|
||||
SigningAlgorithm = signingAlgorithm
|
||||
};
|
||||
}
|
||||
finally
|
||||
{
|
||||
// Clean up temp directory
|
||||
try
|
||||
{
|
||||
if (Directory.Exists(tempDir))
|
||||
{
|
||||
Directory.Delete(tempDir, recursive: true);
|
||||
}
|
||||
}
|
||||
catch
|
||||
{
|
||||
// Ignore cleanup errors
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private static string ComputeSha256(byte[] content)
|
||||
{
|
||||
var hash = SHA256.HashData(content);
|
||||
return $"sha256:{Convert.ToHexString(hash).ToLowerInvariant()}";
|
||||
}
|
||||
|
||||
private static async Task<string> ComputeFileDigestAsync(string filePath, CancellationToken ct)
|
||||
{
|
||||
await using var stream = File.OpenRead(filePath);
|
||||
var hash = await SHA256.HashDataAsync(stream, ct);
|
||||
return $"sha256:{Convert.ToHexString(hash).ToLowerInvariant()}";
|
||||
}
|
||||
|
||||
private static string ComputeMerkleRoot(List<BundleEntry> entries)
|
||||
{
|
||||
if (entries.Count == 0)
|
||||
{
|
||||
return string.Empty;
|
||||
}
|
||||
|
||||
var leaves = entries
|
||||
.OrderBy(e => e.Path, StringComparer.Ordinal)
|
||||
.Select(e => SHA256.HashData(Encoding.UTF8.GetBytes($"{e.Path}:{e.Digest}")))
|
||||
.ToArray();
|
||||
|
||||
while (leaves.Length > 1)
|
||||
{
|
||||
leaves = PairwiseHash(leaves).ToArray();
|
||||
}
|
||||
|
||||
return Convert.ToHexString(leaves[0]).ToLowerInvariant();
|
||||
}
|
||||
|
||||
private static IEnumerable<byte[]> PairwiseHash(byte[][] nodes)
|
||||
{
|
||||
for (var i = 0; i < nodes.Length; i += 2)
|
||||
{
|
||||
if (i + 1 >= nodes.Length)
|
||||
{
|
||||
yield return SHA256.HashData(nodes[i]);
|
||||
continue;
|
||||
}
|
||||
|
||||
var combined = new byte[nodes[i].Length + nodes[i + 1].Length];
|
||||
Buffer.BlockCopy(nodes[i], 0, combined, 0, nodes[i].Length);
|
||||
Buffer.BlockCopy(nodes[i + 1], 0, combined, nodes[i].Length, nodes[i + 1].Length);
|
||||
yield return SHA256.HashData(combined);
|
||||
}
|
||||
}
|
||||
|
||||
private static async Task CreateTarGzAsync(string sourceDir, string outputPath, CancellationToken ct)
|
||||
{
|
||||
var outputDir = Path.GetDirectoryName(outputPath);
|
||||
if (!string.IsNullOrEmpty(outputDir) && !Directory.Exists(outputDir))
|
||||
{
|
||||
Directory.CreateDirectory(outputDir);
|
||||
}
|
||||
|
||||
await using var fileStream = File.Create(outputPath);
|
||||
await using var gzipStream = new GZipStream(fileStream, CompressionLevel.Optimal);
|
||||
await TarFile.CreateFromDirectoryAsync(sourceDir, gzipStream, includeBaseDirectory: false, ct);
|
||||
}
|
||||
|
||||
private sealed record BundleEntry(string Path, string Digest, long SizeBytes);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Interface for snapshot bundle writing.
|
||||
/// </summary>
|
||||
public interface ISnapshotBundleWriter
|
||||
{
|
||||
Task<SnapshotBundleResult> WriteAsync(
|
||||
SnapshotBundleRequest request,
|
||||
CancellationToken cancellationToken = default);
|
||||
}
|
||||
|
||||
#region Request and Result Models
|
||||
|
||||
/// <summary>
|
||||
/// Request for creating a knowledge snapshot bundle.
|
||||
/// </summary>
|
||||
public sealed record SnapshotBundleRequest
|
||||
{
|
||||
public required string OutputPath { get; init; }
|
||||
public string? BundleId { get; init; }
|
||||
public string? Name { get; init; }
|
||||
public string? Version { get; init; }
|
||||
public List<AdvisoryContent> Advisories { get; init; } = [];
|
||||
public List<VexContent> VexStatements { get; init; } = [];
|
||||
public List<PolicyContent> Policies { get; init; } = [];
|
||||
public List<TrustRootContent> TrustRoots { get; init; } = [];
|
||||
public TimeAnchorContent? TimeAnchor { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Whether to sign the manifest.
|
||||
/// </summary>
|
||||
public bool Sign { get; init; } = true;
|
||||
|
||||
/// <summary>
|
||||
/// Path to signing key file (PEM format).
|
||||
/// If null and Sign is true, an ephemeral key will be used.
|
||||
/// </summary>
|
||||
public string? SigningKeyPath { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Password for encrypted signing key.
|
||||
/// </summary>
|
||||
public string? SigningKeyPassword { get; init; }
|
||||
}
|
||||
|
||||
public sealed record AdvisoryContent
|
||||
{
|
||||
public required string FeedId { get; init; }
|
||||
public required string FileName { get; init; }
|
||||
public required byte[] Content { get; init; }
|
||||
public DateTimeOffset? SnapshotAt { get; init; }
|
||||
public int RecordCount { get; init; }
|
||||
}
|
||||
|
||||
public sealed record VexContent
|
||||
{
|
||||
public required string SourceId { get; init; }
|
||||
public required string FileName { get; init; }
|
||||
public required byte[] Content { get; init; }
|
||||
public DateTimeOffset? SnapshotAt { get; init; }
|
||||
public int StatementCount { get; init; }
|
||||
}
|
||||
|
||||
public sealed record PolicyContent
|
||||
{
|
||||
public required string PolicyId { get; init; }
|
||||
public required string Name { get; init; }
|
||||
public required string Version { get; init; }
|
||||
public required string FileName { get; init; }
|
||||
public required byte[] Content { get; init; }
|
||||
public string Type { get; init; } = "OpaRego";
|
||||
}
|
||||
|
||||
public sealed record TrustRootContent
|
||||
{
|
||||
public required string KeyId { get; init; }
|
||||
public required string FileName { get; init; }
|
||||
public required byte[] Content { get; init; }
|
||||
public string Algorithm { get; init; } = "ES256";
|
||||
public DateTimeOffset? ExpiresAt { get; init; }
|
||||
}
|
||||
|
||||
public sealed record TimeAnchorContent
|
||||
{
|
||||
public required DateTimeOffset AnchorTime { get; init; }
|
||||
public required string Source { get; init; }
|
||||
public string? TokenDigest { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Result of creating a knowledge snapshot bundle.
|
||||
/// </summary>
|
||||
public sealed record SnapshotBundleResult
|
||||
{
|
||||
public bool Success { get; init; }
|
||||
public string? OutputPath { get; init; }
|
||||
public string? BundleId { get; init; }
|
||||
public string? MerkleRoot { get; init; }
|
||||
public string? BundleDigest { get; init; }
|
||||
public long TotalSizeBytes { get; init; }
|
||||
public int EntryCount { get; init; }
|
||||
public DateTimeOffset CreatedAt { get; init; }
|
||||
public string? Error { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Whether the manifest was signed.
|
||||
/// </summary>
|
||||
public bool Signed { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Key ID used for signing.
|
||||
/// </summary>
|
||||
public string? SigningKeyId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Algorithm used for signing.
|
||||
/// </summary>
|
||||
public string? SigningAlgorithm { get; init; }
|
||||
|
||||
public static SnapshotBundleResult Failed(string error) => new()
|
||||
{
|
||||
Success = false,
|
||||
Error = error
|
||||
};
|
||||
}
|
||||
|
||||
#endregion
|
||||
@@ -0,0 +1,486 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// SnapshotManifestSigner.cs
|
||||
// Sprint: SPRINT_4300_0003_0001 (Sealed Knowledge Snapshot Export/Import)
|
||||
// Task: SEAL-004 - Add DSSE signing for manifest
|
||||
// Description: Signs snapshot manifests using DSSE format for integrity verification.
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System.Security.Cryptography;
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
|
||||
namespace StellaOps.AirGap.Bundle.Services;
|
||||
|
||||
/// <summary>
|
||||
/// Signs snapshot manifests using DSSE (Dead Simple Signing Envelope) format.
|
||||
/// Produces signatures compatible with in-toto/Sigstore verification.
|
||||
/// </summary>
|
||||
public sealed class SnapshotManifestSigner : ISnapshotManifestSigner
|
||||
{
|
||||
private const string DssePayloadType = "application/vnd.stellaops.knowledge-snapshot+json";
|
||||
private const string PreAuthenticationEncodingPrefix = "DSSEv1";
|
||||
|
||||
private static readonly JsonSerializerOptions JsonOptions = new()
|
||||
{
|
||||
WriteIndented = false,
|
||||
PropertyNamingPolicy = JsonNamingPolicy.CamelCase
|
||||
};
|
||||
|
||||
/// <summary>
|
||||
/// Signs a manifest using the provided signing key.
|
||||
/// </summary>
|
||||
public async Task<ManifestSignatureResult> SignAsync(
|
||||
ManifestSigningRequest request,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(request);
|
||||
ArgumentNullException.ThrowIfNull(request.ManifestBytes);
|
||||
|
||||
// Build PAE (Pre-Authentication Encoding) for DSSE signing
|
||||
var paeBytes = BuildPae(DssePayloadType, request.ManifestBytes);
|
||||
|
||||
// Sign the PAE
|
||||
byte[] signatureBytes;
|
||||
string keyId;
|
||||
string algorithm;
|
||||
|
||||
if (request.SigningKey is not null)
|
||||
{
|
||||
// Use provided signing key
|
||||
(signatureBytes, keyId, algorithm) = await SignWithKeyAsync(
|
||||
request.SigningKey, paeBytes, cancellationToken);
|
||||
}
|
||||
else if (!string.IsNullOrWhiteSpace(request.KeyFilePath))
|
||||
{
|
||||
// Load key from file and sign
|
||||
(signatureBytes, keyId, algorithm) = await SignWithKeyFileAsync(
|
||||
request.KeyFilePath, request.KeyPassword, paeBytes, cancellationToken);
|
||||
}
|
||||
else
|
||||
{
|
||||
// Generate ephemeral key for signing (keyless mode)
|
||||
(signatureBytes, keyId, algorithm) = await SignEphemeralAsync(paeBytes, cancellationToken);
|
||||
}
|
||||
|
||||
// Build DSSE envelope
|
||||
var envelope = BuildDsseEnvelope(request.ManifestBytes, signatureBytes, keyId);
|
||||
|
||||
return new ManifestSignatureResult
|
||||
{
|
||||
Success = true,
|
||||
Envelope = envelope,
|
||||
KeyId = keyId,
|
||||
Algorithm = algorithm,
|
||||
SignatureDigest = ComputeSha256(signatureBytes)
|
||||
};
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Verifies a DSSE envelope signature.
|
||||
/// </summary>
|
||||
public async Task<ManifestVerificationResult> VerifyAsync(
|
||||
ManifestVerificationRequest request,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(request);
|
||||
ArgumentNullException.ThrowIfNull(request.EnvelopeBytes);
|
||||
|
||||
try
|
||||
{
|
||||
// Parse the envelope
|
||||
using var envelope = JsonDocument.Parse(request.EnvelopeBytes);
|
||||
var root = envelope.RootElement;
|
||||
|
||||
if (!root.TryGetProperty("payloadType", out var payloadTypeElement) ||
|
||||
!root.TryGetProperty("payload", out var payloadElement) ||
|
||||
!root.TryGetProperty("signatures", out var signaturesElement))
|
||||
{
|
||||
return new ManifestVerificationResult
|
||||
{
|
||||
Success = false,
|
||||
Error = "Invalid DSSE envelope structure"
|
||||
};
|
||||
}
|
||||
|
||||
var payloadType = payloadTypeElement.GetString();
|
||||
var payloadBase64 = payloadElement.GetString();
|
||||
|
||||
if (string.IsNullOrEmpty(payloadBase64))
|
||||
{
|
||||
return new ManifestVerificationResult
|
||||
{
|
||||
Success = false,
|
||||
Error = "Missing payload in envelope"
|
||||
};
|
||||
}
|
||||
|
||||
// Decode payload
|
||||
var payloadBytes = Convert.FromBase64String(payloadBase64);
|
||||
|
||||
// Compute expected digest
|
||||
var payloadDigest = ComputeSha256(payloadBytes);
|
||||
|
||||
// Verify at least one signature
|
||||
var signatureCount = signaturesElement.GetArrayLength();
|
||||
if (signatureCount == 0)
|
||||
{
|
||||
return new ManifestVerificationResult
|
||||
{
|
||||
Success = false,
|
||||
Error = "No signatures present in envelope"
|
||||
};
|
||||
}
|
||||
|
||||
// Build PAE for verification
|
||||
var paeBytes = BuildPae(payloadType ?? DssePayloadType, payloadBytes);
|
||||
|
||||
// Verify signatures if public key is provided
|
||||
var verifiedSignatures = new List<VerifiedSignature>();
|
||||
foreach (var sig in signaturesElement.EnumerateArray())
|
||||
{
|
||||
var keyId = sig.TryGetProperty("keyid", out var keyIdElement)
|
||||
? keyIdElement.GetString()
|
||||
: null;
|
||||
|
||||
if (sig.TryGetProperty("sig", out var sigElement))
|
||||
{
|
||||
var signatureBase64 = sigElement.GetString();
|
||||
if (!string.IsNullOrEmpty(signatureBase64))
|
||||
{
|
||||
// If public key is provided, verify the signature
|
||||
if (request.PublicKey is not null)
|
||||
{
|
||||
var signatureBytes = Convert.FromBase64String(signatureBase64);
|
||||
var isValid = await VerifySignatureAsync(
|
||||
request.PublicKey, paeBytes, signatureBytes, cancellationToken);
|
||||
|
||||
verifiedSignatures.Add(new VerifiedSignature(keyId, isValid));
|
||||
}
|
||||
else
|
||||
{
|
||||
// Without public key, we can only confirm presence
|
||||
verifiedSignatures.Add(new VerifiedSignature(keyId, null));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return new ManifestVerificationResult
|
||||
{
|
||||
Success = true,
|
||||
PayloadDigest = payloadDigest,
|
||||
SignatureCount = signatureCount,
|
||||
VerifiedSignatures = verifiedSignatures,
|
||||
PayloadType = payloadType
|
||||
};
|
||||
}
|
||||
catch (JsonException ex)
|
||||
{
|
||||
return new ManifestVerificationResult
|
||||
{
|
||||
Success = false,
|
||||
Error = $"Failed to parse envelope: {ex.Message}"
|
||||
};
|
||||
}
|
||||
catch (FormatException ex)
|
||||
{
|
||||
return new ManifestVerificationResult
|
||||
{
|
||||
Success = false,
|
||||
Error = $"Invalid base64 encoding: {ex.Message}"
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
private static byte[] BuildPae(string payloadType, byte[] payload)
|
||||
{
|
||||
var typeBytes = Encoding.UTF8.GetBytes(payloadType);
|
||||
var prefixBytes = Encoding.UTF8.GetBytes(PreAuthenticationEncodingPrefix);
|
||||
var typeLenStr = typeBytes.Length.ToString();
|
||||
var payloadLenStr = payload.Length.ToString();
|
||||
|
||||
var totalLen = prefixBytes.Length + 1 +
|
||||
typeLenStr.Length + 1 +
|
||||
typeBytes.Length + 1 +
|
||||
payloadLenStr.Length + 1 +
|
||||
payload.Length;
|
||||
|
||||
var pae = new byte[totalLen];
|
||||
var offset = 0;
|
||||
|
||||
// DSSEv1
|
||||
Buffer.BlockCopy(prefixBytes, 0, pae, offset, prefixBytes.Length);
|
||||
offset += prefixBytes.Length;
|
||||
pae[offset++] = 0x20;
|
||||
|
||||
// LEN(type)
|
||||
var typeLenBytes = Encoding.UTF8.GetBytes(typeLenStr);
|
||||
Buffer.BlockCopy(typeLenBytes, 0, pae, offset, typeLenBytes.Length);
|
||||
offset += typeLenBytes.Length;
|
||||
pae[offset++] = 0x20;
|
||||
|
||||
// type
|
||||
Buffer.BlockCopy(typeBytes, 0, pae, offset, typeBytes.Length);
|
||||
offset += typeBytes.Length;
|
||||
pae[offset++] = 0x20;
|
||||
|
||||
// LEN(payload)
|
||||
var payloadLenBytes = Encoding.UTF8.GetBytes(payloadLenStr);
|
||||
Buffer.BlockCopy(payloadLenBytes, 0, pae, offset, payloadLenBytes.Length);
|
||||
offset += payloadLenBytes.Length;
|
||||
pae[offset++] = 0x20;
|
||||
|
||||
// payload
|
||||
Buffer.BlockCopy(payload, 0, pae, offset, payload.Length);
|
||||
|
||||
return pae;
|
||||
}
|
||||
|
||||
private static async Task<(byte[] Signature, string KeyId, string Algorithm)> SignWithKeyAsync(
|
||||
AsymmetricAlgorithm key,
|
||||
byte[] data,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
await Task.CompletedTask; // Signature operations are synchronous
|
||||
|
||||
return key switch
|
||||
{
|
||||
ECDsa ecdsa => SignWithEcdsa(ecdsa, data),
|
||||
RSA rsa => SignWithRsa(rsa, data),
|
||||
_ => throw new NotSupportedException($"Unsupported key type: {key.GetType().Name}")
|
||||
};
|
||||
}
|
||||
|
||||
private static (byte[] Signature, string KeyId, string Algorithm) SignWithEcdsa(ECDsa ecdsa, byte[] data)
|
||||
{
|
||||
var signature = ecdsa.SignData(data, HashAlgorithmName.SHA256);
|
||||
var keyId = ComputeKeyId(ecdsa);
|
||||
var algorithm = ecdsa.KeySize switch
|
||||
{
|
||||
256 => "ES256",
|
||||
384 => "ES384",
|
||||
521 => "ES512",
|
||||
_ => "ECDSA"
|
||||
};
|
||||
return (signature, keyId, algorithm);
|
||||
}
|
||||
|
||||
private static (byte[] Signature, string KeyId, string Algorithm) SignWithRsa(RSA rsa, byte[] data)
|
||||
{
|
||||
var signature = rsa.SignData(data, HashAlgorithmName.SHA256, RSASignaturePadding.Pkcs1);
|
||||
var keyId = ComputeKeyId(rsa);
|
||||
return (signature, keyId, "RS256");
|
||||
}
|
||||
|
||||
private static async Task<(byte[] Signature, string KeyId, string Algorithm)> SignWithKeyFileAsync(
|
||||
string keyFilePath,
|
||||
string? password,
|
||||
byte[] data,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
var keyBytes = await File.ReadAllBytesAsync(keyFilePath, cancellationToken);
|
||||
var keyPem = Encoding.UTF8.GetString(keyBytes);
|
||||
|
||||
// Try to load as ECDSA first
|
||||
try
|
||||
{
|
||||
using var ecdsa = ECDsa.Create();
|
||||
if (string.IsNullOrEmpty(password))
|
||||
{
|
||||
ecdsa.ImportFromPem(keyPem);
|
||||
}
|
||||
else
|
||||
{
|
||||
ecdsa.ImportFromEncryptedPem(keyPem, password);
|
||||
}
|
||||
return SignWithEcdsa(ecdsa, data);
|
||||
}
|
||||
catch (CryptographicException)
|
||||
{
|
||||
// Try RSA
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
using var rsa = RSA.Create();
|
||||
if (string.IsNullOrEmpty(password))
|
||||
{
|
||||
rsa.ImportFromPem(keyPem);
|
||||
}
|
||||
else
|
||||
{
|
||||
rsa.ImportFromEncryptedPem(keyPem, password);
|
||||
}
|
||||
return SignWithRsa(rsa, data);
|
||||
}
|
||||
catch (CryptographicException ex)
|
||||
{
|
||||
throw new InvalidOperationException($"Failed to load signing key from {keyFilePath}", ex);
|
||||
}
|
||||
}
|
||||
|
||||
private static async Task<(byte[] Signature, string KeyId, string Algorithm)> SignEphemeralAsync(
|
||||
byte[] data,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
await Task.CompletedTask;
|
||||
|
||||
// Generate ephemeral ECDSA P-256 key
|
||||
using var ecdsa = ECDsa.Create(ECCurve.NamedCurves.nistP256);
|
||||
var signature = ecdsa.SignData(data, HashAlgorithmName.SHA256);
|
||||
var keyId = $"ephemeral:{ComputeKeyId(ecdsa)}";
|
||||
return (signature, keyId, "ES256");
|
||||
}
|
||||
|
||||
private static async Task<bool> VerifySignatureAsync(
|
||||
AsymmetricAlgorithm key,
|
||||
byte[] data,
|
||||
byte[] signature,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
await Task.CompletedTask;
|
||||
|
||||
return key switch
|
||||
{
|
||||
ECDsa ecdsa => ecdsa.VerifyData(data, signature, HashAlgorithmName.SHA256),
|
||||
RSA rsa => rsa.VerifyData(data, signature, HashAlgorithmName.SHA256, RSASignaturePadding.Pkcs1),
|
||||
_ => false
|
||||
};
|
||||
}
|
||||
|
||||
private static string ComputeKeyId(AsymmetricAlgorithm key)
|
||||
{
|
||||
byte[] publicKeyBytes;
|
||||
|
||||
switch (key)
|
||||
{
|
||||
case ECDsa ecdsa:
|
||||
publicKeyBytes = ecdsa.ExportSubjectPublicKeyInfo();
|
||||
break;
|
||||
case RSA rsa:
|
||||
publicKeyBytes = rsa.ExportSubjectPublicKeyInfo();
|
||||
break;
|
||||
default:
|
||||
return "unknown";
|
||||
}
|
||||
|
||||
var hash = SHA256.HashData(publicKeyBytes);
|
||||
return Convert.ToHexString(hash[..8]).ToLowerInvariant();
|
||||
}
|
||||
|
||||
private static byte[] BuildDsseEnvelope(byte[] payload, byte[] signature, string keyId)
|
||||
{
|
||||
var payloadBase64 = Convert.ToBase64String(payload);
|
||||
var signatureBase64 = Convert.ToBase64String(signature);
|
||||
|
||||
var envelope = new DsseEnvelopeDto
|
||||
{
|
||||
PayloadType = DssePayloadType,
|
||||
Payload = payloadBase64,
|
||||
Signatures =
|
||||
[
|
||||
new DsseSignatureDto
|
||||
{
|
||||
KeyId = keyId,
|
||||
Sig = signatureBase64
|
||||
}
|
||||
]
|
||||
};
|
||||
|
||||
return JsonSerializer.SerializeToUtf8Bytes(envelope, JsonOptions);
|
||||
}
|
||||
|
||||
private static string ComputeSha256(byte[] content)
|
||||
{
|
||||
var hash = SHA256.HashData(content);
|
||||
return $"sha256:{Convert.ToHexString(hash).ToLowerInvariant()}";
|
||||
}
|
||||
|
||||
private sealed class DsseEnvelopeDto
|
||||
{
|
||||
public required string PayloadType { get; init; }
|
||||
public required string Payload { get; init; }
|
||||
public required List<DsseSignatureDto> Signatures { get; init; }
|
||||
}
|
||||
|
||||
private sealed class DsseSignatureDto
|
||||
{
|
||||
public string? KeyId { get; init; }
|
||||
public required string Sig { get; init; }
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Interface for manifest signing operations.
|
||||
/// </summary>
|
||||
public interface ISnapshotManifestSigner
|
||||
{
|
||||
Task<ManifestSignatureResult> SignAsync(
|
||||
ManifestSigningRequest request,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
Task<ManifestVerificationResult> VerifyAsync(
|
||||
ManifestVerificationRequest request,
|
||||
CancellationToken cancellationToken = default);
|
||||
}
|
||||
|
||||
#region Request and Result Models
|
||||
|
||||
/// <summary>
|
||||
/// Request for signing a manifest.
|
||||
/// </summary>
|
||||
public sealed record ManifestSigningRequest
|
||||
{
|
||||
public required byte[] ManifestBytes { get; init; }
|
||||
public AsymmetricAlgorithm? SigningKey { get; init; }
|
||||
public string? KeyFilePath { get; init; }
|
||||
public string? KeyPassword { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Result of signing a manifest.
|
||||
/// </summary>
|
||||
public sealed record ManifestSignatureResult
|
||||
{
|
||||
public bool Success { get; init; }
|
||||
public byte[]? Envelope { get; init; }
|
||||
public string? KeyId { get; init; }
|
||||
public string? Algorithm { get; init; }
|
||||
public string? SignatureDigest { get; init; }
|
||||
public string? Error { get; init; }
|
||||
|
||||
public static ManifestSignatureResult Failed(string error) => new()
|
||||
{
|
||||
Success = false,
|
||||
Error = error
|
||||
};
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Request for verifying a manifest signature.
|
||||
/// </summary>
|
||||
public sealed record ManifestVerificationRequest
|
||||
{
|
||||
public required byte[] EnvelopeBytes { get; init; }
|
||||
public AsymmetricAlgorithm? PublicKey { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Result of verifying a manifest signature.
|
||||
/// </summary>
|
||||
public sealed record ManifestVerificationResult
|
||||
{
|
||||
public bool Success { get; init; }
|
||||
public string? PayloadDigest { get; init; }
|
||||
public string? PayloadType { get; init; }
|
||||
public int SignatureCount { get; init; }
|
||||
public IReadOnlyList<VerifiedSignature>? VerifiedSignatures { get; init; }
|
||||
public string? Error { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// A verified signature with optional verification status.
|
||||
/// </summary>
|
||||
public sealed record VerifiedSignature(string? KeyId, bool? Verified);
|
||||
|
||||
#endregion
|
||||
@@ -0,0 +1,352 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// TimeAnchorService.cs
|
||||
// Sprint: SPRINT_4300_0003_0001 (Sealed Knowledge Snapshot Export/Import)
|
||||
// Task: SEAL-009 - Add time anchor token generation
|
||||
// Description: Generates time anchor tokens for knowledge snapshot bundles.
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System.Security.Cryptography;
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
|
||||
namespace StellaOps.AirGap.Bundle.Services;
|
||||
|
||||
/// <summary>
|
||||
/// Generates time anchor tokens for snapshot bundles.
|
||||
/// Time anchors provide cryptographic proof of the time when a snapshot was created.
|
||||
/// </summary>
|
||||
public sealed class TimeAnchorService : ITimeAnchorService
|
||||
{
|
||||
private static readonly JsonSerializerOptions JsonOptions = new()
|
||||
{
|
||||
WriteIndented = false,
|
||||
PropertyNamingPolicy = JsonNamingPolicy.CamelCase
|
||||
};
|
||||
|
||||
/// <summary>
|
||||
/// Creates a time anchor token for a snapshot.
|
||||
/// </summary>
|
||||
public async Task<TimeAnchorResult> CreateAnchorAsync(
|
||||
TimeAnchorRequest request,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(request);
|
||||
|
||||
try
|
||||
{
|
||||
var source = request.Source?.ToLowerInvariant() ?? "local";
|
||||
|
||||
return source switch
|
||||
{
|
||||
"local" => await CreateLocalAnchorAsync(request, cancellationToken),
|
||||
var s when s.StartsWith("roughtime:") => await CreateRoughtimeAnchorAsync(request, cancellationToken),
|
||||
var s when s.StartsWith("rfc3161:") => await CreateRfc3161AnchorAsync(request, cancellationToken),
|
||||
_ => await CreateLocalAnchorAsync(request, cancellationToken)
|
||||
};
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
return TimeAnchorResult.Failed($"Failed to create time anchor: {ex.Message}");
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Validates a time anchor token.
|
||||
/// </summary>
|
||||
public async Task<TimeAnchorValidationResult> ValidateAnchorAsync(
|
||||
TimeAnchorContent anchor,
|
||||
TimeAnchorValidationRequest request,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(anchor);
|
||||
ArgumentNullException.ThrowIfNull(request);
|
||||
|
||||
try
|
||||
{
|
||||
// Validate timestamp is within acceptable range
|
||||
var now = DateTimeOffset.UtcNow;
|
||||
var anchorAge = now - anchor.AnchorTime;
|
||||
|
||||
if (request.MaxAgeHours.HasValue && anchorAge.TotalHours > request.MaxAgeHours.Value)
|
||||
{
|
||||
return new TimeAnchorValidationResult
|
||||
{
|
||||
IsValid = false,
|
||||
AnchorTime = anchor.AnchorTime,
|
||||
Source = anchor.Source,
|
||||
AgeHours = anchorAge.TotalHours,
|
||||
Error = $"Time anchor is too old: {anchorAge.TotalHours:F1} hours (max: {request.MaxAgeHours.Value})"
|
||||
};
|
||||
}
|
||||
|
||||
// Validate anchor is not in the future (with drift tolerance)
|
||||
var maxDrift = TimeSpan.FromSeconds(request.MaxClockDriftSeconds ?? 60);
|
||||
if (anchor.AnchorTime > now + maxDrift)
|
||||
{
|
||||
return new TimeAnchorValidationResult
|
||||
{
|
||||
IsValid = false,
|
||||
AnchorTime = anchor.AnchorTime,
|
||||
Source = anchor.Source,
|
||||
Error = "Time anchor is in the future"
|
||||
};
|
||||
}
|
||||
|
||||
// Validate token digest if provided
|
||||
if (!string.IsNullOrEmpty(anchor.TokenDigest) && !string.IsNullOrEmpty(request.ExpectedTokenDigest))
|
||||
{
|
||||
if (!string.Equals(anchor.TokenDigest, request.ExpectedTokenDigest, StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
return new TimeAnchorValidationResult
|
||||
{
|
||||
IsValid = false,
|
||||
AnchorTime = anchor.AnchorTime,
|
||||
Source = anchor.Source,
|
||||
Error = "Token digest mismatch"
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
await Task.CompletedTask;
|
||||
|
||||
return new TimeAnchorValidationResult
|
||||
{
|
||||
IsValid = true,
|
||||
AnchorTime = anchor.AnchorTime,
|
||||
Source = anchor.Source,
|
||||
AgeHours = anchorAge.TotalHours
|
||||
};
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
return new TimeAnchorValidationResult
|
||||
{
|
||||
IsValid = false,
|
||||
Error = $"Validation failed: {ex.Message}"
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
private static async Task<TimeAnchorResult> CreateLocalAnchorAsync(
|
||||
TimeAnchorRequest request,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
await Task.CompletedTask;
|
||||
|
||||
var anchorTime = DateTimeOffset.UtcNow;
|
||||
|
||||
// Create a local anchor with a signed timestamp
|
||||
var anchorData = new LocalAnchorData
|
||||
{
|
||||
Timestamp = anchorTime,
|
||||
Nonce = Guid.NewGuid().ToString("N"),
|
||||
MerkleRoot = request.MerkleRoot
|
||||
};
|
||||
|
||||
var anchorJson = JsonSerializer.Serialize(anchorData, JsonOptions);
|
||||
var anchorBytes = Encoding.UTF8.GetBytes(anchorJson);
|
||||
var tokenDigest = $"sha256:{Convert.ToHexString(SHA256.HashData(anchorBytes)).ToLowerInvariant()}";
|
||||
|
||||
return new TimeAnchorResult
|
||||
{
|
||||
Success = true,
|
||||
Content = new TimeAnchorContent
|
||||
{
|
||||
AnchorTime = anchorTime,
|
||||
Source = "local",
|
||||
TokenDigest = tokenDigest
|
||||
},
|
||||
TokenBytes = anchorBytes
|
||||
};
|
||||
}
|
||||
|
||||
private static async Task<TimeAnchorResult> CreateRoughtimeAnchorAsync(
|
||||
TimeAnchorRequest request,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
// Roughtime is a cryptographic time synchronization protocol
|
||||
// This is a placeholder implementation - full implementation would use a Roughtime client
|
||||
var serverUrl = request.Source?["roughtime:".Length..] ?? "roughtime.cloudflare.com:2003";
|
||||
|
||||
// For now, fallback to local with indication of intended source
|
||||
var anchorTime = DateTimeOffset.UtcNow;
|
||||
var anchorData = new RoughtimeAnchorData
|
||||
{
|
||||
Timestamp = anchorTime,
|
||||
Server = serverUrl,
|
||||
Midpoint = anchorTime.ToUnixTimeSeconds(),
|
||||
Radius = 1000000, // 1 second radius in microseconds
|
||||
Nonce = Guid.NewGuid().ToString("N"),
|
||||
MerkleRoot = request.MerkleRoot
|
||||
};
|
||||
|
||||
var anchorJson = JsonSerializer.Serialize(anchorData, JsonOptions);
|
||||
var anchorBytes = Encoding.UTF8.GetBytes(anchorJson);
|
||||
var tokenDigest = $"sha256:{Convert.ToHexString(SHA256.HashData(anchorBytes)).ToLowerInvariant()}";
|
||||
|
||||
await Task.CompletedTask;
|
||||
|
||||
return new TimeAnchorResult
|
||||
{
|
||||
Success = true,
|
||||
Content = new TimeAnchorContent
|
||||
{
|
||||
AnchorTime = anchorTime,
|
||||
Source = $"roughtime:{serverUrl}",
|
||||
TokenDigest = tokenDigest
|
||||
},
|
||||
TokenBytes = anchorBytes,
|
||||
Warning = "Roughtime client not implemented; using simulated response"
|
||||
};
|
||||
}
|
||||
|
||||
private static async Task<TimeAnchorResult> CreateRfc3161AnchorAsync(
|
||||
TimeAnchorRequest request,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
// RFC 3161 is the Internet X.509 PKI Time-Stamp Protocol (TSP)
|
||||
// This is a placeholder implementation - full implementation would use a TSA client
|
||||
var tsaUrl = request.Source?["rfc3161:".Length..] ?? "http://timestamp.digicert.com";
|
||||
|
||||
var anchorTime = DateTimeOffset.UtcNow;
|
||||
var anchorData = new Rfc3161AnchorData
|
||||
{
|
||||
Timestamp = anchorTime,
|
||||
TsaUrl = tsaUrl,
|
||||
SerialNumber = Guid.NewGuid().ToString("N"),
|
||||
PolicyOid = "2.16.840.1.114412.2.1", // DigiCert timestamp policy
|
||||
MerkleRoot = request.MerkleRoot
|
||||
};
|
||||
|
||||
var anchorJson = JsonSerializer.Serialize(anchorData, JsonOptions);
|
||||
var anchorBytes = Encoding.UTF8.GetBytes(anchorJson);
|
||||
var tokenDigest = $"sha256:{Convert.ToHexString(SHA256.HashData(anchorBytes)).ToLowerInvariant()}";
|
||||
|
||||
await Task.CompletedTask;
|
||||
|
||||
return new TimeAnchorResult
|
||||
{
|
||||
Success = true,
|
||||
Content = new TimeAnchorContent
|
||||
{
|
||||
AnchorTime = anchorTime,
|
||||
Source = $"rfc3161:{tsaUrl}",
|
||||
TokenDigest = tokenDigest
|
||||
},
|
||||
TokenBytes = anchorBytes,
|
||||
Warning = "RFC 3161 TSA client not implemented; using simulated response"
|
||||
};
|
||||
}
|
||||
|
||||
private sealed record LocalAnchorData
|
||||
{
|
||||
public required DateTimeOffset Timestamp { get; init; }
|
||||
public required string Nonce { get; init; }
|
||||
public string? MerkleRoot { get; init; }
|
||||
}
|
||||
|
||||
private sealed record RoughtimeAnchorData
|
||||
{
|
||||
public required DateTimeOffset Timestamp { get; init; }
|
||||
public required string Server { get; init; }
|
||||
public required long Midpoint { get; init; }
|
||||
public required long Radius { get; init; }
|
||||
public required string Nonce { get; init; }
|
||||
public string? MerkleRoot { get; init; }
|
||||
}
|
||||
|
||||
private sealed record Rfc3161AnchorData
|
||||
{
|
||||
public required DateTimeOffset Timestamp { get; init; }
|
||||
public required string TsaUrl { get; init; }
|
||||
public required string SerialNumber { get; init; }
|
||||
public required string PolicyOid { get; init; }
|
||||
public string? MerkleRoot { get; init; }
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Interface for time anchor operations.
|
||||
/// </summary>
|
||||
public interface ITimeAnchorService
|
||||
{
|
||||
Task<TimeAnchorResult> CreateAnchorAsync(
|
||||
TimeAnchorRequest request,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
Task<TimeAnchorValidationResult> ValidateAnchorAsync(
|
||||
TimeAnchorContent anchor,
|
||||
TimeAnchorValidationRequest request,
|
||||
CancellationToken cancellationToken = default);
|
||||
}
|
||||
|
||||
#region Request and Result Models
|
||||
|
||||
/// <summary>
|
||||
/// Request for creating a time anchor.
|
||||
/// </summary>
|
||||
public sealed record TimeAnchorRequest
|
||||
{
|
||||
/// <summary>
|
||||
/// Time anchor source: "local", "roughtime:<server>", or "rfc3161:<tsa-url>"
|
||||
/// </summary>
|
||||
public string? Source { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Merkle root to bind to the time anchor (optional).
|
||||
/// </summary>
|
||||
public string? MerkleRoot { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Result of creating a time anchor.
|
||||
/// </summary>
|
||||
public sealed record TimeAnchorResult
|
||||
{
|
||||
public bool Success { get; init; }
|
||||
public TimeAnchorContent? Content { get; init; }
|
||||
public byte[]? TokenBytes { get; init; }
|
||||
public string? Warning { get; init; }
|
||||
public string? Error { get; init; }
|
||||
|
||||
public static TimeAnchorResult Failed(string error) => new()
|
||||
{
|
||||
Success = false,
|
||||
Error = error
|
||||
};
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Request for validating a time anchor.
|
||||
/// </summary>
|
||||
public sealed record TimeAnchorValidationRequest
|
||||
{
|
||||
/// <summary>
|
||||
/// Maximum age in hours.
|
||||
/// </summary>
|
||||
public int? MaxAgeHours { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Maximum clock drift in seconds.
|
||||
/// </summary>
|
||||
public int? MaxClockDriftSeconds { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Expected token digest for validation.
|
||||
/// </summary>
|
||||
public string? ExpectedTokenDigest { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Result of validating a time anchor.
|
||||
/// </summary>
|
||||
public sealed record TimeAnchorValidationResult
|
||||
{
|
||||
public bool IsValid { get; init; }
|
||||
public DateTimeOffset? AnchorTime { get; init; }
|
||||
public string? Source { get; init; }
|
||||
public double? AgeHours { get; init; }
|
||||
public string? Error { get; init; }
|
||||
}
|
||||
|
||||
#endregion
|
||||
Reference in New Issue
Block a user