audit, advisories and doctors/setup work

This commit is contained in:
master
2026-01-13 18:53:39 +02:00
parent 9ca7cb183e
commit d7be6ba34b
811 changed files with 54242 additions and 4056 deletions

View File

@@ -111,24 +111,8 @@ public static class ExportAdapterServiceExtensions
new TrivyJavaDbAdapter(
sp.GetRequiredService<ILogger<TrivyJavaDbAdapter>>(),
sp.GetRequiredService<ICryptoHash>()));
// Register mirror delta infrastructure
services.AddSingleton<IMirrorBaseManifestStore, InMemoryMirrorBaseManifestStore>();
services.AddSingleton<IMirrorContentStore>(sp =>
new InMemoryMirrorContentStore(sp.GetRequiredService<ICryptoHash>()));
services.AddSingleton<IMirrorDeltaService, MirrorDeltaService>();
// Register Mirror Delta adapter
services.AddSingleton<IExportAdapter>(sp =>
new MirrorDeltaAdapter(
sp.GetRequiredService<ILogger<MirrorDeltaAdapter>>(),
sp.GetRequiredService<ICryptoHash>(),
sp.GetRequiredService<IMirrorDeltaService>(),
sp.GetRequiredService<IMirrorBaseManifestStore>(),
sp.GetService<IMirrorContentStore>()));
// Register encryption services
services.AddSingleton<IAgeKeyWrapper, StubAgeKeyWrapper>();
// Note: IKmsKeyWrapper should be registered by specific KMS implementations (AWS, Azure, etc.)
services.AddSingleton<IBundleEncryptionService>(sp =>
new BundleEncryptionService(
@@ -147,6 +131,36 @@ public static class ExportAdapterServiceExtensions
return services;
}
/// <summary>
/// Registers in-memory mirror delta infrastructure and adapter (opt-in).
/// </summary>
public static IServiceCollection AddMirrorDeltaAdaptersWithInMemoryStores(this IServiceCollection services)
{
services.AddSingleton<IMirrorBaseManifestStore, InMemoryMirrorBaseManifestStore>();
services.AddSingleton<IMirrorContentStore>(sp =>
new InMemoryMirrorContentStore(sp.GetRequiredService<ICryptoHash>()));
services.AddSingleton<IMirrorDeltaService, MirrorDeltaService>();
services.AddSingleton<IExportAdapter>(sp =>
new MirrorDeltaAdapter(
sp.GetRequiredService<ILogger<MirrorDeltaAdapter>>(),
sp.GetRequiredService<ICryptoHash>(),
sp.GetRequiredService<IMirrorDeltaService>(),
sp.GetRequiredService<IMirrorBaseManifestStore>(),
sp.GetService<IMirrorContentStore>()));
return services;
}
/// <summary>
/// Registers the stub age key wrapper (opt-in for tests).
/// </summary>
public static IServiceCollection AddStubAgeKeyWrapper(this IServiceCollection services)
{
services.AddSingleton<IAgeKeyWrapper, StubAgeKeyWrapper>();
return services;
}
/// <summary>
/// Registers export adapters with custom normalization options.
/// </summary>

View File

@@ -48,13 +48,10 @@ public sealed class AgeBundleKeyWrapper : IBundleKeyWrapper
{
wrappedKey = await WrapWithNativeAgeAsync(dek, recipient, cancellationToken);
}
else if (!string.IsNullOrEmpty(_options.AgeCliPath))
else if (!string.IsNullOrWhiteSpace(_options.AgeCliPath))
{
wrappedKey = await WrapWithAgeCliAsync(dek, recipient, _options.AgeCliPath, cancellationToken);
}
else if (TryFindAgeCli(out var cliPath))
{
wrappedKey = await WrapWithAgeCliAsync(dek, recipient, cliPath!, cancellationToken);
var cliPath = GetAgeCliPath(_options.AgeCliPath);
wrappedKey = await WrapWithAgeCliAsync(dek, recipient, cliPath, cancellationToken);
}
else
{
@@ -102,13 +99,10 @@ public sealed class AgeBundleKeyWrapper : IBundleKeyWrapper
{
dek = await UnwrapWithNativeAgeAsync(wrappedBytes, privateKey, cancellationToken);
}
else if (!string.IsNullOrEmpty(_options.AgeCliPath))
else if (!string.IsNullOrWhiteSpace(_options.AgeCliPath))
{
dek = await UnwrapWithAgeCliAsync(wrappedBytes, privateKey, _options.AgeCliPath, cancellationToken);
}
else if (TryFindAgeCli(out var cliPath))
{
dek = await UnwrapWithAgeCliAsync(wrappedBytes, privateKey, cliPath!, cancellationToken);
var cliPath = GetAgeCliPath(_options.AgeCliPath);
dek = await UnwrapWithAgeCliAsync(wrappedBytes, privateKey, cliPath, cancellationToken);
}
else
{
@@ -157,57 +151,16 @@ public sealed class AgeBundleKeyWrapper : IBundleKeyWrapper
return false;
}
private static bool TryFindAgeCli(out string? path)
private static string GetAgeCliPath(string ageCliPath)
{
path = null;
// Try common locations
var candidates = new[]
if (!File.Exists(ageCliPath))
{
"age",
"/usr/bin/age",
"/usr/local/bin/age",
@"C:\Program Files\age\age.exe"
};
foreach (var candidate in candidates)
{
if (File.Exists(candidate))
{
path = candidate;
return true;
}
throw new FileNotFoundException(
"Age CLI not found at configured path. Set BundleEncryption:AgeCliPath to a valid path.",
ageCliPath);
}
// Try PATH
try
{
var startInfo = new ProcessStartInfo
{
FileName = "age",
Arguments = "--version",
RedirectStandardOutput = true,
UseShellExecute = false,
CreateNoWindow = true
};
using var process = Process.Start(startInfo);
if (process is not null)
{
process.WaitForExit(1000);
if (process.ExitCode == 0)
{
path = "age";
return true;
}
}
}
catch
{
// age CLI not found in PATH
}
return false;
return ageCliPath;
}
private static async Task<byte[]> WrapWithNativeAgeAsync(
@@ -242,13 +195,15 @@ public sealed class AgeBundleKeyWrapper : IBundleKeyWrapper
var startInfo = new ProcessStartInfo
{
FileName = agePath,
Arguments = $"--encrypt --recipient {recipient}",
RedirectStandardInput = true,
RedirectStandardOutput = true,
RedirectStandardError = true,
UseShellExecute = false,
CreateNoWindow = true
};
startInfo.ArgumentList.Add("--encrypt");
startInfo.ArgumentList.Add("--recipient");
startInfo.ArgumentList.Add(recipient);
using var process = Process.Start(startInfo)
?? throw new InvalidOperationException("Failed to start age process");
@@ -283,13 +238,15 @@ public sealed class AgeBundleKeyWrapper : IBundleKeyWrapper
var startInfo = new ProcessStartInfo
{
FileName = agePath,
Arguments = $"--decrypt --identity {identityPath}",
RedirectStandardInput = true,
RedirectStandardOutput = true,
RedirectStandardError = true,
UseShellExecute = false,
CreateNoWindow = true
};
startInfo.ArgumentList.Add("--decrypt");
startInfo.ArgumentList.Add("--identity");
startInfo.ArgumentList.Add(identityPath);
using var process = Process.Start(startInfo)
?? throw new InvalidOperationException("Failed to start age process");

View File

@@ -242,19 +242,29 @@ public sealed class StubKmsClient : IKmsClient
System.Security.Cryptography.RandomNumberGenerator.Fill(plaintext);
// Encrypt it
var encryptResult = EncryptAsync(keyId, plaintext, encryptionContext, cancellationToken).GetAwaiter().GetResult();
return GenerateDataKeyInternalAsync(keyId, plaintext, encryptionContext, cancellationToken);
}
private async Task<KmsGenerateDataKeyResult> GenerateDataKeyInternalAsync(
string keyId,
byte[] plaintext,
IDictionary<string, string>? encryptionContext,
CancellationToken cancellationToken)
{
var encryptResult = await EncryptAsync(keyId, plaintext, encryptionContext, cancellationToken)
.ConfigureAwait(false);
if (!encryptResult.Success)
{
return Task.FromResult(KmsGenerateDataKeyResult.Failed(encryptResult.Error ?? "Encryption failed"));
return KmsGenerateDataKeyResult.Failed(encryptResult.Error ?? "Encryption failed");
}
return Task.FromResult(new KmsGenerateDataKeyResult
return new KmsGenerateDataKeyResult
{
Success = true,
Plaintext = plaintext,
CiphertextBlob = encryptResult.Ciphertext,
KeyId = keyId
});
};
}
}

View File

@@ -56,7 +56,9 @@ public sealed class DistributionLifecycleService : IDistributionLifecycleService
if (request.RetentionPolicy is { Enabled: true })
{
retentionPolicyId = request.RetentionPolicy.PolicyId;
retentionPolicyId = request.RetentionPolicy.PolicyId == Guid.Empty
? _guidProvider.NewGuid()
: request.RetentionPolicy.PolicyId;
retentionExpiresAt = request.RetentionPolicy.CalculateExpiryAt(now);
}

View File

@@ -107,7 +107,7 @@ public sealed record ExportRetentionPolicy
/// <summary>
/// Unique identifier for the retention policy.
/// </summary>
public Guid PolicyId { get; init; } = Guid.NewGuid();
public Guid PolicyId { get; init; }
/// <summary>
/// Duration to retain artifacts (e.g., "30d", "1y").

View File

@@ -19,7 +19,7 @@ public sealed record LineageNodeEvidencePack
/// <summary>
/// Unique identifier for this evidence pack.
/// </summary>
public Guid PackId { get; init; } = Guid.NewGuid();
public Guid PackId { get; init; }
/// <summary>
/// Artifact digest this evidence pack relates to.
@@ -64,7 +64,7 @@ public sealed record LineageNodeEvidencePack
/// <summary>
/// When the evidence pack was generated.
/// </summary>
public DateTimeOffset GeneratedAt { get; init; } = DateTimeOffset.UtcNow;
public DateTimeOffset GeneratedAt { get; init; }
/// <summary>
/// DSSE attestations included in this pack.

View File

@@ -1,5 +1,6 @@
using System.Collections.Concurrent;
using StellaOps.Cryptography;
using StellaOps.Determinism;
namespace StellaOps.ExportCenter.Core.MirrorBundle;
@@ -162,11 +163,17 @@ public sealed class FileSystemMirrorContentStore : IMirrorContentStore, IDisposa
private readonly string _storePath;
private readonly ICryptoHash _cryptoHash;
private readonly bool _ownsDirectory;
private readonly IGuidProvider _guidProvider;
public FileSystemMirrorContentStore(string storePath, ICryptoHash cryptoHash, bool createIfMissing = true)
public FileSystemMirrorContentStore(
string storePath,
ICryptoHash cryptoHash,
bool createIfMissing = true,
IGuidProvider? guidProvider = null)
{
_storePath = storePath ?? throw new ArgumentNullException(nameof(storePath));
_cryptoHash = cryptoHash ?? throw new ArgumentNullException(nameof(cryptoHash));
_guidProvider = guidProvider ?? SystemGuidProvider.Instance;
if (!Directory.Exists(_storePath))
{
@@ -215,7 +222,7 @@ public sealed class FileSystemMirrorContentStore : IMirrorContentStore, IDisposa
ArgumentNullException.ThrowIfNull(content);
// Write to temp file first
var tempPath = Path.Combine(_storePath, $".tmp-{Guid.NewGuid():N}");
var tempPath = Path.Combine(_storePath, $".tmp-{_guidProvider.NewGuid():N}");
try
{
await using (var tempStream = new FileStream(

View File

@@ -1,11 +1,14 @@
using System.Buffers.Binary;
using System.Formats.Tar;
using System.Globalization;
using System.IO.Compression;
using System.Reflection;
using System.Security.Cryptography;
using System.Text;
using System.Text.Encodings.Web;
using System.Text.Json;
using Microsoft.Extensions.Logging;
using StellaOps.Determinism;
namespace StellaOps.ExportCenter.Core.OfflineBundle;
@@ -16,19 +19,26 @@ public sealed class OfflineBundlePackager : IOfflineBundlePackager
{
private readonly TimeProvider _timeProvider;
private readonly ILogger<OfflineBundlePackager> _logger;
private readonly IGuidProvider _guidProvider;
private static readonly UnixFileMode DefaultFileMode =
UnixFileMode.UserRead | UnixFileMode.UserWrite | UnixFileMode.GroupRead | UnixFileMode.OtherRead;
private static readonly JsonSerializerOptions JsonOptions = new()
{
WriteIndented = true,
PropertyNamingPolicy = JsonNamingPolicy.SnakeCaseLower
PropertyNamingPolicy = JsonNamingPolicy.SnakeCaseLower,
Encoder = JavaScriptEncoder.Default
};
public OfflineBundlePackager(
TimeProvider timeProvider,
ILogger<OfflineBundlePackager> logger)
ILogger<OfflineBundlePackager> logger,
IGuidProvider? guidProvider = null)
{
_timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider));
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
_guidProvider = guidProvider ?? SystemGuidProvider.Instance;
}
/// <summary>
@@ -42,9 +52,10 @@ public sealed class OfflineBundlePackager : IOfflineBundlePackager
ArgumentException.ThrowIfNullOrWhiteSpace(request.AlertId);
ArgumentException.ThrowIfNullOrWhiteSpace(request.ActorId);
var bundleId = Guid.NewGuid().ToString("N");
var now = _timeProvider.GetUtcNow();
var bundleId = ComputeBundleId(request, now);
var entries = new List<BundleEntry>();
var tempDir = Path.Combine(Path.GetTempPath(), $"bundle_{bundleId}");
var tempDir = Path.Combine(Path.GetTempPath(), $"bundle_{_guidProvider.NewGuid():N}");
try
{
@@ -57,32 +68,32 @@ public sealed class OfflineBundlePackager : IOfflineBundlePackager
Directory.CreateDirectory(Path.Combine(tempDir, "attestations"));
// Write metadata
entries.AddRange(await WriteMetadataAsync(tempDir, request, cancellationToken));
entries.AddRange(await WriteMetadataAsync(tempDir, request, now, cancellationToken));
// Write placeholder evidence artifacts
entries.AddRange(await WriteEvidencePlaceholdersAsync(tempDir, request, cancellationToken));
entries.AddRange(await WriteEvidencePlaceholdersAsync(tempDir, request, now, cancellationToken));
// Write VEX data
if (request.IncludeVexHistory)
{
entries.AddRange(await WriteVexPlaceholdersAsync(tempDir, request, cancellationToken));
entries.AddRange(await WriteVexPlaceholdersAsync(tempDir, request, now, cancellationToken));
}
// Write SBOM slices
if (request.IncludeSbomSlice)
{
entries.AddRange(await WriteSbomPlaceholdersAsync(tempDir, request, cancellationToken));
entries.AddRange(await WriteSbomPlaceholdersAsync(tempDir, request, now, cancellationToken));
}
// Create manifest
var manifest = CreateManifest(bundleId, request, entries);
var manifest = CreateManifest(bundleId, request, entries, now);
// Write manifest
var manifestEntry = await WriteManifestAsync(tempDir, manifest, cancellationToken);
// Don't add manifest to entries (it contains the entry list)
// Create tarball
var bundlePath = await CreateTarballAsync(tempDir, bundleId, cancellationToken);
var bundlePath = await CreateTarballAsync(tempDir, bundleId, now, cancellationToken);
_logger.LogInformation(
"Created bundle {BundleId} for alert {AlertId} with {EntryCount} entries",
@@ -127,7 +138,7 @@ public sealed class OfflineBundlePackager : IOfflineBundlePackager
}
var issues = new List<string>();
var tempDir = Path.Combine(Path.GetTempPath(), $"verify_{Guid.NewGuid():N}");
var tempDir = Path.Combine(Path.GetTempPath(), $"verify_{_guidProvider.NewGuid():N}");
try
{
@@ -235,7 +246,7 @@ public sealed class OfflineBundlePackager : IOfflineBundlePackager
return null;
}
var tempDir = Path.Combine(Path.GetTempPath(), $"read_{Guid.NewGuid():N}");
var tempDir = Path.Combine(Path.GetTempPath(), $"read_{_guidProvider.NewGuid():N}");
try
{
@@ -260,13 +271,34 @@ public sealed class OfflineBundlePackager : IOfflineBundlePackager
}
}
private static string ComputeBundleId(BundleRequest request, DateTimeOffset createdAt)
{
var input = string.Join(
"|",
new[]
{
request.AlertId,
request.TenantId,
request.ActorId,
request.ArtifactId,
request.BaselineScanId ?? string.Empty,
request.IncludeSbomSlice.ToString(),
request.IncludeVexHistory.ToString(),
request.SignBundle.ToString(),
createdAt.ToString("O", CultureInfo.InvariantCulture)
});
var hash = ComputeHash(Encoding.UTF8.GetBytes(input));
return hash.Length > 32 ? hash[..32] : hash;
}
private async Task<IReadOnlyList<BundleEntry>> WriteMetadataAsync(
string tempDir,
BundleRequest request,
DateTimeOffset now,
CancellationToken cancellationToken)
{
var entries = new List<BundleEntry>();
var now = _timeProvider.GetUtcNow();
// Write alert metadata
var alertMetadata = new
@@ -305,6 +337,7 @@ public sealed class OfflineBundlePackager : IOfflineBundlePackager
private async Task<IReadOnlyList<BundleEntry>> WriteEvidencePlaceholdersAsync(
string tempDir,
BundleRequest request,
DateTimeOffset now,
CancellationToken cancellationToken)
{
var entries = new List<BundleEntry>();
@@ -314,7 +347,7 @@ public sealed class OfflineBundlePackager : IOfflineBundlePackager
{
status = "pending",
alert_id = request.AlertId,
computed_at = _timeProvider.GetUtcNow().ToString("O", CultureInfo.InvariantCulture)
computed_at = now.ToString("O", CultureInfo.InvariantCulture)
};
entries.Add(await WriteJsonEntryAsync(
tempDir, "evidence/reachability.json", BundleEntryTypes.Evidence, reachability, cancellationToken));
@@ -343,6 +376,7 @@ public sealed class OfflineBundlePackager : IOfflineBundlePackager
private async Task<IReadOnlyList<BundleEntry>> WriteVexPlaceholdersAsync(
string tempDir,
BundleRequest request,
DateTimeOffset now,
CancellationToken cancellationToken)
{
var entries = new List<BundleEntry>();
@@ -370,6 +404,7 @@ public sealed class OfflineBundlePackager : IOfflineBundlePackager
private async Task<IReadOnlyList<BundleEntry>> WriteSbomPlaceholdersAsync(
string tempDir,
BundleRequest request,
DateTimeOffset now,
CancellationToken cancellationToken)
{
var entries = new List<BundleEntry>();
@@ -457,7 +492,8 @@ public sealed class OfflineBundlePackager : IOfflineBundlePackager
private BundleManifest CreateManifest(
string bundleId,
BundleRequest request,
List<BundleEntry> entries)
List<BundleEntry> entries,
DateTimeOffset createdAt)
{
var contentHash = ComputeContentHash(entries);
@@ -466,7 +502,7 @@ public sealed class OfflineBundlePackager : IOfflineBundlePackager
BundleId = bundleId,
AlertId = request.AlertId,
ArtifactId = request.ArtifactId,
CreatedAt = _timeProvider.GetUtcNow(),
CreatedAt = createdAt,
CreatedBy = request.ActorId,
Entries = entries.OrderBy(e => e.Path, StringComparer.Ordinal).ToList(),
ContentHash = contentHash,
@@ -479,17 +515,80 @@ public sealed class OfflineBundlePackager : IOfflineBundlePackager
private async Task<string> CreateTarballAsync(
string sourceDir,
string bundleId,
DateTimeOffset createdAt,
CancellationToken cancellationToken)
{
var outputPath = Path.Combine(Path.GetTempPath(), $"alert_{bundleId}.stella.bundle.tgz");
await using var outputStream = File.Create(outputPath);
await using var gzipStream = new GZipStream(outputStream, CompressionLevel.Optimal);
await TarFile.CreateFromDirectoryAsync(sourceDir, gzipStream, false, cancellationToken);
await using var outputStream = new FileStream(
outputPath,
FileMode.Create,
FileAccess.Write,
FileShare.None,
bufferSize: 64 * 1024,
useAsync: true);
await using (var gzipStream = new GZipStream(outputStream, CompressionLevel.Optimal, leaveOpen: true))
using (var tar = new TarWriter(gzipStream, TarEntryFormat.Pax, leaveOpen: true))
{
var files = Directory.EnumerateFiles(sourceDir, "*", SearchOption.AllDirectories)
.Select(path => new
{
FullPath = path,
RelativePath = Path.GetRelativePath(sourceDir, path).Replace('\\', '/')
})
.OrderBy(x => x.RelativePath, StringComparer.Ordinal)
.ToList();
foreach (var file in files)
{
cancellationToken.ThrowIfCancellationRequested();
await using var fileStream = new FileStream(
file.FullPath,
FileMode.Open,
FileAccess.Read,
FileShare.Read,
bufferSize: 64 * 1024,
useAsync: true);
var entry = new PaxTarEntry(TarEntryType.RegularFile, file.RelativePath)
{
Mode = DefaultFileMode,
ModificationTime = createdAt,
Uid = 0,
Gid = 0,
UserName = string.Empty,
GroupName = string.Empty,
DataStream = fileStream
};
tar.WriteEntry(entry);
}
}
ApplyDeterministicGzipHeader(outputStream, createdAt);
return outputPath;
}
private static void ApplyDeterministicGzipHeader(FileStream stream, DateTimeOffset createdAt)
{
if (stream.Length < 10)
{
throw new InvalidOperationException("GZip header not fully written for offline bundle.");
}
var seconds = checked((int)(createdAt - DateTimeOffset.UnixEpoch).TotalSeconds);
Span<byte> buffer = stackalloc byte[4];
BinaryPrimitives.WriteInt32LittleEndian(buffer, seconds);
var originalPosition = stream.Position;
stream.Position = 4;
stream.Write(buffer);
stream.Position = originalPosition;
}
private static async Task ExtractTarballAsync(
string tarballPath,
string targetDir,

View File

@@ -1,6 +1,7 @@
using System.Collections.Concurrent;
using System.Text.Json;
using Microsoft.Extensions.Logging;
using StellaOps.Determinism;
using StellaOps.ExportCenter.Core.Domain;
namespace StellaOps.ExportCenter.Core.Planner;
@@ -17,6 +18,7 @@ public sealed class ExportPlanner : IExportPlanner
private readonly IExportProfileRepository _profileRepository;
private readonly ILogger<ExportPlanner> _logger;
private readonly TimeProvider _timeProvider;
private readonly IGuidProvider _guidProvider;
// In-memory plan store (in production, use database)
private readonly ConcurrentDictionary<Guid, ExportPlan> _plans = new();
@@ -25,12 +27,14 @@ public sealed class ExportPlanner : IExportPlanner
IExportScopeResolver scopeResolver,
IExportProfileRepository profileRepository,
ILogger<ExportPlanner> logger,
TimeProvider? timeProvider = null)
TimeProvider? timeProvider = null,
IGuidProvider? guidProvider = null)
{
_scopeResolver = scopeResolver;
_profileRepository = profileRepository;
_logger = logger;
_timeProvider = timeProvider ?? TimeProvider.System;
_guidProvider = guidProvider ?? SystemGuidProvider.Instance;
}
public async Task<ExportPlanResult> CreatePlanAsync(
@@ -52,8 +56,8 @@ public sealed class ExportPlanner : IExportPlanner
}
// Parse scope from profile or use override
var scope = request.ScopeOverride ?? ParseScope(profile.ScopeJson);
var format = request.FormatOverride ?? ParseFormat(profile.FormatJson);
var scope = request.ScopeOverride ?? ParseScope(profile.ScopeJson, profile.ProfileId);
var format = request.FormatOverride ?? ParseFormat(profile.FormatJson, profile.ProfileId);
// Validate scope
var scopeErrors = await _scopeResolver.ValidateAsync(scope, cancellationToken);
@@ -80,7 +84,7 @@ public sealed class ExportPlanner : IExportPlanner
// Create plan
var plan = new ExportPlan
{
PlanId = Guid.NewGuid(),
PlanId = _guidProvider.NewGuid(),
ProfileId = request.ProfileId,
TenantId = request.TenantId,
Status = ExportPlanStatus.Ready,
@@ -183,7 +187,7 @@ public sealed class ExportPlanner : IExportPlanner
return Task.FromResult(true);
}
private static ExportScope ParseScope(string? scopeJson)
private ExportScope ParseScope(string? scopeJson, Guid profileId)
{
if (string.IsNullOrWhiteSpace(scopeJson))
{
@@ -194,13 +198,14 @@ public sealed class ExportPlanner : IExportPlanner
{
return JsonSerializer.Deserialize<ExportScope>(scopeJson) ?? new ExportScope();
}
catch
catch (JsonException ex)
{
_logger.LogWarning(ex, "Failed to parse scope JSON for profile {ProfileId}", profileId);
return new ExportScope();
}
}
private static ExportFormatOptions ParseFormat(string? formatJson)
private ExportFormatOptions ParseFormat(string? formatJson, Guid profileId)
{
if (string.IsNullOrWhiteSpace(formatJson))
{
@@ -211,8 +216,9 @@ public sealed class ExportPlanner : IExportPlanner
{
return JsonSerializer.Deserialize<ExportFormatOptions>(formatJson) ?? new ExportFormatOptions();
}
catch
catch (JsonException ex)
{
_logger.LogWarning(ex, "Failed to parse format JSON for profile {ProfileId}", profileId);
return new ExportFormatOptions();
}
}
@@ -322,10 +328,12 @@ public sealed class InMemoryExportProfileRepository : IExportProfileRepository
{
private readonly ConcurrentDictionary<(Guid TenantId, Guid ProfileId), ExportProfile> _profiles = new();
private readonly TimeProvider _timeProvider;
private readonly IGuidProvider _guidProvider;
public InMemoryExportProfileRepository(TimeProvider? timeProvider = null)
public InMemoryExportProfileRepository(TimeProvider? timeProvider = null, IGuidProvider? guidProvider = null)
{
_timeProvider = timeProvider ?? TimeProvider.System;
_guidProvider = guidProvider ?? SystemGuidProvider.Instance;
}
public Task<ExportProfile?> GetByIdAsync(Guid profileId, Guid tenantId, CancellationToken cancellationToken = default)
@@ -347,7 +355,7 @@ public sealed class InMemoryExportProfileRepository : IExportProfileRepository
var now = _timeProvider.GetUtcNow();
var newProfile = profile with
{
ProfileId = profile.ProfileId == Guid.Empty ? Guid.NewGuid() : profile.ProfileId,
ProfileId = profile.ProfileId == Guid.Empty ? _guidProvider.NewGuid() : profile.ProfileId,
CreatedAt = now,
UpdatedAt = now
};

View File

@@ -1,4 +1,5 @@
using System.Buffers.Binary;
using System.Globalization;
using System.Security.Cryptography;
using System.Text;
using System.Text.RegularExpressions;

View File

@@ -12,6 +12,8 @@ using System.Security.Cryptography;
using System.Text;
using System.Text.Json;
using Microsoft.Extensions.Logging;
using StellaOps.Attestation;
using StellaOps.Attestor.Envelope;
using StellaOps.ExportCenter.Core.Domain;
namespace StellaOps.ExportCenter.Core.Services;
@@ -24,24 +26,28 @@ public sealed class EvidencePackSigningService : IEvidencePackSigningService
{
private static readonly ActivitySource ActivitySource = new("StellaOps.ExportCenter.Signing");
private const string PayloadType = "application/vnd.stellaops.evidence-pack-manifest+json";
private const string InTotoPredicateType = "https://stella.ops/evidence-pack@v1";
private static readonly JsonSerializerOptions JsonOptions = new()
private static readonly DsseEnvelopeSerializationOptions EnvelopeSerializationOptions = new()
{
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
WriteIndented = false
EmitCompactJson = true,
EmitExpandedJson = false,
IndentExpandedJson = false,
IncludePayloadPreview = false
};
private readonly ILogger<EvidencePackSigningService> _logger;
private readonly TimeProvider _timeProvider;
private readonly IAuthoritySigner? _authoritySigner;
public EvidencePackSigningService(
ILogger<EvidencePackSigningService> logger,
IAuthoritySigner? authoritySigner = null,
TimeProvider? timeProvider = null)
{
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
_timeProvider = timeProvider ?? TimeProvider.System;
_authoritySigner = authoritySigner;
}
/// <inheritdoc />
@@ -81,31 +87,54 @@ public sealed class EvidencePackSigningService : IEvidencePackSigningService
};
}
if (_authoritySigner is null)
{
_logger.LogWarning("Evidence pack signing requested but no authority signer configured");
return new EvidencePackSignResult
{
Success = false,
Error = "Signing is not configured"
};
}
// Build in-toto statement for the manifest
var statement = BuildInTotoStatement(pack);
var statementJson = JsonSerializer.Serialize(statement, JsonOptions);
var statementBytes = Encoding.UTF8.GetBytes(statementJson);
// Create DSSE envelope
var envelope = await CreateDsseEnvelopeAsync(
statementBytes,
request.KeyId,
ct);
var envelope = await DsseHelper.WrapAsync(statement, _authoritySigner, ct).ConfigureAwait(false);
var serialized = DsseEnvelopeSerializer.Serialize(envelope, EnvelopeSerializationOptions);
var envelopeBytes = serialized.CompactJson ?? serialized.ExpandedJson;
if (envelopeBytes is null || envelopeBytes.Length == 0)
{
return new EvidencePackSignResult
{
Success = false,
Error = "Failed to serialize DSSE envelope"
};
}
var envelopeJson = JsonSerializer.Serialize(envelope, JsonOptions);
var envelopeBase64 = Convert.ToBase64String(Encoding.UTF8.GetBytes(envelopeJson));
var envelopeDigest = ComputeDigest(envelopeJson);
var envelopeBase64 = Convert.ToBase64String(envelopeBytes);
var envelopeDigest = ComputeDigest(envelopeBytes);
var signedAt = _timeProvider.GetUtcNow();
var signatureEntry = envelope.Signatures.FirstOrDefault();
if (signatureEntry is null)
{
return new EvidencePackSignResult
{
Success = false,
Error = "DSSE envelope has no signatures"
};
}
// Build signature record
var signature = new EvidencePackSignature
{
Algorithm = envelope.Signatures.FirstOrDefault()?.Sig is not null ? "ECDSA-P256" : "none",
SignatureBase64 = envelope.Signatures.FirstOrDefault()?.Sig ?? string.Empty,
KeyId = request.KeyId,
Algorithm = "dsse",
SignatureBase64 = signatureEntry.Signature,
KeyId = signatureEntry.KeyId ?? request.KeyId,
CertificateChain = ImmutableArray<string>.Empty, // Would populate for keyless
TransparencyLogIndex = request.UploadToTransparencyLog ? await UploadToRekorAsync(envelope, ct) : null,
TransparencyLogIndex = request.UploadToTransparencyLog ? LogTransparencyRequest(pack.PackId) : null,
SignedAt = signedAt
};
@@ -222,42 +251,36 @@ public sealed class EvidencePackSigningService : IEvidencePackSigningService
private static InTotoStatement BuildInTotoStatement(LineageNodeEvidencePack pack)
{
var subjects = new List<InTotoSubject>
var subjects = new List<Subject>
{
new InTotoSubject
{
Name = $"stellaops:evidence-pack:{pack.PackId}",
Digest = new Dictionary<string, string>
new(
$"stellaops:evidence-pack:{pack.PackId}",
new Dictionary<string, string>(StringComparer.Ordinal)
{
["sha256"] = ExtractHash(pack.Manifest!.MerkleRoot)
}
}
})
};
// Add SBOM subjects
foreach (var sbom in pack.SbomDocuments)
{
subjects.Add(new InTotoSubject
{
Name = sbom.FileName,
Digest = new Dictionary<string, string>
subjects.Add(new Subject(
sbom.FileName,
new Dictionary<string, string>(StringComparer.Ordinal)
{
["sha256"] = ExtractHash(sbom.Digest)
}
});
}));
}
// Add VEX subjects
foreach (var vex in pack.VexDocuments)
{
subjects.Add(new InTotoSubject
{
Name = vex.FileName,
Digest = new Dictionary<string, string>
subjects.Add(new Subject(
vex.FileName,
new Dictionary<string, string>(StringComparer.Ordinal)
{
["sha256"] = ExtractHash(vex.Digest)
}
});
}));
}
var predicate = new EvidencePackPredicate
@@ -276,78 +299,41 @@ public sealed class EvidencePackSigningService : IEvidencePackSigningService
AttestationCount = pack.Attestations.Length
};
return new InTotoStatement
{
Type = "https://in-toto.io/Statement/v1",
PredicateType = InTotoPredicateType,
Subject = subjects,
Predicate = predicate
};
return new InTotoStatement(
"https://in-toto.io/Statement/v1",
subjects,
InTotoPredicateType,
predicate);
}
private static async Task<DsseEnvelope> CreateDsseEnvelopeAsync(
byte[] payload,
string? keyId,
CancellationToken ct)
private long? LogTransparencyRequest(Guid packId)
{
// Compute PAE (Pre-Authentication Encoding)
var payloadBase64 = Convert.ToBase64String(payload);
var paeString = $"DSSEv1 {PayloadType.Length} {PayloadType} {payloadBase64.Length} {payloadBase64}";
var paeBytes = Encoding.UTF8.GetBytes(paeString);
// Sign the PAE
// In real implementation, would use actual signing key or keyless flow
// For now, use placeholder signature
string signature;
using (var ecdsa = ECDsa.Create(ECCurve.NamedCurves.nistP256))
{
var sigBytes = ecdsa.SignData(paeBytes, HashAlgorithmName.SHA256);
signature = Convert.ToBase64String(sigBytes);
}
await Task.CompletedTask;
return new DsseEnvelope
{
PayloadType = PayloadType,
Payload = payloadBase64,
Signatures = new List<DsseSignature>
{
new DsseSignature
{
KeyId = keyId ?? "ephemeral",
Sig = signature
}
}
};
}
private Task<long?> UploadToRekorAsync(DsseEnvelope envelope, CancellationToken ct)
{
// In real implementation, would upload to Rekor transparency log
// For now, return placeholder index
return Task.FromResult<long?>(_timeProvider.GetUtcNow().ToUnixTimeMilliseconds());
_logger.LogWarning(
"Transparency log upload requested for evidence pack {PackId} but no Rekor client is configured",
packId);
return null;
}
private static string ComputeMerkleRoot(ImmutableArray<ManifestEntry> entries)
{
if (entries.Length == 0)
{
return ComputeHash(string.Empty);
return ComputeHash(Encoding.UTF8.GetBytes(string.Empty));
}
var combined = string.Join("|", entries.Select(e => $"{e.Path}:{e.Sha256}"));
return ComputeHash(combined);
var combined = string.Join(
"|",
entries
.OrderBy(e => e.Path, StringComparer.Ordinal)
.Select(e => $"{e.Path}:{e.Sha256}"));
return ComputeHash(Encoding.UTF8.GetBytes(combined));
}
private static string ComputeDigest(string input)
{
return $"sha256:{ComputeHash(input)}";
}
private static string ComputeDigest(ReadOnlySpan<byte> bytes)
=> $"sha256:{ComputeHash(bytes)}";
private static string ComputeHash(string input)
private static string ComputeHash(ReadOnlySpan<byte> bytes)
{
var bytes = Encoding.UTF8.GetBytes(input);
var hash = SHA256.HashData(bytes);
return Convert.ToHexStringLower(hash);
}
@@ -369,35 +355,6 @@ public sealed class EvidencePackSigningService : IEvidencePackSigningService
return digest.Length > 16 ? $"{digest[..16]}..." : digest;
}
// DSSE and in-toto types for serialization
private sealed class DsseEnvelope
{
public required string PayloadType { get; init; }
public required string Payload { get; init; }
public required List<DsseSignature> Signatures { get; init; }
}
private sealed class DsseSignature
{
public string? KeyId { get; init; }
public required string Sig { get; init; }
}
private sealed class InTotoStatement
{
[System.Text.Json.Serialization.JsonPropertyName("_type")]
public required string Type { get; init; }
public required string PredicateType { get; init; }
public required List<InTotoSubject> Subject { get; init; }
public required object Predicate { get; init; }
}
private sealed class InTotoSubject
{
public required string Name { get; init; }
public required Dictionary<string, string> Digest { get; init; }
}
private sealed class EvidencePackPredicate
{
public required string PackId { get; init; }

View File

@@ -12,8 +12,10 @@ using System.Globalization;
using System.IO.Compression;
using System.Security.Cryptography;
using System.Text;
using System.Text.Encodings.Web;
using System.Text.Json;
using Microsoft.Extensions.Logging;
using StellaOps.Determinism;
using StellaOps.ExportCenter.Core.Domain;
namespace StellaOps.ExportCenter.Core.Services;
@@ -29,18 +31,24 @@ public sealed class LineageEvidencePackService : ILineageEvidencePackService
private static readonly JsonSerializerOptions JsonOptions = new()
{
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
WriteIndented = true
WriteIndented = true,
Encoder = JavaScriptEncoder.Default
};
private readonly ILogger<LineageEvidencePackService> _logger;
private readonly TimeProvider _timeProvider;
private readonly IGuidProvider _guidProvider;
private readonly ConcurrentDictionary<Guid, CachedPack> _packCache = new();
private readonly string _tempDirectory;
public LineageEvidencePackService(ILogger<LineageEvidencePackService> logger, TimeProvider? timeProvider = null)
public LineageEvidencePackService(
ILogger<LineageEvidencePackService> logger,
TimeProvider? timeProvider = null,
IGuidProvider? guidProvider = null)
{
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
_timeProvider = timeProvider ?? TimeProvider.System;
_guidProvider = guidProvider ?? SystemGuidProvider.Instance;
_tempDirectory = Path.Combine(Path.GetTempPath(), "stellaops-evidence-packs");
Directory.CreateDirectory(_tempDirectory);
}
@@ -53,7 +61,8 @@ public sealed class LineageEvidencePackService : ILineageEvidencePackService
CancellationToken ct = default)
{
options ??= new EvidencePackOptions();
var packId = Guid.NewGuid();
var packId = _guidProvider.NewGuid();
var now = _timeProvider.GetUtcNow();
using var activity = ActivitySource.StartActivity("GenerateEvidencePack");
activity?.SetTag("artifact_digest", artifactDigest);
@@ -83,7 +92,7 @@ public sealed class LineageEvidencePackService : ILineageEvidencePackService
// Collect SBOMs
if (options.IncludeCycloneDx)
{
var cdxDoc = await CollectCycloneDxSbomAsync(artifactDigest, tenantId, packDir, ct);
var cdxDoc = await CollectCycloneDxSbomAsync(artifactDigest, tenantId, packDir, now, ct);
if (cdxDoc is not null)
{
sbomDocuments.Add(cdxDoc);
@@ -104,7 +113,7 @@ public sealed class LineageEvidencePackService : ILineageEvidencePackService
if (options.IncludeSpdx)
{
var spdxDoc = await CollectSpdxSbomAsync(artifactDigest, tenantId, packDir, ct);
var spdxDoc = await CollectSpdxSbomAsync(artifactDigest, tenantId, packDir, now, ct);
if (spdxDoc is not null)
{
sbomDocuments.Add(spdxDoc);
@@ -126,7 +135,7 @@ public sealed class LineageEvidencePackService : ILineageEvidencePackService
// Collect VEX documents
if (options.IncludeVex)
{
var vexDocs = await CollectVexDocumentsAsync(artifactDigest, tenantId, packDir, ct);
var vexDocs = await CollectVexDocumentsAsync(artifactDigest, tenantId, packDir, now, ct);
vexDocuments.AddRange(vexDocs);
foreach (var vex in vexDocs)
{
@@ -144,7 +153,7 @@ public sealed class LineageEvidencePackService : ILineageEvidencePackService
// Collect policy verdict
if (options.IncludePolicyVerdict)
{
policyVerdict = await CollectPolicyVerdictAsync(artifactDigest, tenantId, packDir, ct);
policyVerdict = await CollectPolicyVerdictAsync(artifactDigest, tenantId, packDir, now, ct);
if (policyVerdict is not null)
{
entries.Add(new ManifestEntry
@@ -178,7 +187,7 @@ public sealed class LineageEvidencePackService : ILineageEvidencePackService
}
// Sort entries for deterministic ordering
entries = entries.OrderBy(e => e.Path).ToList();
entries = entries.OrderBy(e => e.Path, StringComparer.Ordinal).ToList();
// Compute merkle root
var merkleRoot = ComputeMerkleRoot(entries);
@@ -190,7 +199,7 @@ public sealed class LineageEvidencePackService : ILineageEvidencePackService
Entries = entries.ToImmutableArray(),
TotalSizeBytes = entries.Sum(e => e.SizeBytes),
FileCount = entries.Count,
CreatedAt = _timeProvider.GetUtcNow()
CreatedAt = now
};
// Write manifest
@@ -207,8 +216,8 @@ public sealed class LineageEvidencePackService : ILineageEvidencePackService
TenantId = tenantId,
VexVerdictDigests = vexDocuments.Select(v => v.Digest).ToImmutableArray(),
PolicyVerdictDigest = policyVerdict?.Digest,
ReplayHash = ComputeReplayHash(artifactDigest, sbomDigest, manifest.MerkleRoot),
GeneratedAt = _timeProvider.GetUtcNow(),
ReplayHash = ComputeReplayHash(artifactDigest, sbomDigest, manifest.MerkleRoot, packId),
GeneratedAt = now,
Attestations = attestations.ToImmutableArray(),
SbomDocuments = sbomDocuments.ToImmutableArray(),
VexDocuments = vexDocuments.ToImmutableArray(),
@@ -218,16 +227,17 @@ public sealed class LineageEvidencePackService : ILineageEvidencePackService
// Create ZIP archive
var zipPath = Path.Combine(_tempDirectory, $"{packId}.zip");
await CreateZipArchiveAsync(packDir, zipPath, options.Compression, ct);
await CreateZipArchiveAsync(packDir, zipPath, options.Compression, now, ct);
var zipInfo = new FileInfo(zipPath);
// Cache the pack
var expiresAt = now.AddHours(24);
_packCache[packId] = new CachedPack
{
Pack = pack,
ZipPath = zipPath,
ExpiresAt = _timeProvider.GetUtcNow().AddHours(24)
ExpiresAt = expiresAt
};
// Clean up temp directory
@@ -249,7 +259,7 @@ public sealed class LineageEvidencePackService : ILineageEvidencePackService
Success = true,
Pack = pack,
DownloadUrl = $"/api/v1/lineage/export/{packId}/download",
ExpiresAt = _timeProvider.GetUtcNow().AddHours(24),
ExpiresAt = expiresAt,
SizeBytes = zipInfo.Length,
Warnings = warnings.ToImmutableArray()
};
@@ -341,6 +351,7 @@ public sealed class LineageEvidencePackService : ILineageEvidencePackService
string artifactDigest,
string tenantId,
string packDir,
DateTimeOffset now,
CancellationToken ct)
{
// In real implementation, would fetch from SbomService
@@ -350,7 +361,7 @@ public sealed class LineageEvidencePackService : ILineageEvidencePackService
bomFormat = "CycloneDX",
specVersion = "1.6",
version = 1,
metadata = new { timestamp = _timeProvider.GetUtcNow().ToString("O", CultureInfo.InvariantCulture) },
metadata = new { timestamp = now.ToString("O", CultureInfo.InvariantCulture) },
components = Array.Empty<object>()
}, JsonOptions);
@@ -377,6 +388,7 @@ public sealed class LineageEvidencePackService : ILineageEvidencePackService
string artifactDigest,
string tenantId,
string packDir,
DateTimeOffset now,
CancellationToken ct)
{
// In real implementation, would fetch from SbomService
@@ -386,7 +398,7 @@ public sealed class LineageEvidencePackService : ILineageEvidencePackService
dataLicense = "CC0-1.0",
name = artifactDigest,
documentNamespace = $"https://stellaops.io/spdx/{artifactDigest}",
creationInfo = new { created = _timeProvider.GetUtcNow().ToString("O", CultureInfo.InvariantCulture) },
creationInfo = new { created = now.ToString("O", CultureInfo.InvariantCulture) },
packages = Array.Empty<object>()
}, JsonOptions);
@@ -413,6 +425,7 @@ public sealed class LineageEvidencePackService : ILineageEvidencePackService
string artifactDigest,
string tenantId,
string packDir,
DateTimeOffset now,
CancellationToken ct)
{
// In real implementation, would fetch from VexLens
@@ -421,7 +434,7 @@ public sealed class LineageEvidencePackService : ILineageEvidencePackService
context = "https://openvex.dev/ns/v0.2.0",
id = $"urn:stellaops:vex:{artifactDigest}",
author = "StellaOps",
timestamp = _timeProvider.GetUtcNow().ToString("O", CultureInfo.InvariantCulture),
timestamp = now.ToString("O", CultureInfo.InvariantCulture),
statements = Array.Empty<object>()
}, JsonOptions);
@@ -451,6 +464,7 @@ public sealed class LineageEvidencePackService : ILineageEvidencePackService
string artifactDigest,
string tenantId,
string packDir,
DateTimeOffset now,
CancellationToken ct)
{
// In real implementation, would fetch from Policy Engine
@@ -460,7 +474,7 @@ public sealed class LineageEvidencePackService : ILineageEvidencePackService
tenantId,
verdict = "pass",
policyVersion = "1.0.0",
evaluatedAt = _timeProvider.GetUtcNow().ToString("O", CultureInfo.InvariantCulture),
evaluatedAt = now.ToString("O", CultureInfo.InvariantCulture),
rules = new { total = 0, passed = 0, failed = 0, warned = 0 }
}, JsonOptions);
@@ -480,7 +494,7 @@ public sealed class LineageEvidencePackService : ILineageEvidencePackService
RulesPassed = 0,
RulesFailed = 0,
RulesWarned = 0,
EvaluatedAt = _timeProvider.GetUtcNow(),
EvaluatedAt = now,
FileName = fileName
};
}
@@ -500,6 +514,7 @@ public sealed class LineageEvidencePackService : ILineageEvidencePackService
string sourceDir,
string zipPath,
string compression,
DateTimeOffset createdAt,
CancellationToken ct)
{
var compressionLevel = compression switch
@@ -515,8 +530,43 @@ public sealed class LineageEvidencePackService : ILineageEvidencePackService
File.Delete(zipPath);
}
ZipFile.CreateFromDirectory(sourceDir, zipPath, compressionLevel, includeBaseDirectory: false);
await Task.CompletedTask;
var files = Directory.EnumerateFiles(sourceDir, "*", SearchOption.AllDirectories)
.Select(path => new
{
FullPath = path,
RelativePath = Path.GetRelativePath(sourceDir, path).Replace('\\', '/')
})
.OrderBy(x => x.RelativePath, StringComparer.Ordinal)
.ToList();
await using var outputStream = new FileStream(
zipPath,
FileMode.Create,
FileAccess.Write,
FileShare.None,
bufferSize: 64 * 1024,
useAsync: true);
using var archive = new ZipArchive(outputStream, ZipArchiveMode.Create, leaveOpen: false);
foreach (var file in files)
{
ct.ThrowIfCancellationRequested();
var entry = archive.CreateEntry(file.RelativePath, compressionLevel);
entry.LastWriteTime = createdAt;
await using var entryStream = entry.Open();
await using var inputStream = new FileStream(
file.FullPath,
FileMode.Open,
FileAccess.Read,
FileShare.Read,
bufferSize: 64 * 1024,
useAsync: true);
await inputStream.CopyToAsync(entryStream, ct);
}
}
private static string ComputeMerkleRoot(IReadOnlyList<ManifestEntry> entries)
@@ -527,13 +577,25 @@ public sealed class LineageEvidencePackService : ILineageEvidencePackService
}
// Simple merkle tree: hash all entries together in order
var combined = string.Join("|", entries.Select(e => $"{e.Path}:{e.Sha256}"));
var combined = string.Join(
"|",
entries
.OrderBy(e => e.Path, StringComparer.Ordinal)
.Select(e => $"{e.Path}:{e.Sha256}"));
return ComputeHash(combined);
}
private string ComputeReplayHash(string artifactDigest, string sbomDigest, string merkleRoot)
private static string ComputeReplayHash(string artifactDigest, string sbomDigest, string merkleRoot, Guid packId)
{
var input = $"{artifactDigest}|{sbomDigest}|{merkleRoot}|{_timeProvider.GetUtcNow().ToString("O", CultureInfo.InvariantCulture)}";
var input = string.Join(
"|",
new[]
{
artifactDigest,
sbomDigest,
merkleRoot,
packId.ToString("N")
});
return $"sha256:{ComputeHash(input)}";
}

View File

@@ -3,6 +3,7 @@ using System.Security.Cryptography;
using System.Text.Json;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Logging.Abstractions;
using StellaOps.Determinism;
using StellaOps.Policy.Replay;
using StellaOps.Policy.Snapshots;
@@ -16,15 +17,21 @@ public sealed class ExportSnapshotService : IExportSnapshotService
private readonly ISnapshotService _snapshotService;
private readonly IKnowledgeSourceResolver _sourceResolver;
private readonly ILogger<ExportSnapshotService> _logger;
private readonly TimeProvider _timeProvider;
private readonly IGuidProvider _guidProvider;
public ExportSnapshotService(
ISnapshotService snapshotService,
IKnowledgeSourceResolver sourceResolver,
ILogger<ExportSnapshotService>? logger = null)
ILogger<ExportSnapshotService>? logger = null,
TimeProvider? timeProvider = null,
IGuidProvider? guidProvider = null)
{
_snapshotService = snapshotService ?? throw new ArgumentNullException(nameof(snapshotService));
_sourceResolver = sourceResolver ?? throw new ArgumentNullException(nameof(sourceResolver));
_logger = logger ?? NullLogger<ExportSnapshotService>.Instance;
_timeProvider = timeProvider ?? TimeProvider.System;
_guidProvider = guidProvider ?? SystemGuidProvider.Instance;
}
/// <summary>
@@ -52,7 +59,8 @@ public sealed class ExportSnapshotService : IExportSnapshotService
}
// Create temp directory for bundle assembly
var tempDir = Path.Combine(Path.GetTempPath(), $"snapshot-export-{Guid.NewGuid():N}");
var now = _timeProvider.GetUtcNow();
var tempDir = Path.Combine(Path.GetTempPath(), $"snapshot-export-{_guidProvider.NewGuid():N}");
Directory.CreateDirectory(tempDir);
try
@@ -73,8 +81,8 @@ public sealed class ExportSnapshotService : IExportSnapshotService
// Create bundle info
var bundleInfo = new BundleInfo
{
BundleId = $"bundle:{Guid.NewGuid():N}",
CreatedAt = DateTimeOffset.UtcNow,
BundleId = $"bundle:{_guidProvider.NewGuid():N}",
CreatedAt = now,
CreatedBy = options.CreatedBy ?? "StellaOps",
InclusionLevel = options.InclusionLevel,
TotalSizeBytes = bundledFiles.Sum(f => f.SizeBytes),
@@ -93,7 +101,8 @@ public sealed class ExportSnapshotService : IExportSnapshotService
if (File.Exists(zipPath))
File.Delete(zipPath);
ZipFile.CreateFromDirectory(tempDir, zipPath, CompressionLevel.Optimal, false);
await CreateZipArchiveAsync(tempDir, zipPath, CompressionLevel.Optimal, now, ct)
.ConfigureAwait(false);
_logger.LogInformation("Exported snapshot to {ZipPath}", zipPath);
@@ -224,6 +233,56 @@ public sealed class ExportSnapshotService : IExportSnapshotService
.ConfigureAwait(false);
}
private static async Task CreateZipArchiveAsync(
string sourceDir,
string zipPath,
CompressionLevel compressionLevel,
DateTimeOffset createdAt,
CancellationToken ct)
{
if (File.Exists(zipPath))
{
File.Delete(zipPath);
}
var files = Directory.EnumerateFiles(sourceDir, "*", SearchOption.AllDirectories)
.Select(path => new
{
FullPath = path,
RelativePath = Path.GetRelativePath(sourceDir, path).Replace('\\', '/')
})
.OrderBy(x => x.RelativePath, StringComparer.Ordinal)
.ToList();
await using var outputStream = new FileStream(
zipPath,
FileMode.Create,
FileAccess.Write,
FileShare.None,
bufferSize: 64 * 1024,
useAsync: true);
using var archive = new ZipArchive(outputStream, ZipArchiveMode.Create, leaveOpen: false);
foreach (var file in files)
{
ct.ThrowIfCancellationRequested();
var entry = archive.CreateEntry(file.RelativePath, compressionLevel);
entry.LastWriteTime = createdAt;
await using var entryStream = entry.Open();
await using var inputStream = new FileStream(
file.FullPath,
FileMode.Open,
FileAccess.Read,
FileShare.Read,
bufferSize: 64 * 1024,
useAsync: true);
await inputStream.CopyToAsync(entryStream, ct).ConfigureAwait(false);
}
}
private static string GetExtension(string sourceType) =>
sourceType switch
{

View File

@@ -3,6 +3,7 @@ using System.Security.Cryptography;
using System.Text.Json;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Logging.Abstractions;
using StellaOps.Determinism;
using StellaOps.Policy.Snapshots;
namespace StellaOps.ExportCenter.Snapshots;
@@ -15,15 +16,18 @@ public sealed class ImportSnapshotService : IImportSnapshotService
private readonly ISnapshotService _snapshotService;
private readonly ISnapshotStore _snapshotStore;
private readonly ILogger<ImportSnapshotService> _logger;
private readonly IGuidProvider _guidProvider;
public ImportSnapshotService(
ISnapshotService snapshotService,
ISnapshotStore snapshotStore,
ILogger<ImportSnapshotService>? logger = null)
ILogger<ImportSnapshotService>? logger = null,
IGuidProvider? guidProvider = null)
{
_snapshotService = snapshotService ?? throw new ArgumentNullException(nameof(snapshotService));
_snapshotStore = snapshotStore ?? throw new ArgumentNullException(nameof(snapshotStore));
_logger = logger ?? NullLogger<ImportSnapshotService>.Instance;
_guidProvider = guidProvider ?? SystemGuidProvider.Instance;
}
/// <summary>
@@ -41,7 +45,7 @@ public sealed class ImportSnapshotService : IImportSnapshotService
return ImportResult.Fail($"Bundle not found: {bundlePath}");
// Extract to temp directory
var tempDir = Path.Combine(Path.GetTempPath(), $"snapshot-import-{Guid.NewGuid():N}");
var tempDir = Path.Combine(Path.GetTempPath(), $"snapshot-import-{_guidProvider.NewGuid():N}");
try
{

View File

@@ -23,6 +23,8 @@
<ProjectReference Include="..\..\..\__Libraries\StellaOps.Provcache\StellaOps.Provcache.csproj" />
<ProjectReference Include="..\..\..\__Libraries\StellaOps.Determinism.Abstractions\StellaOps.Determinism.Abstractions.csproj" />
<ProjectReference Include="..\..\..\Scanner\__Libraries\StellaOps.Scanner.ChangeTrace\StellaOps.Scanner.ChangeTrace.csproj" />
<ProjectReference Include="..\..\..\Attestor\StellaOps.Attestation\StellaOps.Attestation.csproj" />
<ProjectReference Include="..\..\..\Attestor\__Libraries\StellaOps.Attestor.ProofChain\StellaOps.Attestor.ProofChain.csproj" />
<ProjectReference Include="..\..\..\Attestor\StellaOps.Attestor.Envelope\StellaOps.Attestor.Envelope.csproj" />
</ItemGroup>
</Project>

View File

@@ -1,3 +1,4 @@
using System.Collections.Concurrent;
using System.Text.RegularExpressions;
using Microsoft.Extensions.Logging;
@@ -8,10 +9,14 @@ namespace StellaOps.ExportCenter.Core.Tenancy;
/// </summary>
public sealed class TenantScopeEnforcer : ITenantScopeEnforcer
{
private static readonly TimeSpan ConfigCacheTtl = TimeSpan.FromMinutes(5);
private readonly ITenantScopeConfigStore _configStore;
private readonly ITenantResourceStore _resourceStore;
private readonly ILogger<TenantScopeEnforcer> _logger;
private readonly TimeProvider _timeProvider;
private readonly ConcurrentDictionary<string, TenantScopeCacheEntry> _configCache = new(StringComparer.OrdinalIgnoreCase);
private readonly ConcurrentDictionary<string, SemaphoreSlim> _configLocks = new(StringComparer.OrdinalIgnoreCase);
public TenantScopeEnforcer(
ITenantScopeConfigStore configStore,
@@ -309,16 +314,79 @@ public sealed class TenantScopeEnforcer : ITenantScopeEnforcer
public TenantScopeConfig GetConfigForTenant(string tenantId)
{
// Synchronous fallback - in production would cache
var config = _configStore.GetTenantConfigAsync(tenantId, default).GetAwaiter().GetResult();
return config ?? _configStore.GetDefaultConfig();
if (TryGetCachedConfig(tenantId, out var cached))
{
return cached;
}
QueueRefresh(tenantId);
return _configStore.GetDefaultConfig();
}
private async Task<TenantScopeConfig> GetConfigOrDefaultAsync(
string tenantId,
CancellationToken cancellationToken)
{
var config = await _configStore.GetTenantConfigAsync(tenantId, cancellationToken);
return config ?? _configStore.GetDefaultConfig();
if (TryGetCachedConfig(tenantId, out var cached))
{
return cached;
}
return await RefreshConfigAsync(tenantId, cancellationToken).ConfigureAwait(false);
}
private bool TryGetCachedConfig(string tenantId, out TenantScopeConfig config)
{
if (_configCache.TryGetValue(tenantId, out var entry))
{
if (entry.ExpiresAt > _timeProvider.GetUtcNow())
{
config = entry.Config;
return true;
}
_configCache.TryRemove(tenantId, out _);
}
config = null!;
return false;
}
private void QueueRefresh(string tenantId)
{
_ = RefreshConfigAsync(tenantId, CancellationToken.None);
}
private async Task<TenantScopeConfig> RefreshConfigAsync(string tenantId, CancellationToken cancellationToken)
{
var gate = _configLocks.GetOrAdd(tenantId, _ => new SemaphoreSlim(1, 1));
await gate.WaitAsync(cancellationToken).ConfigureAwait(false);
try
{
if (TryGetCachedConfig(tenantId, out var cached))
{
return cached;
}
var config = await _configStore.GetTenantConfigAsync(tenantId, cancellationToken).ConfigureAwait(false)
?? _configStore.GetDefaultConfig();
_configCache[tenantId] = new TenantScopeCacheEntry(
config,
_timeProvider.GetUtcNow().Add(ConfigCacheTtl));
return config;
}
catch (Exception ex)
{
_logger.LogWarning(ex, "Failed to refresh tenant scope config for tenant {TenantId}", tenantId);
return _configStore.GetDefaultConfig();
}
finally
{
gate.Release();
}
}
private sealed record TenantScopeCacheEntry(TenantScopeConfig Config, DateTimeOffset ExpiresAt);
}

View File

@@ -535,7 +535,7 @@ public sealed record PackRunVerificationResult
/// <summary>
/// When verification was performed.
/// </summary>
public DateTimeOffset VerifiedAt { get; init; } = DateTimeOffset.UtcNow;
public DateTimeOffset VerifiedAt { get; init; }
}
/// <summary>
@@ -839,7 +839,7 @@ public sealed record VerificationProgressEvent
/// <summary>
/// Timestamp.
/// </summary>
public DateTimeOffset Timestamp { get; init; } = DateTimeOffset.UtcNow;
public DateTimeOffset Timestamp { get; init; }
}
/// <summary>

View File

@@ -255,7 +255,8 @@ public sealed class ExportVerificationService : IExportVerificationService
yield return new VerificationProgressEvent
{
Type = VerificationProgressType.Started,
Message = "Verification started"
Message = "Verification started",
Timestamp = _timeProvider.GetUtcNow()
};
// Get artifacts for progress tracking
@@ -284,7 +285,8 @@ public sealed class ExportVerificationService : IExportVerificationService
VerifiedItems = verified,
PassedItems = passed,
FailedItems = failed,
Message = manifestResult.IsValid ? "Manifest valid" : "Manifest invalid"
Message = manifestResult.IsValid ? "Manifest valid" : "Manifest invalid",
Timestamp = _timeProvider.GetUtcNow()
};
}
@@ -315,7 +317,8 @@ public sealed class ExportVerificationService : IExportVerificationService
VerifiedItems = verified,
PassedItems = passed,
FailedItems = failed,
Message = sigResult.IsValid ? "Signature valid" : "Signature invalid"
Message = sigResult.IsValid ? "Signature valid" : "Signature invalid",
Timestamp = _timeProvider.GetUtcNow()
};
}
}
@@ -327,7 +330,8 @@ public sealed class ExportVerificationService : IExportVerificationService
{
Type = VerificationProgressType.HashVerificationStarted,
TotalItems = artifacts.Count,
Message = $"Verifying {artifacts.Count} files"
Message = $"Verifying {artifacts.Count} files",
Timestamp = _timeProvider.GetUtcNow()
};
foreach (var artifact in artifacts)
@@ -368,7 +372,8 @@ public sealed class ExportVerificationService : IExportVerificationService
TotalItems = totalItems,
VerifiedItems = verified,
PassedItems = passed,
FailedItems = failed
FailedItems = failed,
Timestamp = _timeProvider.GetUtcNow()
};
}
@@ -379,7 +384,8 @@ public sealed class ExportVerificationService : IExportVerificationService
VerifiedItems = artifacts.Count,
PassedItems = passed,
FailedItems = failed,
Message = $"Hash verification complete: {passed} passed, {failed} failed"
Message = $"Hash verification complete: {passed} passed, {failed} failed",
Timestamp = _timeProvider.GetUtcNow()
};
}
@@ -391,7 +397,8 @@ public sealed class ExportVerificationService : IExportVerificationService
VerifiedItems = verified,
PassedItems = passed,
FailedItems = failed,
Message = failed == 0 ? "Verification successful" : $"Verification completed with {failed} failures"
Message = failed == 0 ? "Verification successful" : $"Verification completed with {failed} failures",
Timestamp = _timeProvider.GetUtcNow()
};
}
@@ -702,7 +709,8 @@ public sealed class ExportVerificationService : IExportVerificationService
SubjectAlignment = alignmentResult,
ProvenanceChain = chainResult,
ProvenanceLinks = provenanceLinks,
Errors = errors
Errors = errors,
VerifiedAt = _timeProvider.GetUtcNow()
};
}

View File

@@ -0,0 +1,65 @@
using System;
using System.IO;
using System.Threading.Tasks;
using Microsoft.Extensions.Logging.Abstractions;
using StellaOps.Determinism;
using StellaOps.ExportCenter.Core.OfflineBundle;
using Xunit;
namespace StellaOps.ExportCenter.Tests.OfflineBundle;
public sealed class OfflineBundlePackagerDeterminismTests
{
[Fact]
public async Task CreateBundleAsync_IsDeterministic()
{
var now = new DateTimeOffset(2025, 1, 2, 3, 4, 5, TimeSpan.Zero);
var timeProvider = new FixedTimeProvider(now);
var guidProvider = new SequentialGuidProvider(Guid.Empty);
var packager = new OfflineBundlePackager(
timeProvider,
NullLogger<OfflineBundlePackager>.Instance,
guidProvider);
var request = new BundleRequest
{
AlertId = "alert-1",
TenantId = "tenant-1",
ActorId = "actor-1",
ArtifactId = "artifact-1",
BaselineScanId = "baseline-1",
IncludeSbomSlice = true,
IncludeVexHistory = true,
SignBundle = true
};
var first = await packager.CreateBundleAsync(request);
var firstBytes = await File.ReadAllBytesAsync(first.BundlePath);
var second = await packager.CreateBundleAsync(request);
var secondBytes = await File.ReadAllBytesAsync(second.BundlePath);
Assert.Equal(first.BundleId, second.BundleId);
Assert.Equal(firstBytes.Length, secondBytes.Length);
Assert.Equal(firstBytes, secondBytes);
Assert.Equal(now, first.Manifest.CreatedAt);
if (File.Exists(second.BundlePath))
{
File.Delete(second.BundlePath);
}
}
private sealed class FixedTimeProvider : TimeProvider
{
private readonly DateTimeOffset _now;
public FixedTimeProvider(DateTimeOffset now)
{
_now = now;
}
public override DateTimeOffset GetUtcNow() => _now;
}
}

View File

@@ -0,0 +1,93 @@
using System;
using System.Collections.Immutable;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Extensions.Logging.Abstractions;
using StellaOps.Attestation;
using StellaOps.ExportCenter.Core.Domain;
using StellaOps.ExportCenter.Core.Services;
using Xunit;
namespace StellaOps.ExportCenter.Tests.Services;
public sealed class EvidencePackSigningServiceTests
{
[Fact]
public async Task SignPackAsync_UsesAuthoritySigner()
{
var now = new DateTimeOffset(2025, 1, 2, 3, 4, 5, TimeSpan.Zero);
var signer = new FixedAuthoritySigner("test-key", new byte[] { 1, 2, 3 });
var service = new EvidencePackSigningService(
NullLogger<EvidencePackSigningService>.Instance,
signer,
new FixedTimeProvider(now));
var manifest = new EvidencePackManifest
{
MerkleRoot = "sha256:deadbeef",
Entries = ImmutableArray<ManifestEntry>.Empty,
TotalSizeBytes = 0,
FileCount = 0,
CreatedAt = now
};
var pack = new LineageNodeEvidencePack
{
PackId = Guid.Parse("11111111-1111-1111-1111-111111111111"),
ArtifactDigest = "sha256:artifact",
SbomDigest = "sha256:sbom",
TenantId = "tenant-1",
GeneratedAt = now,
Manifest = manifest
};
var request = new EvidencePackSignRequest
{
TenantId = "tenant-1",
UploadToTransparencyLog = false
};
var result = await service.SignPackAsync(pack, request);
Assert.True(result.Success);
Assert.NotNull(result.SignedPack);
Assert.NotNull(result.SignedPack!.ManifestSignature);
Assert.Equal("test-key", result.SignedPack.ManifestSignature!.KeyId);
Assert.Equal(
Convert.ToBase64String(new byte[] { 1, 2, 3 }),
result.SignedPack.ManifestSignature.SignatureBase64);
Assert.Equal(now, result.SignedPack.ManifestSignature.SignedAt);
Assert.False(string.IsNullOrEmpty(result.DsseEnvelopeBase64));
Assert.StartsWith("sha256:", result.EnvelopeDigest);
}
private sealed class FixedAuthoritySigner : IAuthoritySigner
{
private readonly string _keyId;
private readonly byte[] _signature;
public FixedAuthoritySigner(string keyId, byte[] signature)
{
_keyId = keyId;
_signature = signature;
}
public Task<string> GetKeyIdAsync(CancellationToken cancellationToken = default)
=> Task.FromResult(_keyId);
public Task<byte[]> SignAsync(ReadOnlyMemory<byte> paePayload, CancellationToken cancellationToken = default)
=> Task.FromResult(_signature);
}
private sealed class FixedTimeProvider : TimeProvider
{
private readonly DateTimeOffset _now;
public FixedTimeProvider(DateTimeOffset now)
{
_now = now;
}
public override DateTimeOffset GetUtcNow() => _now;
}
}