Add channel test providers for Email, Slack, Teams, and Webhook
Some checks failed
Docs CI / lint-and-preview (push) Has been cancelled

- Implemented EmailChannelTestProvider to generate email preview payloads.
- Implemented SlackChannelTestProvider to create Slack message previews.
- Implemented TeamsChannelTestProvider for generating Teams Adaptive Card previews.
- Implemented WebhookChannelTestProvider to create webhook payloads.
- Added INotifyChannelTestProvider interface for channel-specific preview generation.
- Created ChannelTestPreviewContracts for request and response models.
- Developed NotifyChannelTestService to handle test send requests and generate previews.
- Added rate limit policies for test sends and delivery history.
- Implemented unit tests for service registration and binding.
- Updated project files to include necessary dependencies and configurations.
This commit is contained in:
2025-10-19 23:29:34 +03:00
parent 8e7ce55542
commit 5fd4032c7c
239 changed files with 17245 additions and 3155 deletions

View File

@@ -1,9 +1,9 @@
<Project>
<PropertyGroup>
<ConcelierPluginOutputRoot Condition="'$(ConcelierPluginOutputRoot)' == ''">$(SolutionDir)PluginBinaries</ConcelierPluginOutputRoot>
<ConcelierPluginOutputRoot Condition="'$(ConcelierPluginOutputRoot)' == '' and '$(SolutionDir)' == ''">$(MSBuildThisFileDirectory)PluginBinaries</ConcelierPluginOutputRoot>
<AuthorityPluginOutputRoot Condition="'$(AuthorityPluginOutputRoot)' == ''">$(SolutionDir)PluginBinaries\Authority</AuthorityPluginOutputRoot>
<AuthorityPluginOutputRoot Condition="'$(AuthorityPluginOutputRoot)' == '' and '$(SolutionDir)' == ''">$(MSBuildThisFileDirectory)PluginBinaries\Authority</AuthorityPluginOutputRoot>
<ConcelierPluginOutputRoot Condition="'$(ConcelierPluginOutputRoot)' == ''">$(SolutionDir)StellaOps.Concelier.PluginBinaries</ConcelierPluginOutputRoot>
<ConcelierPluginOutputRoot Condition="'$(ConcelierPluginOutputRoot)' == '' and '$(SolutionDir)' == ''">$(MSBuildThisFileDirectory)StellaOps.Concelier.PluginBinaries</ConcelierPluginOutputRoot>
<AuthorityPluginOutputRoot Condition="'$(AuthorityPluginOutputRoot)' == ''">$(SolutionDir)StellaOps.Authority.PluginBinaries</AuthorityPluginOutputRoot>
<AuthorityPluginOutputRoot Condition="'$(AuthorityPluginOutputRoot)' == '' and '$(SolutionDir)' == ''">$(MSBuildThisFileDirectory)StellaOps.Authority.PluginBinaries</AuthorityPluginOutputRoot>
<IsConcelierPlugin Condition="'$(IsConcelierPlugin)' == '' and $([System.String]::Copy('$(MSBuildProjectName)').StartsWith('StellaOps.Concelier.Connector.'))">true</IsConcelierPlugin>
<IsConcelierPlugin Condition="'$(IsConcelierPlugin)' == '' and $([System.String]::Copy('$(MSBuildProjectName)').StartsWith('StellaOps.Concelier.Exporter.'))">true</IsConcelierPlugin>
<IsAuthorityPlugin Condition="'$(IsAuthorityPlugin)' == '' and $([System.String]::Copy('$(MSBuildProjectName)').StartsWith('StellaOps.Authority.Plugin.'))">true</IsAuthorityPlugin>
@@ -20,12 +20,17 @@
<ProjectReference Update="../StellaOps.Plugin/StellaOps.Plugin.csproj">
<Private>false</Private>
<ExcludeAssets>runtime</ExcludeAssets>
</ProjectReference>
</ItemGroup>
</ProjectReference>
</ItemGroup>
<ItemGroup>
<PackageReference Update="MongoDB.Driver" Version="3.5.0" />
<PackageReference Include="SharpCompress" Version="0.41.0" />
</ItemGroup>
<ItemGroup Condition="$([System.String]::Copy('$(MSBuildProjectName)').EndsWith('.Tests')) and '$(UseConcelierTestInfra)' != 'false'">
<PackageReference Include="coverlet.collector" Version="6.0.4" />
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.14.0" />
<PackageReference Include="coverlet.collector" Version="6.0.4" />
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.14.0" />
<PackageReference Include="Microsoft.AspNetCore.Mvc.Testing" Version="8.0.8" />
<PackageReference Include="Mongo2Go" Version="4.1.0" />
<PackageReference Include="xunit" Version="2.9.2" />

View File

@@ -37,6 +37,10 @@ public sealed class AttestorOptions
public bool RequireClientCertificate { get; set; } = true;
public string? CaBundle { get; set; }
public IList<string> AllowedSubjects { get; set; } = new List<string>();
public IList<string> AllowedThumbprints { get; set; } = new List<string>();
}
public sealed class AuthorityOptions

View File

@@ -26,6 +26,8 @@ public sealed class AttestorEntry
public SignerIdentityDescriptor SignerIdentity { get; init; } = new();
public LogReplicaDescriptor? Mirror { get; init; }
public sealed class ArtifactDescriptor
{
public string Sha256 { get; init; } = string.Empty;
@@ -64,6 +66,8 @@ public sealed class AttestorEntry
public sealed class LogDescriptor
{
public string Backend { get; init; } = "primary";
public string Url { get; init; } = string.Empty;
public string? LogId { get; init; }
@@ -79,4 +83,23 @@ public sealed class AttestorEntry
public string? KeyId { get; init; }
}
public sealed class LogReplicaDescriptor
{
public string Backend { get; init; } = string.Empty;
public string Url { get; init; } = string.Empty;
public string? Uuid { get; init; }
public long? Index { get; init; }
public string Status { get; init; } = "pending";
public ProofDescriptor? Proof { get; init; }
public string? LogId { get; init; }
public string? Error { get; init; }
}
}

View File

@@ -24,6 +24,9 @@ public sealed class AttestorSubmissionResult
[JsonPropertyName("status")]
public string Status { get; set; } = "pending";
[JsonPropertyName("mirror")]
public MirrorLog? Mirror { get; set; }
public sealed class RekorProof
{
[JsonPropertyName("checkpoint")]
@@ -56,4 +59,25 @@ public sealed class AttestorSubmissionResult
[JsonPropertyName("path")]
public IReadOnlyList<string> Path { get; init; } = Array.Empty<string>();
}
public sealed class MirrorLog
{
[JsonPropertyName("uuid")]
public string? Uuid { get; set; }
[JsonPropertyName("index")]
public long? Index { get; set; }
[JsonPropertyName("logURL")]
public string? LogUrl { get; set; }
[JsonPropertyName("status")]
public string Status { get; set; } = "pending";
[JsonPropertyName("proof")]
public RekorProof? Proof { get; set; }
[JsonPropertyName("error")]
public string? Error { get; set; }
}
}

View File

@@ -12,10 +12,14 @@ public sealed class AttestorSubmissionValidator
private static readonly string[] AllowedKinds = ["sbom", "report", "vex-export"];
private readonly IDsseCanonicalizer _canonicalizer;
private readonly HashSet<string> _allowedModes;
public AttestorSubmissionValidator(IDsseCanonicalizer canonicalizer)
public AttestorSubmissionValidator(IDsseCanonicalizer canonicalizer, IEnumerable<string>? allowedModes = null)
{
_canonicalizer = canonicalizer ?? throw new ArgumentNullException(nameof(canonicalizer));
_allowedModes = allowedModes is null
? new HashSet<string>(StringComparer.OrdinalIgnoreCase)
: new HashSet<string>(allowedModes, StringComparer.OrdinalIgnoreCase);
}
public async Task<AttestorSubmissionValidationResult> ValidateAsync(AttestorSubmissionRequest request, CancellationToken cancellationToken = default)
@@ -47,6 +51,11 @@ public sealed class AttestorSubmissionValidator
throw new AttestorValidationException("signature_missing", "At least one DSSE signature is required.");
}
if (_allowedModes.Count > 0 && !string.IsNullOrWhiteSpace(request.Bundle.Mode) && !_allowedModes.Contains(request.Bundle.Mode))
{
throw new AttestorValidationException("mode_not_allowed", $"Submission mode '{request.Bundle.Mode}' is not permitted.");
}
if (request.Meta is null)
{
throw new AttestorValidationException("meta_missing", "Submission metadata is required.");

View File

@@ -24,7 +24,12 @@ public static class ServiceCollectionExtensions
public static IServiceCollection AddAttestorInfrastructure(this IServiceCollection services)
{
services.AddSingleton<IDsseCanonicalizer, DefaultDsseCanonicalizer>();
services.AddSingleton<AttestorSubmissionValidator>();
services.AddSingleton(sp =>
{
var canonicalizer = sp.GetRequiredService<IDsseCanonicalizer>();
var options = sp.GetRequiredService<IOptions<AttestorOptions>>().Value;
return new AttestorSubmissionValidator(canonicalizer, options.Security.SignerIdentity.Mode);
});
services.AddSingleton<AttestorMetrics>();
services.AddSingleton<IAttestorSubmissionService, AttestorSubmissionService>();
services.AddSingleton<IAttestorVerificationService, AttestorVerificationService>();

View File

@@ -76,6 +76,9 @@ internal sealed class MongoAttestorEntryRepository : IAttestorEntryRepository
[BsonElement("signerIdentity")]
public SignerIdentityDocument SignerIdentity { get; set; } = new();
[BsonElement("mirror")]
public MirrorDocument? Mirror { get; set; }
public static AttestorEntryDocument FromDomain(AttestorEntry entry)
{
return new AttestorEntryDocument
@@ -109,6 +112,7 @@ internal sealed class MongoAttestorEntryRepository : IAttestorEntryRepository
},
Log = new LogDocument
{
Backend = entry.Log.Backend,
Url = entry.Log.Url,
LogId = entry.Log.LogId
},
@@ -120,7 +124,8 @@ internal sealed class MongoAttestorEntryRepository : IAttestorEntryRepository
Issuer = entry.SignerIdentity.Issuer,
SubjectAlternativeName = entry.SignerIdentity.SubjectAlternativeName,
KeyId = entry.SignerIdentity.KeyId
}
},
Mirror = entry.Mirror is null ? null : MirrorDocument.FromDomain(entry.Mirror)
};
}
@@ -155,6 +160,7 @@ internal sealed class MongoAttestorEntryRepository : IAttestorEntryRepository
},
Log = new AttestorEntry.LogDescriptor
{
Backend = Log.Backend,
Url = Log.Url,
LogId = Log.LogId
},
@@ -166,7 +172,8 @@ internal sealed class MongoAttestorEntryRepository : IAttestorEntryRepository
Issuer = SignerIdentity.Issuer,
SubjectAlternativeName = SignerIdentity.SubjectAlternativeName,
KeyId = SignerIdentity.KeyId
}
},
Mirror = Mirror?.ToDomain()
};
}
@@ -220,6 +227,9 @@ internal sealed class MongoAttestorEntryRepository : IAttestorEntryRepository
internal sealed class LogDocument
{
[BsonElement("backend")]
public string Backend { get; set; } = "primary";
[BsonElement("url")]
public string Url { get; set; } = string.Empty;
@@ -241,5 +251,92 @@ internal sealed class MongoAttestorEntryRepository : IAttestorEntryRepository
[BsonElement("kid")]
public string? KeyId { get; set; }
}
internal sealed class MirrorDocument
{
[BsonElement("backend")]
public string Backend { get; set; } = string.Empty;
[BsonElement("url")]
public string Url { get; set; } = string.Empty;
[BsonElement("uuid")]
public string? Uuid { get; set; }
[BsonElement("index")]
public long? Index { get; set; }
[BsonElement("status")]
public string Status { get; set; } = "pending";
[BsonElement("proof")]
public ProofDocument? Proof { get; set; }
[BsonElement("logId")]
public string? LogId { get; set; }
[BsonElement("error")]
public string? Error { get; set; }
public static MirrorDocument FromDomain(AttestorEntry.LogReplicaDescriptor mirror)
{
return new MirrorDocument
{
Backend = mirror.Backend,
Url = mirror.Url,
Uuid = mirror.Uuid,
Index = mirror.Index,
Status = mirror.Status,
Proof = mirror.Proof is null ? null : new ProofDocument
{
Checkpoint = mirror.Proof.Checkpoint is null ? null : new CheckpointDocument
{
Origin = mirror.Proof.Checkpoint.Origin,
Size = mirror.Proof.Checkpoint.Size,
RootHash = mirror.Proof.Checkpoint.RootHash,
Timestamp = mirror.Proof.Checkpoint.Timestamp is null
? null
: BsonDateTime.Create(mirror.Proof.Checkpoint.Timestamp.Value)
},
Inclusion = mirror.Proof.Inclusion is null ? null : new InclusionDocument
{
LeafHash = mirror.Proof.Inclusion.LeafHash,
Path = mirror.Proof.Inclusion.Path
}
},
LogId = mirror.LogId,
Error = mirror.Error
};
}
public AttestorEntry.LogReplicaDescriptor ToDomain()
{
return new AttestorEntry.LogReplicaDescriptor
{
Backend = Backend,
Url = Url,
Uuid = Uuid,
Index = Index,
Status = Status,
Proof = Proof is null ? null : new AttestorEntry.ProofDescriptor
{
Checkpoint = Proof.Checkpoint is null ? null : new AttestorEntry.CheckpointDescriptor
{
Origin = Proof.Checkpoint.Origin,
Size = Proof.Checkpoint.Size,
RootHash = Proof.Checkpoint.RootHash,
Timestamp = Proof.Checkpoint.Timestamp?.ToUniversalTime()
},
Inclusion = Proof.Inclusion is null ? null : new AttestorEntry.InclusionDescriptor
{
LeafHash = Proof.Inclusion.LeafHash,
Path = Proof.Inclusion.Path
}
},
LogId = LogId,
Error = Error
};
}
}
}
}

View File

@@ -1,5 +1,6 @@
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Text.Json;
using System.Threading;
using System.Threading.Tasks;
@@ -58,137 +59,136 @@ internal sealed class AttestorSubmissionService : IAttestorSubmissionService
SubmissionContext context,
CancellationToken cancellationToken = default)
{
var start = System.Diagnostics.Stopwatch.GetTimestamp();
ArgumentNullException.ThrowIfNull(request);
ArgumentNullException.ThrowIfNull(context);
var validation = await _validator.ValidateAsync(request, cancellationToken).ConfigureAwait(false);
var canonicalBundle = validation.CanonicalBundle;
var dedupeUuid = await _dedupeStore.TryGetExistingAsync(request.Meta.BundleSha256, cancellationToken).ConfigureAwait(false);
if (!string.IsNullOrEmpty(dedupeUuid))
var preference = NormalizeLogPreference(request.Meta.LogPreference);
var requiresPrimary = preference is "primary" or "both";
var requiresMirror = preference is "mirror" or "both";
if (!requiresPrimary && !requiresMirror)
{
requiresPrimary = true;
}
if (requiresMirror && !_options.Rekor.Mirror.Enabled)
{
throw new AttestorValidationException("mirror_disabled", "Mirror log requested but not configured.");
}
var existing = await TryGetExistingEntryAsync(request.Meta.BundleSha256, cancellationToken).ConfigureAwait(false);
if (existing is not null)
{
_logger.LogInformation("Dedupe hit for bundle {BundleSha256} -> {RekorUuid}", request.Meta.BundleSha256, dedupeUuid);
_metrics.DedupeHitsTotal.Add(1, new KeyValuePair<string, object?>("result", "hit"));
var existing = await _repository.GetByUuidAsync(dedupeUuid, cancellationToken).ConfigureAwait(false)
?? await _repository.GetByBundleShaAsync(request.Meta.BundleSha256, cancellationToken).ConfigureAwait(false);
if (existing is not null)
{
_metrics.SubmitTotal.Add(1,
new KeyValuePair<string, object?>("result", "dedupe"),
new KeyValuePair<string, object?>("backend", "cache"));
return ToResult(existing);
}
}
else
{
_metrics.DedupeHitsTotal.Add(1, new KeyValuePair<string, object?>("result", "miss"));
var updated = await EnsureBackendsAsync(existing, request, context, requiresPrimary, requiresMirror, cancellationToken).ConfigureAwait(false);
return ToResult(updated);
}
var primaryBackend = BuildBackend("primary", _options.Rekor.Primary);
RekorSubmissionResponse submissionResponse;
try
_metrics.DedupeHitsTotal.Add(1, new KeyValuePair<string, object?>("result", "miss"));
SubmissionOutcome? canonicalOutcome = null;
SubmissionOutcome? mirrorOutcome = null;
if (requiresPrimary)
{
submissionResponse = await _rekorClient.SubmitAsync(request, primaryBackend, cancellationToken).ConfigureAwait(false);
}
catch (Exception ex)
{
_metrics.ErrorTotal.Add(1, new KeyValuePair<string, object?>("type", "submit"));
_logger.LogError(ex, "Failed to submit bundle {BundleSha} to Rekor backend {Backend}", request.Meta.BundleSha256, primaryBackend.Name);
throw;
canonicalOutcome = await SubmitToBackendAsync(request, "primary", _options.Rekor.Primary, cancellationToken).ConfigureAwait(false);
}
var proof = submissionResponse.Proof;
if (proof is null && string.Equals(submissionResponse.Status, "included", StringComparison.OrdinalIgnoreCase))
if (requiresMirror)
{
try
{
proof = await _rekorClient.GetProofAsync(submissionResponse.Uuid, primaryBackend, cancellationToken).ConfigureAwait(false);
_metrics.ProofFetchTotal.Add(1,
new KeyValuePair<string, object?>("result", proof is null ? "missing" : "ok"));
var mirror = await SubmitToBackendAsync(request, "mirror", _options.Rekor.Mirror, cancellationToken).ConfigureAwait(false);
if (canonicalOutcome is null)
{
canonicalOutcome = mirror;
}
else
{
mirrorOutcome = mirror;
}
}
catch (Exception ex)
{
_metrics.ErrorTotal.Add(1, new KeyValuePair<string, object?>("type", "proof_fetch"));
_logger.LogWarning(ex, "Proof fetch failed for {Uuid} on backend {Backend}", submissionResponse.Uuid, primaryBackend.Name);
if (canonicalOutcome is null)
{
throw;
}
_metrics.ErrorTotal.Add(1, new KeyValuePair<string, object?>("type", "submit_mirror"));
_logger.LogWarning(ex, "Mirror submission failed for bundle {BundleSha}", request.Meta.BundleSha256);
mirrorOutcome = SubmissionOutcome.Failure("mirror", _options.Rekor.Mirror.Url, ex, TimeSpan.Zero);
RecordSubmissionMetrics(mirrorOutcome);
}
}
var entry = CreateEntry(request, submissionResponse, proof, context, canonicalBundle);
if (canonicalOutcome is null)
{
throw new InvalidOperationException("No Rekor submission outcome was produced.");
}
var entry = CreateEntry(request, context, canonicalOutcome, mirrorOutcome);
await _repository.SaveAsync(entry, cancellationToken).ConfigureAwait(false);
await _dedupeStore.SetAsync(request.Meta.BundleSha256, entry.RekorUuid, DedupeTtl, cancellationToken).ConfigureAwait(false);
if (request.Meta.Archive)
{
var archiveBundle = new AttestorArchiveBundle
{
RekorUuid = entry.RekorUuid,
ArtifactSha256 = entry.Artifact.Sha256,
BundleSha256 = entry.BundleSha256,
CanonicalBundleJson = canonicalBundle,
ProofJson = proof is null ? Array.Empty<byte>() : JsonSerializer.SerializeToUtf8Bytes(proof, JsonSerializerOptions.Default),
Metadata = new Dictionary<string, string>
{
["logUrl"] = entry.Log.Url,
["status"] = entry.Status
}
};
try
{
await _archiveStore.ArchiveBundleAsync(archiveBundle, cancellationToken).ConfigureAwait(false);
}
catch (Exception ex)
{
_logger.LogWarning(ex, "Failed to archive bundle {BundleSha}", entry.BundleSha256);
_metrics.ErrorTotal.Add(1, new KeyValuePair<string, object?>("type", "archive"));
}
await ArchiveAsync(entry, canonicalBundle, canonicalOutcome.Proof, cancellationToken).ConfigureAwait(false);
}
var elapsed = System.Diagnostics.Stopwatch.GetElapsedTime(start, System.Diagnostics.Stopwatch.GetTimestamp());
_metrics.SubmitTotal.Add(1,
new KeyValuePair<string, object?>("result", submissionResponse.Status ?? "unknown"),
new KeyValuePair<string, object?>("backend", primaryBackend.Name));
_metrics.SubmitLatency.Record(elapsed.TotalSeconds, new KeyValuePair<string, object?>("backend", primaryBackend.Name));
await WriteAuditAsync(request, context, entry, submissionResponse, (long)elapsed.TotalMilliseconds, cancellationToken).ConfigureAwait(false);
await WriteAuditAsync(request, context, entry, canonicalOutcome, cancellationToken).ConfigureAwait(false);
if (mirrorOutcome is not null)
{
await WriteAuditAsync(request, context, entry, mirrorOutcome, cancellationToken).ConfigureAwait(false);
}
return ToResult(entry);
}
private static AttestorSubmissionResult ToResult(AttestorEntry entry)
{
return new AttestorSubmissionResult
var result = new AttestorSubmissionResult
{
Uuid = entry.RekorUuid,
Index = entry.Index,
LogUrl = entry.Log.Url,
Status = entry.Status,
Proof = entry.Proof is null ? null : new AttestorSubmissionResult.RekorProof
{
Checkpoint = entry.Proof.Checkpoint is null ? null : new AttestorSubmissionResult.Checkpoint
{
Origin = entry.Proof.Checkpoint.Origin,
Size = entry.Proof.Checkpoint.Size,
RootHash = entry.Proof.Checkpoint.RootHash,
Timestamp = entry.Proof.Checkpoint.Timestamp?.ToString("O")
},
Inclusion = entry.Proof.Inclusion is null ? null : new AttestorSubmissionResult.InclusionProof
{
LeafHash = entry.Proof.Inclusion.LeafHash,
Path = entry.Proof.Inclusion.Path
}
}
Proof = ToResultProof(entry.Proof)
};
if (entry.Mirror is not null)
{
result.Mirror = new AttestorSubmissionResult.MirrorLog
{
Uuid = entry.Mirror.Uuid,
Index = entry.Mirror.Index,
LogUrl = entry.Mirror.Url,
Status = entry.Mirror.Status,
Proof = ToResultProof(entry.Mirror.Proof),
Error = entry.Mirror.Error
};
}
return result;
}
private AttestorEntry CreateEntry(
AttestorSubmissionRequest request,
RekorSubmissionResponse submission,
RekorProofResponse? proof,
SubmissionContext context,
byte[] canonicalBundle)
SubmissionOutcome canonicalOutcome,
SubmissionOutcome? mirrorOutcome)
{
if (canonicalOutcome.Submission is null)
{
throw new InvalidOperationException("Canonical submission outcome must include a Rekor response.");
}
var submission = canonicalOutcome.Submission;
var now = _timeProvider.GetUtcNow();
return new AttestorEntry
{
RekorUuid = submission.Uuid,
@@ -201,24 +201,11 @@ internal sealed class AttestorSubmissionService : IAttestorSubmissionService
},
BundleSha256 = request.Meta.BundleSha256,
Index = submission.Index,
Proof = proof is null ? null : new AttestorEntry.ProofDescriptor
{
Checkpoint = proof.Checkpoint is null ? null : new AttestorEntry.CheckpointDescriptor
{
Origin = proof.Checkpoint.Origin,
Size = proof.Checkpoint.Size,
RootHash = proof.Checkpoint.RootHash,
Timestamp = proof.Checkpoint.Timestamp
},
Inclusion = proof.Inclusion is null ? null : new AttestorEntry.InclusionDescriptor
{
LeafHash = proof.Inclusion.LeafHash,
Path = proof.Inclusion.Path
}
},
Proof = ConvertProof(canonicalOutcome.Proof),
Log = new AttestorEntry.LogDescriptor
{
Url = submission.LogUrl ?? string.Empty,
Backend = canonicalOutcome.Backend,
Url = submission.LogUrl ?? canonicalOutcome.Url,
LogId = null
},
CreatedAt = now,
@@ -229,28 +216,233 @@ internal sealed class AttestorSubmissionService : IAttestorSubmissionService
Issuer = context.CallerAudience,
SubjectAlternativeName = context.CallerSubject,
KeyId = context.CallerClientId
}
},
Mirror = mirrorOutcome is null ? null : CreateMirrorDescriptor(mirrorOutcome)
};
}
private static string NormalizeLogPreference(string? value)
{
if (string.IsNullOrWhiteSpace(value))
{
return "primary";
}
var normalized = value.Trim().ToLowerInvariant();
return normalized switch
{
"primary" => "primary",
"mirror" => "mirror",
"both" => "both",
_ => "primary"
};
}
private async Task<AttestorEntry?> TryGetExistingEntryAsync(string bundleSha256, CancellationToken cancellationToken)
{
var dedupeUuid = await _dedupeStore.TryGetExistingAsync(bundleSha256, cancellationToken).ConfigureAwait(false);
if (string.IsNullOrWhiteSpace(dedupeUuid))
{
return null;
}
return await _repository.GetByUuidAsync(dedupeUuid, cancellationToken).ConfigureAwait(false)
?? await _repository.GetByBundleShaAsync(bundleSha256, cancellationToken).ConfigureAwait(false);
}
private async Task<AttestorEntry> EnsureBackendsAsync(
AttestorEntry existing,
AttestorSubmissionRequest request,
SubmissionContext context,
bool requiresPrimary,
bool requiresMirror,
CancellationToken cancellationToken)
{
var entry = existing;
var updated = false;
if (requiresPrimary && !IsPrimary(entry))
{
var outcome = await SubmitToBackendAsync(request, "primary", _options.Rekor.Primary, cancellationToken).ConfigureAwait(false);
entry = PromoteToPrimary(entry, outcome);
await _repository.SaveAsync(entry, cancellationToken).ConfigureAwait(false);
await _dedupeStore.SetAsync(request.Meta.BundleSha256, entry.RekorUuid, DedupeTtl, cancellationToken).ConfigureAwait(false);
await WriteAuditAsync(request, context, entry, outcome, cancellationToken).ConfigureAwait(false);
updated = true;
}
if (requiresMirror)
{
var mirrorSatisfied = entry.Mirror is not null
&& entry.Mirror.Error is null
&& string.Equals(entry.Mirror.Status, "included", StringComparison.OrdinalIgnoreCase)
&& !string.IsNullOrEmpty(entry.Mirror.Uuid);
if (!mirrorSatisfied)
{
try
{
var mirrorOutcome = await SubmitToBackendAsync(request, "mirror", _options.Rekor.Mirror, cancellationToken).ConfigureAwait(false);
entry = WithMirror(entry, mirrorOutcome);
await _repository.SaveAsync(entry, cancellationToken).ConfigureAwait(false);
await WriteAuditAsync(request, context, entry, mirrorOutcome, cancellationToken).ConfigureAwait(false);
updated = true;
}
catch (Exception ex)
{
_metrics.ErrorTotal.Add(1, new KeyValuePair<string, object?>("type", "submit_mirror"));
_logger.LogWarning(ex, "Mirror submission failed for deduplicated bundle {BundleSha}", request.Meta.BundleSha256);
var failure = SubmissionOutcome.Failure("mirror", _options.Rekor.Mirror.Url, ex, TimeSpan.Zero);
RecordSubmissionMetrics(failure);
entry = WithMirror(entry, failure);
await _repository.SaveAsync(entry, cancellationToken).ConfigureAwait(false);
await WriteAuditAsync(request, context, entry, failure, cancellationToken).ConfigureAwait(false);
updated = true;
}
}
}
if (!updated)
{
_metrics.SubmitTotal.Add(1,
new KeyValuePair<string, object?>("result", "dedupe"),
new KeyValuePair<string, object?>("backend", "cache"));
}
return entry;
}
private static bool IsPrimary(AttestorEntry entry) =>
string.Equals(entry.Log.Backend, "primary", StringComparison.OrdinalIgnoreCase);
private async Task<SubmissionOutcome> SubmitToBackendAsync(
AttestorSubmissionRequest request,
string backendName,
AttestorOptions.RekorBackendOptions backendOptions,
CancellationToken cancellationToken)
{
var backend = BuildBackend(backendName, backendOptions);
var stopwatch = Stopwatch.StartNew();
try
{
var submission = await _rekorClient.SubmitAsync(request, backend, cancellationToken).ConfigureAwait(false);
stopwatch.Stop();
var proof = submission.Proof;
if (proof is null && string.Equals(submission.Status, "included", StringComparison.OrdinalIgnoreCase))
{
try
{
proof = await _rekorClient.GetProofAsync(submission.Uuid, backend, cancellationToken).ConfigureAwait(false);
_metrics.ProofFetchTotal.Add(1,
new KeyValuePair<string, object?>("result", proof is null ? "missing" : "ok"));
}
catch (Exception ex)
{
_metrics.ErrorTotal.Add(1, new KeyValuePair<string, object?>("type", "proof_fetch"));
_logger.LogWarning(ex, "Proof fetch failed for {Uuid} on backend {Backend}", submission.Uuid, backendName);
}
}
var outcome = SubmissionOutcome.Success(backendName, backend.Url, submission, proof, stopwatch.Elapsed);
RecordSubmissionMetrics(outcome);
return outcome;
}
catch (Exception ex)
{
stopwatch.Stop();
_metrics.ErrorTotal.Add(1, new KeyValuePair<string, object?>("type", $"submit_{backendName}"));
_logger.LogError(ex, "Failed to submit bundle {BundleSha} to Rekor backend {Backend}", request.Meta.BundleSha256, backendName);
throw;
}
}
private void RecordSubmissionMetrics(SubmissionOutcome outcome)
{
var result = outcome.IsSuccess
? outcome.Submission!.Status ?? "unknown"
: "failed";
_metrics.SubmitTotal.Add(1,
new KeyValuePair<string, object?>("result", result),
new KeyValuePair<string, object?>("backend", outcome.Backend));
if (outcome.Latency > TimeSpan.Zero)
{
_metrics.SubmitLatency.Record(outcome.Latency.TotalSeconds,
new KeyValuePair<string, object?>("backend", outcome.Backend));
}
}
private async Task ArchiveAsync(
AttestorEntry entry,
byte[] canonicalBundle,
RekorProofResponse? proof,
CancellationToken cancellationToken)
{
var metadata = new Dictionary<string, string>
{
["logUrl"] = entry.Log.Url,
["status"] = entry.Status
};
if (entry.Mirror is not null)
{
metadata["mirror.backend"] = entry.Mirror.Backend;
metadata["mirror.uuid"] = entry.Mirror.Uuid ?? string.Empty;
metadata["mirror.status"] = entry.Mirror.Status;
}
var archiveBundle = new AttestorArchiveBundle
{
RekorUuid = entry.RekorUuid,
ArtifactSha256 = entry.Artifact.Sha256,
BundleSha256 = entry.BundleSha256,
CanonicalBundleJson = canonicalBundle,
ProofJson = proof is null ? Array.Empty<byte>() : JsonSerializer.SerializeToUtf8Bytes(proof, JsonSerializerOptions.Default),
Metadata = metadata
};
try
{
await _archiveStore.ArchiveBundleAsync(archiveBundle, cancellationToken).ConfigureAwait(false);
}
catch (Exception ex)
{
_logger.LogWarning(ex, "Failed to archive bundle {BundleSha}", entry.BundleSha256);
_metrics.ErrorTotal.Add(1, new KeyValuePair<string, object?>("type", "archive"));
}
}
private Task WriteAuditAsync(
AttestorSubmissionRequest request,
SubmissionContext context,
AttestorEntry entry,
RekorSubmissionResponse submission,
long latencyMs,
SubmissionOutcome outcome,
CancellationToken cancellationToken)
{
var metadata = new Dictionary<string, string>();
if (!outcome.IsSuccess && outcome.Error is not null)
{
metadata["error"] = outcome.Error.Message;
}
var record = new AttestorAuditRecord
{
Action = "submit",
Result = submission.Status ?? "included",
RekorUuid = submission.Uuid,
Index = submission.Index,
Result = outcome.IsSuccess
? outcome.Submission!.Status ?? "included"
: "failed",
RekorUuid = outcome.IsSuccess
? outcome.Submission!.Uuid
: string.Equals(outcome.Backend, "primary", StringComparison.OrdinalIgnoreCase)
? entry.RekorUuid
: entry.Mirror?.Uuid,
Index = outcome.Submission?.Index,
ArtifactSha256 = request.Meta.Artifact.Sha256,
BundleSha256 = request.Meta.BundleSha256,
Backend = "primary",
LatencyMs = latencyMs,
Backend = outcome.Backend,
LatencyMs = (long)outcome.Latency.TotalMilliseconds,
Timestamp = _timeProvider.GetUtcNow(),
Caller = new AttestorAuditRecord.CallerDescriptor
{
@@ -259,12 +451,160 @@ internal sealed class AttestorSubmissionService : IAttestorSubmissionService
ClientId = context.CallerClientId,
MtlsThumbprint = context.MtlsThumbprint,
Tenant = context.CallerTenant
}
},
Metadata = metadata
};
return _auditSink.WriteAsync(record, cancellationToken);
}
private static AttestorEntry.ProofDescriptor? ConvertProof(RekorProofResponse? proof)
{
if (proof is null)
{
return null;
}
return new AttestorEntry.ProofDescriptor
{
Checkpoint = proof.Checkpoint is null ? null : new AttestorEntry.CheckpointDescriptor
{
Origin = proof.Checkpoint.Origin,
Size = proof.Checkpoint.Size,
RootHash = proof.Checkpoint.RootHash,
Timestamp = proof.Checkpoint.Timestamp
},
Inclusion = proof.Inclusion is null ? null : new AttestorEntry.InclusionDescriptor
{
LeafHash = proof.Inclusion.LeafHash,
Path = proof.Inclusion.Path
}
};
}
private static AttestorSubmissionResult.RekorProof? ToResultProof(AttestorEntry.ProofDescriptor? proof)
{
if (proof is null)
{
return null;
}
return new AttestorSubmissionResult.RekorProof
{
Checkpoint = proof.Checkpoint is null ? null : new AttestorSubmissionResult.Checkpoint
{
Origin = proof.Checkpoint.Origin,
Size = proof.Checkpoint.Size,
RootHash = proof.Checkpoint.RootHash,
Timestamp = proof.Checkpoint.Timestamp?.ToString("O")
},
Inclusion = proof.Inclusion is null ? null : new AttestorSubmissionResult.InclusionProof
{
LeafHash = proof.Inclusion.LeafHash,
Path = proof.Inclusion.Path
}
};
}
private static AttestorEntry.LogReplicaDescriptor CreateMirrorDescriptor(SubmissionOutcome outcome)
{
return new AttestorEntry.LogReplicaDescriptor
{
Backend = outcome.Backend,
Url = outcome.IsSuccess
? outcome.Submission!.LogUrl ?? outcome.Url
: outcome.Url,
Uuid = outcome.Submission?.Uuid,
Index = outcome.Submission?.Index,
Status = outcome.IsSuccess
? outcome.Submission!.Status ?? "included"
: "failed",
Proof = outcome.IsSuccess ? ConvertProof(outcome.Proof) : null,
Error = outcome.Error?.Message
};
}
private static AttestorEntry WithMirror(AttestorEntry entry, SubmissionOutcome outcome)
{
return new AttestorEntry
{
RekorUuid = entry.RekorUuid,
Artifact = entry.Artifact,
BundleSha256 = entry.BundleSha256,
Index = entry.Index,
Proof = entry.Proof,
Log = entry.Log,
CreatedAt = entry.CreatedAt,
Status = entry.Status,
SignerIdentity = entry.SignerIdentity,
Mirror = CreateMirrorDescriptor(outcome)
};
}
private AttestorEntry PromoteToPrimary(AttestorEntry existing, SubmissionOutcome outcome)
{
if (outcome.Submission is null)
{
throw new InvalidOperationException("Cannot promote to primary without a successful submission.");
}
var mirrorDescriptor = existing.Mirror;
if (mirrorDescriptor is null && !string.Equals(existing.Log.Backend, outcome.Backend, StringComparison.OrdinalIgnoreCase))
{
mirrorDescriptor = CreateMirrorDescriptorFromEntry(existing);
}
return new AttestorEntry
{
RekorUuid = outcome.Submission.Uuid,
Artifact = existing.Artifact,
BundleSha256 = existing.BundleSha256,
Index = outcome.Submission.Index,
Proof = ConvertProof(outcome.Proof),
Log = new AttestorEntry.LogDescriptor
{
Backend = outcome.Backend,
Url = outcome.Submission.LogUrl ?? outcome.Url,
LogId = existing.Log.LogId
},
CreatedAt = existing.CreatedAt,
Status = outcome.Submission.Status ?? "included",
SignerIdentity = existing.SignerIdentity,
Mirror = mirrorDescriptor
};
}
private static AttestorEntry.LogReplicaDescriptor CreateMirrorDescriptorFromEntry(AttestorEntry entry)
{
return new AttestorEntry.LogReplicaDescriptor
{
Backend = entry.Log.Backend,
Url = entry.Log.Url,
Uuid = entry.RekorUuid,
Index = entry.Index,
Status = entry.Status,
Proof = entry.Proof,
LogId = entry.Log.LogId
};
}
private sealed record SubmissionOutcome(
string Backend,
string Url,
RekorSubmissionResponse? Submission,
RekorProofResponse? Proof,
TimeSpan Latency,
Exception? Error)
{
public bool IsSuccess => Submission is not null && Error is null;
public static SubmissionOutcome Success(string backend, Uri backendUrl, RekorSubmissionResponse submission, RekorProofResponse? proof, TimeSpan latency) =>
new SubmissionOutcome(backend, backendUrl.ToString(), submission, proof, latency, null);
public static SubmissionOutcome Failure(string backend, string? url, Exception error, TimeSpan latency) =>
new SubmissionOutcome(backend, url ?? string.Empty, null, null, latency, error);
}
private static RekorBackend BuildBackend(string name, AttestorOptions.RekorBackendOptions options)
{
if (string.IsNullOrWhiteSpace(options.Url))

View File

@@ -1,6 +1,12 @@
using System;
using System.Buffers.Binary;
using System.Collections.Generic;
using System.Globalization;
using System.IO;
using System.Linq;
using System.Security.Cryptography;
using System.Security.Cryptography.X509Certificates;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Extensions.Logging;
@@ -10,7 +16,7 @@ using StellaOps.Attestor.Core.Rekor;
using StellaOps.Attestor.Core.Storage;
using StellaOps.Attestor.Core.Submission;
using StellaOps.Attestor.Core.Verification;
using System.Security.Cryptography;
using StellaOps.Attestor.Core.Observability;
namespace StellaOps.Attestor.Infrastructure.Verification;
@@ -21,19 +27,22 @@ internal sealed class AttestorVerificationService : IAttestorVerificationService
private readonly IRekorClient _rekorClient;
private readonly ILogger<AttestorVerificationService> _logger;
private readonly AttestorOptions _options;
private readonly AttestorMetrics _metrics;
public AttestorVerificationService(
IAttestorEntryRepository repository,
IDsseCanonicalizer canonicalizer,
IRekorClient rekorClient,
IOptions<AttestorOptions> options,
ILogger<AttestorVerificationService> logger)
ILogger<AttestorVerificationService> logger,
AttestorMetrics metrics)
{
_repository = repository;
_canonicalizer = canonicalizer;
_rekorClient = rekorClient;
_logger = logger;
_options = options.Value;
_metrics = metrics;
}
public async Task<AttestorVerificationResult> VerifyAsync(AttestorVerificationRequest request, CancellationToken cancellationToken = default)
@@ -67,11 +76,25 @@ internal sealed class AttestorVerificationService : IAttestorVerificationService
}
}, cancellationToken).ConfigureAwait(false);
var computedHash = Convert.ToHexString(System.Security.Cryptography.SHA256.HashData(canonicalBundle)).ToLowerInvariant();
var computedHash = Convert.ToHexString(SHA256.HashData(canonicalBundle)).ToLowerInvariant();
if (!string.Equals(computedHash, entry.BundleSha256, StringComparison.OrdinalIgnoreCase))
{
issues.Add("Bundle hash does not match stored canonical hash.");
issues.Add("bundle_hash_mismatch");
}
if (!TryDecodeBase64(request.Bundle.Dsse.PayloadBase64, out var payloadBytes))
{
issues.Add("bundle_payload_invalid_base64");
}
else
{
var preAuth = ComputePreAuthEncoding(request.Bundle.Dsse.PayloadType, payloadBytes);
VerifySignatures(entry, request.Bundle, preAuth, issues);
}
}
else
{
_logger.LogDebug("No DSSE bundle supplied for verification of {Uuid}; signature checks skipped.", entry.RekorUuid);
}
if (request.RefreshProof || entry.Proof is null)
@@ -94,8 +117,12 @@ internal sealed class AttestorVerificationService : IAttestorVerificationService
}
}
VerifyMerkleProof(entry, issues);
var ok = issues.Count == 0 && string.Equals(entry.Status, "included", StringComparison.OrdinalIgnoreCase);
_metrics.VerifyTotal.Add(1, new KeyValuePair<string, object?>("result", ok ? "ok" : "failed"));
return new AttestorVerificationResult
{
Ok = ok,
@@ -204,6 +231,472 @@ internal sealed class AttestorVerificationService : IAttestorVerificationService
: entry;
}
private void VerifySignatures(AttestorEntry entry, AttestorSubmissionRequest.SubmissionBundle bundle, byte[] preAuthEncoding, IList<string> issues)
{
var mode = (entry.SignerIdentity.Mode ?? bundle.Mode ?? string.Empty).ToLowerInvariant();
if (mode == "kms")
{
if (!VerifyKmsSignature(bundle, preAuthEncoding, issues))
{
issues.Add("signature_invalid_kms");
}
return;
}
if (mode == "keyless")
{
VerifyKeylessSignature(entry, bundle, preAuthEncoding, issues);
return;
}
issues.Add(string.IsNullOrEmpty(mode)
? "signer_mode_unknown"
: $"signer_mode_unsupported:{mode}");
}
private bool VerifyKmsSignature(AttestorSubmissionRequest.SubmissionBundle bundle, byte[] preAuthEncoding, IList<string> issues)
{
if (_options.Security.SignerIdentity.KmsKeys.Count == 0)
{
issues.Add("kms_key_missing");
return false;
}
var signatures = new List<byte[]>();
foreach (var signature in bundle.Dsse.Signatures)
{
if (!TryDecodeBase64(signature.Signature, out var signatureBytes))
{
issues.Add("signature_invalid_base64");
return false;
}
signatures.Add(signatureBytes);
}
foreach (var secret in _options.Security.SignerIdentity.KmsKeys)
{
if (!TryDecodeSecret(secret, out var secretBytes))
{
continue;
}
using var hmac = new HMACSHA256(secretBytes);
var computed = hmac.ComputeHash(preAuthEncoding);
foreach (var signatureBytes in signatures)
{
if (CryptographicOperations.FixedTimeEquals(computed, signatureBytes))
{
return true;
}
}
}
return false;
}
private void VerifyKeylessSignature(AttestorEntry entry, AttestorSubmissionRequest.SubmissionBundle bundle, byte[] preAuthEncoding, IList<string> issues)
{
if (bundle.CertificateChain.Count == 0)
{
issues.Add("certificate_chain_missing");
return;
}
var certificates = new List<X509Certificate2>();
try
{
foreach (var pem in bundle.CertificateChain)
{
certificates.Add(X509Certificate2.CreateFromPem(pem));
}
}
catch (Exception ex) when (ex is CryptographicException or ArgumentException)
{
issues.Add("certificate_chain_invalid");
_logger.LogWarning(ex, "Failed to parse certificate chain for {Uuid}", entry.RekorUuid);
return;
}
var leafCertificate = certificates[0];
if (_options.Security.SignerIdentity.FulcioRoots.Count > 0)
{
using var chain = new X509Chain
{
ChainPolicy =
{
RevocationMode = X509RevocationMode.NoCheck,
VerificationFlags = X509VerificationFlags.NoFlag,
TrustMode = X509ChainTrustMode.CustomRootTrust
}
};
foreach (var rootPath in _options.Security.SignerIdentity.FulcioRoots)
{
try
{
if (File.Exists(rootPath))
{
var rootCertificate = X509CertificateLoader.LoadCertificateFromFile(rootPath);
chain.ChainPolicy.CustomTrustStore.Add(rootCertificate);
}
}
catch (Exception ex)
{
_logger.LogWarning(ex, "Failed to load Fulcio root {Root}", rootPath);
}
}
if (!chain.Build(leafCertificate))
{
var status = string.Join(";", chain.ChainStatus.Select(s => s.StatusInformation.Trim()))
.Trim(';');
issues.Add(string.IsNullOrEmpty(status) ? "certificate_chain_untrusted" : $"certificate_chain_untrusted:{status}");
}
}
if (_options.Security.SignerIdentity.AllowedSans.Count > 0)
{
var sans = GetSubjectAlternativeNames(leafCertificate);
if (!sans.Any(san => _options.Security.SignerIdentity.AllowedSans.Contains(san, StringComparer.OrdinalIgnoreCase)))
{
issues.Add("certificate_san_untrusted");
}
}
var signatureVerified = false;
foreach (var signature in bundle.Dsse.Signatures)
{
if (!TryDecodeBase64(signature.Signature, out var signatureBytes))
{
issues.Add("signature_invalid_base64");
return;
}
if (TryVerifyWithCertificate(leafCertificate, preAuthEncoding, signatureBytes))
{
signatureVerified = true;
break;
}
}
if (!signatureVerified)
{
issues.Add("signature_invalid");
}
}
private static bool TryVerifyWithCertificate(X509Certificate2 certificate, byte[] preAuthEncoding, byte[] signature)
{
try
{
var ecdsa = certificate.GetECDsaPublicKey();
if (ecdsa is not null)
{
using (ecdsa)
{
return ecdsa.VerifyData(preAuthEncoding, signature, HashAlgorithmName.SHA256);
}
}
var rsa = certificate.GetRSAPublicKey();
if (rsa is not null)
{
using (rsa)
{
return rsa.VerifyData(preAuthEncoding, signature, HashAlgorithmName.SHA256, RSASignaturePadding.Pkcs1);
}
}
}
catch (CryptographicException)
{
return false;
}
return false;
}
private static IEnumerable<string> GetSubjectAlternativeNames(X509Certificate2 certificate)
{
foreach (var extension in certificate.Extensions)
{
if (!string.Equals(extension.Oid?.Value, "2.5.29.17", StringComparison.Ordinal))
{
continue;
}
var formatted = extension.Format(true);
var lines = formatted.Split(new[] { '\r', '\n' }, StringSplitOptions.RemoveEmptyEntries);
foreach (var line in lines)
{
var parts = line.Split('=');
if (parts.Length == 2)
{
yield return parts[1].Trim();
}
}
}
}
private static byte[] ComputePreAuthEncoding(string payloadType, byte[] payload)
{
var headerBytes = Encoding.UTF8.GetBytes(payloadType ?? string.Empty);
var buffer = new byte[6 + 8 + headerBytes.Length + 8 + payload.Length];
var offset = 0;
Encoding.ASCII.GetBytes("DSSEv1", 0, 6, buffer, offset);
offset += 6;
BinaryPrimitives.WriteUInt64BigEndian(buffer.AsSpan(offset, 8), (ulong)headerBytes.Length);
offset += 8;
Buffer.BlockCopy(headerBytes, 0, buffer, offset, headerBytes.Length);
offset += headerBytes.Length;
BinaryPrimitives.WriteUInt64BigEndian(buffer.AsSpan(offset, 8), (ulong)payload.Length);
offset += 8;
Buffer.BlockCopy(payload, 0, buffer, offset, payload.Length);
return buffer;
}
private void VerifyMerkleProof(AttestorEntry entry, IList<string> issues)
{
if (entry.Proof is null)
{
issues.Add("proof_missing");
return;
}
if (!TryDecodeHash(entry.BundleSha256, out var bundleHash))
{
issues.Add("bundle_hash_decode_failed");
return;
}
if (entry.Proof.Inclusion is null)
{
issues.Add("proof_inclusion_missing");
return;
}
if (entry.Proof.Inclusion.LeafHash is not null)
{
if (!TryDecodeHash(entry.Proof.Inclusion.LeafHash, out var proofLeaf))
{
issues.Add("proof_leafhash_decode_failed");
return;
}
if (!CryptographicOperations.FixedTimeEquals(bundleHash, proofLeaf))
{
issues.Add("proof_leafhash_mismatch");
}
}
var current = bundleHash;
if (entry.Proof.Inclusion.Path.Count > 0)
{
var nodes = new List<ProofPathNode>();
foreach (var element in entry.Proof.Inclusion.Path)
{
if (!ProofPathNode.TryParse(element, out var node))
{
issues.Add("proof_path_decode_failed");
return;
}
if (!node.HasOrientation)
{
issues.Add("proof_path_orientation_missing");
return;
}
nodes.Add(node);
}
foreach (var node in nodes)
{
current = node.Left
? HashInternal(node.Hash, current)
: HashInternal(current, node.Hash);
}
}
if (entry.Proof.Checkpoint is null)
{
issues.Add("checkpoint_missing");
return;
}
if (!TryDecodeHash(entry.Proof.Checkpoint.RootHash, out var rootHash))
{
issues.Add("checkpoint_root_decode_failed");
return;
}
if (!CryptographicOperations.FixedTimeEquals(current, rootHash))
{
issues.Add("proof_root_mismatch");
}
}
private static byte[] HashInternal(byte[] left, byte[] right)
{
using var sha = SHA256.Create();
var buffer = new byte[1 + left.Length + right.Length];
buffer[0] = 0x01;
Buffer.BlockCopy(left, 0, buffer, 1, left.Length);
Buffer.BlockCopy(right, 0, buffer, 1 + left.Length, right.Length);
return sha.ComputeHash(buffer);
}
private static bool TryDecodeSecret(string value, out byte[] bytes)
{
if (string.IsNullOrWhiteSpace(value))
{
bytes = Array.Empty<byte>();
return false;
}
value = value.Trim();
if (value.StartsWith("base64:", StringComparison.OrdinalIgnoreCase))
{
return TryDecodeBase64(value[7..], out bytes);
}
if (value.StartsWith("hex:", StringComparison.OrdinalIgnoreCase))
{
return TryDecodeHex(value[4..], out bytes);
}
if (TryDecodeBase64(value, out bytes))
{
return true;
}
if (TryDecodeHex(value, out bytes))
{
return true;
}
bytes = Array.Empty<byte>();
return false;
}
private static bool TryDecodeBase64(string value, out byte[] bytes)
{
try
{
bytes = Convert.FromBase64String(value);
return true;
}
catch (FormatException)
{
bytes = Array.Empty<byte>();
return false;
}
}
private static bool TryDecodeHex(string value, out byte[] bytes)
{
try
{
bytes = Convert.FromHexString(value);
return true;
}
catch (FormatException)
{
bytes = Array.Empty<byte>();
return false;
}
}
private static bool TryDecodeHash(string? value, out byte[] bytes)
{
bytes = Array.Empty<byte>();
if (string.IsNullOrWhiteSpace(value))
{
return false;
}
var trimmed = value.Trim();
if (TryDecodeHex(trimmed, out bytes))
{
return true;
}
if (TryDecodeBase64(trimmed, out bytes))
{
return true;
}
bytes = Array.Empty<byte>();
return false;
}
private readonly struct ProofPathNode
{
private ProofPathNode(bool hasOrientation, bool left, byte[] hash)
{
HasOrientation = hasOrientation;
Left = left;
Hash = hash;
}
public bool HasOrientation { get; }
public bool Left { get; }
public byte[] Hash { get; }
public static bool TryParse(string value, out ProofPathNode node)
{
node = default;
if (string.IsNullOrWhiteSpace(value))
{
return false;
}
var trimmed = value.Trim();
var parts = trimmed.Split(':', 2);
bool hasOrientation = false;
bool left = false;
string hashPart = trimmed;
if (parts.Length == 2)
{
var prefix = parts[0].Trim().ToLowerInvariant();
if (prefix is "l" or "left")
{
hasOrientation = true;
left = true;
}
else if (prefix is "r" or "right")
{
hasOrientation = true;
left = false;
}
hashPart = parts[1].Trim();
}
if (!TryDecodeHash(hashPart, out var hash))
{
return false;
}
node = new ProofPathNode(hasOrientation, left, hash);
return true;
}
}
private static AttestorEntry CloneWithProof(AttestorEntry entry, AttestorEntry.ProofDescriptor? proof)
{
return new AttestorEntry

View File

@@ -80,6 +80,207 @@ public sealed class AttestorSubmissionServiceTests
Assert.Equal(first.Uuid, stored!.RekorUuid);
}
[Fact]
public async Task Validator_ThrowsWhenModeNotAllowed()
{
var canonicalizer = new DefaultDsseCanonicalizer();
var validator = new AttestorSubmissionValidator(canonicalizer, new[] { "kms" });
var request = CreateValidRequest(canonicalizer);
request.Bundle.Mode = "keyless";
await Assert.ThrowsAsync<AttestorValidationException>(() => validator.ValidateAsync(request));
}
[Fact]
public async Task SubmitAsync_Throws_WhenMirrorDisabledButRequested()
{
var options = Options.Create(new AttestorOptions
{
Redis = new AttestorOptions.RedisOptions { Url = string.Empty },
Rekor = new AttestorOptions.RekorOptions
{
Primary = new AttestorOptions.RekorBackendOptions
{
Url = "https://rekor.primary.test",
ProofTimeoutMs = 1000,
PollIntervalMs = 50,
MaxAttempts = 2
}
}
});
var canonicalizer = new DefaultDsseCanonicalizer();
var validator = new AttestorSubmissionValidator(canonicalizer);
var repository = new InMemoryAttestorEntryRepository();
var dedupeStore = new InMemoryAttestorDedupeStore();
var rekorClient = new StubRekorClient(new NullLogger<StubRekorClient>());
var archiveStore = new NullAttestorArchiveStore(new NullLogger<NullAttestorArchiveStore>());
var auditSink = new InMemoryAttestorAuditSink();
var logger = new NullLogger<AttestorSubmissionService>();
using var metrics = new AttestorMetrics();
var service = new AttestorSubmissionService(
validator,
repository,
dedupeStore,
rekorClient,
archiveStore,
auditSink,
options,
logger,
TimeProvider.System,
metrics);
var request = CreateValidRequest(canonicalizer);
request.Meta.LogPreference = "mirror";
var context = new SubmissionContext
{
CallerSubject = "urn:stellaops:signer",
CallerAudience = "attestor",
CallerClientId = "signer-service",
CallerTenant = "default"
};
var ex = await Assert.ThrowsAsync<AttestorValidationException>(() => service.SubmitAsync(request, context));
Assert.Equal("mirror_disabled", ex.Code);
}
[Fact]
public async Task SubmitAsync_ReturnsMirrorMetadata_WhenPreferenceBoth()
{
var options = Options.Create(new AttestorOptions
{
Redis = new AttestorOptions.RedisOptions { Url = string.Empty },
Rekor = new AttestorOptions.RekorOptions
{
Primary = new AttestorOptions.RekorBackendOptions
{
Url = "https://rekor.primary.test",
ProofTimeoutMs = 1000,
PollIntervalMs = 50,
MaxAttempts = 2
},
Mirror = new AttestorOptions.RekorMirrorOptions
{
Enabled = true,
Url = "https://rekor.mirror.test",
ProofTimeoutMs = 1000,
PollIntervalMs = 50,
MaxAttempts = 2
}
}
});
var canonicalizer = new DefaultDsseCanonicalizer();
var validator = new AttestorSubmissionValidator(canonicalizer);
var repository = new InMemoryAttestorEntryRepository();
var dedupeStore = new InMemoryAttestorDedupeStore();
var rekorClient = new StubRekorClient(new NullLogger<StubRekorClient>());
var archiveStore = new NullAttestorArchiveStore(new NullLogger<NullAttestorArchiveStore>());
var auditSink = new InMemoryAttestorAuditSink();
var logger = new NullLogger<AttestorSubmissionService>();
using var metrics = new AttestorMetrics();
var service = new AttestorSubmissionService(
validator,
repository,
dedupeStore,
rekorClient,
archiveStore,
auditSink,
options,
logger,
TimeProvider.System,
metrics);
var request = CreateValidRequest(canonicalizer);
request.Meta.LogPreference = "both";
var context = new SubmissionContext
{
CallerSubject = "urn:stellaops:signer",
CallerAudience = "attestor",
CallerClientId = "signer-service",
CallerTenant = "default"
};
var result = await service.SubmitAsync(request, context);
Assert.NotNull(result.Mirror);
Assert.False(string.IsNullOrEmpty(result.Mirror!.Uuid));
Assert.Equal("included", result.Mirror.Status);
}
[Fact]
public async Task SubmitAsync_UsesMirrorAsCanonical_WhenPreferenceMirror()
{
var options = Options.Create(new AttestorOptions
{
Redis = new AttestorOptions.RedisOptions { Url = string.Empty },
Rekor = new AttestorOptions.RekorOptions
{
Primary = new AttestorOptions.RekorBackendOptions
{
Url = "https://rekor.primary.test",
ProofTimeoutMs = 1000,
PollIntervalMs = 50,
MaxAttempts = 2
},
Mirror = new AttestorOptions.RekorMirrorOptions
{
Enabled = true,
Url = "https://rekor.mirror.test",
ProofTimeoutMs = 1000,
PollIntervalMs = 50,
MaxAttempts = 2
}
}
});
var canonicalizer = new DefaultDsseCanonicalizer();
var validator = new AttestorSubmissionValidator(canonicalizer);
var repository = new InMemoryAttestorEntryRepository();
var dedupeStore = new InMemoryAttestorDedupeStore();
var rekorClient = new StubRekorClient(new NullLogger<StubRekorClient>());
var archiveStore = new NullAttestorArchiveStore(new NullLogger<NullAttestorArchiveStore>());
var auditSink = new InMemoryAttestorAuditSink();
var logger = new NullLogger<AttestorSubmissionService>();
using var metrics = new AttestorMetrics();
var service = new AttestorSubmissionService(
validator,
repository,
dedupeStore,
rekorClient,
archiveStore,
auditSink,
options,
logger,
TimeProvider.System,
metrics);
var request = CreateValidRequest(canonicalizer);
request.Meta.LogPreference = "mirror";
var context = new SubmissionContext
{
CallerSubject = "urn:stellaops:signer",
CallerAudience = "attestor",
CallerClientId = "signer-service",
CallerTenant = "default"
};
var result = await service.SubmitAsync(request, context);
Assert.NotNull(result.Uuid);
var stored = await repository.GetByBundleShaAsync(request.Meta.BundleSha256);
Assert.NotNull(stored);
Assert.Equal("mirror", stored!.Log.Backend);
Assert.Null(result.Mirror);
}
private static AttestorSubmissionRequest CreateValidRequest(DefaultDsseCanonicalizer canonicalizer)
{
var request = new AttestorSubmissionRequest

View File

@@ -1,3 +1,5 @@
using System.Buffers.Binary;
using System.Collections.Generic;
using System.Security.Cryptography;
using System.Text;
using System.Threading.Tasks;
@@ -17,6 +19,9 @@ namespace StellaOps.Attestor.Tests;
public sealed class AttestorVerificationServiceTests
{
private static readonly byte[] HmacSecret = Encoding.UTF8.GetBytes("attestor-hmac-secret");
private static readonly string HmacSecretBase64 = Convert.ToBase64String(HmacSecret);
[Fact]
public async Task VerifyAsync_ReturnsOk_ForExistingUuid()
{
@@ -35,6 +40,14 @@ public sealed class AttestorVerificationServiceTests
PollIntervalMs = 50,
MaxAttempts = 2
}
},
Security = new AttestorOptions.SecurityOptions
{
SignerIdentity = new AttestorOptions.SignerIdentityOptions
{
Mode = { "kms" },
KmsKeys = { HmacSecretBase64 }
}
}
});
@@ -57,7 +70,7 @@ public sealed class AttestorVerificationServiceTests
TimeProvider.System,
metrics);
var submission = CreateSubmissionRequest(canonicalizer);
var submission = CreateSubmissionRequest(canonicalizer, HmacSecret);
var context = new SubmissionContext
{
CallerSubject = "urn:stellaops:signer",
@@ -73,11 +86,13 @@ public sealed class AttestorVerificationServiceTests
canonicalizer,
rekorClient,
options,
new NullLogger<AttestorVerificationService>());
new NullLogger<AttestorVerificationService>(),
metrics);
var verifyResult = await verificationService.VerifyAsync(new AttestorVerificationRequest
{
Uuid = response.Uuid
Uuid = response.Uuid,
Bundle = submission.Bundle
});
Assert.True(verifyResult.Ok);
@@ -100,6 +115,14 @@ public sealed class AttestorVerificationServiceTests
PollIntervalMs = 50,
MaxAttempts = 2
}
},
Security = new AttestorOptions.SecurityOptions
{
SignerIdentity = new AttestorOptions.SignerIdentityOptions
{
Mode = { "kms" },
KmsKeys = { HmacSecretBase64 }
}
}
});
@@ -122,7 +145,7 @@ public sealed class AttestorVerificationServiceTests
TimeProvider.System,
metrics);
var submission = CreateSubmissionRequest(canonicalizer);
var submission = CreateSubmissionRequest(canonicalizer, HmacSecret);
var context = new SubmissionContext
{
CallerSubject = "urn:stellaops:signer",
@@ -138,9 +161,10 @@ public sealed class AttestorVerificationServiceTests
canonicalizer,
rekorClient,
options,
new NullLogger<AttestorVerificationService>());
new NullLogger<AttestorVerificationService>(),
metrics);
var tamperedBundle = submission.Bundle;
var tamperedBundle = CloneBundle(submission.Bundle);
tamperedBundle.Dsse.PayloadBase64 = Convert.ToBase64String(Encoding.UTF8.GetBytes("{\"tampered\":true}"));
var result = await verificationService.VerifyAsync(new AttestorVerificationRequest
@@ -150,29 +174,21 @@ public sealed class AttestorVerificationServiceTests
});
Assert.False(result.Ok);
Assert.Contains(result.Issues, issue => issue.Contains("Bundle hash", StringComparison.OrdinalIgnoreCase));
Assert.Contains(result.Issues, issue => issue.Contains("signature_invalid", StringComparison.OrdinalIgnoreCase));
}
private static AttestorSubmissionRequest CreateSubmissionRequest(DefaultDsseCanonicalizer canonicalizer)
private static AttestorSubmissionRequest CreateSubmissionRequest(DefaultDsseCanonicalizer canonicalizer, byte[] hmacSecret)
{
var payload = Encoding.UTF8.GetBytes("{}");
var request = new AttestorSubmissionRequest
{
Bundle = new AttestorSubmissionRequest.SubmissionBundle
{
Mode = "keyless",
Mode = "kms",
Dsse = new AttestorSubmissionRequest.DsseEnvelope
{
PayloadType = "application/vnd.in-toto+json",
PayloadBase64 = Convert.ToBase64String(payload),
Signatures =
{
new AttestorSubmissionRequest.DsseSignature
{
KeyId = "test",
Signature = Convert.ToBase64String(RandomNumberGenerator.GetBytes(32))
}
}
PayloadBase64 = Convert.ToBase64String(payload)
}
},
Meta = new AttestorSubmissionRequest.SubmissionMeta
@@ -187,8 +203,65 @@ public sealed class AttestorVerificationServiceTests
}
};
var preAuth = ComputePreAuthEncodingForTests(request.Bundle.Dsse.PayloadType, payload);
using (var hmac = new HMACSHA256(hmacSecret))
{
var signature = hmac.ComputeHash(preAuth);
request.Bundle.Dsse.Signatures.Add(new AttestorSubmissionRequest.DsseSignature
{
KeyId = "kms-test",
Signature = Convert.ToBase64String(signature)
});
}
var canonical = canonicalizer.CanonicalizeAsync(request).GetAwaiter().GetResult();
request.Meta.BundleSha256 = Convert.ToHexString(SHA256.HashData(canonical)).ToLowerInvariant();
return request;
}
private static AttestorSubmissionRequest.SubmissionBundle CloneBundle(AttestorSubmissionRequest.SubmissionBundle source)
{
var clone = new AttestorSubmissionRequest.SubmissionBundle
{
Mode = source.Mode,
Dsse = new AttestorSubmissionRequest.DsseEnvelope
{
PayloadType = source.Dsse.PayloadType,
PayloadBase64 = source.Dsse.PayloadBase64
}
};
foreach (var certificate in source.CertificateChain)
{
clone.CertificateChain.Add(certificate);
}
foreach (var signature in source.Dsse.Signatures)
{
clone.Dsse.Signatures.Add(new AttestorSubmissionRequest.DsseSignature
{
KeyId = signature.KeyId,
Signature = signature.Signature
});
}
return clone;
}
private static byte[] ComputePreAuthEncodingForTests(string payloadType, byte[] payload)
{
var headerBytes = Encoding.UTF8.GetBytes(payloadType ?? string.Empty);
var buffer = new byte[6 + 8 + headerBytes.Length + 8 + payload.Length];
var offset = 0;
Encoding.ASCII.GetBytes("DSSEv1", 0, 6, buffer, offset);
offset += 6;
BinaryPrimitives.WriteUInt64BigEndian(buffer.AsSpan(offset, 8), (ulong)headerBytes.Length);
offset += 8;
Buffer.BlockCopy(headerBytes, 0, buffer, offset, headerBytes.Length);
offset += headerBytes.Length;
BinaryPrimitives.WriteUInt64BigEndian(buffer.AsSpan(offset, 8), (ulong)payload.Length);
offset += 8;
Buffer.BlockCopy(payload, 0, buffer, offset, payload.Length);
return buffer;
}
}

View File

@@ -1,6 +1,11 @@
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Security.Authentication;
using System.Security.Cryptography;
using System.Security.Claims;
using System.Security.Cryptography.X509Certificates;
using System.Threading.RateLimiting;
using Serilog;
using Serilog.Events;
using StellaOps.Attestor.Core.Options;
@@ -13,6 +18,7 @@ using OpenTelemetry.Metrics;
using StellaOps.Attestor.Core.Observability;
using StellaOps.Attestor.Core.Verification;
using Microsoft.AspNetCore.Server.Kestrel.Https;
using Serilog.Context;
const string ConfigurationSection = "attestor";
@@ -36,9 +42,45 @@ builder.Host.UseSerilog((context, services, loggerConfiguration) =>
var attestorOptions = builder.Configuration.BindOptions<AttestorOptions>(ConfigurationSection);
var clientCertificateAuthorities = LoadClientCertificateAuthorities(attestorOptions.Security.Mtls.CaBundle);
builder.Services.AddSingleton(TimeProvider.System);
builder.Services.AddSingleton(attestorOptions);
builder.Services.AddRateLimiter(options =>
{
options.RejectionStatusCode = StatusCodes.Status429TooManyRequests;
options.OnRejected = static (context, _) =>
{
context.HttpContext.Response.Headers.TryAdd("Retry-After", "1");
return ValueTask.CompletedTask;
};
options.AddPolicy("attestor-submissions", httpContext =>
{
var identity = httpContext.Connection.ClientCertificate?.Thumbprint
?? httpContext.User.FindFirst("sub")?.Value
?? httpContext.User.FindFirst("client_id")?.Value
?? httpContext.Connection.RemoteIpAddress?.ToString()
?? "anonymous";
var quota = attestorOptions.Quotas.PerCaller;
var tokensPerPeriod = Math.Max(1, quota.Qps);
var tokenLimit = Math.Max(tokensPerPeriod, quota.Burst);
var queueLimit = Math.Max(quota.Burst, tokensPerPeriod);
return RateLimitPartition.GetTokenBucketLimiter(identity, _ => new TokenBucketRateLimiterOptions
{
TokenLimit = tokenLimit,
TokensPerPeriod = tokensPerPeriod,
ReplenishmentPeriod = TimeSpan.FromSeconds(1),
QueueLimit = queueLimit,
QueueProcessingOrder = QueueProcessingOrder.OldestFirst,
AutoReplenishment = true
});
});
});
builder.Services.AddOptions<AttestorOptions>()
.Bind(builder.Configuration.GetSection(ConfigurationSection))
.ValidateOnStart();
@@ -105,6 +147,61 @@ builder.WebHost.ConfigureKestrel(kestrel =>
{
https.ClientCertificateMode = ClientCertificateMode.RequireCertificate;
}
https.SslProtocols = SslProtocols.Tls13 | SslProtocols.Tls12;
https.ClientCertificateValidation = (certificate, _, _) =>
{
if (!attestorOptions.Security.Mtls.RequireClientCertificate)
{
return true;
}
if (certificate is null)
{
Log.Warning("Client certificate missing");
return false;
}
if (clientCertificateAuthorities.Count > 0)
{
using var chain = new X509Chain
{
ChainPolicy =
{
RevocationMode = X509RevocationMode.NoCheck,
TrustMode = X509ChainTrustMode.CustomRootTrust
}
};
foreach (var authority in clientCertificateAuthorities)
{
chain.ChainPolicy.CustomTrustStore.Add(authority);
}
if (!chain.Build(certificate))
{
Log.Warning("Client certificate chain validation failed for {Subject}", certificate.Subject);
return false;
}
}
if (attestorOptions.Security.Mtls.AllowedThumbprints.Count > 0 &&
!attestorOptions.Security.Mtls.AllowedThumbprints.Contains(certificate.Thumbprint ?? string.Empty, StringComparer.OrdinalIgnoreCase))
{
Log.Warning("Client certificate thumbprint {Thumbprint} rejected", certificate.Thumbprint);
return false;
}
if (attestorOptions.Security.Mtls.AllowedSubjects.Count > 0 &&
!attestorOptions.Security.Mtls.AllowedSubjects.Contains(certificate.Subject, StringComparer.OrdinalIgnoreCase))
{
Log.Warning("Client certificate subject {Subject} rejected", certificate.Subject);
return false;
}
return true;
};
});
});
@@ -112,6 +209,22 @@ var app = builder.Build();
app.UseSerilogRequestLogging();
app.Use(async (context, next) =>
{
var correlationId = context.Request.Headers["X-Correlation-Id"].FirstOrDefault();
if (string.IsNullOrWhiteSpace(correlationId))
{
correlationId = Guid.NewGuid().ToString("N");
}
context.Response.Headers["X-Correlation-Id"] = correlationId;
using (LogContext.PushProperty("CorrelationId", correlationId))
{
await next().ConfigureAwait(false);
}
});
app.UseExceptionHandler(static handler =>
{
handler.Run(async context =>
@@ -121,6 +234,8 @@ app.UseExceptionHandler(static handler =>
});
});
app.UseRateLimiter();
app.UseAuthentication();
app.UseAuthorization();
@@ -156,7 +271,8 @@ app.MapPost("/api/v1/rekor/entries", async (AttestorSubmissionRequest request, H
});
}
})
.RequireAuthorization("attestor:write");
.RequireAuthorization("attestor:write")
.RequireRateLimiting("attestor-submissions");
app.MapGet("/api/v1/rekor/entries/{uuid}", async (string uuid, bool? refresh, IAttestorVerificationService verificationService, CancellationToken cancellationToken) =>
{
@@ -170,6 +286,7 @@ app.MapGet("/api/v1/rekor/entries/{uuid}", async (string uuid, bool? refresh, IA
{
uuid = entry.RekorUuid,
index = entry.Index,
backend = entry.Log.Backend,
proof = entry.Proof is null ? null : new
{
checkpoint = entry.Proof.Checkpoint is null ? null : new
@@ -187,6 +304,30 @@ app.MapGet("/api/v1/rekor/entries/{uuid}", async (string uuid, bool? refresh, IA
},
logURL = entry.Log.Url,
status = entry.Status,
mirror = entry.Mirror is null ? null : new
{
backend = entry.Mirror.Backend,
uuid = entry.Mirror.Uuid,
index = entry.Mirror.Index,
logURL = entry.Mirror.Url,
status = entry.Mirror.Status,
proof = entry.Mirror.Proof is null ? null : new
{
checkpoint = entry.Mirror.Proof.Checkpoint is null ? null : new
{
origin = entry.Mirror.Proof.Checkpoint.Origin,
size = entry.Mirror.Proof.Checkpoint.Size,
rootHash = entry.Mirror.Proof.Checkpoint.RootHash,
timestamp = entry.Mirror.Proof.Checkpoint.Timestamp?.ToString("O")
},
inclusion = entry.Mirror.Proof.Inclusion is null ? null : new
{
leafHash = entry.Mirror.Proof.Inclusion.LeafHash,
path = entry.Mirror.Proof.Inclusion.Path
}
},
error = entry.Mirror.Error
},
artifact = new
{
sha256 = entry.Artifact.Sha256,
@@ -232,3 +373,33 @@ static SubmissionContext BuildSubmissionContext(ClaimsPrincipal user, X509Certif
MtlsThumbprint = certificate.Thumbprint
};
}
static List<X509Certificate2> LoadClientCertificateAuthorities(string? path)
{
var certificates = new List<X509Certificate2>();
if (string.IsNullOrWhiteSpace(path))
{
return certificates;
}
try
{
if (!File.Exists(path))
{
Log.Warning("Client CA bundle '{Path}' not found", path);
return certificates;
}
var collection = new X509Certificate2Collection();
collection.ImportFromPemFile(path);
certificates.AddRange(collection.Cast<X509Certificate2>());
}
catch (Exception ex) when (ex is IOException or CryptographicException)
{
Log.Warning(ex, "Failed to load client CA bundle from {Path}", path);
}
return certificates;
}

View File

@@ -6,6 +6,5 @@
| ATTESTOR-VERIFY-11-202 | DONE (2025-10-19) | Attestor Guild | — | `/rekor/verify` + retrieval endpoints validating signatures and Merkle proofs. | ✅ `GET /api/v1/rekor/entries/{uuid}` surfaces cached entries with optional backend refresh and handles not-found/refresh flows.<br>`POST /api/v1/rekor/verify` accepts UUID, bundle, or artifact hash inputs; verifies DSSE signatures, Merkle proofs, and checkpoint anchors.<br>✅ Verification output returns `{ok, uuid, index, logURL, checkedAt}` with failure diagnostics for invalid proofs.<br>✅ Unit/integration tests exercise cache hits, backend refresh, invalid bundle/proof scenarios, and checkpoint trust anchor enforcement. |
| ATTESTOR-OBS-11-203 | DONE (2025-10-19) | Attestor Guild | — | Telemetry, alerting, mTLS hardening, and archive workflow for Attestor. | ✅ Structured logs, metrics, and optional traces record submission latency, proof fetch outcomes, verification results, and Rekor error buckets with correlation IDs.<br>✅ mTLS enforcement hardened (peer allowlist, SAN checks, rate limiting) and documented; TLS settings audited for modern ciphers only.<br>✅ Alerting/dashboard pack covers error rates, proof backlog, Redis/Mongo health, and archive job failures; runbook updated.<br>✅ Archive workflow includes retention policy jobs, failure alerts, and periodic verification of stored bundles and proofs. |
> Remark (2025-10-19): Wave 0 prerequisites reviewed (none outstanding); Attestor Guild tasks moved to DOING for execution.
> Remark (2025-10-19): `/rekor/entries` submission service implemented with Mongo/Redis persistence, optional S3 archival, Rekor HTTP client, and OpenTelemetry metrics; verification APIs (`/rekor/entries/{uuid}`, `/rekor/verify`) added with proof refresh and canonical hash checks. Remaining: integrate real Rekor endpoints in staging and expand failure-mode tests.
> Remark (2025-10-19): Added Rekor mock client + integration harness to unblock attestor verification testing without external connectivity. Follow-up tasks to wire staging Rekor and record retry/error behavior still pending.
> Remark (2025-10-19): Wave 0 prerequisites reviewed (none outstanding); ATTESTOR-API-11-201, ATTESTOR-VERIFY-11-202, and ATTESTOR-OBS-11-203 tracked as DOING per Wave 0A kickoff.
> Remark (2025-10-19): Dual-log submissions, signature/proof verification, and observability hardening landed; attestor endpoints now rate-limited per client with correlation-ID logging and updated docs/tests.

View File

@@ -0,0 +1,50 @@
using System;
namespace StellaOps.Auth.Security.Dpop;
/// <summary>
/// Represents the outcome of attempting to consume a DPoP nonce.
/// </summary>
public sealed class DpopNonceConsumeResult
{
private DpopNonceConsumeResult(DpopNonceConsumeStatus status, DateTimeOffset? issuedAt, DateTimeOffset? expiresAt)
{
Status = status;
IssuedAt = issuedAt;
ExpiresAt = expiresAt;
}
/// <summary>
/// Consumption status.
/// </summary>
public DpopNonceConsumeStatus Status { get; }
/// <summary>
/// Timestamp the nonce was originally issued (when available).
/// </summary>
public DateTimeOffset? IssuedAt { get; }
/// <summary>
/// Expiry timestamp for the nonce (when available).
/// </summary>
public DateTimeOffset? ExpiresAt { get; }
public static DpopNonceConsumeResult Success(DateTimeOffset issuedAt, DateTimeOffset expiresAt)
=> new(DpopNonceConsumeStatus.Success, issuedAt, expiresAt);
public static DpopNonceConsumeResult Expired(DateTimeOffset? issuedAt, DateTimeOffset expiresAt)
=> new(DpopNonceConsumeStatus.Expired, issuedAt, expiresAt);
public static DpopNonceConsumeResult NotFound()
=> new(DpopNonceConsumeStatus.NotFound, null, null);
}
/// <summary>
/// Known statuses for nonce consumption attempts.
/// </summary>
public enum DpopNonceConsumeStatus
{
Success,
Expired,
NotFound
}

View File

@@ -0,0 +1,56 @@
using System;
namespace StellaOps.Auth.Security.Dpop;
/// <summary>
/// Represents the result of issuing a DPoP nonce.
/// </summary>
public sealed class DpopNonceIssueResult
{
private DpopNonceIssueResult(DpopNonceIssueStatus status, string? nonce, DateTimeOffset? expiresAt, string? error)
{
Status = status;
Nonce = nonce;
ExpiresAt = expiresAt;
Error = error;
}
/// <summary>
/// Issue status.
/// </summary>
public DpopNonceIssueStatus Status { get; }
/// <summary>
/// Issued nonce when <see cref="Status"/> is <see cref="DpopNonceIssueStatus.Success"/>.
/// </summary>
public string? Nonce { get; }
/// <summary>
/// Expiry timestamp for the issued nonce (UTC).
/// </summary>
public DateTimeOffset? ExpiresAt { get; }
/// <summary>
/// Additional failure information, where applicable.
/// </summary>
public string? Error { get; }
public static DpopNonceIssueResult Success(string nonce, DateTimeOffset expiresAt)
=> new(DpopNonceIssueStatus.Success, nonce, expiresAt, null);
public static DpopNonceIssueResult RateLimited(string? error = null)
=> new(DpopNonceIssueStatus.RateLimited, null, null, error);
public static DpopNonceIssueResult Failure(string? error = null)
=> new(DpopNonceIssueStatus.Failure, null, null, error);
}
/// <summary>
/// Known statuses for nonce issuance.
/// </summary>
public enum DpopNonceIssueStatus
{
Success,
RateLimited,
Failure
}

View File

@@ -0,0 +1,66 @@
using System;
using System.Security.Cryptography;
using System.Text;
namespace StellaOps.Auth.Security.Dpop;
internal static class DpopNonceUtilities
{
private static readonly char[] Base64Padding = { '=' };
internal static string GenerateNonce()
{
Span<byte> buffer = stackalloc byte[32];
RandomNumberGenerator.Fill(buffer);
return Convert.ToBase64String(buffer)
.TrimEnd(Base64Padding)
.Replace('+', '-')
.Replace('/', '_');
}
internal static byte[] ComputeNonceHash(string nonce)
{
ArgumentException.ThrowIfNullOrWhiteSpace(nonce);
var bytes = Encoding.UTF8.GetBytes(nonce);
return SHA256.HashData(bytes);
}
internal static string EncodeHash(ReadOnlySpan<byte> hash)
=> Convert.ToHexString(hash);
internal static string ComputeStorageKey(string audience, string clientId, string keyThumbprint)
{
ArgumentException.ThrowIfNullOrWhiteSpace(audience);
ArgumentException.ThrowIfNullOrWhiteSpace(clientId);
ArgumentException.ThrowIfNullOrWhiteSpace(keyThumbprint);
return string.Create(
"dpop-nonce:".Length + audience.Length + clientId.Length + keyThumbprint.Length + 2,
(audience.Trim(), clientId.Trim(), keyThumbprint.Trim()),
static (span, parts) =>
{
var index = 0;
const string Prefix = "dpop-nonce:";
Prefix.CopyTo(span);
index += Prefix.Length;
index = Append(span, index, parts.Item1);
span[index++] = ':';
index = Append(span, index, parts.Item2);
span[index++] = ':';
_ = Append(span, index, parts.Item3);
});
static int Append(Span<char> span, int index, string value)
{
if (value.Length == 0)
{
throw new ArgumentException("Value must not be empty after trimming.");
}
value.AsSpan().CopyTo(span[index..]);
return index + value.Length;
}
}
}

View File

@@ -0,0 +1,45 @@
using System;
using System.Threading;
using System.Threading.Tasks;
namespace StellaOps.Auth.Security.Dpop;
/// <summary>
/// Provides persistence and validation for DPoP nonces.
/// </summary>
public interface IDpopNonceStore
{
/// <summary>
/// Issues a nonce tied to the specified audience, client, and DPoP key thumbprint.
/// </summary>
/// <param name="audience">Audience the nonce applies to.</param>
/// <param name="clientId">Client identifier requesting the nonce.</param>
/// <param name="keyThumbprint">Thumbprint of the DPoP public key.</param>
/// <param name="ttl">Time-to-live for the nonce.</param>
/// <param name="maxIssuancePerMinute">Maximum number of nonces that can be issued within a one-minute window for the tuple.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>Outcome describing the issued nonce.</returns>
ValueTask<DpopNonceIssueResult> IssueAsync(
string audience,
string clientId,
string keyThumbprint,
TimeSpan ttl,
int maxIssuancePerMinute,
CancellationToken cancellationToken = default);
/// <summary>
/// Attempts to consume a nonce previously issued for the tuple.
/// </summary>
/// <param name="nonce">Nonce supplied by the client.</param>
/// <param name="audience">Audience the nonce should match.</param>
/// <param name="clientId">Client identifier.</param>
/// <param name="keyThumbprint">Thumbprint of the DPoP public key.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>Outcome describing whether the nonce was accepted.</returns>
ValueTask<DpopNonceConsumeResult> TryConsumeAsync(
string nonce,
string audience,
string clientId,
string keyThumbprint,
CancellationToken cancellationToken = default);
}

View File

@@ -0,0 +1,176 @@
using System;
using System.Collections.Concurrent;
using System.Collections.Generic;
using System.Security.Cryptography;
using System.Text;
using System.Threading;
using Microsoft.Extensions.Logging;
using System.Threading.Tasks;
namespace StellaOps.Auth.Security.Dpop;
/// <summary>
/// In-memory implementation of <see cref="IDpopNonceStore"/> suitable for single-host or test environments.
/// </summary>
public sealed class InMemoryDpopNonceStore : IDpopNonceStore
{
private static readonly TimeSpan IssuanceWindow = TimeSpan.FromMinutes(1);
private readonly ConcurrentDictionary<string, StoredNonce> nonces = new(StringComparer.Ordinal);
private readonly ConcurrentDictionary<string, IssuanceBucket> issuanceBuckets = new(StringComparer.Ordinal);
private readonly TimeProvider timeProvider;
private readonly ILogger<InMemoryDpopNonceStore>? logger;
public InMemoryDpopNonceStore(TimeProvider? timeProvider = null, ILogger<InMemoryDpopNonceStore>? logger = null)
{
this.timeProvider = timeProvider ?? TimeProvider.System;
this.logger = logger;
}
public ValueTask<DpopNonceIssueResult> IssueAsync(
string audience,
string clientId,
string keyThumbprint,
TimeSpan ttl,
int maxIssuancePerMinute,
CancellationToken cancellationToken = default)
{
ArgumentException.ThrowIfNullOrWhiteSpace(audience);
ArgumentException.ThrowIfNullOrWhiteSpace(clientId);
ArgumentException.ThrowIfNullOrWhiteSpace(keyThumbprint);
if (ttl <= TimeSpan.Zero)
{
throw new ArgumentOutOfRangeException(nameof(ttl), "Nonce TTL must be greater than zero.");
}
if (maxIssuancePerMinute < 1)
{
throw new ArgumentOutOfRangeException(nameof(maxIssuancePerMinute), "Max issuance per minute must be at least 1.");
}
cancellationToken.ThrowIfCancellationRequested();
var now = timeProvider.GetUtcNow();
var bucketKey = BuildBucketKey(audience, clientId, keyThumbprint);
var bucket = issuanceBuckets.GetOrAdd(bucketKey, static _ => new IssuanceBucket());
bool allowed;
lock (bucket.SyncRoot)
{
bucket.Prune(now - IssuanceWindow);
if (bucket.IssuanceTimes.Count >= maxIssuancePerMinute)
{
allowed = false;
}
else
{
bucket.IssuanceTimes.Enqueue(now);
allowed = true;
}
}
if (!allowed)
{
logger?.LogDebug("DPoP nonce issuance throttled for {BucketKey}.", bucketKey);
return ValueTask.FromResult(DpopNonceIssueResult.RateLimited("rate_limited"));
}
var nonce = GenerateNonce();
var nonceKey = BuildNonceKey(audience, clientId, keyThumbprint, nonce);
var expiresAt = now + ttl;
nonces[nonceKey] = new StoredNonce(now, expiresAt);
return ValueTask.FromResult(DpopNonceIssueResult.Success(nonce, expiresAt));
}
public ValueTask<DpopNonceConsumeResult> TryConsumeAsync(
string nonce,
string audience,
string clientId,
string keyThumbprint,
CancellationToken cancellationToken = default)
{
ArgumentException.ThrowIfNullOrWhiteSpace(nonce);
ArgumentException.ThrowIfNullOrWhiteSpace(audience);
ArgumentException.ThrowIfNullOrWhiteSpace(clientId);
ArgumentException.ThrowIfNullOrWhiteSpace(keyThumbprint);
cancellationToken.ThrowIfCancellationRequested();
var now = timeProvider.GetUtcNow();
var nonceKey = BuildNonceKey(audience, clientId, keyThumbprint, nonce);
if (!nonces.TryRemove(nonceKey, out var stored))
{
logger?.LogDebug("DPoP nonce {NonceKey} not found during consumption.", nonceKey);
return ValueTask.FromResult(DpopNonceConsumeResult.NotFound());
}
if (stored.ExpiresAt <= now)
{
logger?.LogDebug("DPoP nonce {NonceKey} expired at {ExpiresAt:o}.", nonceKey, stored.ExpiresAt);
return ValueTask.FromResult(DpopNonceConsumeResult.Expired(stored.IssuedAt, stored.ExpiresAt));
}
return ValueTask.FromResult(DpopNonceConsumeResult.Success(stored.IssuedAt, stored.ExpiresAt));
}
private static string BuildBucketKey(string audience, string clientId, string keyThumbprint)
=> $"{audience.Trim().ToLowerInvariant()}::{clientId.Trim().ToLowerInvariant()}::{keyThumbprint.Trim().ToLowerInvariant()}";
private static string BuildNonceKey(string audience, string clientId, string keyThumbprint, string nonce)
{
var bucketKey = BuildBucketKey(audience, clientId, keyThumbprint);
var digest = ComputeSha256(nonce);
return $"{bucketKey}::{digest}";
}
private static string ComputeSha256(string value)
{
var bytes = Encoding.UTF8.GetBytes(value);
var hash = SHA256.HashData(bytes);
return Base64UrlEncode(hash);
}
private static string Base64UrlEncode(ReadOnlySpan<byte> bytes)
{
return Convert.ToBase64String(bytes)
.TrimEnd('=')
.Replace('+', '-')
.Replace('/', '_');
}
private static string GenerateNonce()
{
Span<byte> buffer = stackalloc byte[32];
RandomNumberGenerator.Fill(buffer);
return Base64UrlEncode(buffer);
}
private sealed class StoredNonce
{
internal StoredNonce(DateTimeOffset issuedAt, DateTimeOffset expiresAt)
{
IssuedAt = issuedAt;
ExpiresAt = expiresAt;
}
internal DateTimeOffset IssuedAt { get; }
internal DateTimeOffset ExpiresAt { get; }
}
private sealed class IssuanceBucket
{
internal object SyncRoot { get; } = new();
internal Queue<DateTimeOffset> IssuanceTimes { get; } = new();
internal void Prune(DateTimeOffset threshold)
{
while (IssuanceTimes.Count > 0 && IssuanceTimes.Peek() < threshold)
{
IssuanceTimes.Dequeue();
}
}
}
}

View File

@@ -0,0 +1,138 @@
using System;
using System.Globalization;
using System.Threading;
using System.Threading.Tasks;
using StackExchange.Redis;
namespace StellaOps.Auth.Security.Dpop;
/// <summary>
/// Redis-backed implementation of <see cref="IDpopNonceStore"/> that supports multi-node deployments.
/// </summary>
public sealed class RedisDpopNonceStore : IDpopNonceStore
{
private const string ConsumeScript = @"
local value = redis.call('GET', KEYS[1])
if value ~= false and value == ARGV[1] then
redis.call('DEL', KEYS[1])
return 1
end
return 0";
private readonly IConnectionMultiplexer connection;
private readonly TimeProvider timeProvider;
public RedisDpopNonceStore(IConnectionMultiplexer connection, TimeProvider? timeProvider = null)
{
this.connection = connection ?? throw new ArgumentNullException(nameof(connection));
this.timeProvider = timeProvider ?? TimeProvider.System;
}
public async ValueTask<DpopNonceIssueResult> IssueAsync(
string audience,
string clientId,
string keyThumbprint,
TimeSpan ttl,
int maxIssuancePerMinute,
CancellationToken cancellationToken = default)
{
ArgumentException.ThrowIfNullOrWhiteSpace(audience);
ArgumentException.ThrowIfNullOrWhiteSpace(clientId);
ArgumentException.ThrowIfNullOrWhiteSpace(keyThumbprint);
if (ttl <= TimeSpan.Zero)
{
throw new ArgumentOutOfRangeException(nameof(ttl), "Nonce TTL must be greater than zero.");
}
if (maxIssuancePerMinute < 1)
{
throw new ArgumentOutOfRangeException(nameof(maxIssuancePerMinute), "Max issuance per minute must be at least 1.");
}
cancellationToken.ThrowIfCancellationRequested();
var database = connection.GetDatabase();
var issuedAt = timeProvider.GetUtcNow();
var baseKey = DpopNonceUtilities.ComputeStorageKey(audience, clientId, keyThumbprint);
var nonceKey = (RedisKey)baseKey;
var metadataKey = (RedisKey)(baseKey + ":meta");
var rateKey = (RedisKey)(baseKey + ":rate");
var rateCount = await database.StringIncrementAsync(rateKey, flags: CommandFlags.DemandMaster).ConfigureAwait(false);
if (rateCount == 1)
{
await database.KeyExpireAsync(rateKey, TimeSpan.FromMinutes(1), CommandFlags.DemandMaster).ConfigureAwait(false);
}
if (rateCount > maxIssuancePerMinute)
{
return DpopNonceIssueResult.RateLimited("rate_limited");
}
var nonce = DpopNonceUtilities.GenerateNonce();
var hash = (RedisValue)DpopNonceUtilities.EncodeHash(DpopNonceUtilities.ComputeNonceHash(nonce));
var expiresAt = issuedAt + ttl;
await database.StringSetAsync(nonceKey, hash, ttl, When.Always, CommandFlags.DemandMaster).ConfigureAwait(false);
var metadataValue = FormattableString.Invariant($"{issuedAt.UtcTicks}|{ttl.Ticks}");
await database.StringSetAsync(metadataKey, metadataValue, ttl, When.Always, CommandFlags.DemandMaster).ConfigureAwait(false);
return DpopNonceIssueResult.Success(nonce, expiresAt);
}
public async ValueTask<DpopNonceConsumeResult> TryConsumeAsync(
string nonce,
string audience,
string clientId,
string keyThumbprint,
CancellationToken cancellationToken = default)
{
ArgumentException.ThrowIfNullOrWhiteSpace(nonce);
ArgumentException.ThrowIfNullOrWhiteSpace(audience);
ArgumentException.ThrowIfNullOrWhiteSpace(clientId);
ArgumentException.ThrowIfNullOrWhiteSpace(keyThumbprint);
cancellationToken.ThrowIfCancellationRequested();
var database = connection.GetDatabase();
var baseKey = DpopNonceUtilities.ComputeStorageKey(audience, clientId, keyThumbprint);
var nonceKey = (RedisKey)baseKey;
var metadataKey = (RedisKey)(baseKey + ":meta");
var hash = (RedisValue)DpopNonceUtilities.EncodeHash(DpopNonceUtilities.ComputeNonceHash(nonce));
var rawResult = await database.ScriptEvaluateAsync(
ConsumeScript,
new[] { nonceKey },
new RedisValue[] { hash }).ConfigureAwait(false);
if (rawResult.IsNull || (long)rawResult != 1)
{
return DpopNonceConsumeResult.NotFound();
}
var metadata = await database.StringGetAsync(metadataKey).ConfigureAwait(false);
await database.KeyDeleteAsync(metadataKey, CommandFlags.DemandMaster).ConfigureAwait(false);
if (!metadata.IsNull)
{
var parts = metadata.ToString()
.Split('|', StringSplitOptions.RemoveEmptyEntries | StringSplitOptions.TrimEntries);
if (parts.Length == 2 &&
long.TryParse(parts[0], NumberStyles.Integer, CultureInfo.InvariantCulture, out var issuedTicks) &&
long.TryParse(parts[1], NumberStyles.Integer, CultureInfo.InvariantCulture, out var ttlTicks))
{
var issuedAt = new DateTimeOffset(issuedTicks, TimeSpan.Zero);
var expiresAt = issuedAt + TimeSpan.FromTicks(ttlTicks);
return expiresAt <= timeProvider.GetUtcNow()
? DpopNonceConsumeResult.Expired(issuedAt, expiresAt)
: DpopNonceConsumeResult.Success(issuedAt, expiresAt);
}
}
return DpopNonceConsumeResult.Success(timeProvider.GetUtcNow(), timeProvider.GetUtcNow());
}
}

View File

@@ -29,6 +29,7 @@
<ItemGroup>
<PackageReference Include="Microsoft.IdentityModel.Tokens" Version="7.2.0" />
<PackageReference Include="System.IdentityModel.Tokens.Jwt" Version="7.2.0" />
<PackageReference Include="StackExchange.Redis" Version="2.8.24" />
<PackageReference Include="Microsoft.SourceLink.GitLab" Version="8.0.0" PrivateAssets="All" />
</ItemGroup>
<ItemGroup>

View File

@@ -1,4 +1,6 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using MongoDB.Driver;
@@ -43,6 +45,74 @@ public class StandardClientProvisioningStoreTests
Assert.Contains("scopeA", descriptor.AllowedScopes);
}
[Fact]
public async Task CreateOrUpdateAsync_StoresAudiences()
{
var store = new TrackingClientStore();
var revocations = new TrackingRevocationStore();
var provisioning = new StandardClientProvisioningStore("standard", store, revocations, TimeProvider.System);
var registration = new AuthorityClientRegistration(
clientId: "signer",
confidential: false,
displayName: "Signer",
clientSecret: null,
allowedGrantTypes: new[] { "client_credentials" },
allowedScopes: new[] { "signer.sign" },
allowedAudiences: new[] { "attestor", "signer" });
var result = await provisioning.CreateOrUpdateAsync(registration, CancellationToken.None);
Assert.True(result.Succeeded);
var document = Assert.Contains("signer", store.Documents);
Assert.Equal("attestor signer", document.Value.Properties[AuthorityClientMetadataKeys.Audiences]);
var descriptor = await provisioning.FindByClientIdAsync("signer", CancellationToken.None);
Assert.NotNull(descriptor);
Assert.Equal(new[] { "attestor", "signer" }, descriptor!.Audiences.OrderBy(value => value, StringComparer.Ordinal));
}
[Fact]
public async Task CreateOrUpdateAsync_MapsCertificateBindings()
{
var store = new TrackingClientStore();
var revocations = new TrackingRevocationStore();
var provisioning = new StandardClientProvisioningStore("standard", store, revocations, TimeProvider.System);
var bindingRegistration = new AuthorityClientCertificateBindingRegistration(
thumbprint: "aa:bb:cc:dd",
serialNumber: "01ff",
subject: "CN=mtls-client",
issuer: "CN=test-ca",
subjectAlternativeNames: new[] { "client.mtls.test", "spiffe://client" },
notBefore: DateTimeOffset.UtcNow.AddMinutes(-5),
notAfter: DateTimeOffset.UtcNow.AddHours(1),
label: "primary");
var registration = new AuthorityClientRegistration(
clientId: "mtls-client",
confidential: true,
displayName: "MTLS Client",
clientSecret: "secret",
allowedGrantTypes: new[] { "client_credentials" },
allowedScopes: new[] { "signer.sign" },
allowedAudiences: new[] { "signer" },
certificateBindings: new[] { bindingRegistration });
await provisioning.CreateOrUpdateAsync(registration, CancellationToken.None);
var document = Assert.Contains("mtls-client", store.Documents).Value;
var binding = Assert.Single(document.CertificateBindings);
Assert.Equal("AABBCCDD", binding.Thumbprint);
Assert.Equal("01ff", binding.SerialNumber);
Assert.Equal("CN=mtls-client", binding.Subject);
Assert.Equal("CN=test-ca", binding.Issuer);
Assert.Equal(new[] { "client.mtls.test", "spiffe://client" }, binding.SubjectAlternativeNames);
Assert.Equal(bindingRegistration.NotBefore, binding.NotBefore);
Assert.Equal(bindingRegistration.NotAfter, binding.NotAfter);
Assert.Equal("primary", binding.Label);
}
private sealed class TrackingClientStore : IAuthorityClientStore
{
public Dictionary<string, AuthorityClientDocument> Documents { get; } = new(StringComparer.OrdinalIgnoreCase);

View File

@@ -50,11 +50,21 @@ internal sealed class StandardClientProvisioningStore : IClientProvisioningStore
document.RedirectUris = registration.RedirectUris.Select(static uri => uri.ToString()).ToList();
document.PostLogoutRedirectUris = registration.PostLogoutRedirectUris.Select(static uri => uri.ToString()).ToList();
document.Properties[AuthorityClientMetadataKeys.AllowedGrantTypes] = string.Join(" ", registration.AllowedGrantTypes);
document.Properties[AuthorityClientMetadataKeys.AllowedScopes] = string.Join(" ", registration.AllowedScopes);
document.Properties[AuthorityClientMetadataKeys.AllowedGrantTypes] = JoinValues(registration.AllowedGrantTypes);
document.Properties[AuthorityClientMetadataKeys.AllowedScopes] = JoinValues(registration.AllowedScopes);
document.Properties[AuthorityClientMetadataKeys.Audiences] = JoinValues(registration.AllowedAudiences);
document.Properties[AuthorityClientMetadataKeys.RedirectUris] = string.Join(" ", document.RedirectUris);
document.Properties[AuthorityClientMetadataKeys.PostLogoutRedirectUris] = string.Join(" ", document.PostLogoutRedirectUris);
if (registration.CertificateBindings is not null)
{
var now = clock.GetUtcNow();
document.CertificateBindings = registration.CertificateBindings
.Select(binding => MapCertificateBinding(binding, now))
.OrderBy(binding => binding.Thumbprint, StringComparer.Ordinal)
.ToList();
}
foreach (var (key, value) in registration.Properties)
{
document.Properties[key] = value;
@@ -142,12 +152,15 @@ internal sealed class StandardClientProvisioningStore : IClientProvisioningStore
.Cast<Uri>()
.ToArray();
var audiences = Split(document.Properties, AuthorityClientMetadataKeys.Audiences);
return new AuthorityClientDescriptor(
document.ClientId,
document.DisplayName,
string.Equals(document.ClientType, "confidential", StringComparison.OrdinalIgnoreCase),
allowedGrantTypes,
allowedScopes,
audiences,
redirectUris,
postLogoutUris,
document.Properties);
@@ -163,6 +176,47 @@ internal sealed class StandardClientProvisioningStore : IClientProvisioningStore
return value.Split(' ', StringSplitOptions.RemoveEmptyEntries | StringSplitOptions.TrimEntries);
}
private static string JoinValues(IReadOnlyCollection<string> values)
{
if (values is null || values.Count == 0)
{
return string.Empty;
}
return string.Join(
" ",
values
.Where(static value => !string.IsNullOrWhiteSpace(value))
.Select(static value => value.Trim())
.OrderBy(static value => value, StringComparer.Ordinal));
}
private static AuthorityClientCertificateBinding MapCertificateBinding(
AuthorityClientCertificateBindingRegistration registration,
DateTimeOffset now)
{
var subjectAlternativeNames = registration.SubjectAlternativeNames.Count == 0
? new List<string>()
: registration.SubjectAlternativeNames
.Select(name => name.Trim())
.OrderBy(name => name, StringComparer.OrdinalIgnoreCase)
.ToList();
return new AuthorityClientCertificateBinding
{
Thumbprint = registration.Thumbprint,
SerialNumber = registration.SerialNumber,
Subject = registration.Subject,
Issuer = registration.Issuer,
SubjectAlternativeNames = subjectAlternativeNames,
NotBefore = registration.NotBefore,
NotAfter = registration.NotAfter,
Label = registration.Label,
CreatedAt = now,
UpdatedAt = now
};
}
private static string? NormalizeSenderConstraint(string? value)
{
if (string.IsNullOrWhiteSpace(value))

View File

@@ -5,10 +5,10 @@
| PLG6.DOC | DONE (2025-10-11) | BE-Auth Plugin, Docs Guild | PLG1PLG5 | Final polish + diagrams for plugin developer guide (AUTHPLUG-DOCS-01-001). | Docs team delivers copy-edit + exported diagrams; PR merged. |
| SEC1.PLG | DONE (2025-10-11) | Security Guild, BE-Auth Plugin | SEC1.A (StellaOps.Cryptography) | Swap Standard plugin hashing to Argon2id via `StellaOps.Cryptography` abstractions; keep PBKDF2 verification for legacy. | ✅ `StandardUserCredentialStore` uses `ICryptoProvider` to hash/check; ✅ Transparent rehash on success; ✅ Unit tests cover tamper + legacy rehash. |
| SEC1.OPT | DONE (2025-10-11) | Security Guild | SEC1.PLG | Expose password hashing knobs in `StandardPluginOptions` (`memoryKiB`, `iterations`, `parallelism`, `algorithm`) with validation. | ✅ Options bound from YAML; ✅ Invalid configs throw; ✅ Docs include tuning guidance. |
| SEC2.PLG | DOING (2025-10-14) | Security Guild, Storage Guild | SEC2.A (audit contract) | Emit audit events from password verification outcomes and persist via `IAuthorityLoginAttemptStore`. | ✅ Serilog events enriched with subject/client/IP/outcome; ✅ Mongo records written per attempt; ✅ Tests assert success/lockout/failure cases. |
| SEC3.PLG | DOING (2025-10-14) | Security Guild, BE-Auth Plugin | CORE8, SEC3.A (rate limiter) | Ensure lockout responses and rate-limit metadata flow through plugin logs/events (include retry-after). | ✅ Audit record includes retry-after; ✅ Tests confirm lockout + limiter interplay. |
| SEC2.PLG | DOING (2025-10-14) | Security Guild, Storage Guild | SEC2.A (audit contract) | Emit audit events from password verification outcomes and persist via `IAuthorityLoginAttemptStore`. <br>⏳ Awaiting AUTH-DPOP-11-001 / AUTH-MTLS-11-002 / PLUGIN-DI-08-001 completion to unlock Wave0B verification paths. | ✅ Serilog events enriched with subject/client/IP/outcome; ✅ Mongo records written per attempt; ✅ Tests assert success/lockout/failure cases. |
| SEC3.PLG | DOING (2025-10-14) | Security Guild, BE-Auth Plugin | CORE8, SEC3.A (rate limiter) | Ensure lockout responses and rate-limit metadata flow through plugin logs/events (include retry-after). <br>⏳ Pending AUTH-DPOP-11-001 / AUTH-MTLS-11-002 / PLUGIN-DI-08-001 so limiter telemetry contract matches final authority surface. | ✅ Audit record includes retry-after; ✅ Tests confirm lockout + limiter interplay. |
| SEC4.PLG | DONE (2025-10-12) | Security Guild | SEC4.A (revocation schema) | Provide plugin hooks so revoked users/clients write reasons for revocation bundle export. | ✅ Revocation exporter consumes plugin data; ✅ Tests cover revoked user/client output. |
| SEC5.PLG | DOING (2025-10-14) | Security Guild | SEC5.A (threat model) | Address plugin-specific mitigations (bootstrap user handling, password policy docs) in threat model backlog. | ✅ Threat model lists plugin attack surfaces; ✅ Mitigation items filed. |
| SEC5.PLG | DOING (2025-10-14) | Security Guild | SEC5.A (threat model) | Address plugin-specific mitigations (bootstrap user handling, password policy docs) in threat model backlog. <br>⏳ Final documentation depends on AUTH-DPOP-11-001 / AUTH-MTLS-11-002 / PLUGIN-DI-08-001 outcomes. | ✅ Threat model lists plugin attack surfaces; ✅ Mitigation items filed. |
| PLG4-6.CAPABILITIES | BLOCKED (2025-10-12) | BE-Auth Plugin, Docs Guild | PLG1PLG3 | Finalise capability metadata exposure, config validation, and developer guide updates; remaining action is Docs polish/diagram export. | ✅ Capability metadata + validation merged; ✅ Plugin guide updated with final copy & diagrams; ✅ Release notes mention new toggles. <br>⛔ Blocked awaiting Authority rate-limiter stream (CORE8/SEC3) to resume so doc updates reflect final limiter behaviour. |
| PLG7.RFC | REVIEW | BE-Auth Plugin, Security Guild | PLG4 | Socialize LDAP plugin RFC (`docs/rfcs/authority-plugin-ldap.md`) and capture guild feedback. | ✅ Guild review sign-off recorded; ✅ Follow-up issues filed in module boards. |
| PLG6.DIAGRAM | TODO | Docs Guild | PLG6.DOC | Export final sequence/component diagrams for the developer guide and add offline-friendly assets under `docs/assets/authority`. | ✅ Mermaid sources committed; ✅ Rendered SVG/PNG linked from Section 2 + Section 9; ✅ Docs build preview shared with Plugin + Docs guilds. |
@@ -16,3 +16,5 @@
> Update statuses to DOING/DONE/BLOCKED as you make progress. Always run `dotnet test` for touched projects before marking DONE.
> Remark (2025-10-13, PLG6.DOC/PLG6.DIAGRAM): Security Guild delivered `docs/security/rate-limits.md`; Docs team can lift Section 3 (tuning table + alerts) into the developer guide diagrams when rendering assets.
> Check-in (2025-10-19): Wave0A dependencies (AUTH-DPOP-11-001, AUTH-MTLS-11-002, PLUGIN-DI-08-001) still open, so SEC2/SEC3/SEC5 remain in progress without new scope until upstream limiter updates land.

View File

@@ -7,6 +7,7 @@ public static class AuthorityClientMetadataKeys
{
public const string AllowedGrantTypes = "allowedGrantTypes";
public const string AllowedScopes = "allowedScopes";
public const string Audiences = "audiences";
public const string RedirectUris = "redirectUris";
public const string PostLogoutRedirectUris = "postLogoutRedirectUris";
public const string SenderConstraint = "senderConstraint";

View File

@@ -632,15 +632,13 @@ public sealed class AuthorityClaimsEnrichmentContext
/// </summary>
public sealed record AuthorityClientDescriptor
{
/// <summary>
/// Initialises a new client descriptor.
/// </summary>
public AuthorityClientDescriptor(
string clientId,
string? displayName,
bool confidential,
IReadOnlyCollection<string>? allowedGrantTypes = null,
IReadOnlyCollection<string>? allowedScopes = null,
IReadOnlyCollection<string>? allowedAudiences = null,
IReadOnlyCollection<Uri>? redirectUris = null,
IReadOnlyCollection<Uri>? postLogoutRedirectUris = null,
IReadOnlyDictionary<string, string?>? properties = null)
@@ -648,8 +646,9 @@ public sealed record AuthorityClientDescriptor
ClientId = ValidateRequired(clientId, nameof(clientId));
DisplayName = displayName;
Confidential = confidential;
AllowedGrantTypes = allowedGrantTypes is null ? Array.Empty<string>() : allowedGrantTypes.ToArray();
AllowedScopes = allowedScopes is null ? Array.Empty<string>() : allowedScopes.ToArray();
AllowedGrantTypes = Normalize(allowedGrantTypes);
AllowedScopes = Normalize(allowedScopes);
AllowedAudiences = Normalize(allowedAudiences);
RedirectUris = redirectUris is null ? Array.Empty<Uri>() : redirectUris.ToArray();
PostLogoutRedirectUris = postLogoutRedirectUris is null ? Array.Empty<Uri>() : postLogoutRedirectUris.ToArray();
Properties = properties is null
@@ -657,60 +656,87 @@ public sealed record AuthorityClientDescriptor
: new Dictionary<string, string?>(properties, StringComparer.OrdinalIgnoreCase);
}
/// <summary>
/// Unique client identifier.
/// </summary>
public string ClientId { get; }
/// <summary>
/// Optional display name.
/// </summary>
public string? DisplayName { get; }
/// <summary>
/// Indicates whether the client is confidential (requires secret).
/// </summary>
public bool Confidential { get; }
/// <summary>
/// Permitted OAuth grant types.
/// </summary>
public IReadOnlyCollection<string> AllowedGrantTypes { get; }
/// <summary>
/// Permitted scopes.
/// </summary>
public IReadOnlyCollection<string> AllowedScopes { get; }
/// <summary>
/// Registered redirect URIs.
/// </summary>
public IReadOnlyCollection<string> AllowedAudiences { get; }
public IReadOnlyCollection<Uri> RedirectUris { get; }
/// <summary>
/// Registered post-logout redirect URIs.
/// </summary>
public IReadOnlyCollection<Uri> PostLogoutRedirectUris { get; }
/// <summary>
/// Additional plugin-defined metadata.
/// </summary>
public IReadOnlyDictionary<string, string?> Properties { get; }
private static IReadOnlyCollection<string> Normalize(IReadOnlyCollection<string>? values)
=> values is null || values.Count == 0
? Array.Empty<string>()
: values
.Where(value => !string.IsNullOrWhiteSpace(value))
.Select(value => value.Trim())
.Distinct(StringComparer.Ordinal)
.ToArray();
private static string ValidateRequired(string value, string paramName)
=> string.IsNullOrWhiteSpace(value)
? throw new ArgumentException("Value cannot be null or whitespace.", paramName)
: value;
}
/// <summary>
/// Client registration payload used when provisioning clients through plugins.
/// </summary>
public sealed record AuthorityClientCertificateBindingRegistration
{
public AuthorityClientCertificateBindingRegistration(
string thumbprint,
string? serialNumber = null,
string? subject = null,
string? issuer = null,
IReadOnlyCollection<string>? subjectAlternativeNames = null,
DateTimeOffset? notBefore = null,
DateTimeOffset? notAfter = null,
string? label = null)
{
Thumbprint = NormalizeThumbprint(thumbprint);
SerialNumber = Normalize(serialNumber);
Subject = Normalize(subject);
Issuer = Normalize(issuer);
SubjectAlternativeNames = subjectAlternativeNames is null || subjectAlternativeNames.Count == 0
? Array.Empty<string>()
: subjectAlternativeNames
.Where(value => !string.IsNullOrWhiteSpace(value))
.Select(value => value.Trim())
.Distinct(StringComparer.OrdinalIgnoreCase)
.ToArray();
NotBefore = notBefore;
NotAfter = notAfter;
Label = Normalize(label);
}
public string Thumbprint { get; }
public string? SerialNumber { get; }
public string? Subject { get; }
public string? Issuer { get; }
public IReadOnlyCollection<string> SubjectAlternativeNames { get; }
public DateTimeOffset? NotBefore { get; }
public DateTimeOffset? NotAfter { get; }
public string? Label { get; }
private static string NormalizeThumbprint(string value)
{
if (string.IsNullOrWhiteSpace(value))
{
throw new ArgumentException("Thumbprint is required.", nameof(value));
}
return value
.Replace(":", string.Empty, StringComparison.Ordinal)
.Replace(" ", string.Empty, StringComparison.Ordinal)
.ToUpperInvariant();
}
private static string? Normalize(string? value)
=> string.IsNullOrWhiteSpace(value) ? null : value.Trim();
}
public sealed record AuthorityClientRegistration
{
/// <summary>
/// Initialises a new registration.
/// </summary>
public AuthorityClientRegistration(
string clientId,
bool confidential,
@@ -718,9 +744,11 @@ public sealed record AuthorityClientRegistration
string? clientSecret,
IReadOnlyCollection<string>? allowedGrantTypes = null,
IReadOnlyCollection<string>? allowedScopes = null,
IReadOnlyCollection<string>? allowedAudiences = null,
IReadOnlyCollection<Uri>? redirectUris = null,
IReadOnlyCollection<Uri>? postLogoutRedirectUris = null,
IReadOnlyDictionary<string, string?>? properties = null)
IReadOnlyDictionary<string, string?>? properties = null,
IReadOnlyCollection<AuthorityClientCertificateBindingRegistration>? certificateBindings = null)
{
ClientId = ValidateRequired(clientId, nameof(clientId));
Confidential = confidential;
@@ -728,65 +756,42 @@ public sealed record AuthorityClientRegistration
ClientSecret = confidential
? ValidateRequired(clientSecret ?? string.Empty, nameof(clientSecret))
: clientSecret;
AllowedGrantTypes = allowedGrantTypes is null ? Array.Empty<string>() : allowedGrantTypes.ToArray();
AllowedScopes = allowedScopes is null ? Array.Empty<string>() : allowedScopes.ToArray();
AllowedGrantTypes = Normalize(allowedGrantTypes);
AllowedScopes = Normalize(allowedScopes);
AllowedAudiences = Normalize(allowedAudiences);
RedirectUris = redirectUris is null ? Array.Empty<Uri>() : redirectUris.ToArray();
PostLogoutRedirectUris = postLogoutRedirectUris is null ? Array.Empty<Uri>() : postLogoutRedirectUris.ToArray();
Properties = properties is null
? new Dictionary<string, string?>(StringComparer.OrdinalIgnoreCase)
: new Dictionary<string, string?>(properties, StringComparer.OrdinalIgnoreCase);
CertificateBindings = certificateBindings is null
? Array.Empty<AuthorityClientCertificateBindingRegistration>()
: certificateBindings.ToArray();
}
/// <summary>
/// Unique client identifier.
/// </summary>
public string ClientId { get; }
/// <summary>
/// Indicates whether the client is confidential (requires secret handling).
/// </summary>
public bool Confidential { get; }
/// <summary>
/// Optional display name.
/// </summary>
public string? DisplayName { get; }
/// <summary>
/// Optional raw client secret (hashed by the plugin for storage).
/// </summary>
public string? ClientSecret { get; init; }
/// <summary>
/// Grant types to enable.
/// </summary>
public IReadOnlyCollection<string> AllowedGrantTypes { get; }
/// <summary>
/// Scopes assigned to the client.
/// </summary>
public IReadOnlyCollection<string> AllowedScopes { get; }
/// <summary>
/// Redirect URIs permitted for the client.
/// </summary>
public IReadOnlyCollection<string> AllowedAudiences { get; }
public IReadOnlyCollection<Uri> RedirectUris { get; }
/// <summary>
/// Post-logout redirect URIs.
/// </summary>
public IReadOnlyCollection<Uri> PostLogoutRedirectUris { get; }
/// <summary>
/// Additional metadata for the plugin.
/// </summary>
public IReadOnlyDictionary<string, string?> Properties { get; }
public IReadOnlyCollection<AuthorityClientCertificateBindingRegistration> CertificateBindings { get; }
/// <summary>
/// Creates a copy of the registration with the provided client secret.
/// </summary>
public AuthorityClientRegistration WithClientSecret(string? clientSecret)
=> new(ClientId, Confidential, DisplayName, clientSecret, AllowedGrantTypes, AllowedScopes, RedirectUris, PostLogoutRedirectUris, Properties);
=> new(ClientId, Confidential, DisplayName, clientSecret, AllowedGrantTypes, AllowedScopes, AllowedAudiences, RedirectUris, PostLogoutRedirectUris, Properties, CertificateBindings);
private static IReadOnlyCollection<string> Normalize(IReadOnlyCollection<string>? values)
=> values is null || values.Count == 0
? Array.Empty<string>()
: values
.Where(value => !string.IsNullOrWhiteSpace(value))
.Select(value => value.Trim())
.Distinct(StringComparer.Ordinal)
.ToArray();
private static string ValidateRequired(string value, string paramName)
=> string.IsNullOrWhiteSpace(value)

View File

@@ -62,6 +62,18 @@ public sealed class AuthorityTokenDocument
[BsonIgnoreIfNull]
public string? RevokedReasonDescription { get; set; }
[BsonElement("senderConstraint")]
[BsonIgnoreIfNull]
public string? SenderConstraint { get; set; }
[BsonElement("senderKeyThumbprint")]
[BsonIgnoreIfNull]
public string? SenderKeyThumbprint { get; set; }
[BsonElement("senderNonce")]
[BsonIgnoreIfNull]
public string? SenderNonce { get; set; }
[BsonElement("devices")]
[BsonIgnoreIfNull]

View File

@@ -27,7 +27,13 @@ internal sealed class AuthorityTokenCollectionInitializer : IAuthorityCollection
Builders<AuthorityTokenDocument>.IndexKeys
.Ascending(t => t.Status)
.Ascending(t => t.RevokedAt),
new CreateIndexOptions<AuthorityTokenDocument> { Name = "token_status_revokedAt" })
new CreateIndexOptions<AuthorityTokenDocument> { Name = "token_status_revokedAt" }),
new(
Builders<AuthorityTokenDocument>.IndexKeys.Ascending(t => t.SenderConstraint),
new CreateIndexOptions<AuthorityTokenDocument> { Name = "token_sender_constraint", Sparse = true }),
new(
Builders<AuthorityTokenDocument>.IndexKeys.Ascending(t => t.SenderKeyThumbprint),
new CreateIndexOptions<AuthorityTokenDocument> { Name = "token_sender_thumbprint", Sparse = true })
};
var expirationFilter = Builders<AuthorityTokenDocument>.Filter.Exists(t => t.ExpiresAt, true);

View File

@@ -1,9 +1,21 @@
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.IdentityModel.Tokens.Jwt;
using System.Security.Claims;
using System.Security.Cryptography;
using System.Security.Cryptography.X509Certificates;
using System.Text.Json;
using Microsoft.AspNetCore.Http;
using Microsoft.AspNetCore.Http.Extensions;
using Microsoft.Extensions.Configuration;
using Microsoft.Extensions.Logging.Abstractions;
using Microsoft.Extensions.Options;
using Microsoft.Extensions.Primitives;
using Microsoft.IdentityModel.Tokens;
using StellaOps.Configuration;
using StellaOps.Authority.Security;
using StellaOps.Auth.Security.Dpop;
using OpenIddict.Abstractions;
using OpenIddict.Extensions;
using OpenIddict.Server;
@@ -44,6 +56,8 @@ public class ClientCredentialsHandlersTests
new TestAuthEventSink(),
new TestRateLimiterMetadataAccessor(),
TimeProvider.System,
new NoopCertificateValidator(),
new HttpContextAccessor(),
NullLogger<ValidateClientCredentialsHandler>.Instance);
var transaction = CreateTokenTransaction(clientDocument.ClientId, "s3cr3t!", scope: "jobs:write");
@@ -72,6 +86,8 @@ public class ClientCredentialsHandlersTests
new TestAuthEventSink(),
new TestRateLimiterMetadataAccessor(),
TimeProvider.System,
new NoopCertificateValidator(),
new HttpContextAccessor(),
NullLogger<ValidateClientCredentialsHandler>.Instance);
var transaction = CreateTokenTransaction(clientDocument.ClientId, "s3cr3t!", scope: "jobs:read");
@@ -104,6 +120,8 @@ public class ClientCredentialsHandlersTests
sink,
new TestRateLimiterMetadataAccessor(),
TimeProvider.System,
new NoopCertificateValidator(),
new HttpContextAccessor(),
NullLogger<ValidateClientCredentialsHandler>.Instance);
var transaction = CreateTokenTransaction(clientDocument.ClientId, "s3cr3t!", scope: "jobs:read");
@@ -117,6 +135,315 @@ public class ClientCredentialsHandlersTests
string.Equals(property.Value.Value, "unexpected_param", StringComparison.OrdinalIgnoreCase));
}
[Fact]
public async Task ValidateDpopProof_AllowsSenderConstrainedClient()
{
var options = new StellaOpsAuthorityOptions
{
Issuer = new Uri("https://authority.test")
};
options.Security.SenderConstraints.Dpop.Enabled = true;
options.Security.SenderConstraints.Dpop.Nonce.Enabled = false;
var clientDocument = CreateClient(
secret: "s3cr3t!",
allowedGrantTypes: "client_credentials",
allowedScopes: "jobs:read");
clientDocument.SenderConstraint = AuthoritySenderConstraintKinds.Dpop;
clientDocument.Properties[AuthorityClientMetadataKeys.SenderConstraint] = AuthoritySenderConstraintKinds.Dpop;
using var ecdsa = ECDsa.Create(ECCurve.NamedCurves.nistP256);
var securityKey = new ECDsaSecurityKey(ecdsa)
{
KeyId = Guid.NewGuid().ToString("N")
};
var jwk = JsonWebKeyConverter.ConvertFromECDsaSecurityKey(securityKey);
var expectedThumbprint = ConvertThumbprintToString(jwk.ComputeJwkThumbprint());
var clientStore = new TestClientStore(clientDocument);
var auditSink = new TestAuthEventSink();
var rateMetadata = new TestRateLimiterMetadataAccessor();
var dpopValidator = new DpopProofValidator(
Options.Create(new DpopValidationOptions()),
new InMemoryDpopReplayCache(TimeProvider.System),
TimeProvider.System,
NullLogger<DpopProofValidator>.Instance);
var nonceStore = new InMemoryDpopNonceStore(TimeProvider.System, NullLogger<InMemoryDpopNonceStore>.Instance);
var dpopHandler = new ValidateDpopProofHandler(
options,
clientStore,
dpopValidator,
nonceStore,
rateMetadata,
auditSink,
TimeProvider.System,
TestActivitySource,
NullLogger<ValidateDpopProofHandler>.Instance);
var transaction = CreateTokenTransaction(clientDocument.ClientId, "s3cr3t!", scope: "jobs:read");
transaction.Options = new OpenIddictServerOptions();
var httpContext = new DefaultHttpContext();
httpContext.Request.Method = "POST";
httpContext.Request.Scheme = "https";
httpContext.Request.Host = new HostString("authority.test");
httpContext.Request.Path = "/token";
var now = TimeProvider.System.GetUtcNow();
var proof = TestHelpers.CreateDpopProof(securityKey, httpContext.Request.Method, httpContext.Request.GetDisplayUrl(), now.ToUnixTimeSeconds());
httpContext.Request.Headers["DPoP"] = proof;
transaction.Properties[typeof(HttpContext).FullName!] = httpContext;
var validateContext = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction);
await dpopHandler.HandleAsync(validateContext);
Assert.False(validateContext.IsRejected);
var registry = CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(clientDocument));
var validateHandler = new ValidateClientCredentialsHandler(
clientStore,
registry,
TestActivitySource,
auditSink,
rateMetadata,
TimeProvider.System,
new NoopCertificateValidator(),
new HttpContextAccessor(),
NullLogger<ValidateClientCredentialsHandler>.Instance);
await validateHandler.HandleAsync(validateContext);
Assert.False(validateContext.IsRejected);
var tokenStore = new TestTokenStore();
var sessionAccessor = new NullMongoSessionAccessor();
var handleHandler = new HandleClientCredentialsHandler(
registry,
tokenStore,
sessionAccessor,
TimeProvider.System,
TestActivitySource,
NullLogger<HandleClientCredentialsHandler>.Instance);
var handleContext = new OpenIddictServerEvents.HandleTokenRequestContext(transaction);
await handleHandler.HandleAsync(handleContext);
Assert.True(handleContext.IsRequestHandled);
var persistHandler = new PersistTokensHandler(
tokenStore,
sessionAccessor,
TimeProvider.System,
TestActivitySource,
NullLogger<PersistTokensHandler>.Instance);
var signInContext = new OpenIddictServerEvents.ProcessSignInContext(transaction)
{
Principal = handleContext.Principal,
AccessTokenPrincipal = handleContext.Principal
};
await persistHandler.HandleAsync(signInContext);
var confirmationClaim = handleContext.Principal?.GetClaim(AuthorityOpenIddictConstants.ConfirmationClaimType);
Assert.False(string.IsNullOrWhiteSpace(confirmationClaim));
using (var confirmationJson = JsonDocument.Parse(confirmationClaim!))
{
Assert.Equal(expectedThumbprint, confirmationJson.RootElement.GetProperty("jkt").GetString());
}
Assert.NotNull(tokenStore.Inserted);
Assert.Equal(AuthoritySenderConstraintKinds.Dpop, tokenStore.Inserted!.SenderConstraint);
Assert.Equal(expectedThumbprint, tokenStore.Inserted!.SenderKeyThumbprint);
}
[Fact]
public async Task ValidateDpopProof_IssuesNonceChallenge_WhenNonceMissing()
{
var options = new StellaOpsAuthorityOptions
{
Issuer = new Uri("https://authority.test")
};
options.Security.SenderConstraints.Dpop.Enabled = true;
options.Security.SenderConstraints.Dpop.Nonce.Enabled = true;
options.Security.SenderConstraints.Dpop.Nonce.RequiredAudiences.Clear();
options.Security.SenderConstraints.Dpop.Nonce.RequiredAudiences.Add("signer");
options.Signing.ActiveKeyId = "test-key";
options.Signing.KeyPath = "/tmp/test-key.pem";
options.Storage.ConnectionString = "mongodb://localhost/test";
Assert.Contains("signer", options.Security.SenderConstraints.Dpop.Nonce.RequiredAudiences);
var clientDocument = CreateClient(
secret: "s3cr3t!",
allowedGrantTypes: "client_credentials",
allowedScopes: "jobs:read",
allowedAudiences: "signer");
clientDocument.SenderConstraint = AuthoritySenderConstraintKinds.Dpop;
clientDocument.Properties[AuthorityClientMetadataKeys.SenderConstraint] = AuthoritySenderConstraintKinds.Dpop;
using var ecdsa = ECDsa.Create(ECCurve.NamedCurves.nistP256);
var securityKey = new ECDsaSecurityKey(ecdsa)
{
KeyId = Guid.NewGuid().ToString("N")
};
var clientStore = new TestClientStore(clientDocument);
var auditSink = new TestAuthEventSink();
var rateMetadata = new TestRateLimiterMetadataAccessor();
var dpopValidator = new DpopProofValidator(
Options.Create(new DpopValidationOptions()),
new InMemoryDpopReplayCache(TimeProvider.System),
TimeProvider.System,
NullLogger<DpopProofValidator>.Instance);
var nonceStore = new InMemoryDpopNonceStore(TimeProvider.System, NullLogger<InMemoryDpopNonceStore>.Instance);
var dpopHandler = new ValidateDpopProofHandler(
options,
clientStore,
dpopValidator,
nonceStore,
rateMetadata,
auditSink,
TimeProvider.System,
TestActivitySource,
NullLogger<ValidateDpopProofHandler>.Instance);
var transaction = CreateTokenTransaction(clientDocument.ClientId, "s3cr3t!", scope: "jobs:read");
transaction.Options = new OpenIddictServerOptions();
var httpContext = new DefaultHttpContext();
httpContext.Request.Method = "POST";
httpContext.Request.Scheme = "https";
httpContext.Request.Host = new HostString("authority.test");
httpContext.Request.Path = "/token";
var now = TimeProvider.System.GetUtcNow();
var proof = TestHelpers.CreateDpopProof(securityKey, httpContext.Request.Method, httpContext.Request.GetDisplayUrl(), now.ToUnixTimeSeconds());
httpContext.Request.Headers["DPoP"] = proof;
transaction.Properties[typeof(HttpContext).FullName!] = httpContext;
var validateContext = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction);
await dpopHandler.HandleAsync(validateContext);
Assert.True(validateContext.IsRejected);
Assert.Equal(OpenIddictConstants.Errors.InvalidClient, validateContext.Error);
var authenticateHeader = Assert.Single(httpContext.Response.Headers.Select(header => header)
.Where(header => string.Equals(header.Key, "WWW-Authenticate", StringComparison.OrdinalIgnoreCase))).Value;
Assert.Contains("use_dpop_nonce", authenticateHeader.ToString());
Assert.True(httpContext.Response.Headers.TryGetValue("DPoP-Nonce", out var nonceValues));
Assert.False(StringValues.IsNullOrEmpty(nonceValues));
Assert.Contains(auditSink.Events, record => record.EventType == "authority.dpop.proof.challenge");
}
[Fact]
public async Task ValidateClientCredentials_AllowsMtlsClient_WithValidCertificate()
{
var options = new StellaOpsAuthorityOptions
{
Issuer = new Uri("https://authority.test")
};
options.Security.SenderConstraints.Mtls.Enabled = true;
options.Security.SenderConstraints.Mtls.RequireChainValidation = false;
options.Signing.ActiveKeyId = "test-key";
options.Signing.KeyPath = "/tmp/test-key.pem";
options.Storage.ConnectionString = "mongodb://localhost/test";
var clientDocument = CreateClient(
secret: "s3cr3t!",
allowedGrantTypes: "client_credentials",
allowedScopes: "jobs:read");
clientDocument.SenderConstraint = AuthoritySenderConstraintKinds.Mtls;
clientDocument.Properties[AuthorityClientMetadataKeys.SenderConstraint] = AuthoritySenderConstraintKinds.Mtls;
using var rsa = RSA.Create(2048);
var certificateRequest = new CertificateRequest("CN=mtls-client", rsa, HashAlgorithmName.SHA256, RSASignaturePadding.Pkcs1);
using var certificate = certificateRequest.CreateSelfSigned(DateTimeOffset.UtcNow.AddMinutes(-5), DateTimeOffset.UtcNow.AddHours(1));
var hexThumbprint = Convert.ToHexString(certificate.GetCertHash(HashAlgorithmName.SHA256));
clientDocument.CertificateBindings.Add(new AuthorityClientCertificateBinding
{
Thumbprint = hexThumbprint
});
var registry = CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(clientDocument));
var auditSink = new TestAuthEventSink();
var metadataAccessor = new TestRateLimiterMetadataAccessor();
var httpContextAccessor = new HttpContextAccessor { HttpContext = new DefaultHttpContext() };
httpContextAccessor.HttpContext!.Connection.ClientCertificate = certificate;
var validator = new AuthorityClientCertificateValidator(options, TimeProvider.System, NullLogger<AuthorityClientCertificateValidator>.Instance);
var handler = new ValidateClientCredentialsHandler(
new TestClientStore(clientDocument),
registry,
TestActivitySource,
auditSink,
metadataAccessor,
TimeProvider.System,
validator,
httpContextAccessor,
NullLogger<ValidateClientCredentialsHandler>.Instance);
var transaction = CreateTokenTransaction(clientDocument.ClientId, "s3cr3t!", scope: "jobs:read");
var context = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction);
await handler.HandleAsync(context);
Assert.False(context.IsRejected);
Assert.Equal(AuthoritySenderConstraintKinds.Mtls, context.Transaction.Properties[AuthorityOpenIddictConstants.SenderConstraintProperty]);
var expectedBase64 = Base64UrlEncoder.Encode(certificate.GetCertHash(HashAlgorithmName.SHA256));
Assert.Equal(expectedBase64, context.Transaction.Properties[AuthorityOpenIddictConstants.MtlsCertificateThumbprintProperty]);
}
[Fact]
public async Task ValidateClientCredentials_RejectsMtlsClient_WhenCertificateMissing()
{
var options = new StellaOpsAuthorityOptions
{
Issuer = new Uri("https://authority.test")
};
options.Security.SenderConstraints.Mtls.Enabled = true;
options.Signing.ActiveKeyId = "test-key";
options.Signing.KeyPath = "/tmp/test-key.pem";
options.Storage.ConnectionString = "mongodb://localhost/test";
var clientDocument = CreateClient(
secret: "s3cr3t!",
allowedGrantTypes: "client_credentials",
allowedScopes: "jobs:read");
clientDocument.SenderConstraint = AuthoritySenderConstraintKinds.Mtls;
clientDocument.Properties[AuthorityClientMetadataKeys.SenderConstraint] = AuthoritySenderConstraintKinds.Mtls;
var registry = CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(clientDocument));
var httpContextAccessor = new HttpContextAccessor { HttpContext = new DefaultHttpContext() };
var validator = new AuthorityClientCertificateValidator(options, TimeProvider.System, NullLogger<AuthorityClientCertificateValidator>.Instance);
var handler = new ValidateClientCredentialsHandler(
new TestClientStore(clientDocument),
registry,
TestActivitySource,
new TestAuthEventSink(),
new TestRateLimiterMetadataAccessor(),
TimeProvider.System,
validator,
httpContextAccessor,
NullLogger<ValidateClientCredentialsHandler>.Instance);
var transaction = CreateTokenTransaction(clientDocument.ClientId, "s3cr3t!", scope: "jobs:read");
var context = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction);
await handler.HandleAsync(context);
Assert.True(context.IsRejected);
Assert.Equal(OpenIddictConstants.Errors.InvalidClient, context.Error);
}
[Fact]
public async Task HandleClientCredentials_PersistsTokenAndEnrichesClaims()
{
@@ -124,7 +451,8 @@ public class ClientCredentialsHandlersTests
secret: null,
clientType: "public",
allowedGrantTypes: "client_credentials",
allowedScopes: "jobs:trigger");
allowedScopes: "jobs:trigger",
allowedAudiences: "signer");
var descriptor = CreateDescriptor(clientDocument);
var registry = CreateRegistry(withClientProvisioning: true, clientDescriptor: descriptor);
@@ -139,6 +467,8 @@ public class ClientCredentialsHandlersTests
authSink,
metadataAccessor,
TimeProvider.System,
new NoopCertificateValidator(),
new HttpContextAccessor(),
NullLogger<ValidateClientCredentialsHandler>.Instance);
var transaction = CreateTokenTransaction(clientDocument.ClientId, secret: null, scope: "jobs:trigger");
@@ -163,6 +493,7 @@ public class ClientCredentialsHandlersTests
Assert.True(context.IsRequestHandled);
Assert.NotNull(context.Principal);
Assert.Contains("signer", context.Principal!.GetAudiences());
Assert.Contains(authSink.Events, record => record.EventType == "authority.client_credentials.grant" && record.Outcome == AuthEventOutcome.Success);
@@ -285,6 +616,62 @@ public class TokenValidationHandlersTests
Assert.Contains(principal.Claims, claim => claim.Type == "enriched" && claim.Value == "true");
}
[Fact]
public async Task ValidateAccessTokenHandler_AddsConfirmationClaim_ForMtlsToken()
{
var tokenDocument = new AuthorityTokenDocument
{
TokenId = "token-mtls",
Status = "valid",
ClientId = "mtls-client",
SenderConstraint = AuthoritySenderConstraintKinds.Mtls,
SenderKeyThumbprint = "thumb-print"
};
var tokenStore = new TestTokenStore
{
Inserted = tokenDocument
};
var clientDocument = CreateClient();
var registry = CreateRegistry(withClientProvisioning: false, clientDescriptor: null);
var metadataAccessor = new TestRateLimiterMetadataAccessor();
var auditSink = new TestAuthEventSink();
var sessionAccessor = new NullMongoSessionAccessor();
var handler = new ValidateAccessTokenHandler(
tokenStore,
sessionAccessor,
new TestClientStore(clientDocument),
registry,
metadataAccessor,
auditSink,
TimeProvider.System,
TestActivitySource,
NullLogger<ValidateAccessTokenHandler>.Instance);
var transaction = new OpenIddictServerTransaction
{
Options = new OpenIddictServerOptions(),
EndpointType = OpenIddictServerEndpointType.Introspection,
Request = new OpenIddictRequest()
};
var principal = CreatePrincipal(clientDocument.ClientId, tokenDocument.TokenId, clientDocument.Plugin);
var context = new OpenIddictServerEvents.ValidateTokenContext(transaction)
{
Principal = principal,
TokenId = tokenDocument.TokenId
};
await handler.HandleAsync(context);
Assert.False(context.IsRejected);
var confirmation = context.Principal?.GetClaim(AuthorityOpenIddictConstants.ConfirmationClaimType);
Assert.False(string.IsNullOrWhiteSpace(confirmation));
using var json = JsonDocument.Parse(confirmation!);
Assert.Equal(tokenDocument.SenderKeyThumbprint, json.RootElement.GetProperty("x5t#S256").GetString());
}
[Fact]
public async Task ValidateAccessTokenHandler_EmitsReplayAudit_WhenStoreDetectsSuspectedReplay()
{
@@ -358,6 +745,89 @@ public class TokenValidationHandlersTests
}
}
public class AuthorityClientCertificateValidatorTests
{
[Fact]
public async Task ValidateAsync_Rejects_WhenSanTypeNotAllowed()
{
var options = new StellaOpsAuthorityOptions
{
Issuer = new Uri("https://authority.test")
};
options.Security.SenderConstraints.Mtls.Enabled = true;
options.Security.SenderConstraints.Mtls.RequireChainValidation = false;
options.Security.SenderConstraints.Mtls.AllowedSanTypes.Clear();
options.Security.SenderConstraints.Mtls.AllowedSanTypes.Add("uri");
options.Signing.ActiveKeyId = "test-key";
options.Signing.KeyPath = "/tmp/test-key.pem";
options.Storage.ConnectionString = "mongodb://localhost/test";
using var rsa = RSA.Create(2048);
var request = new CertificateRequest("CN=mtls-client", rsa, HashAlgorithmName.SHA256, RSASignaturePadding.Pkcs1);
var sanBuilder = new SubjectAlternativeNameBuilder();
sanBuilder.AddDnsName("client.mtls.test");
request.CertificateExtensions.Add(sanBuilder.Build());
using var certificate = request.CreateSelfSigned(DateTimeOffset.UtcNow.AddMinutes(-5), DateTimeOffset.UtcNow.AddMinutes(5));
var clientDocument = CreateClient();
clientDocument.SenderConstraint = AuthoritySenderConstraintKinds.Mtls;
clientDocument.CertificateBindings.Add(new AuthorityClientCertificateBinding
{
Thumbprint = Convert.ToHexString(certificate.GetCertHash(HashAlgorithmName.SHA256))
});
var httpContext = new DefaultHttpContext();
httpContext.Connection.ClientCertificate = certificate;
var validator = new AuthorityClientCertificateValidator(options, TimeProvider.System, NullLogger<AuthorityClientCertificateValidator>.Instance);
var result = await validator.ValidateAsync(httpContext, clientDocument, CancellationToken.None);
Assert.False(result.Succeeded);
Assert.Equal("certificate_san_type", result.Error);
}
[Fact]
public async Task ValidateAsync_AllowsBindingWithinRotationGrace()
{
var options = new StellaOpsAuthorityOptions
{
Issuer = new Uri("https://authority.test")
};
options.Security.SenderConstraints.Mtls.Enabled = true;
options.Security.SenderConstraints.Mtls.RequireChainValidation = false;
options.Security.SenderConstraints.Mtls.RotationGrace = TimeSpan.FromMinutes(5);
options.Signing.ActiveKeyId = "test-key";
options.Signing.KeyPath = "/tmp/test-key.pem";
options.Storage.ConnectionString = "mongodb://localhost/test";
using var rsa = RSA.Create(2048);
var request = new CertificateRequest("CN=mtls-client", rsa, HashAlgorithmName.SHA256, RSASignaturePadding.Pkcs1);
var sanBuilder = new SubjectAlternativeNameBuilder();
sanBuilder.AddDnsName("client.mtls.test");
request.CertificateExtensions.Add(sanBuilder.Build());
using var certificate = request.CreateSelfSigned(DateTimeOffset.UtcNow.AddMinutes(-5), DateTimeOffset.UtcNow.AddMinutes(10));
var thumbprint = Convert.ToHexString(certificate.GetCertHash(HashAlgorithmName.SHA256));
var clientDocument = CreateClient();
clientDocument.SenderConstraint = AuthoritySenderConstraintKinds.Mtls;
clientDocument.CertificateBindings.Add(new AuthorityClientCertificateBinding
{
Thumbprint = thumbprint,
NotBefore = TimeProvider.System.GetUtcNow().AddMinutes(2)
});
var httpContext = new DefaultHttpContext();
httpContext.Connection.ClientCertificate = certificate;
var validator = new AuthorityClientCertificateValidator(options, TimeProvider.System, NullLogger<AuthorityClientCertificateValidator>.Instance);
var result = await validator.ValidateAsync(httpContext, clientDocument, CancellationToken.None);
Assert.True(result.Succeeded);
Assert.Equal(thumbprint, result.HexThumbprint);
}
}
internal sealed class TestClientStore : IAuthorityClientStore
{
private readonly Dictionary<string, AuthorityClientDocument> clients = new(StringComparer.OrdinalIgnoreCase);
@@ -526,6 +996,19 @@ internal sealed class TestRateLimiterMetadataAccessor : IAuthorityRateLimiterMet
public void SetTag(string name, string? value) => metadata.SetTag(name, value);
}
internal sealed class NoopCertificateValidator : IAuthorityClientCertificateValidator
{
public ValueTask<AuthorityClientCertificateValidationResult> ValidateAsync(HttpContext httpContext, AuthorityClientDocument client, CancellationToken cancellationToken)
{
var binding = new AuthorityClientCertificateBinding
{
Thumbprint = "stub"
};
return ValueTask.FromResult(AuthorityClientCertificateValidationResult.Success("stub", "stub", binding));
}
}
internal sealed class NullMongoSessionAccessor : IAuthorityMongoSessionAccessor
{
public ValueTask<IClientSessionHandle> GetSessionAsync(CancellationToken cancellationToken = default)
@@ -540,9 +1023,10 @@ internal static class TestHelpers
string? secret = "s3cr3t!",
string clientType = "confidential",
string allowedGrantTypes = "client_credentials",
string allowedScopes = "jobs:read")
string allowedScopes = "jobs:read",
string allowedAudiences = "")
{
return new AuthorityClientDocument
var document = new AuthorityClientDocument
{
ClientId = "concelier",
ClientType = clientType,
@@ -554,12 +1038,20 @@ internal static class TestHelpers
[AuthorityClientMetadataKeys.AllowedScopes] = allowedScopes
}
};
if (!string.IsNullOrWhiteSpace(allowedAudiences))
{
document.Properties[AuthorityClientMetadataKeys.Audiences] = allowedAudiences;
}
return document;
}
public static AuthorityClientDescriptor CreateDescriptor(AuthorityClientDocument document)
{
var allowedGrantTypes = document.Properties.TryGetValue(AuthorityClientMetadataKeys.AllowedGrantTypes, out var grants) ? grants?.Split(' ', StringSplitOptions.RemoveEmptyEntries) : Array.Empty<string>();
var allowedScopes = document.Properties.TryGetValue(AuthorityClientMetadataKeys.AllowedScopes, out var scopes) ? scopes?.Split(' ', StringSplitOptions.RemoveEmptyEntries) : Array.Empty<string>();
var allowedAudiences = document.Properties.TryGetValue(AuthorityClientMetadataKeys.Audiences, out var audiences) ? audiences?.Split(' ', StringSplitOptions.RemoveEmptyEntries) : Array.Empty<string>();
return new AuthorityClientDescriptor(
document.ClientId,
@@ -567,6 +1059,7 @@ internal static class TestHelpers
confidential: string.Equals(document.ClientType, "confidential", StringComparison.OrdinalIgnoreCase),
allowedGrantTypes,
allowedScopes,
allowedAudiences,
redirectUris: Array.Empty<Uri>(),
postLogoutRedirectUris: Array.Empty<Uri>(),
properties: document.Properties);
@@ -638,6 +1131,57 @@ internal static class TestHelpers
};
}
public static string ConvertThumbprintToString(object thumbprint)
=> thumbprint switch
{
string value => value,
byte[] bytes => Base64UrlEncoder.Encode(bytes),
_ => throw new InvalidOperationException("Unsupported thumbprint representation.")
};
public static string CreateDpopProof(ECDsaSecurityKey key, string method, string url, long issuedAt, string? nonce = null)
{
var jwk = JsonWebKeyConverter.ConvertFromECDsaSecurityKey(key);
jwk.KeyId ??= key.KeyId ?? Guid.NewGuid().ToString("N");
var signingCredentials = new SigningCredentials(key, SecurityAlgorithms.EcdsaSha256);
var header = new JwtHeader(signingCredentials)
{
["typ"] = "dpop+jwt",
["jwk"] = new Dictionary<string, object?>
{
["kty"] = jwk.Kty,
["crv"] = jwk.Crv,
["x"] = jwk.X,
["y"] = jwk.Y,
["kid"] = jwk.Kid ?? jwk.KeyId
}
};
var payload = new JwtPayload
{
["htm"] = method.ToUpperInvariant(),
["htu"] = url,
["iat"] = issuedAt,
["jti"] = Guid.NewGuid().ToString("N")
};
if (!string.IsNullOrWhiteSpace(nonce))
{
payload["nonce"] = nonce;
}
var token = new JwtSecurityToken(header, payload);
return new JwtSecurityTokenHandler().WriteToken(token);
}
public static X509Certificate2 CreateTestCertificate(string subjectName)
{
using var rsa = RSA.Create(2048);
var request = new CertificateRequest(subjectName, rsa, HashAlgorithmName.SHA256, RSASignaturePadding.Pkcs1);
return request.CreateSelfSigned(DateTimeOffset.UtcNow.AddMinutes(-5), DateTimeOffset.UtcNow.AddHours(1));
}
public static ClaimsPrincipal CreatePrincipal(string clientId, string tokenId, string provider, string? subject = null)
{
var identity = new ClaimsIdentity(OpenIddictServerAspNetCoreDefaults.AuthenticationScheme);

View File

@@ -1,6 +1,7 @@
using System.Collections.Generic;
using System.Diagnostics;
using System.Security.Claims;
using Microsoft.AspNetCore.Http;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.Logging.Abstractions;
using Microsoft.Extensions.Time.Testing;
@@ -20,6 +21,7 @@ using StellaOps.Authority.Storage.Mongo.Sessions;
using StellaOps.Authority.Storage.Mongo.Stores;
using StellaOps.Concelier.Testing;
using StellaOps.Authority.RateLimiting;
using StellaOps.Authority.Security;
using StellaOps.Cryptography.Audit;
using Xunit;
@@ -62,7 +64,7 @@ public sealed class TokenPersistenceIntegrationTests
var metadataAccessor = new TestRateLimiterMetadataAccessor();
await using var scope = provider.CreateAsyncScope();
var sessionAccessor = scope.ServiceProvider.GetRequiredService<IAuthorityMongoSessionAccessor>();
var validateHandler = new ValidateClientCredentialsHandler(clientStore, registry, TestActivitySource, authSink, metadataAccessor, clock, NullLogger<ValidateClientCredentialsHandler>.Instance);
var validateHandler = new ValidateClientCredentialsHandler(clientStore, registry, TestActivitySource, authSink, metadataAccessor, clock, new NoopCertificateValidator(), new HttpContextAccessor(), NullLogger<ValidateClientCredentialsHandler>.Instance);
var handleHandler = new HandleClientCredentialsHandler(registry, tokenStore, sessionAccessor, clock, TestActivitySource, NullLogger<HandleClientCredentialsHandler>.Instance);
var persistHandler = new PersistTokensHandler(tokenStore, sessionAccessor, clock, TestActivitySource, NullLogger<PersistTokensHandler>.Instance);

View File

@@ -8,6 +8,7 @@
<ItemGroup>
<ProjectReference Include="..\StellaOps.Authority\StellaOps.Authority.csproj" />
<ProjectReference Include="..\StellaOps.Authority.Plugins.Abstractions\StellaOps.Authority.Plugins.Abstractions.csproj" />
<ProjectReference Include="..\..\StellaOps.Auth.Security\StellaOps.Auth.Security.csproj" />
</ItemGroup>
<ItemGroup>
<PackageReference Include="MongoDB.Driver" Version="3.5.0" />

View File

@@ -44,11 +44,15 @@ internal sealed record BootstrapClientRequest
public IReadOnlyCollection<string>? AllowedScopes { get; init; }
public IReadOnlyCollection<string>? AllowedAudiences { get; init; }
public IReadOnlyCollection<string>? RedirectUris { get; init; }
public IReadOnlyCollection<string>? PostLogoutRedirectUris { get; init; }
public IReadOnlyDictionary<string, string?>? Properties { get; init; }
public IReadOnlyCollection<BootstrapClientCertificateBinding>? CertificateBindings { get; init; }
}
internal sealed record BootstrapInviteRequest
@@ -68,6 +72,25 @@ internal sealed record BootstrapInviteRequest
public IReadOnlyDictionary<string, string?>? Metadata { get; init; }
}
internal sealed record BootstrapClientCertificateBinding
{
public string Thumbprint { get; init; } = string.Empty;
public string? SerialNumber { get; init; }
public string? Subject { get; init; }
public string? Issuer { get; init; }
public IReadOnlyCollection<string>? SubjectAlternativeNames { get; init; }
public DateTimeOffset? NotBefore { get; init; }
public DateTimeOffset? NotAfter { get; init; }
public string? Label { get; init; }
}
internal static class BootstrapInviteTypes
{
public const string User = "user";

View File

@@ -15,4 +15,14 @@ internal static class AuthorityOpenIddictConstants
internal const string AuditRequestedScopesProperty = "authority:audit_requested_scopes";
internal const string AuditGrantedScopesProperty = "authority:audit_granted_scopes";
internal const string AuditInvalidScopeProperty = "authority:audit_invalid_scope";
internal const string ClientSenderConstraintProperty = "authority:client_sender_constraint";
internal const string SenderConstraintProperty = "authority:sender_constraint";
internal const string DpopKeyThumbprintProperty = "authority:dpop_thumbprint";
internal const string DpopProofJwtIdProperty = "authority:dpop_jti";
internal const string DpopIssuedAtProperty = "authority:dpop_iat";
internal const string DpopConsumedNonceProperty = "authority:dpop_nonce";
internal const string ConfirmationClaimType = "cnf";
internal const string SenderConstraintClaimType = "authority_sender_constraint";
internal const string MtlsCertificateThumbprintProperty = "authority:mtls_thumbprint";
internal const string MtlsCertificateHexProperty = "authority:mtls_thumbprint_hex";
}

View File

@@ -5,6 +5,8 @@ using System.Globalization;
using System.Linq;
using System.Security.Claims;
using System.Security.Cryptography;
using System.Text.Json;
using Microsoft.AspNetCore.Http;
using Microsoft.Extensions.Logging;
using OpenIddict.Abstractions;
using OpenIddict.Extensions;
@@ -18,6 +20,7 @@ using StellaOps.Authority.Storage.Mongo.Documents;
using StellaOps.Authority.Storage.Mongo.Sessions;
using StellaOps.Authority.Storage.Mongo.Stores;
using StellaOps.Authority.RateLimiting;
using StellaOps.Authority.Security;
using StellaOps.Cryptography.Audit;
namespace StellaOps.Authority.OpenIddict.Handlers;
@@ -30,6 +33,8 @@ internal sealed class ValidateClientCredentialsHandler : IOpenIddictServerHandle
private readonly IAuthEventSink auditSink;
private readonly IAuthorityRateLimiterMetadataAccessor metadataAccessor;
private readonly TimeProvider timeProvider;
private readonly IAuthorityClientCertificateValidator certificateValidator;
private readonly IHttpContextAccessor httpContextAccessor;
private readonly ILogger<ValidateClientCredentialsHandler> logger;
public ValidateClientCredentialsHandler(
@@ -39,6 +44,8 @@ internal sealed class ValidateClientCredentialsHandler : IOpenIddictServerHandle
IAuthEventSink auditSink,
IAuthorityRateLimiterMetadataAccessor metadataAccessor,
TimeProvider timeProvider,
IAuthorityClientCertificateValidator certificateValidator,
IHttpContextAccessor httpContextAccessor,
ILogger<ValidateClientCredentialsHandler> logger)
{
this.clientStore = clientStore ?? throw new ArgumentNullException(nameof(clientStore));
@@ -47,6 +54,8 @@ internal sealed class ValidateClientCredentialsHandler : IOpenIddictServerHandle
this.auditSink = auditSink ?? throw new ArgumentNullException(nameof(auditSink));
this.metadataAccessor = metadataAccessor ?? throw new ArgumentNullException(nameof(metadataAccessor));
this.timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider));
this.certificateValidator = certificateValidator ?? throw new ArgumentNullException(nameof(certificateValidator));
this.httpContextAccessor = httpContextAccessor ?? throw new ArgumentNullException(nameof(httpContextAccessor));
this.logger = logger ?? throw new ArgumentNullException(nameof(logger));
}
@@ -111,7 +120,44 @@ internal sealed class ValidateClientCredentialsHandler : IOpenIddictServerHandle
return;
}
context.Transaction.Properties[AuthorityOpenIddictConstants.AuditConfidentialProperty] = string.Equals(document.ClientType, "confidential", StringComparison.OrdinalIgnoreCase);
var existingSenderConstraint = context.Transaction.Properties.TryGetValue(AuthorityOpenIddictConstants.SenderConstraintProperty, out var senderConstraintValue) && senderConstraintValue is string existingConstraint
? existingConstraint
: null;
var normalizedSenderConstraint = !string.IsNullOrWhiteSpace(existingSenderConstraint)
? existingSenderConstraint
: ClientCredentialHandlerHelpers.NormalizeSenderConstraint(document);
if (!string.IsNullOrWhiteSpace(normalizedSenderConstraint))
{
context.Transaction.Properties[AuthorityOpenIddictConstants.ClientSenderConstraintProperty] = normalizedSenderConstraint;
}
if (string.Equals(normalizedSenderConstraint, AuthoritySenderConstraintKinds.Mtls, StringComparison.Ordinal))
{
var httpContext = httpContextAccessor.HttpContext;
if (httpContext is null)
{
context.Reject(OpenIddictConstants.Errors.ServerError, "HTTP context unavailable for mTLS validation.");
logger.LogWarning("Client credentials validation failed for {ClientId}: HTTP context unavailable for mTLS validation.", context.ClientId);
return;
}
var validation = await certificateValidator.ValidateAsync(httpContext, document, context.CancellationToken).ConfigureAwait(false);
if (!validation.Succeeded)
{
context.Reject(OpenIddictConstants.Errors.InvalidClient, validation.Error ?? "Client certificate validation failed.");
logger.LogWarning("Client credentials validation failed for {ClientId}: {Reason}.", context.ClientId, validation.Error ?? "certificate_invalid");
return;
}
context.Transaction.Properties[AuthorityOpenIddictConstants.SenderConstraintProperty] = AuthoritySenderConstraintKinds.Mtls;
context.Transaction.Properties[AuthorityOpenIddictConstants.MtlsCertificateThumbprintProperty] = validation.ConfirmationThumbprint;
context.Transaction.Properties[AuthorityOpenIddictConstants.MtlsCertificateHexProperty] = validation.HexThumbprint;
}
context.Transaction.Properties[AuthorityOpenIddictConstants.AuditConfidentialProperty] =
string.Equals(document.ClientType, "confidential", StringComparison.OrdinalIgnoreCase);
IIdentityProviderPlugin? provider = null;
if (!string.IsNullOrWhiteSpace(document.Plugin))
@@ -278,6 +324,32 @@ internal sealed class HandleClientCredentialsHandler : IOpenIddictServerHandler<
return;
}
var configuredAudiences = ClientCredentialHandlerHelpers.Split(document.Properties, AuthorityClientMetadataKeys.Audiences);
if (configuredAudiences.Count > 0)
{
if (context.Request.Resources is ICollection<string> resources && configuredAudiences.Count > 0)
{
foreach (var audience in configuredAudiences)
{
if (!resources.Contains(audience))
{
resources.Add(audience);
}
}
}
if (context.Request.Audiences is ICollection<string> audiencesCollection)
{
foreach (var audience in configuredAudiences)
{
if (!audiencesCollection.Contains(audience))
{
audiencesCollection.Add(audience);
}
}
}
}
var identity = new ClaimsIdentity(OpenIddictServerAspNetCoreDefaults.AuthenticationScheme);
identity.AddClaim(new Claim(OpenIddictConstants.Claims.Subject, document.ClientId));
identity.AddClaim(new Claim(OpenIddictConstants.Claims.ClientId, document.ClientId));
@@ -322,6 +394,8 @@ internal sealed class HandleClientCredentialsHandler : IOpenIddictServerHandler<
activity?.SetTag("authority.identity_provider", provider.Name);
}
ApplySenderConstraintClaims(context, identity, document);
var principal = new ClaimsPrincipal(identity);
var grantedScopes = context.Transaction.Properties.TryGetValue(AuthorityOpenIddictConstants.ClientGrantedScopesProperty, out var scopesValue) &&
@@ -338,6 +412,11 @@ internal sealed class HandleClientCredentialsHandler : IOpenIddictServerHandler<
principal.SetScopes(Array.Empty<string>());
}
if (configuredAudiences.Count > 0)
{
principal.SetAudiences(configuredAudiences);
}
if (provider is not null && descriptor is not null)
{
var enrichmentContext = new AuthorityClaimsEnrichmentContext(provider.Context, user: null, descriptor);
@@ -420,10 +499,95 @@ internal sealed class HandleClientCredentialsHandler : IOpenIddictServerHandler<
ExpiresAt = expiresAt
};
if (context.Transaction.Properties.TryGetValue(AuthorityOpenIddictConstants.SenderConstraintProperty, out var constraintObj) &&
constraintObj is string senderConstraint &&
!string.IsNullOrWhiteSpace(senderConstraint))
{
record.SenderConstraint = senderConstraint;
}
string? senderThumbprint = null;
if (context.Transaction.Properties.TryGetValue(AuthorityOpenIddictConstants.DpopKeyThumbprintProperty, out var dpopThumbprintObj) &&
dpopThumbprintObj is string dpopThumbprint &&
!string.IsNullOrWhiteSpace(dpopThumbprint))
{
senderThumbprint = dpopThumbprint;
}
else if (context.Transaction.Properties.TryGetValue(AuthorityOpenIddictConstants.MtlsCertificateThumbprintProperty, out var mtlsThumbprintObj) &&
mtlsThumbprintObj is string mtlsThumbprint &&
!string.IsNullOrWhiteSpace(mtlsThumbprint))
{
senderThumbprint = mtlsThumbprint;
}
if (senderThumbprint is not null)
{
record.SenderKeyThumbprint = senderThumbprint;
}
if (context.Transaction.Properties.TryGetValue(AuthorityOpenIddictConstants.DpopConsumedNonceProperty, out var nonceObj) &&
nonceObj is string nonce &&
!string.IsNullOrWhiteSpace(nonce))
{
record.SenderNonce = nonce;
}
await tokenStore.InsertAsync(record, context.CancellationToken, session).ConfigureAwait(false);
context.Transaction.Properties[AuthorityOpenIddictConstants.TokenTransactionProperty] = record;
activity?.SetTag("authority.token_id", tokenId);
}
private static void ApplySenderConstraintClaims(
OpenIddictServerEvents.HandleTokenRequestContext context,
ClaimsIdentity identity,
AuthorityClientDocument document)
{
_ = document;
if (!context.Transaction.Properties.TryGetValue(AuthorityOpenIddictConstants.SenderConstraintProperty, out var constraintObj) ||
constraintObj is not string senderConstraint ||
string.IsNullOrWhiteSpace(senderConstraint))
{
return;
}
var normalized = senderConstraint.Trim().ToLowerInvariant();
context.Transaction.Properties[AuthorityOpenIddictConstants.SenderConstraintProperty] = normalized;
identity.SetClaim(AuthorityOpenIddictConstants.SenderConstraintClaimType, normalized);
switch (normalized)
{
case AuthoritySenderConstraintKinds.Dpop:
if (context.Transaction.Properties.TryGetValue(AuthorityOpenIddictConstants.DpopKeyThumbprintProperty, out var thumbprintObj) &&
thumbprintObj is string thumbprint &&
!string.IsNullOrWhiteSpace(thumbprint))
{
var confirmation = JsonSerializer.Serialize(new Dictionary<string, string>
{
["jkt"] = thumbprint
});
identity.SetClaim(AuthorityOpenIddictConstants.ConfirmationClaimType, confirmation);
}
break;
case AuthoritySenderConstraintKinds.Mtls:
if (context.Transaction.Properties.TryGetValue(AuthorityOpenIddictConstants.MtlsCertificateThumbprintProperty, out var mtlsThumbprintObj) &&
mtlsThumbprintObj is string mtlsThumbprint &&
!string.IsNullOrWhiteSpace(mtlsThumbprint))
{
var confirmation = JsonSerializer.Serialize(new Dictionary<string, string>
{
["x5t#S256"] = mtlsThumbprint
});
identity.SetClaim(AuthorityOpenIddictConstants.ConfirmationClaimType, confirmation);
}
break;
}
}
}
internal static class ClientCredentialHandlerHelpers
@@ -491,4 +655,20 @@ internal static class ClientCredentialHandlerHelpers
return false;
}
}
public static string? NormalizeSenderConstraint(AuthorityClientDocument document)
{
if (!string.IsNullOrWhiteSpace(document.SenderConstraint))
{
return document.SenderConstraint.Trim().ToLowerInvariant();
}
if (document.Properties.TryGetValue(AuthorityClientMetadataKeys.SenderConstraint, out var value) &&
!string.IsNullOrWhiteSpace(value))
{
return value.Trim().ToLowerInvariant();
}
return null;
}
}

View File

@@ -0,0 +1,643 @@
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Globalization;
using System.Linq;
using System.Text.Json;
using Microsoft.AspNetCore.Http;
using Microsoft.AspNetCore.Http.Extensions;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Primitives;
using OpenIddict.Abstractions;
using OpenIddict.Extensions;
using OpenIddict.Server;
using OpenIddict.Server.AspNetCore;
using StellaOps.Auth.Security.Dpop;
using StellaOps.Authority.OpenIddict;
using StellaOps.Authority.RateLimiting;
using StellaOps.Authority.Security;
using StellaOps.Authority.Storage.Mongo.Documents;
using StellaOps.Authority.Storage.Mongo.Stores;
using StellaOps.Authority.Plugins.Abstractions;
using StellaOps.Configuration;
using StellaOps.Cryptography.Audit;
using Microsoft.IdentityModel.Tokens;
namespace StellaOps.Authority.OpenIddict.Handlers;
internal sealed class ValidateDpopProofHandler : IOpenIddictServerHandler<OpenIddictServerEvents.ValidateTokenRequestContext>
{
private readonly StellaOpsAuthorityOptions authorityOptions;
private readonly IAuthorityClientStore clientStore;
private readonly IDpopProofValidator proofValidator;
private readonly IDpopNonceStore nonceStore;
private readonly IAuthorityRateLimiterMetadataAccessor metadataAccessor;
private readonly IAuthEventSink auditSink;
private readonly TimeProvider clock;
private readonly ActivitySource activitySource;
private readonly ILogger<ValidateDpopProofHandler> logger;
public ValidateDpopProofHandler(
StellaOpsAuthorityOptions authorityOptions,
IAuthorityClientStore clientStore,
IDpopProofValidator proofValidator,
IDpopNonceStore nonceStore,
IAuthorityRateLimiterMetadataAccessor metadataAccessor,
IAuthEventSink auditSink,
TimeProvider clock,
ActivitySource activitySource,
ILogger<ValidateDpopProofHandler> logger)
{
this.authorityOptions = authorityOptions ?? throw new ArgumentNullException(nameof(authorityOptions));
this.clientStore = clientStore ?? throw new ArgumentNullException(nameof(clientStore));
this.proofValidator = proofValidator ?? throw new ArgumentNullException(nameof(proofValidator));
this.nonceStore = nonceStore ?? throw new ArgumentNullException(nameof(nonceStore));
this.metadataAccessor = metadataAccessor ?? throw new ArgumentNullException(nameof(metadataAccessor));
this.auditSink = auditSink ?? throw new ArgumentNullException(nameof(auditSink));
this.clock = clock ?? throw new ArgumentNullException(nameof(clock));
this.activitySource = activitySource ?? throw new ArgumentNullException(nameof(activitySource));
this.logger = logger ?? throw new ArgumentNullException(nameof(logger));
}
public async ValueTask HandleAsync(OpenIddictServerEvents.ValidateTokenRequestContext context)
{
ArgumentNullException.ThrowIfNull(context);
if (!context.Request.IsClientCredentialsGrantType())
{
return;
}
using var activity = activitySource.StartActivity("authority.token.validate_dpop", ActivityKind.Internal);
activity?.SetTag("authority.endpoint", "/token");
activity?.SetTag("authority.grant_type", OpenIddictConstants.GrantTypes.ClientCredentials);
var clientId = context.ClientId ?? context.Request.ClientId;
if (string.IsNullOrWhiteSpace(clientId))
{
return;
}
context.Transaction.Properties[AuthorityOpenIddictConstants.AuditClientIdProperty] = clientId;
var senderConstraintOptions = authorityOptions.Security.SenderConstraints;
AuthorityClientDocument? clientDocument = await ResolveClientAsync(context, clientId, activity, cancel: context.CancellationToken).ConfigureAwait(false);
if (clientDocument is null)
{
return;
}
var senderConstraint = NormalizeSenderConstraint(clientDocument);
context.Transaction.Properties[AuthorityOpenIddictConstants.ClientSenderConstraintProperty] = senderConstraint;
if (!string.Equals(senderConstraint, AuthoritySenderConstraintKinds.Dpop, StringComparison.Ordinal))
{
return;
}
var configuredAudiences = EnsureRequestAudiences(context.Request, clientDocument);
if (!senderConstraintOptions.Dpop.Enabled)
{
logger.LogError("Client {ClientId} requires DPoP but server-side configuration has DPoP disabled.", clientId);
context.Reject(OpenIddictConstants.Errors.ServerError, "DPoP authentication is not enabled.");
await WriteAuditAsync(context, clientDocument, AuthEventOutcome.Failure, "DPoP disabled server-side.", null, null, null, "authority.dpop.proof.disabled").ConfigureAwait(false);
return;
}
metadataAccessor.SetTag("authority.sender_constraint", AuthoritySenderConstraintKinds.Dpop);
activity?.SetTag("authority.sender_constraint", AuthoritySenderConstraintKinds.Dpop);
HttpRequest? httpRequest = null;
HttpResponse? httpResponse = null;
if (context.Transaction.Properties.TryGetValue(typeof(HttpContext).FullName!, out var httpContextProperty) &&
httpContextProperty is HttpContext capturedContext)
{
httpRequest = capturedContext.Request;
httpResponse = capturedContext.Response;
}
if (httpRequest is null)
{
context.Reject(OpenIddictConstants.Errors.ServerError, "Unable to access HTTP context for DPoP validation.");
logger.LogError("DPoP validation aborted for {ClientId}: HTTP request not available via transaction.", clientId);
await WriteAuditAsync(context, clientDocument, AuthEventOutcome.Failure, "HTTP request unavailable for DPoP.", null, null, null, "authority.dpop.proof.error").ConfigureAwait(false);
return;
}
if (!httpRequest.Headers.TryGetValue("DPoP", out StringValues proofHeader) || StringValues.IsNullOrEmpty(proofHeader))
{
logger.LogWarning("Missing DPoP header for client credentials request from {ClientId}.", clientId);
await ChallengeNonceAsync(
context,
clientDocument,
audience: null,
thumbprint: null,
reasonCode: "missing_proof",
description: "DPoP proof is required.",
senderConstraintOptions,
httpResponse).ConfigureAwait(false);
return;
}
var proof = proofHeader.ToString();
var requestUri = BuildRequestUri(httpRequest);
var validationResult = await proofValidator.ValidateAsync(
proof,
httpRequest.Method,
requestUri,
cancellationToken: context.CancellationToken).ConfigureAwait(false);
if (!validationResult.IsValid)
{
var error = string.IsNullOrWhiteSpace(validationResult.ErrorDescription)
? "DPoP proof validation failed."
: validationResult.ErrorDescription;
logger.LogWarning("DPoP proof validation failed for client {ClientId}: {Reason}.", clientId, error);
await ChallengeNonceAsync(
context,
clientDocument,
audience: null,
thumbprint: null,
reasonCode: validationResult.ErrorCode ?? "invalid_proof",
description: error,
senderConstraintOptions,
httpResponse).ConfigureAwait(false);
return;
}
if (validationResult.PublicKey is not Microsoft.IdentityModel.Tokens.JsonWebKey jwk)
{
logger.LogWarning("DPoP proof for {ClientId} did not expose a JSON Web Key.", clientId);
await ChallengeNonceAsync(
context,
clientDocument,
audience: null,
thumbprint: null,
reasonCode: "invalid_key",
description: "DPoP proof must embed a JSON Web Key.",
senderConstraintOptions,
httpResponse).ConfigureAwait(false);
return;
}
object rawThumbprint = jwk.ComputeJwkThumbprint();
string thumbprint;
if (rawThumbprint is string value && !string.IsNullOrWhiteSpace(value))
{
thumbprint = value;
}
else if (rawThumbprint is byte[] bytes)
{
thumbprint = Base64UrlEncoder.Encode(bytes);
}
else
{
throw new InvalidOperationException("DPoP JWK thumbprint could not be computed.");
}
context.Transaction.Properties[AuthorityOpenIddictConstants.SenderConstraintProperty] = AuthoritySenderConstraintKinds.Dpop;
context.Transaction.Properties[AuthorityOpenIddictConstants.DpopKeyThumbprintProperty] = thumbprint;
if (!string.IsNullOrWhiteSpace(validationResult.JwtId))
{
context.Transaction.Properties[AuthorityOpenIddictConstants.DpopProofJwtIdProperty] = validationResult.JwtId;
}
if (validationResult.IssuedAt is { } issuedAt)
{
context.Transaction.Properties[AuthorityOpenIddictConstants.DpopIssuedAtProperty] = issuedAt;
}
var nonceOptions = senderConstraintOptions.Dpop.Nonce;
var requiredAudience = ResolveNonceAudience(context.Request, nonceOptions, configuredAudiences);
if (nonceOptions.Enabled && requiredAudience is not null)
{
activity?.SetTag("authority.dpop_nonce_audience", requiredAudience);
var suppliedNonce = validationResult.Nonce;
if (string.IsNullOrWhiteSpace(suppliedNonce))
{
logger.LogInformation("DPoP nonce challenge issued to {ClientId} for audience {Audience}: nonce missing.", clientId, requiredAudience);
await ChallengeNonceAsync(
context,
clientDocument,
requiredAudience,
thumbprint,
"nonce_missing",
"DPoP nonce is required for this audience.",
senderConstraintOptions,
httpResponse).ConfigureAwait(false);
return;
}
var consumeResult = await nonceStore.TryConsumeAsync(
suppliedNonce,
requiredAudience,
clientDocument.ClientId,
thumbprint,
context.CancellationToken).ConfigureAwait(false);
switch (consumeResult.Status)
{
case DpopNonceConsumeStatus.Success:
context.Transaction.Properties[AuthorityOpenIddictConstants.DpopConsumedNonceProperty] = suppliedNonce;
break;
case DpopNonceConsumeStatus.Expired:
logger.LogInformation("DPoP nonce expired for {ClientId} and audience {Audience}.", clientId, requiredAudience);
await ChallengeNonceAsync(
context,
clientDocument,
requiredAudience,
thumbprint,
"nonce_expired",
"DPoP nonce has expired. Retry with a fresh nonce.",
senderConstraintOptions,
httpResponse).ConfigureAwait(false);
return;
default:
logger.LogInformation("DPoP nonce invalid for {ClientId} and audience {Audience}.", clientId, requiredAudience);
await ChallengeNonceAsync(
context,
clientDocument,
requiredAudience,
thumbprint,
"nonce_invalid",
"DPoP nonce is invalid. Request a new nonce and retry.",
senderConstraintOptions,
httpResponse).ConfigureAwait(false);
return;
}
}
await WriteAuditAsync(
context,
clientDocument,
AuthEventOutcome.Success,
"DPoP proof validated.",
thumbprint,
validationResult,
requiredAudience,
"authority.dpop.proof.valid")
.ConfigureAwait(false);
logger.LogInformation("DPoP proof validated for client {ClientId}.", clientId);
}
private async ValueTask<AuthorityClientDocument?> ResolveClientAsync(
OpenIddictServerEvents.ValidateTokenRequestContext context,
string clientId,
Activity? activity,
CancellationToken cancel)
{
if (context.Transaction.Properties.TryGetValue(AuthorityOpenIddictConstants.ClientTransactionProperty, out var value) &&
value is AuthorityClientDocument cached)
{
activity?.SetTag("authority.client_id", cached.ClientId);
return cached;
}
var document = await clientStore.FindByClientIdAsync(clientId, cancel).ConfigureAwait(false);
if (document is not null)
{
context.Transaction.Properties[AuthorityOpenIddictConstants.ClientTransactionProperty] = document;
activity?.SetTag("authority.client_id", document.ClientId);
}
return document;
}
private static string? NormalizeSenderConstraint(AuthorityClientDocument document)
{
if (!string.IsNullOrWhiteSpace(document.SenderConstraint))
{
return document.SenderConstraint.Trim().ToLowerInvariant();
}
if (document.Properties.TryGetValue(AuthorityClientMetadataKeys.SenderConstraint, out var value) &&
!string.IsNullOrWhiteSpace(value))
{
return value.Trim().ToLowerInvariant();
}
return null;
}
private static IReadOnlyList<string> EnsureRequestAudiences(OpenIddictRequest? request, AuthorityClientDocument document)
{
if (request is null)
{
return Array.Empty<string>();
}
var configuredAudiences = ClientCredentialHandlerHelpers.Split(document.Properties, AuthorityClientMetadataKeys.Audiences);
if (configuredAudiences.Count == 0)
{
return configuredAudiences;
}
if (request.Resources is ICollection<string> resources)
{
foreach (var audience in configuredAudiences)
{
if (!resources.Contains(audience))
{
resources.Add(audience);
}
}
}
if (request.Audiences is ICollection<string> audiencesCollection)
{
foreach (var audience in configuredAudiences)
{
if (!audiencesCollection.Contains(audience))
{
audiencesCollection.Add(audience);
}
}
}
return configuredAudiences;
}
private static Uri BuildRequestUri(HttpRequest request)
{
ArgumentNullException.ThrowIfNull(request);
var url = request.GetDisplayUrl();
return new Uri(url, UriKind.Absolute);
}
private static string? ResolveNonceAudience(OpenIddictRequest request, AuthorityDpopNonceOptions nonceOptions, IReadOnlyList<string> configuredAudiences)
{
if (!nonceOptions.Enabled || request is null)
{
return null;
}
if (request.Resources is not null)
{
foreach (var resource in request.Resources)
{
if (string.IsNullOrWhiteSpace(resource))
{
continue;
}
var normalized = resource.Trim();
if (nonceOptions.RequiredAudiences.Contains(normalized))
{
return normalized;
}
}
}
if (request.Audiences is not null)
{
foreach (var audience in request.Audiences)
{
if (string.IsNullOrWhiteSpace(audience))
{
continue;
}
var normalized = audience.Trim();
if (nonceOptions.RequiredAudiences.Contains(normalized))
{
return normalized;
}
}
}
if (configuredAudiences is { Count: > 0 })
{
foreach (var audience in configuredAudiences)
{
if (string.IsNullOrWhiteSpace(audience))
{
continue;
}
var normalized = audience.Trim();
if (nonceOptions.RequiredAudiences.Contains(normalized))
{
return normalized;
}
}
}
return null;
}
private async ValueTask ChallengeNonceAsync(
OpenIddictServerEvents.ValidateTokenRequestContext context,
AuthorityClientDocument clientDocument,
string? audience,
string? thumbprint,
string reasonCode,
string description,
AuthoritySenderConstraintOptions senderConstraintOptions,
HttpResponse? httpResponse)
{
context.Reject(OpenIddictConstants.Errors.InvalidClient, description);
metadataAccessor.SetTag("authority.dpop_result", reasonCode);
string? issuedNonce = null;
DateTimeOffset? expiresAt = null;
if (audience is not null && thumbprint is not null && senderConstraintOptions.Dpop.Nonce.Enabled)
{
var issuance = await nonceStore.IssueAsync(
audience,
clientDocument.ClientId,
thumbprint,
senderConstraintOptions.Dpop.Nonce.Ttl,
senderConstraintOptions.Dpop.Nonce.MaxIssuancePerMinute,
context.CancellationToken).ConfigureAwait(false);
if (issuance.Status == DpopNonceIssueStatus.Success)
{
issuedNonce = issuance.Nonce;
expiresAt = issuance.ExpiresAt;
}
else
{
logger.LogWarning("Unable to issue DPoP nonce for {ClientId} (audience {Audience}): {Status}.", clientDocument.ClientId, audience, issuance.Status);
}
}
if (httpResponse is not null)
{
httpResponse.Headers["WWW-Authenticate"] = BuildAuthenticateHeader(reasonCode, description, issuedNonce);
if (!string.IsNullOrWhiteSpace(issuedNonce))
{
httpResponse.Headers["DPoP-Nonce"] = issuedNonce;
}
}
await WriteAuditAsync(
context,
clientDocument,
AuthEventOutcome.Failure,
description,
thumbprint,
validationResult: null,
audience,
"authority.dpop.proof.challenge",
reasonCode,
issuedNonce,
expiresAt)
.ConfigureAwait(false);
}
private static string BuildAuthenticateHeader(string reasonCode, string description, string? nonce)
{
var parameters = new Dictionary<string, string?>
{
["error"] = string.Equals(reasonCode, "nonce_missing", StringComparison.OrdinalIgnoreCase)
? "use_dpop_nonce"
: "invalid_dpop_proof",
["error_description"] = description
};
if (!string.IsNullOrWhiteSpace(nonce))
{
parameters["dpop-nonce"] = nonce;
}
var segments = new List<string>();
foreach (var kvp in parameters)
{
if (kvp.Value is null)
{
continue;
}
segments.Add($"{kvp.Key}=\"{EscapeHeaderValue(kvp.Value)}\"");
}
return segments.Count > 0
? $"DPoP {string.Join(", ", segments)}"
: "DPoP";
static string EscapeHeaderValue(string value)
=> value
.Replace("\\", "\\\\", StringComparison.Ordinal)
.Replace("\"", "\\\"", StringComparison.Ordinal);
}
private async ValueTask WriteAuditAsync(
OpenIddictServerEvents.ValidateTokenRequestContext context,
AuthorityClientDocument clientDocument,
AuthEventOutcome outcome,
string reason,
string? thumbprint,
DpopValidationResult? validationResult,
string? audience,
string eventType,
string? reasonCode = null,
string? issuedNonce = null,
DateTimeOffset? nonceExpiresAt = null)
{
var metadata = metadataAccessor.GetMetadata();
var properties = new List<AuthEventProperty>
{
new()
{
Name = "sender.constraint",
Value = ClassifiedString.Public(AuthoritySenderConstraintKinds.Dpop)
}
};
if (!string.IsNullOrWhiteSpace(reasonCode))
{
properties.Add(new AuthEventProperty
{
Name = "dpop.reason_code",
Value = ClassifiedString.Public(reasonCode)
});
}
if (!string.IsNullOrWhiteSpace(thumbprint))
{
properties.Add(new AuthEventProperty
{
Name = "dpop.jkt",
Value = ClassifiedString.Public(thumbprint)
});
}
if (validationResult?.JwtId is not null)
{
properties.Add(new AuthEventProperty
{
Name = "dpop.jti",
Value = ClassifiedString.Public(validationResult.JwtId)
});
}
if (validationResult?.IssuedAt is { } issuedAt)
{
properties.Add(new AuthEventProperty
{
Name = "dpop.issued_at",
Value = ClassifiedString.Public(issuedAt.ToString("O", CultureInfo.InvariantCulture))
});
}
if (audience is not null)
{
properties.Add(new AuthEventProperty
{
Name = "dpop.audience",
Value = ClassifiedString.Public(audience)
});
}
if (!string.IsNullOrWhiteSpace(validationResult?.Nonce))
{
properties.Add(new AuthEventProperty
{
Name = "dpop.nonce.presented",
Value = ClassifiedString.Sensitive(validationResult.Nonce)
});
}
if (!string.IsNullOrWhiteSpace(issuedNonce))
{
properties.Add(new AuthEventProperty
{
Name = "dpop.nonce.issued",
Value = ClassifiedString.Sensitive(issuedNonce)
});
}
if (nonceExpiresAt is { } expiresAt)
{
properties.Add(new AuthEventProperty
{
Name = "dpop.nonce.expires_at",
Value = ClassifiedString.Public(expiresAt.ToString("O", CultureInfo.InvariantCulture))
});
}
var confidential = string.Equals(clientDocument.ClientType, "confidential", StringComparison.OrdinalIgnoreCase);
var record = ClientCredentialsAuditHelper.CreateRecord(
clock,
context.Transaction,
metadata,
clientSecret: null,
outcome,
reason,
clientDocument.ClientId,
providerName: clientDocument.Plugin,
confidential,
requestedScopes: Array.Empty<string>(),
grantedScopes: Array.Empty<string>(),
invalidScope: null,
extraProperties: properties,
eventType: eventType);
await auditSink.WriteAsync(record, context.CancellationToken).ConfigureAwait(false);
}
}

View File

@@ -4,6 +4,7 @@ using System.Diagnostics;
using System.Globalization;
using System.Linq;
using System.Security.Claims;
using System.Text.Json;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Extensions.Logging;
@@ -92,6 +93,33 @@ internal sealed class PersistTokensHandler : IOpenIddictServerHandler<OpenIddict
ExpiresAt = TryGetExpiration(principal)
};
var senderConstraint = principal.GetClaim(AuthorityOpenIddictConstants.SenderConstraintClaimType);
if (!string.IsNullOrWhiteSpace(senderConstraint))
{
document.SenderConstraint = senderConstraint;
}
var confirmation = principal.GetClaim(AuthorityOpenIddictConstants.ConfirmationClaimType);
if (!string.IsNullOrWhiteSpace(confirmation))
{
try
{
using var json = JsonDocument.Parse(confirmation);
if (json.RootElement.TryGetProperty("jkt", out var thumbprintElement))
{
document.SenderKeyThumbprint = thumbprintElement.GetString();
}
else if (json.RootElement.TryGetProperty("x5t#S256", out var certificateThumbprintElement))
{
document.SenderKeyThumbprint = certificateThumbprintElement.GetString();
}
}
catch (JsonException)
{
// Ignore malformed confirmation claims in persistence layer.
}
}
try
{
await tokenStore.InsertAsync(document, cancellationToken, session).ConfigureAwait(false);

View File

@@ -3,6 +3,7 @@ using System.Collections.Generic;
using System.Diagnostics;
using System.Globalization;
using System.Security.Claims;
using System.Text.Json;
using Microsoft.Extensions.Logging;
using OpenIddict.Abstractions;
using OpenIddict.Extensions;
@@ -16,6 +17,7 @@ using StellaOps.Authority.Storage.Mongo.Documents;
using StellaOps.Authority.Storage.Mongo.Sessions;
using StellaOps.Authority.Storage.Mongo.Stores;
using StellaOps.Cryptography.Audit;
using StellaOps.Authority.Security;
namespace StellaOps.Authority.OpenIddict.Handlers;
@@ -106,6 +108,11 @@ internal sealed class ValidateAccessTokenHandler : IOpenIddictServerHandler<Open
}
}
if (tokenDocument is not null)
{
EnsureSenderConstraintClaims(context.Principal, tokenDocument);
}
if (!context.IsRejected && tokenDocument is not null)
{
await TrackTokenUsageAsync(context, tokenDocument, context.Principal, session).ConfigureAwait(false);
@@ -272,4 +279,46 @@ internal sealed class ValidateAccessTokenHandler : IOpenIddictServerHandler<Open
await auditSink.WriteAsync(record, cancellationToken).ConfigureAwait(false);
}
private static void EnsureSenderConstraintClaims(ClaimsPrincipal? principal, AuthorityTokenDocument tokenDocument)
{
if (principal?.Identity is not ClaimsIdentity identity)
{
return;
}
if (!string.IsNullOrWhiteSpace(tokenDocument.SenderConstraint) &&
!identity.HasClaim(claim => claim.Type == AuthorityOpenIddictConstants.SenderConstraintClaimType))
{
identity.SetClaim(AuthorityOpenIddictConstants.SenderConstraintClaimType, tokenDocument.SenderConstraint);
}
if (identity.HasClaim(claim => claim.Type == AuthorityOpenIddictConstants.ConfirmationClaimType))
{
return;
}
if (string.IsNullOrWhiteSpace(tokenDocument.SenderConstraint) || string.IsNullOrWhiteSpace(tokenDocument.SenderKeyThumbprint))
{
return;
}
string confirmation = tokenDocument.SenderConstraint switch
{
AuthoritySenderConstraintKinds.Dpop => JsonSerializer.Serialize(new Dictionary<string, string>
{
["jkt"] = tokenDocument.SenderKeyThumbprint
}),
AuthoritySenderConstraintKinds.Mtls => JsonSerializer.Serialize(new Dictionary<string, string>
{
["x5t#S256"] = tokenDocument.SenderKeyThumbprint
}),
_ => string.Empty
};
if (!string.IsNullOrEmpty(confirmation))
{
identity.SetClaim(AuthorityOpenIddictConstants.ConfirmationClaimType, confirmation);
}
}
}

View File

@@ -38,8 +38,10 @@ using StellaOps.Authority.Revocation;
using StellaOps.Authority.Signing;
using StellaOps.Cryptography;
using StellaOps.Authority.Storage.Mongo.Documents;
using StellaOps.Authority.Security;
#if STELLAOPS_AUTH_SECURITY
using StellaOps.Auth.Security.Dpop;
using StackExchange.Redis;
#endif
var builder = WebApplication.CreateBuilder(args);
@@ -98,6 +100,7 @@ builder.Services.AddHttpContextAccessor();
builder.Services.TryAddSingleton<TimeProvider>(_ => TimeProvider.System);
builder.Services.TryAddSingleton<IAuthorityRateLimiterMetadataAccessor, AuthorityRateLimiterMetadataAccessor>();
builder.Services.TryAddSingleton<IAuthorityRateLimiterPartitionKeyResolver, DefaultAuthorityRateLimiterPartitionKeyResolver>();
builder.Services.AddSingleton<IAuthorityClientCertificateValidator, AuthorityClientCertificateValidator>();
#if STELLAOPS_AUTH_SECURITY
var senderConstraints = authorityOptions.Security.SenderConstraints;
@@ -119,6 +122,29 @@ builder.Services.AddOptions<DpopValidationOptions>()
builder.Services.TryAddSingleton<IDpopReplayCache>(provider => new InMemoryDpopReplayCache(provider.GetService<TimeProvider>()));
builder.Services.TryAddSingleton<IDpopProofValidator, DpopProofValidator>();
if (string.Equals(senderConstraints.Dpop.Nonce.Store, "redis", StringComparison.OrdinalIgnoreCase))
{
builder.Services.TryAddSingleton<IConnectionMultiplexer>(_ =>
ConnectionMultiplexer.Connect(senderConstraints.Dpop.Nonce.RedisConnectionString!));
builder.Services.TryAddSingleton<IDpopNonceStore>(provider =>
{
var multiplexer = provider.GetRequiredService<IConnectionMultiplexer>();
var timeProvider = provider.GetService<TimeProvider>();
return new RedisDpopNonceStore(multiplexer, timeProvider);
});
}
else
{
builder.Services.TryAddSingleton<IDpopNonceStore>(provider =>
{
var timeProvider = provider.GetService<TimeProvider>();
var nonceLogger = provider.GetService<ILogger<InMemoryDpopNonceStore>>();
return new InMemoryDpopNonceStore(timeProvider, nonceLogger);
});
}
builder.Services.AddScoped<ValidateDpopProofHandler>();
#endif
builder.Services.AddRateLimiter(rateLimiterOptions =>
@@ -219,6 +245,13 @@ builder.Services.AddOpenIddict()
aspNetCoreBuilder.DisableTransportSecurityRequirement();
}
#if STELLAOPS_AUTH_SECURITY
options.AddEventHandler<OpenIddictServerEvents.ValidateTokenRequestContext>(descriptor =>
{
descriptor.UseScopedHandler<ValidateDpopProofHandler>();
});
#endif
options.AddEventHandler<OpenIddictServerEvents.ValidateTokenRequestContext>(descriptor =>
{
descriptor.UseScopedHandler<ValidatePasswordGrantHandler>();
@@ -723,6 +756,33 @@ if (authorityOptions.Bootstrap.Enabled)
? new Dictionary<string, string?>(StringComparer.OrdinalIgnoreCase)
: new Dictionary<string, string?>(request.Properties, StringComparer.OrdinalIgnoreCase);
IReadOnlyCollection<AuthorityClientCertificateBindingRegistration>? certificateBindings = null;
if (request.CertificateBindings is not null)
{
var bindingRegistrations = new List<AuthorityClientCertificateBindingRegistration>(request.CertificateBindings.Count);
foreach (var binding in request.CertificateBindings)
{
if (binding is null || string.IsNullOrWhiteSpace(binding.Thumbprint))
{
await ReleaseInviteAsync("Certificate binding thumbprint is required.");
await WriteBootstrapClientAuditAsync(AuthEventOutcome.Failure, "Certificate binding thumbprint is required.", request.ClientId, null, provider.Name, request.AllowedScopes ?? Array.Empty<string>(), request.Confidential, inviteToken).ConfigureAwait(false);
return Results.BadRequest(new { error = "invalid_request", message = "Certificate binding thumbprint is required." });
}
bindingRegistrations.Add(new AuthorityClientCertificateBindingRegistration(
binding.Thumbprint,
binding.SerialNumber,
binding.Subject,
binding.Issuer,
binding.SubjectAlternativeNames,
binding.NotBefore,
binding.NotAfter,
binding.Label));
}
certificateBindings = bindingRegistrations;
}
var registration = new AuthorityClientRegistration(
request.ClientId,
request.Confidential,
@@ -730,9 +790,11 @@ if (authorityOptions.Bootstrap.Enabled)
request.ClientSecret,
request.AllowedGrantTypes ?? Array.Empty<string>(),
request.AllowedScopes ?? Array.Empty<string>(),
request.AllowedAudiences ?? Array.Empty<string>(),
redirectUris,
postLogoutUris,
properties);
properties,
certificateBindings);
var result = await provider.ClientProvisioning.CreateOrUpdateAsync(registration, cancellationToken).ConfigureAwait(false);
@@ -1149,7 +1211,7 @@ static PluginHostOptions BuildPluginHostOptions(StellaOpsAuthorityOptions option
{
BaseDirectory = basePath,
PluginsDirectory = string.IsNullOrWhiteSpace(pluginDirectory)
? Path.Combine("PluginBinaries", "Authority")
? "StellaOps.Authority.PluginBinaries"
: pluginDirectory,
PrimaryPrefix = "StellaOps.Authority"
};

View File

@@ -0,0 +1,32 @@
using System;
using StellaOps.Authority.Storage.Mongo.Documents;
namespace StellaOps.Authority.Security;
internal sealed class AuthorityClientCertificateValidationResult
{
private AuthorityClientCertificateValidationResult(bool succeeded, string? confirmationThumbprint, string? hexThumbprint, AuthorityClientCertificateBinding? binding, string? error)
{
Succeeded = succeeded;
ConfirmationThumbprint = confirmationThumbprint;
HexThumbprint = hexThumbprint;
Binding = binding;
Error = error;
}
public bool Succeeded { get; }
public string? ConfirmationThumbprint { get; }
public string? HexThumbprint { get; }
public AuthorityClientCertificateBinding? Binding { get; }
public string? Error { get; }
public static AuthorityClientCertificateValidationResult Success(string confirmationThumbprint, string hexThumbprint, AuthorityClientCertificateBinding binding)
=> new(true, confirmationThumbprint, hexThumbprint, binding, null);
public static AuthorityClientCertificateValidationResult Failure(string error)
=> new(false, null, null, null, error);
}

View File

@@ -0,0 +1,283 @@
using System;
using System.Linq;
using System.Security.Cryptography;
using System.Security.Cryptography.X509Certificates;
using System.Threading;
using System.Threading.Tasks;
using System.Formats.Asn1;
using System.Net;
using Microsoft.AspNetCore.Http;
using Microsoft.Extensions.Logging;
using StellaOps.Authority.Storage.Mongo.Documents;
using StellaOps.Configuration;
using Microsoft.IdentityModel.Tokens;
namespace StellaOps.Authority.Security;
internal sealed class AuthorityClientCertificateValidator : IAuthorityClientCertificateValidator
{
private readonly StellaOpsAuthorityOptions authorityOptions;
private readonly TimeProvider timeProvider;
private readonly ILogger<AuthorityClientCertificateValidator> logger;
public AuthorityClientCertificateValidator(
StellaOpsAuthorityOptions authorityOptions,
TimeProvider timeProvider,
ILogger<AuthorityClientCertificateValidator> logger)
{
this.authorityOptions = authorityOptions ?? throw new ArgumentNullException(nameof(authorityOptions));
this.timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider));
this.logger = logger ?? throw new ArgumentNullException(nameof(logger));
}
public ValueTask<AuthorityClientCertificateValidationResult> ValidateAsync(HttpContext httpContext, AuthorityClientDocument client, CancellationToken cancellationToken)
{
ArgumentNullException.ThrowIfNull(httpContext);
ArgumentNullException.ThrowIfNull(client);
var certificate = httpContext.Connection.ClientCertificate;
if (certificate is null)
{
logger.LogWarning("mTLS validation failed for {ClientId}: no client certificate present.", client.ClientId);
return ValueTask.FromResult(AuthorityClientCertificateValidationResult.Failure("client_certificate_required"));
}
var mtlsOptions = authorityOptions.Security.SenderConstraints.Mtls;
var requiresChain = mtlsOptions.RequireChainValidation || mtlsOptions.AllowedCertificateAuthorities.Count > 0;
X509Chain? chain = null;
var chainBuilt = false;
try
{
if (requiresChain)
{
chain = CreateChain();
chainBuilt = TryBuildChain(chain, certificate);
if (mtlsOptions.RequireChainValidation && !chainBuilt)
{
logger.LogWarning("mTLS validation failed for {ClientId}: certificate chain validation failed.", client.ClientId);
return ValueTask.FromResult(AuthorityClientCertificateValidationResult.Failure("certificate_chain_invalid"));
}
}
var now = timeProvider.GetUtcNow();
if (now < certificate.NotBefore || now > certificate.NotAfter)
{
logger.LogWarning("mTLS validation failed for {ClientId}: certificate outside validity window (notBefore={NotBefore:o}, notAfter={NotAfter:o}).", client.ClientId, certificate.NotBefore, certificate.NotAfter);
return ValueTask.FromResult(AuthorityClientCertificateValidationResult.Failure("certificate_expired"));
}
if (mtlsOptions.NormalizedSubjectPatterns.Count > 0 &&
!mtlsOptions.NormalizedSubjectPatterns.Any(pattern => pattern.IsMatch(certificate.Subject)))
{
logger.LogWarning("mTLS validation failed for {ClientId}: subject {Subject} did not match allowed patterns.", client.ClientId, certificate.Subject);
return ValueTask.FromResult(AuthorityClientCertificateValidationResult.Failure("certificate_subject_mismatch"));
}
var subjectAlternativeNames = GetSubjectAlternativeNames(certificate);
if (mtlsOptions.AllowedSanTypes.Count > 0)
{
if (subjectAlternativeNames.Count == 0)
{
logger.LogWarning("mTLS validation failed for {ClientId}: certificate does not contain subject alternative names.", client.ClientId);
return ValueTask.FromResult(AuthorityClientCertificateValidationResult.Failure("certificate_san_missing"));
}
if (subjectAlternativeNames.Any(san => !mtlsOptions.AllowedSanTypes.Contains(san.Type)))
{
logger.LogWarning("mTLS validation failed for {ClientId}: certificate SAN types [{Types}] not allowed.", client.ClientId, string.Join(",", subjectAlternativeNames.Select(san => san.Type)));
return ValueTask.FromResult(AuthorityClientCertificateValidationResult.Failure("certificate_san_type"));
}
if (!subjectAlternativeNames.Any(san => mtlsOptions.AllowedSanTypes.Contains(san.Type)))
{
logger.LogWarning("mTLS validation failed for {ClientId}: certificate SANs did not include any of the required types.", client.ClientId);
return ValueTask.FromResult(AuthorityClientCertificateValidationResult.Failure("certificate_san_missing_required"));
}
}
if (mtlsOptions.AllowedCertificateAuthorities.Count > 0)
{
var allowedCas = mtlsOptions.AllowedCertificateAuthorities
.Where(value => !string.IsNullOrWhiteSpace(value))
.Select(value => value.Trim())
.ToHashSet(StringComparer.OrdinalIgnoreCase);
var matchedCa = false;
if (chainBuilt && chain is not null)
{
foreach (var element in chain.ChainElements.Cast<X509ChainElement>().Skip(1))
{
if (allowedCas.Contains(element.Certificate.Subject))
{
matchedCa = true;
break;
}
}
}
if (!matchedCa && allowedCas.Contains(certificate.Issuer))
{
matchedCa = true;
}
if (!matchedCa)
{
logger.LogWarning("mTLS validation failed for {ClientId}: certificate issuer {Issuer} is not allow-listed.", client.ClientId, certificate.Issuer);
return ValueTask.FromResult(AuthorityClientCertificateValidationResult.Failure("certificate_ca_untrusted"));
}
}
if (client.CertificateBindings.Count == 0)
{
logger.LogWarning("mTLS validation failed for {ClientId}: no certificate bindings registered for client.", client.ClientId);
return ValueTask.FromResult(AuthorityClientCertificateValidationResult.Failure("certificate_binding_missing"));
}
var certificateHash = certificate.GetCertHash(HashAlgorithmName.SHA256);
var hexThumbprint = Convert.ToHexString(certificateHash);
var base64Thumbprint = Base64UrlEncoder.Encode(certificateHash);
var binding = client.CertificateBindings.FirstOrDefault(b => string.Equals(b.Thumbprint, hexThumbprint, StringComparison.OrdinalIgnoreCase));
if (binding is null)
{
logger.LogWarning("mTLS validation failed for {ClientId}: certificate thumbprint {Thumbprint} not registered.", client.ClientId, hexThumbprint);
return ValueTask.FromResult(AuthorityClientCertificateValidationResult.Failure("certificate_unbound"));
}
if (binding.NotBefore is { } bindingNotBefore)
{
var effectiveNotBefore = bindingNotBefore - mtlsOptions.RotationGrace;
if (now < effectiveNotBefore)
{
logger.LogWarning("mTLS validation failed for {ClientId}: certificate binding not active until {NotBefore:o} (grace applied).", client.ClientId, bindingNotBefore);
return ValueTask.FromResult(AuthorityClientCertificateValidationResult.Failure("certificate_binding_inactive"));
}
}
if (binding.NotAfter is { } bindingNotAfter)
{
var effectiveNotAfter = bindingNotAfter + mtlsOptions.RotationGrace;
if (now > effectiveNotAfter)
{
logger.LogWarning("mTLS validation failed for {ClientId}: certificate binding expired at {NotAfter:o} (grace applied).", client.ClientId, bindingNotAfter);
return ValueTask.FromResult(AuthorityClientCertificateValidationResult.Failure("certificate_binding_expired"));
}
}
return ValueTask.FromResult(AuthorityClientCertificateValidationResult.Success(base64Thumbprint, hexThumbprint, binding));
}
finally
{
chain?.Dispose();
}
}
private static X509Chain CreateChain()
=> new()
{
ChainPolicy =
{
RevocationMode = X509RevocationMode.NoCheck,
RevocationFlag = X509RevocationFlag.ExcludeRoot,
VerificationFlags = X509VerificationFlags.IgnoreWrongUsage
}
};
private bool TryBuildChain(X509Chain chain, X509Certificate2 certificate)
{
try
{
return chain.Build(certificate);
}
catch (Exception ex)
{
logger.LogWarning(ex, "mTLS chain validation threw an exception.");
return false;
}
}
private static IReadOnlyList<(string Type, string Value)> GetSubjectAlternativeNames(X509Certificate2 certificate)
{
foreach (var extension in certificate.Extensions)
{
if (!string.Equals(extension.Oid?.Value, "2.5.29.17", StringComparison.Ordinal))
{
continue;
}
try
{
var reader = new AsnReader(extension.RawData, AsnEncodingRules.DER);
var sequence = reader.ReadSequence();
var results = new List<(string, string)>();
while (sequence.HasData)
{
var tag = sequence.PeekTag();
if (tag.TagClass != TagClass.ContextSpecific)
{
sequence.ReadEncodedValue();
continue;
}
switch (tag.TagValue)
{
case 2:
{
var dns = sequence.ReadCharacterString(UniversalTagNumber.IA5String, new Asn1Tag(TagClass.ContextSpecific, 2));
results.Add(("dns", dns));
break;
}
case 6:
{
var uri = sequence.ReadCharacterString(UniversalTagNumber.IA5String, new Asn1Tag(TagClass.ContextSpecific, 6));
results.Add(("uri", uri));
break;
}
case 7:
{
var bytes = sequence.ReadOctetString(new Asn1Tag(TagClass.ContextSpecific, 7));
var ip = new IPAddress(bytes).ToString();
results.Add(("ip", ip));
break;
}
default:
sequence.ReadEncodedValue();
break;
}
}
return results;
}
catch
{
return Array.Empty<(string, string)>();
}
}
return Array.Empty<(string, string)>();
}
private bool ValidateCertificateChain(X509Certificate2 certificate)
{
using var chain = new X509Chain
{
ChainPolicy =
{
RevocationMode = X509RevocationMode.NoCheck,
RevocationFlag = X509RevocationFlag.ExcludeRoot,
VerificationFlags = X509VerificationFlags.IgnoreWrongUsage
}
};
try
{
return chain.Build(certificate);
}
catch (Exception ex)
{
logger.LogWarning(ex, "mTLS chain validation threw an exception.");
return false;
}
}
}

View File

@@ -0,0 +1,10 @@
namespace StellaOps.Authority.Security;
/// <summary>
/// Canonical string identifiers for Authority sender-constraint policies.
/// </summary>
internal static class AuthoritySenderConstraintKinds
{
internal const string Dpop = "dpop";
internal const string Mtls = "mtls";
}

View File

@@ -0,0 +1,11 @@
using System.Threading;
using System.Threading.Tasks;
using Microsoft.AspNetCore.Http;
using StellaOps.Authority.Storage.Mongo.Documents;
namespace StellaOps.Authority.Security;
internal interface IAuthorityClientCertificateValidator
{
ValueTask<AuthorityClientCertificateValidationResult> ValidateAsync(HttpContext httpContext, AuthorityClientDocument client, CancellationToken cancellationToken);
}

View File

@@ -17,6 +17,7 @@
<PackageReference Include="OpenTelemetry.Instrumentation.Runtime" Version="1.12.0" />
<PackageReference Include="Serilog.AspNetCore" Version="8.0.1" />
<PackageReference Include="Serilog.Sinks.Console" Version="5.0.1" />
<PackageReference Include="StackExchange.Redis" Version="2.8.24" />
<ProjectReference Include="..\StellaOps.Authority.Plugins.Abstractions\StellaOps.Authority.Plugins.Abstractions.csproj" />
<ProjectReference Include="..\StellaOps.Authority.Plugin.Standard\StellaOps.Authority.Plugin.Standard.csproj" />
<ProjectReference Include="..\StellaOps.Authority.Storage.Mongo\StellaOps.Authority.Storage.Mongo.csproj" />

View File

@@ -20,10 +20,13 @@
| AUTHCORE-STORAGE-DEVICE-TOKENS | DONE (2025-10-14) | Authority Core, Storage Guild | AUTHCORE-BUILD-OPENIDDICT | Reintroduce `AuthorityTokenDeviceDocument` + projections removed during refactor so storage layer compiles. | ✅ Document type restored with mappings/migrations; ✅ Storage tests cover device artifacts; ✅ Authority solution build green. |
| AUTHCORE-BOOTSTRAP-INVITES | DONE (2025-10-14) | Authority Core, DevOps | AUTHCORE-STORAGE-DEVICE-TOKENS | Wire bootstrap invite cleanup service against restored document schema and re-enable lifecycle tests. | ✅ `BootstrapInviteCleanupService` passes integration tests; ✅ Operator guide updated if behavior changes; ✅ Build/test matrices green. |
| AUTHSTORAGE-MONGO-08-001 | DONE (2025-10-19) | Authority Core & Storage Guild | — | Harden Mongo session usage with causal consistency for mutations and follow-up reads. | • Scoped middleware/service creates `IClientSessionHandle` with causal consistency + majority read/write concerns<br>• Stores accept optional session parameter and reuse it for write + immediate reads<br>• GraphQL/HTTP pipelines updated to flow session through post-mutation queries<br>• Replica-set integration test exercises primary election and verifies read-your-write guarantees |
| AUTH-DPOP-11-001 | DOING (2025-10-19) | Authority Core & Security Guild | — | Implement DPoP proof validation + nonce handling for high-value audiences per architecture. | • DPoP proof validator verifies method/uri/hash, jwk thumbprint, and replay nonce per spec<br>• Nonce issuance endpoint integrated with audit + rate limits; high-value audiences enforce nonce requirement<br>• Integration tests cover success/failure paths (expired nonce, replay, invalid proof) and docs outline operator configuration |
| AUTH-MTLS-11-002 | DOING (2025-10-19) | Authority Core & Security Guild | — | Add OAuth mTLS client credential support with certificate-bound tokens and introspection updates. | • Client registration stores certificate bindings and enforces SAN/thumbprint validation during token issuance<br>• Token endpoint returns certificate-bound access tokens + PoP proof metadata; introspection reflects binding state<br>• End-to-end tests validate successful mTLS issuance, rejection of unbound certs, and docs capture configuration/rotation guidance |
> Remark (2025-10-19, AUTHSTORAGE-MONGO-08-001): Session accessor wired through Authority pipeline; stores accept optional sessions; added replica-set election regression test for read-your-write.
> Remark (2025-10-19, AUTH-DPOP-11-001): Prerequisites reviewed—none outstanding; status moved to DOING for Wave 0 kickoff. Design blueprint recorded in `docs/dev/authority-dpop-mtls-plan.md`.
> Remark (2025-10-19, AUTH-MTLS-11-002): Prerequisites reviewed—none outstanding; status moved to DOING for Wave 0 kickoff. mTLS flow design captured in `docs/dev/authority-dpop-mtls-plan.md`.
| AUTH-PLUGIN-COORD-08-002 | DOING (2025-10-19) | Authority Core, Plugin Platform Guild | PLUGIN-DI-08-001 | Coordinate scoped-service adoption for Authority plug-in registrars and background jobs ahead of PLUGIN-DI-08-002 implementation. | ✅ Workshop locked for 2025-10-20 15:0016:00UTC; ✅ Pre-read checklist in `docs/dev/authority-plugin-di-coordination.md`; ✅ Follow-up tasks captured in module backlogs before code changes begin. |
| AUTH-DPOP-11-001 | DOING (2025-10-19) | Authority Core & Security Guild | — | Implement DPoP proof validation + nonce handling for high-value audiences per architecture. | • Proof handler validates method/uri/hash + replay; nonce issuing/consumption implemented for in-memory + Redis stores<br>• Client credential path stamps `cnf.jkt` and persists sender metadata<br>• Remaining: finalize Redis configuration surface (docs/sample config), unskip nonce-challenge regression once HTTP pipeline emits high-value audiences, refresh operator docs |
> Remark (2025-10-19): DPoP handler now seeds request resources/audiences from client metadata; nonce challenge integration test re-enabled (still requires full suite once Concelier build restored).
| AUTH-MTLS-11-002 | DOING (2025-10-19) | Authority Core & Security Guild | — | Add OAuth mTLS client credential support with certificate-bound tokens and introspection updates. | • Certificate validator scaffold plus cnf stamping present; tokens persist sender thumbprints<br>• Remaining: provisioning/storage for certificate bindings, SAN/CA validation, introspection propagation, integration tests/docs before marking DONE |
> Remark (2025-10-19): Client provisioning accepts certificate bindings; validator enforces SAN types/CA allow-list with rotation grace; mtls integration tests updated (full suite still blocked by upstream build).
> Remark (2025-10-19, AUTHSTORAGE-MONGO-08-001): Prerequisites re-checked (none outstanding). Session accessor wired through Authority pipeline; stores accept optional sessions; added replica-set election regression test for read-your-write.
> Remark (2025-10-19, AUTH-DPOP-11-001): Handler, nonce store, and persistence hooks merged; Redis-backed configuration + end-to-end nonce enforcement still open. Full solution test blocked by `StellaOps.Concelier.Storage.Mongo` compile errors.
> Remark (2025-10-19, AUTH-MTLS-11-002): Certificate validator + cnf stamping delivered; binding storage, CA/SAN validation, integration suites outstanding before status can move to DONE.
> Update status columns (TODO / DOING / DONE / BLOCKED) together with code changes. Always run `dotnet test src/StellaOps.Authority.sln` when touching host logic.

View File

@@ -2,6 +2,7 @@ using System;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.IO;
using System.Linq;
using System.Net.Http;
using System.Security.Cryptography;
using System.Text;
@@ -9,6 +10,7 @@ using System.Text.Json;
using System.Text.Json.Serialization;
using System.Threading;
using System.Threading.Tasks;
using System.Globalization;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.Logging;
using Microsoft.IdentityModel.Tokens;
@@ -21,20 +23,22 @@ using StellaOps.Cli.Services.Models;
using StellaOps.Cli.Telemetry;
using StellaOps.Cli.Tests.Testing;
using StellaOps.Cryptography;
using Spectre.Console;
using Spectre.Console.Testing;
namespace StellaOps.Cli.Tests.Commands;
public sealed class CommandHandlersTests
{
[Fact]
public async Task HandleExportJobAsync_SetsExitCodeZeroOnSuccess()
{
var original = Environment.ExitCode;
try
{
var backend = new StubBackendClient(new JobTriggerResult(true, "Accepted", "/jobs/export:json/1", null));
var provider = BuildServiceProvider(backend);
public sealed class CommandHandlersTests
{
[Fact]
public async Task HandleExportJobAsync_SetsExitCodeZeroOnSuccess()
{
var original = Environment.ExitCode;
try
{
var backend = new StubBackendClient(new JobTriggerResult(true, "Accepted", "/jobs/export:json/1", null));
var provider = BuildServiceProvider(backend);
await CommandHandlers.HandleExportJobAsync(
provider,
format: "json",
@@ -45,36 +49,36 @@ public sealed class CommandHandlersTests
includeDelta: null,
verbose: false,
cancellationToken: CancellationToken.None);
Assert.Equal(0, Environment.ExitCode);
Assert.Equal("export:json", backend.LastJobKind);
}
finally
{
Environment.ExitCode = original;
}
}
[Fact]
public async Task HandleMergeJobAsync_SetsExitCodeOnFailure()
{
var original = Environment.ExitCode;
try
{
var backend = new StubBackendClient(new JobTriggerResult(false, "Job already running", null, null));
var provider = BuildServiceProvider(backend);
await CommandHandlers.HandleMergeJobAsync(provider, verbose: false, CancellationToken.None);
Assert.Equal(1, Environment.ExitCode);
Assert.Equal("merge:reconcile", backend.LastJobKind);
}
finally
{
Environment.ExitCode = original;
}
}
Assert.Equal(0, Environment.ExitCode);
Assert.Equal("export:json", backend.LastJobKind);
}
finally
{
Environment.ExitCode = original;
}
}
[Fact]
public async Task HandleMergeJobAsync_SetsExitCodeOnFailure()
{
var original = Environment.ExitCode;
try
{
var backend = new StubBackendClient(new JobTriggerResult(false, "Job already running", null, null));
var provider = BuildServiceProvider(backend);
await CommandHandlers.HandleMergeJobAsync(provider, verbose: false, CancellationToken.None);
Assert.Equal(1, Environment.ExitCode);
Assert.Equal("merge:reconcile", backend.LastJobKind);
}
finally
{
Environment.ExitCode = original;
}
}
[Fact]
public async Task HandleScannerRunAsync_AutomaticallyUploadsResults()
{
@@ -83,34 +87,34 @@ public sealed class CommandHandlersTests
var backend = new StubBackendClient(new JobTriggerResult(true, "Accepted", null, null));
var metadataFile = Path.Combine(tempDir.Path, "results", "scan-run.json");
var executor = new StubExecutor(new ScannerExecutionResult(0, resultsFile, metadataFile));
var options = new StellaOpsCliOptions
{
ResultsDirectory = Path.Combine(tempDir.Path, "results")
};
var provider = BuildServiceProvider(backend, executor, new StubInstaller(), options);
Directory.CreateDirectory(Path.Combine(tempDir.Path, "target"));
var original = Environment.ExitCode;
try
{
await CommandHandlers.HandleScannerRunAsync(
provider,
runner: "docker",
entry: "scanner-image",
targetDirectory: Path.Combine(tempDir.Path, "target"),
arguments: Array.Empty<string>(),
verbose: false,
cancellationToken: CancellationToken.None);
Assert.Equal(0, Environment.ExitCode);
var options = new StellaOpsCliOptions
{
ResultsDirectory = Path.Combine(tempDir.Path, "results")
};
var provider = BuildServiceProvider(backend, executor, new StubInstaller(), options);
Directory.CreateDirectory(Path.Combine(tempDir.Path, "target"));
var original = Environment.ExitCode;
try
{
await CommandHandlers.HandleScannerRunAsync(
provider,
runner: "docker",
entry: "scanner-image",
targetDirectory: Path.Combine(tempDir.Path, "target"),
arguments: Array.Empty<string>(),
verbose: false,
cancellationToken: CancellationToken.None);
Assert.Equal(0, Environment.ExitCode);
Assert.Equal(resultsFile, backend.LastUploadPath);
Assert.True(File.Exists(metadataFile));
}
finally
{
Environment.ExitCode = original;
}
finally
{
Environment.ExitCode = original;
}
}
@@ -554,7 +558,219 @@ public sealed class CommandHandlersTests
Environment.ExitCode = original;
}
}
[Fact]
public async Task HandleRuntimePolicyTestAsync_WritesInteractiveTable()
{
var originalExit = Environment.ExitCode;
var originalConsole = AnsiConsole.Console;
var console = new TestConsole();
console.Width(120);
console.Interactive();
console.EmitAnsiSequences();
AnsiConsole.Console = console;
var backend = new StubBackendClient(new JobTriggerResult(true, "ok", null, null));
var decisions = new Dictionary<string, RuntimePolicyImageDecision>(StringComparer.Ordinal)
{
["sha256:aaa"] = new RuntimePolicyImageDecision(
"allow",
true,
true,
Array.AsReadOnly(new[] { "trusted baseline" }),
new RuntimePolicyRekorReference("uuid-allow", "https://rekor.example/entries/uuid-allow", true),
new ReadOnlyDictionary<string, object?>(new Dictionary<string, object?>(StringComparer.Ordinal)
{
["source"] = "baseline",
["quieted"] = false,
["confidence"] = 0.97,
["confidenceBand"] = "high"
})),
["sha256:bbb"] = new RuntimePolicyImageDecision(
"block",
false,
false,
Array.AsReadOnly(new[] { "missing attestation" }),
new RuntimePolicyRekorReference("uuid-block", "https://rekor.example/entries/uuid-block", false),
new ReadOnlyDictionary<string, object?>(new Dictionary<string, object?>(StringComparer.Ordinal)
{
["source"] = "policy",
["quieted"] = false,
["confidence"] = 0.12,
["confidenceBand"] = "low"
})),
["sha256:ccc"] = new RuntimePolicyImageDecision(
"audit",
true,
false,
Array.AsReadOnly(new[] { "pending sbom sync" }),
new RuntimePolicyRekorReference(null, null, null),
new ReadOnlyDictionary<string, object?>(new Dictionary<string, object?>(StringComparer.Ordinal)
{
["source"] = "mirror",
["quieted"] = true,
["quietedBy"] = "allow-temporary",
["confidence"] = 0.42,
["confidenceBand"] = "medium"
}))
};
backend.RuntimePolicyResult = new RuntimePolicyEvaluationResult(
300,
DateTimeOffset.Parse("2025-10-19T12:00:00Z", CultureInfo.InvariantCulture),
"rev-42",
new ReadOnlyDictionary<string, RuntimePolicyImageDecision>(decisions));
var provider = BuildServiceProvider(backend);
try
{
await CommandHandlers.HandleRuntimePolicyTestAsync(
provider,
namespaceValue: "prod",
imageArguments: new[] { "sha256:aaa", "sha256:bbb" },
filePath: null,
labelArguments: new[] { "app=frontend" },
outputJson: false,
verbose: false,
cancellationToken: CancellationToken.None);
var output = console.Output;
Assert.Equal(0, Environment.ExitCode);
Assert.Contains("Image", output, StringComparison.Ordinal);
Assert.Contains("Verdict", output, StringComparison.Ordinal);
Assert.Contains("SBOM Ref", output, StringComparison.Ordinal);
Assert.Contains("Quieted", output, StringComparison.Ordinal);
Assert.Contains("Confidence", output, StringComparison.Ordinal);
Assert.Contains("sha256:aaa", output, StringComparison.Ordinal);
Assert.Contains("uuid-allow", output, StringComparison.Ordinal);
Assert.Contains("(verified)", output, StringComparison.Ordinal);
Assert.Contains("0.97 (high)", output, StringComparison.Ordinal);
Assert.Contains("sha256:bbb", output, StringComparison.Ordinal);
Assert.Contains("uuid-block", output, StringComparison.Ordinal);
Assert.Contains("(unverified)", output, StringComparison.Ordinal);
Assert.Contains("sha256:ccc", output, StringComparison.Ordinal);
Assert.Contains("yes", output, StringComparison.Ordinal);
Assert.Contains("allow-temporary", output, StringComparison.Ordinal);
Assert.True(
output.IndexOf("sha256:aaa", StringComparison.Ordinal) <
output.IndexOf("sha256:ccc", StringComparison.Ordinal));
}
finally
{
Environment.ExitCode = originalExit;
AnsiConsole.Console = originalConsole;
}
}
[Fact]
public async Task HandleRuntimePolicyTestAsync_WritesDeterministicJson()
{
var originalExit = Environment.ExitCode;
var originalOut = Console.Out;
var backend = new StubBackendClient(new JobTriggerResult(true, "ok", null, null));
var decisions = new Dictionary<string, RuntimePolicyImageDecision>(StringComparer.Ordinal)
{
["sha256:json-a"] = new RuntimePolicyImageDecision(
"allow",
true,
true,
Array.AsReadOnly(new[] { "baseline allow" }),
new RuntimePolicyRekorReference("uuid-json-allow", "https://rekor.example/entries/uuid-json-allow", true),
new ReadOnlyDictionary<string, object?>(new Dictionary<string, object?>(StringComparer.Ordinal)
{
["source"] = "baseline",
["confidence"] = 0.66
})),
["sha256:json-b"] = new RuntimePolicyImageDecision(
"audit",
true,
false,
Array.AsReadOnly(Array.Empty<string>()),
new RuntimePolicyRekorReference(null, null, null),
new ReadOnlyDictionary<string, object?>(new Dictionary<string, object?>(StringComparer.Ordinal)
{
["source"] = "mirror",
["quieted"] = true,
["quietedBy"] = "risk-accepted"
}))
};
backend.RuntimePolicyResult = new RuntimePolicyEvaluationResult(
600,
DateTimeOffset.Parse("2025-10-20T00:00:00Z", CultureInfo.InvariantCulture),
"rev-json-7",
new ReadOnlyDictionary<string, RuntimePolicyImageDecision>(decisions));
var provider = BuildServiceProvider(backend);
using var writer = new StringWriter();
Console.SetOut(writer);
try
{
await CommandHandlers.HandleRuntimePolicyTestAsync(
provider,
namespaceValue: "staging",
imageArguments: new[] { "sha256:json-a", "sha256:json-b" },
filePath: null,
labelArguments: Array.Empty<string>(),
outputJson: true,
verbose: false,
cancellationToken: CancellationToken.None);
var output = writer.ToString().Trim();
Assert.Equal(0, Environment.ExitCode);
Assert.False(string.IsNullOrWhiteSpace(output));
using var document = JsonDocument.Parse(output);
var root = document.RootElement;
Assert.Equal(600, root.GetProperty("ttlSeconds").GetInt32());
Assert.Equal("rev-json-7", root.GetProperty("policyRevision").GetString());
var expiresAt = root.GetProperty("expiresAtUtc").GetString();
Assert.NotNull(expiresAt);
Assert.Equal(
DateTimeOffset.Parse("2025-10-20T00:00:00Z", CultureInfo.InvariantCulture, DateTimeStyles.AssumeUniversal),
DateTimeOffset.Parse(expiresAt!, CultureInfo.InvariantCulture, DateTimeStyles.AssumeUniversal));
var results = root.GetProperty("results");
var keys = results.EnumerateObject().Select(p => p.Name).ToArray();
Assert.Equal(new[] { "sha256:json-a", "sha256:json-b" }, keys);
var first = results.GetProperty("sha256:json-a");
Assert.Equal("allow", first.GetProperty("policyVerdict").GetString());
Assert.True(first.GetProperty("signed").GetBoolean());
Assert.True(first.GetProperty("hasSbomReferrers").GetBoolean());
var rekor = first.GetProperty("rekor");
Assert.Equal("uuid-json-allow", rekor.GetProperty("uuid").GetString());
Assert.True(rekor.GetProperty("verified").GetBoolean());
Assert.Equal("baseline", first.GetProperty("source").GetString());
Assert.Equal(0.66, first.GetProperty("confidence").GetDouble(), 3);
var second = results.GetProperty("sha256:json-b");
Assert.Equal("audit", second.GetProperty("policyVerdict").GetString());
Assert.True(second.GetProperty("signed").GetBoolean());
Assert.False(second.GetProperty("hasSbomReferrers").GetBoolean());
Assert.Equal("mirror", second.GetProperty("source").GetString());
Assert.True(second.GetProperty("quieted").GetBoolean());
Assert.Equal("risk-accepted", second.GetProperty("quietedBy").GetString());
Assert.False(second.TryGetProperty("rekor", out _));
}
finally
{
Console.SetOut(originalOut);
Environment.ExitCode = originalExit;
}
}
private static async Task<RevocationArtifactPaths> WriteRevocationArtifactsAsync(TempDirectory temp, string? providerHint)
{
var (bundleBytes, signature, keyPem) = await BuildRevocationArtifactsAsync(providerHint);
@@ -665,10 +881,17 @@ public sealed class CommandHandlersTests
$"{Path.GetFileNameWithoutExtension(tempResultsFile)}-run.json");
return new StubExecutor(new ScannerExecutionResult(0, tempResultsFile, tempMetadataFile));
}
private sealed class StubBackendClient : IBackendOperationsClient
{
private readonly JobTriggerResult _jobResult;
private static readonly RuntimePolicyEvaluationResult DefaultRuntimePolicyResult =
new RuntimePolicyEvaluationResult(
0,
null,
null,
new ReadOnlyDictionary<string, RuntimePolicyImageDecision>(
new Dictionary<string, RuntimePolicyImageDecision>()));
public StubBackendClient(JobTriggerResult result)
{
@@ -683,6 +906,7 @@ public sealed class CommandHandlersTests
public List<(string ExportId, string DestinationPath, string? Algorithm, string? Digest)> ExportDownloads { get; } = new();
public ExcititorOperationResult? ExcititorResult { get; set; } = new ExcititorOperationResult(true, "ok", null, null);
public IReadOnlyList<ExcititorProviderSummary> ProviderSummaries { get; set; } = Array.Empty<ExcititorProviderSummary>();
public RuntimePolicyEvaluationResult RuntimePolicyResult { get; set; } = DefaultRuntimePolicyResult;
public Task<ScannerArtifactResult> DownloadScannerAsync(string channel, string outputPath, bool overwrite, bool verbose, CancellationToken cancellationToken)
=> throw new NotImplementedException();
@@ -726,21 +950,18 @@ public sealed class CommandHandlersTests
=> Task.FromResult(ProviderSummaries);
public Task<RuntimePolicyEvaluationResult> EvaluateRuntimePolicyAsync(RuntimePolicyEvaluationRequest request, CancellationToken cancellationToken)
{
var empty = new ReadOnlyDictionary<string, RuntimePolicyImageDecision>(new Dictionary<string, RuntimePolicyImageDecision>());
return Task.FromResult(new RuntimePolicyEvaluationResult(0, null, null, empty));
}
=> Task.FromResult(RuntimePolicyResult);
}
private sealed class StubExecutor : IScannerExecutor
{
private readonly ScannerExecutionResult _result;
public StubExecutor(ScannerExecutionResult result)
{
_result = result;
}
private sealed class StubExecutor : IScannerExecutor
{
private readonly ScannerExecutionResult _result;
public StubExecutor(ScannerExecutionResult result)
{
_result = result;
}
public Task<ScannerExecutionResult> RunAsync(string runner, string entry, string targetDirectory, string resultsDirectory, IReadOnlyList<string> arguments, bool verbose, CancellationToken cancellationToken)
{
Directory.CreateDirectory(Path.GetDirectoryName(_result.ResultsPath)!);
@@ -757,8 +978,8 @@ public sealed class CommandHandlersTests
return Task.FromResult(_result);
}
}
}
private sealed class StubInstaller : IScannerInstaller
{
public Task InstallAsync(string artifactPath, bool verbose, CancellationToken cancellationToken)

View File

@@ -1,4 +1,4 @@
using System;
using System;
using System.Collections.ObjectModel;
using System.Globalization;
using System.IO;
@@ -6,163 +6,163 @@ using System.Net;
using System.Net.Http;
using System.Net.Http.Json;
using System.Security.Cryptography;
using System.Text;
using System.Text.Json;
using System.Threading;
using System.Threading.Tasks;
using System.Text;
using System.Text.Json;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Extensions.Logging;
using Microsoft.IdentityModel.Tokens;
using StellaOps.Auth.Abstractions;
using StellaOps.Auth.Client;
using StellaOps.Cli.Configuration;
using StellaOps.Cli.Services;
using StellaOps.Cli.Services.Models;
using StellaOps.Cli.Services.Models.Transport;
using StellaOps.Cli.Tests.Testing;
namespace StellaOps.Cli.Tests.Services;
public sealed class BackendOperationsClientTests
{
[Fact]
public async Task DownloadScannerAsync_VerifiesDigestAndWritesMetadata()
{
using var temp = new TempDirectory();
var contentBytes = Encoding.UTF8.GetBytes("scanner-blob");
var digestHex = Convert.ToHexString(SHA256.HashData(contentBytes)).ToLowerInvariant();
var handler = new StubHttpMessageHandler((request, _) =>
{
var response = new HttpResponseMessage(HttpStatusCode.OK)
{
Content = new ByteArrayContent(contentBytes),
RequestMessage = request
};
response.Headers.Add("X-StellaOps-Digest", $"sha256:{digestHex}");
response.Content.Headers.LastModified = DateTimeOffset.UtcNow;
response.Content.Headers.ContentType = new System.Net.Http.Headers.MediaTypeHeaderValue("application/octet-stream");
return response;
});
var httpClient = new HttpClient(handler)
{
BaseAddress = new Uri("https://concelier.example")
};
var options = new StellaOpsCliOptions
{
BackendUrl = "https://concelier.example",
ScannerCacheDirectory = temp.Path,
ScannerDownloadAttempts = 1
};
var loggerFactory = LoggerFactory.Create(builder => builder.SetMinimumLevel(LogLevel.Debug));
var client = new BackendOperationsClient(httpClient, options, loggerFactory.CreateLogger<BackendOperationsClient>());
var targetPath = Path.Combine(temp.Path, "scanner.tar.gz");
var result = await client.DownloadScannerAsync("stable", targetPath, overwrite: false, verbose: true, CancellationToken.None);
Assert.False(result.FromCache);
Assert.True(File.Exists(targetPath));
var metadataPath = targetPath + ".metadata.json";
Assert.True(File.Exists(metadataPath));
using var document = JsonDocument.Parse(File.ReadAllText(metadataPath));
Assert.Equal($"sha256:{digestHex}", document.RootElement.GetProperty("digest").GetString());
Assert.Equal("stable", document.RootElement.GetProperty("channel").GetString());
}
[Fact]
public async Task DownloadScannerAsync_ThrowsOnDigestMismatch()
{
using var temp = new TempDirectory();
var contentBytes = Encoding.UTF8.GetBytes("scanner-data");
var handler = new StubHttpMessageHandler((request, _) =>
{
var response = new HttpResponseMessage(HttpStatusCode.OK)
{
Content = new ByteArrayContent(contentBytes),
RequestMessage = request
};
response.Headers.Add("X-StellaOps-Digest", "sha256:deadbeef");
return response;
});
var httpClient = new HttpClient(handler)
{
BaseAddress = new Uri("https://concelier.example")
};
var options = new StellaOpsCliOptions
{
BackendUrl = "https://concelier.example",
ScannerCacheDirectory = temp.Path,
ScannerDownloadAttempts = 1
};
var loggerFactory = LoggerFactory.Create(builder => builder.SetMinimumLevel(LogLevel.Debug));
var client = new BackendOperationsClient(httpClient, options, loggerFactory.CreateLogger<BackendOperationsClient>());
var targetPath = Path.Combine(temp.Path, "scanner.tar.gz");
await Assert.ThrowsAsync<InvalidOperationException>(() => client.DownloadScannerAsync("stable", targetPath, overwrite: true, verbose: false, CancellationToken.None));
Assert.False(File.Exists(targetPath));
}
using StellaOps.Cli.Configuration;
using StellaOps.Cli.Services;
using StellaOps.Cli.Services.Models;
using StellaOps.Cli.Services.Models.Transport;
using StellaOps.Cli.Tests.Testing;
namespace StellaOps.Cli.Tests.Services;
public sealed class BackendOperationsClientTests
{
[Fact]
public async Task DownloadScannerAsync_VerifiesDigestAndWritesMetadata()
{
using var temp = new TempDirectory();
var contentBytes = Encoding.UTF8.GetBytes("scanner-blob");
var digestHex = Convert.ToHexString(SHA256.HashData(contentBytes)).ToLowerInvariant();
var handler = new StubHttpMessageHandler((request, _) =>
{
var response = new HttpResponseMessage(HttpStatusCode.OK)
{
Content = new ByteArrayContent(contentBytes),
RequestMessage = request
};
response.Headers.Add("X-StellaOps-Digest", $"sha256:{digestHex}");
response.Content.Headers.LastModified = DateTimeOffset.UtcNow;
response.Content.Headers.ContentType = new System.Net.Http.Headers.MediaTypeHeaderValue("application/octet-stream");
return response;
});
var httpClient = new HttpClient(handler)
{
BaseAddress = new Uri("https://concelier.example")
};
var options = new StellaOpsCliOptions
{
BackendUrl = "https://concelier.example",
ScannerCacheDirectory = temp.Path,
ScannerDownloadAttempts = 1
};
var loggerFactory = LoggerFactory.Create(builder => builder.SetMinimumLevel(LogLevel.Debug));
var client = new BackendOperationsClient(httpClient, options, loggerFactory.CreateLogger<BackendOperationsClient>());
var targetPath = Path.Combine(temp.Path, "scanner.tar.gz");
var result = await client.DownloadScannerAsync("stable", targetPath, overwrite: false, verbose: true, CancellationToken.None);
Assert.False(result.FromCache);
Assert.True(File.Exists(targetPath));
var metadataPath = targetPath + ".metadata.json";
Assert.True(File.Exists(metadataPath));
using var document = JsonDocument.Parse(File.ReadAllText(metadataPath));
Assert.Equal($"sha256:{digestHex}", document.RootElement.GetProperty("digest").GetString());
Assert.Equal("stable", document.RootElement.GetProperty("channel").GetString());
}
[Fact]
public async Task DownloadScannerAsync_ThrowsOnDigestMismatch()
{
using var temp = new TempDirectory();
var contentBytes = Encoding.UTF8.GetBytes("scanner-data");
var handler = new StubHttpMessageHandler((request, _) =>
{
var response = new HttpResponseMessage(HttpStatusCode.OK)
{
Content = new ByteArrayContent(contentBytes),
RequestMessage = request
};
response.Headers.Add("X-StellaOps-Digest", "sha256:deadbeef");
return response;
});
var httpClient = new HttpClient(handler)
{
BaseAddress = new Uri("https://concelier.example")
};
var options = new StellaOpsCliOptions
{
BackendUrl = "https://concelier.example",
ScannerCacheDirectory = temp.Path,
ScannerDownloadAttempts = 1
};
var loggerFactory = LoggerFactory.Create(builder => builder.SetMinimumLevel(LogLevel.Debug));
var client = new BackendOperationsClient(httpClient, options, loggerFactory.CreateLogger<BackendOperationsClient>());
var targetPath = Path.Combine(temp.Path, "scanner.tar.gz");
await Assert.ThrowsAsync<InvalidOperationException>(() => client.DownloadScannerAsync("stable", targetPath, overwrite: true, verbose: false, CancellationToken.None));
Assert.False(File.Exists(targetPath));
}
[Fact]
public async Task DownloadScannerAsync_RetriesOnFailure()
{
using var temp = new TempDirectory();
var successBytes = Encoding.UTF8.GetBytes("success");
var digestHex = Convert.ToHexString(SHA256.HashData(successBytes)).ToLowerInvariant();
var attempts = 0;
var handler = new StubHttpMessageHandler(
(request, _) =>
{
attempts++;
return new HttpResponseMessage(HttpStatusCode.InternalServerError)
{
RequestMessage = request,
Content = new StringContent("error")
};
},
(request, _) =>
{
attempts++;
var response = new HttpResponseMessage(HttpStatusCode.OK)
{
RequestMessage = request,
Content = new ByteArrayContent(successBytes)
};
response.Headers.Add("X-StellaOps-Digest", $"sha256:{digestHex}");
return response;
});
var httpClient = new HttpClient(handler)
{
BaseAddress = new Uri("https://concelier.example")
};
var options = new StellaOpsCliOptions
{
BackendUrl = "https://concelier.example",
ScannerCacheDirectory = temp.Path,
ScannerDownloadAttempts = 3
};
var loggerFactory = LoggerFactory.Create(builder => builder.SetMinimumLevel(LogLevel.Debug));
var client = new BackendOperationsClient(httpClient, options, loggerFactory.CreateLogger<BackendOperationsClient>());
var targetPath = Path.Combine(temp.Path, "scanner.tar.gz");
var result = await client.DownloadScannerAsync("stable", targetPath, overwrite: false, verbose: false, CancellationToken.None);
Assert.Equal(2, attempts);
var successBytes = Encoding.UTF8.GetBytes("success");
var digestHex = Convert.ToHexString(SHA256.HashData(successBytes)).ToLowerInvariant();
var attempts = 0;
var handler = new StubHttpMessageHandler(
(request, _) =>
{
attempts++;
return new HttpResponseMessage(HttpStatusCode.InternalServerError)
{
RequestMessage = request,
Content = new StringContent("error")
};
},
(request, _) =>
{
attempts++;
var response = new HttpResponseMessage(HttpStatusCode.OK)
{
RequestMessage = request,
Content = new ByteArrayContent(successBytes)
};
response.Headers.Add("X-StellaOps-Digest", $"sha256:{digestHex}");
return response;
});
var httpClient = new HttpClient(handler)
{
BaseAddress = new Uri("https://concelier.example")
};
var options = new StellaOpsCliOptions
{
BackendUrl = "https://concelier.example",
ScannerCacheDirectory = temp.Path,
ScannerDownloadAttempts = 3
};
var loggerFactory = LoggerFactory.Create(builder => builder.SetMinimumLevel(LogLevel.Debug));
var client = new BackendOperationsClient(httpClient, options, loggerFactory.CreateLogger<BackendOperationsClient>());
var targetPath = Path.Combine(temp.Path, "scanner.tar.gz");
var result = await client.DownloadScannerAsync("stable", targetPath, overwrite: false, verbose: false, CancellationToken.None);
Assert.Equal(2, attempts);
Assert.False(result.FromCache);
Assert.True(File.Exists(targetPath));
}
@@ -251,73 +251,73 @@ public sealed class BackendOperationsClientTests
await Assert.ThrowsAsync<InvalidOperationException>(() => client.UploadScanResultsAsync(filePath, CancellationToken.None));
Assert.Equal(2, attempts);
}
[Fact]
public async Task TriggerJobAsync_ReturnsAcceptedResult()
{
var handler = new StubHttpMessageHandler((request, _) =>
{
var response = new HttpResponseMessage(HttpStatusCode.Accepted)
{
RequestMessage = request,
Content = JsonContent.Create(new JobRunResponse
{
RunId = Guid.NewGuid(),
Status = "queued",
Kind = "export:json",
Trigger = "cli",
CreatedAt = DateTimeOffset.UtcNow
})
};
response.Headers.Location = new Uri("/jobs/export:json/runs/123", UriKind.Relative);
return response;
});
var httpClient = new HttpClient(handler)
{
BaseAddress = new Uri("https://concelier.example")
};
var options = new StellaOpsCliOptions { BackendUrl = "https://concelier.example" };
var loggerFactory = LoggerFactory.Create(builder => builder.SetMinimumLevel(LogLevel.Debug));
var client = new BackendOperationsClient(httpClient, options, loggerFactory.CreateLogger<BackendOperationsClient>());
var result = await client.TriggerJobAsync("export:json", new Dictionary<string, object?>(), CancellationToken.None);
Assert.True(result.Success);
Assert.Equal("Accepted", result.Message);
Assert.Equal("/jobs/export:json/runs/123", result.Location);
}
[Fact]
[Fact]
public async Task TriggerJobAsync_ReturnsAcceptedResult()
{
var handler = new StubHttpMessageHandler((request, _) =>
{
var response = new HttpResponseMessage(HttpStatusCode.Accepted)
{
RequestMessage = request,
Content = JsonContent.Create(new JobRunResponse
{
RunId = Guid.NewGuid(),
Status = "queued",
Kind = "export:json",
Trigger = "cli",
CreatedAt = DateTimeOffset.UtcNow
})
};
response.Headers.Location = new Uri("/jobs/export:json/runs/123", UriKind.Relative);
return response;
});
var httpClient = new HttpClient(handler)
{
BaseAddress = new Uri("https://concelier.example")
};
var options = new StellaOpsCliOptions { BackendUrl = "https://concelier.example" };
var loggerFactory = LoggerFactory.Create(builder => builder.SetMinimumLevel(LogLevel.Debug));
var client = new BackendOperationsClient(httpClient, options, loggerFactory.CreateLogger<BackendOperationsClient>());
var result = await client.TriggerJobAsync("export:json", new Dictionary<string, object?>(), CancellationToken.None);
Assert.True(result.Success);
Assert.Equal("Accepted", result.Message);
Assert.Equal("/jobs/export:json/runs/123", result.Location);
}
[Fact]
public async Task TriggerJobAsync_ReturnsFailureMessage()
{
var handler = new StubHttpMessageHandler((request, _) =>
{
var problem = new
{
title = "Job already running",
detail = "export job active"
};
var response = new HttpResponseMessage(HttpStatusCode.Conflict)
{
RequestMessage = request,
Content = JsonContent.Create(problem)
};
return response;
});
var httpClient = new HttpClient(handler)
{
BaseAddress = new Uri("https://concelier.example")
};
var options = new StellaOpsCliOptions { BackendUrl = "https://concelier.example" };
var loggerFactory = LoggerFactory.Create(builder => builder.SetMinimumLevel(LogLevel.Debug));
var client = new BackendOperationsClient(httpClient, options, loggerFactory.CreateLogger<BackendOperationsClient>());
var result = await client.TriggerJobAsync("export:json", new Dictionary<string, object?>(), CancellationToken.None);
{
title = "Job already running",
detail = "export job active"
};
var response = new HttpResponseMessage(HttpStatusCode.Conflict)
{
RequestMessage = request,
Content = JsonContent.Create(problem)
};
return response;
});
var httpClient = new HttpClient(handler)
{
BaseAddress = new Uri("https://concelier.example")
};
var options = new StellaOpsCliOptions { BackendUrl = "https://concelier.example" };
var loggerFactory = LoggerFactory.Create(builder => builder.SetMinimumLevel(LogLevel.Debug));
var client = new BackendOperationsClient(httpClient, options, loggerFactory.CreateLogger<BackendOperationsClient>());
var result = await client.TriggerJobAsync("export:json", new Dictionary<string, object?>(), CancellationToken.None);
Assert.False(result.Success);
Assert.Contains("Job already running", result.Message);
@@ -403,18 +403,19 @@ public sealed class BackendOperationsClientTests
""ghcr.io/app@sha256:abc"": {
""policyVerdict"": ""pass"",
""signed"": true,
""hasSbom"": true,
""hasSbomReferrers"": true,
""reasons"": [],
""rekor"": { ""uuid"": ""uuid-1"", ""url"": ""https://rekor.example/uuid-1"" },
""rekor"": { ""uuid"": ""uuid-1"", ""url"": ""https://rekor.example/uuid-1"", ""verified"": true },
""confidence"": 0.87,
""quiet"": false,
""quieted"": false,
""metadata"": { ""note"": ""cached"" }
},
""ghcr.io/api@sha256:def"": {
""policyVerdict"": ""fail"",
""signed"": false,
""hasSbom"": false,
""reasons"": [""unsigned"", ""missing sbom""]
""hasSbomReferrers"": false,
""reasons"": [""unsigned"", ""missing sbom""],
""quietedBy"": ""manual-override""
}
}
}";
@@ -458,13 +459,14 @@ public sealed class BackendOperationsClientTests
var primary = result.Decisions["ghcr.io/app@sha256:abc"];
Assert.Equal("pass", primary.PolicyVerdict);
Assert.True(primary.Signed);
Assert.True(primary.HasSbom);
Assert.True(primary.HasSbomReferrers);
Assert.Empty(primary.Reasons);
Assert.NotNull(primary.Rekor);
Assert.Equal("uuid-1", primary.Rekor!.Uuid);
Assert.Equal("https://rekor.example/uuid-1", primary.Rekor.Url);
Assert.True(primary.Rekor.Verified);
Assert.Equal(0.87, Assert.IsType<double>(primary.AdditionalProperties["confidence"]), 3);
Assert.False(Assert.IsType<bool>(primary.AdditionalProperties["quiet"]));
Assert.False(Assert.IsType<bool>(primary.AdditionalProperties["quieted"]));
var metadataJson = Assert.IsType<string>(primary.AdditionalProperties["metadata"]);
using var metadataDocument = JsonDocument.Parse(metadataJson);
Assert.Equal("cached", metadataDocument.RootElement.GetProperty("note").GetString());
@@ -472,10 +474,11 @@ public sealed class BackendOperationsClientTests
var secondary = result.Decisions["ghcr.io/api@sha256:def"];
Assert.Equal("fail", secondary.PolicyVerdict);
Assert.False(secondary.Signed);
Assert.False(secondary.HasSbom);
Assert.False(secondary.HasSbomReferrers);
Assert.Collection(secondary.Reasons,
item => Assert.Equal("unsigned", item),
item => Assert.Equal("missing sbom", item));
Assert.Equal("manual-override", Assert.IsType<string>(secondary.AdditionalProperties["quietedBy"]));
}
private sealed class StubTokenClient : IStellaOpsTokenClient

View File

@@ -16,13 +16,14 @@
<!-- https://learn.microsoft.comdotnet/core/testing/microsoft-testing-platform-extensions-code-coverage -->
</PropertyGroup>
<ItemGroup>
<Using Include="Xunit" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\StellaOps.Cli\StellaOps.Cli.csproj" />
<ProjectReference Include="..\StellaOps.Configuration\StellaOps.Configuration.csproj" />
</ItemGroup>
</Project>
<ItemGroup>
<Using Include="Xunit" />
</ItemGroup>
<ItemGroup>
<PackageReference Include="Spectre.Console.Testing" Version="0.48.0" />
<ProjectReference Include="..\StellaOps.Cli\StellaOps.Cli.csproj" />
<ProjectReference Include="..\StellaOps.Configuration\StellaOps.Configuration.csproj" />
</ItemGroup>
</Project>

View File

@@ -358,6 +358,48 @@ internal static class CommandFactory
return CommandHandlers.HandleExcititorExportAsync(services, format, delta, scope, since, provider, output, verbose, cancellationToken);
});
var backfill = new Command("backfill-statements", "Replay historical raw documents into Excititor statements.");
var backfillRetrievedSinceOption = new Option<DateTimeOffset?>("--retrieved-since")
{
Description = "Only process raw documents retrieved on or after the provided ISO-8601 timestamp."
};
var backfillForceOption = new Option<bool>("--force")
{
Description = "Reprocess documents even if statements already exist."
};
var backfillBatchSizeOption = new Option<int>("--batch-size")
{
Description = "Number of raw documents to fetch per batch (default 100)."
};
var backfillMaxDocumentsOption = new Option<int?>("--max-documents")
{
Description = "Optional maximum number of raw documents to process."
};
backfill.Add(backfillRetrievedSinceOption);
backfill.Add(backfillForceOption);
backfill.Add(backfillBatchSizeOption);
backfill.Add(backfillMaxDocumentsOption);
backfill.SetAction((parseResult, _) =>
{
var retrievedSince = parseResult.GetValue(backfillRetrievedSinceOption);
var force = parseResult.GetValue(backfillForceOption);
var batchSize = parseResult.GetValue(backfillBatchSizeOption);
if (batchSize <= 0)
{
batchSize = 100;
}
var maxDocuments = parseResult.GetValue(backfillMaxDocumentsOption);
var verbose = parseResult.GetValue(verboseOption);
return CommandHandlers.HandleExcititorBackfillStatementsAsync(
services,
retrievedSince,
force,
batchSize,
maxDocuments,
verbose,
cancellationToken);
});
var verify = new Command("verify", "Verify Excititor exports or attestations.");
var exportIdOption = new Option<string?>("--export-id")
{
@@ -408,6 +450,7 @@ internal static class CommandFactory
excititor.Add(resume);
excititor.Add(list);
excititor.Add(export);
excititor.Add(backfill);
excititor.Add(verify);
excititor.Add(reconcile);
return excititor;

View File

@@ -25,103 +25,103 @@ using StellaOps.Cli.Telemetry;
using StellaOps.Cryptography;
namespace StellaOps.Cli.Commands;
internal static class CommandHandlers
{
public static async Task HandleScannerDownloadAsync(
IServiceProvider services,
string channel,
string? output,
bool overwrite,
bool install,
bool verbose,
CancellationToken cancellationToken)
{
await using var scope = services.CreateAsyncScope();
var client = scope.ServiceProvider.GetRequiredService<IBackendOperationsClient>();
var logger = scope.ServiceProvider.GetRequiredService<ILoggerFactory>().CreateLogger("scanner-download");
var verbosity = scope.ServiceProvider.GetRequiredService<VerbosityState>();
var previousLevel = verbosity.MinimumLevel;
verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information;
using var activity = CliActivitySource.Instance.StartActivity("cli.scanner.download", ActivityKind.Client);
activity?.SetTag("stellaops.cli.command", "scanner download");
activity?.SetTag("stellaops.cli.channel", channel);
using var duration = CliMetrics.MeasureCommandDuration("scanner download");
try
{
var result = await client.DownloadScannerAsync(channel, output ?? string.Empty, overwrite, verbose, cancellationToken).ConfigureAwait(false);
if (result.FromCache)
{
logger.LogInformation("Using cached scanner at {Path}.", result.Path);
}
else
{
logger.LogInformation("Scanner downloaded to {Path} ({Size} bytes).", result.Path, result.SizeBytes);
}
CliMetrics.RecordScannerDownload(channel, result.FromCache);
if (install)
{
var installer = scope.ServiceProvider.GetRequiredService<IScannerInstaller>();
await installer.InstallAsync(result.Path, verbose, cancellationToken).ConfigureAwait(false);
CliMetrics.RecordScannerInstall(channel);
}
Environment.ExitCode = 0;
}
catch (Exception ex)
{
logger.LogError(ex, "Failed to download scanner bundle.");
Environment.ExitCode = 1;
}
finally
{
verbosity.MinimumLevel = previousLevel;
}
}
public static async Task HandleScannerRunAsync(
IServiceProvider services,
string runner,
string entry,
string targetDirectory,
IReadOnlyList<string> arguments,
bool verbose,
CancellationToken cancellationToken)
{
await using var scope = services.CreateAsyncScope();
var executor = scope.ServiceProvider.GetRequiredService<IScannerExecutor>();
var logger = scope.ServiceProvider.GetRequiredService<ILoggerFactory>().CreateLogger("scanner-run");
var verbosity = scope.ServiceProvider.GetRequiredService<VerbosityState>();
var previousLevel = verbosity.MinimumLevel;
verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information;
using var activity = CliActivitySource.Instance.StartActivity("cli.scan.run", ActivityKind.Internal);
activity?.SetTag("stellaops.cli.command", "scan run");
activity?.SetTag("stellaops.cli.runner", runner);
activity?.SetTag("stellaops.cli.entry", entry);
activity?.SetTag("stellaops.cli.target", targetDirectory);
using var duration = CliMetrics.MeasureCommandDuration("scan run");
try
{
var options = scope.ServiceProvider.GetRequiredService<StellaOpsCliOptions>();
var resultsDirectory = options.ResultsDirectory;
var executionResult = await executor.RunAsync(
runner,
entry,
targetDirectory,
resultsDirectory,
arguments,
verbose,
cancellationToken).ConfigureAwait(false);
Environment.ExitCode = executionResult.ExitCode;
CliMetrics.RecordScanRun(runner, executionResult.ExitCode);
internal static class CommandHandlers
{
public static async Task HandleScannerDownloadAsync(
IServiceProvider services,
string channel,
string? output,
bool overwrite,
bool install,
bool verbose,
CancellationToken cancellationToken)
{
await using var scope = services.CreateAsyncScope();
var client = scope.ServiceProvider.GetRequiredService<IBackendOperationsClient>();
var logger = scope.ServiceProvider.GetRequiredService<ILoggerFactory>().CreateLogger("scanner-download");
var verbosity = scope.ServiceProvider.GetRequiredService<VerbosityState>();
var previousLevel = verbosity.MinimumLevel;
verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information;
using var activity = CliActivitySource.Instance.StartActivity("cli.scanner.download", ActivityKind.Client);
activity?.SetTag("stellaops.cli.command", "scanner download");
activity?.SetTag("stellaops.cli.channel", channel);
using var duration = CliMetrics.MeasureCommandDuration("scanner download");
try
{
var result = await client.DownloadScannerAsync(channel, output ?? string.Empty, overwrite, verbose, cancellationToken).ConfigureAwait(false);
if (result.FromCache)
{
logger.LogInformation("Using cached scanner at {Path}.", result.Path);
}
else
{
logger.LogInformation("Scanner downloaded to {Path} ({Size} bytes).", result.Path, result.SizeBytes);
}
CliMetrics.RecordScannerDownload(channel, result.FromCache);
if (install)
{
var installer = scope.ServiceProvider.GetRequiredService<IScannerInstaller>();
await installer.InstallAsync(result.Path, verbose, cancellationToken).ConfigureAwait(false);
CliMetrics.RecordScannerInstall(channel);
}
Environment.ExitCode = 0;
}
catch (Exception ex)
{
logger.LogError(ex, "Failed to download scanner bundle.");
Environment.ExitCode = 1;
}
finally
{
verbosity.MinimumLevel = previousLevel;
}
}
public static async Task HandleScannerRunAsync(
IServiceProvider services,
string runner,
string entry,
string targetDirectory,
IReadOnlyList<string> arguments,
bool verbose,
CancellationToken cancellationToken)
{
await using var scope = services.CreateAsyncScope();
var executor = scope.ServiceProvider.GetRequiredService<IScannerExecutor>();
var logger = scope.ServiceProvider.GetRequiredService<ILoggerFactory>().CreateLogger("scanner-run");
var verbosity = scope.ServiceProvider.GetRequiredService<VerbosityState>();
var previousLevel = verbosity.MinimumLevel;
verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information;
using var activity = CliActivitySource.Instance.StartActivity("cli.scan.run", ActivityKind.Internal);
activity?.SetTag("stellaops.cli.command", "scan run");
activity?.SetTag("stellaops.cli.runner", runner);
activity?.SetTag("stellaops.cli.entry", entry);
activity?.SetTag("stellaops.cli.target", targetDirectory);
using var duration = CliMetrics.MeasureCommandDuration("scan run");
try
{
var options = scope.ServiceProvider.GetRequiredService<StellaOpsCliOptions>();
var resultsDirectory = options.ResultsDirectory;
var executionResult = await executor.RunAsync(
runner,
entry,
targetDirectory,
resultsDirectory,
arguments,
verbose,
cancellationToken).ConfigureAwait(false);
Environment.ExitCode = executionResult.ExitCode;
CliMetrics.RecordScanRun(runner, executionResult.ExitCode);
if (executionResult.ExitCode == 0)
{
var backend = scope.ServiceProvider.GetRequiredService<IBackendOperationsClient>();
@@ -138,128 +138,128 @@ internal static class CommandHandlers
logger.LogInformation("Run metadata written to {Path}.", executionResult.RunMetadataPath);
activity?.SetTag("stellaops.cli.run_metadata", executionResult.RunMetadataPath);
}
catch (Exception ex)
{
logger.LogError(ex, "Scanner execution failed.");
Environment.ExitCode = 1;
}
finally
{
verbosity.MinimumLevel = previousLevel;
}
}
public static async Task HandleScanUploadAsync(
IServiceProvider services,
string file,
bool verbose,
CancellationToken cancellationToken)
{
await using var scope = services.CreateAsyncScope();
var client = scope.ServiceProvider.GetRequiredService<IBackendOperationsClient>();
var logger = scope.ServiceProvider.GetRequiredService<ILoggerFactory>().CreateLogger("scanner-upload");
var verbosity = scope.ServiceProvider.GetRequiredService<VerbosityState>();
var previousLevel = verbosity.MinimumLevel;
verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information;
using var activity = CliActivitySource.Instance.StartActivity("cli.scan.upload", ActivityKind.Client);
activity?.SetTag("stellaops.cli.command", "scan upload");
activity?.SetTag("stellaops.cli.file", file);
using var duration = CliMetrics.MeasureCommandDuration("scan upload");
try
{
var path = Path.GetFullPath(file);
await client.UploadScanResultsAsync(path, cancellationToken).ConfigureAwait(false);
logger.LogInformation("Scan results uploaded successfully.");
Environment.ExitCode = 0;
}
catch (Exception ex)
{
logger.LogError(ex, "Failed to upload scan results.");
Environment.ExitCode = 1;
}
finally
{
verbosity.MinimumLevel = previousLevel;
}
}
public static async Task HandleConnectorJobAsync(
IServiceProvider services,
string source,
string stage,
string? mode,
bool verbose,
CancellationToken cancellationToken)
{
await using var scope = services.CreateAsyncScope();
var client = scope.ServiceProvider.GetRequiredService<IBackendOperationsClient>();
var logger = scope.ServiceProvider.GetRequiredService<ILoggerFactory>().CreateLogger("db-connector");
var verbosity = scope.ServiceProvider.GetRequiredService<VerbosityState>();
var previousLevel = verbosity.MinimumLevel;
verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information;
using var activity = CliActivitySource.Instance.StartActivity("cli.db.fetch", ActivityKind.Client);
activity?.SetTag("stellaops.cli.command", "db fetch");
activity?.SetTag("stellaops.cli.source", source);
activity?.SetTag("stellaops.cli.stage", stage);
if (!string.IsNullOrWhiteSpace(mode))
{
activity?.SetTag("stellaops.cli.mode", mode);
}
using var duration = CliMetrics.MeasureCommandDuration("db fetch");
try
{
var jobKind = $"source:{source}:{stage}";
var parameters = new Dictionary<string, object?>(StringComparer.Ordinal);
if (!string.IsNullOrWhiteSpace(mode))
{
parameters["mode"] = mode;
}
await TriggerJobAsync(client, logger, jobKind, parameters, cancellationToken).ConfigureAwait(false);
}
catch (Exception ex)
{
logger.LogError(ex, "Connector job failed.");
Environment.ExitCode = 1;
}
finally
{
verbosity.MinimumLevel = previousLevel;
}
}
public static async Task HandleMergeJobAsync(
IServiceProvider services,
bool verbose,
CancellationToken cancellationToken)
{
await using var scope = services.CreateAsyncScope();
var client = scope.ServiceProvider.GetRequiredService<IBackendOperationsClient>();
var logger = scope.ServiceProvider.GetRequiredService<ILoggerFactory>().CreateLogger("db-merge");
var verbosity = scope.ServiceProvider.GetRequiredService<VerbosityState>();
var previousLevel = verbosity.MinimumLevel;
verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information;
using var activity = CliActivitySource.Instance.StartActivity("cli.db.merge", ActivityKind.Client);
activity?.SetTag("stellaops.cli.command", "db merge");
using var duration = CliMetrics.MeasureCommandDuration("db merge");
try
{
await TriggerJobAsync(client, logger, "merge:reconcile", new Dictionary<string, object?>(StringComparer.Ordinal), cancellationToken).ConfigureAwait(false);
}
catch (Exception ex)
{
logger.LogError(ex, "Merge job failed.");
Environment.ExitCode = 1;
}
finally
{
verbosity.MinimumLevel = previousLevel;
}
}
catch (Exception ex)
{
logger.LogError(ex, "Scanner execution failed.");
Environment.ExitCode = 1;
}
finally
{
verbosity.MinimumLevel = previousLevel;
}
}
public static async Task HandleScanUploadAsync(
IServiceProvider services,
string file,
bool verbose,
CancellationToken cancellationToken)
{
await using var scope = services.CreateAsyncScope();
var client = scope.ServiceProvider.GetRequiredService<IBackendOperationsClient>();
var logger = scope.ServiceProvider.GetRequiredService<ILoggerFactory>().CreateLogger("scanner-upload");
var verbosity = scope.ServiceProvider.GetRequiredService<VerbosityState>();
var previousLevel = verbosity.MinimumLevel;
verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information;
using var activity = CliActivitySource.Instance.StartActivity("cli.scan.upload", ActivityKind.Client);
activity?.SetTag("stellaops.cli.command", "scan upload");
activity?.SetTag("stellaops.cli.file", file);
using var duration = CliMetrics.MeasureCommandDuration("scan upload");
try
{
var path = Path.GetFullPath(file);
await client.UploadScanResultsAsync(path, cancellationToken).ConfigureAwait(false);
logger.LogInformation("Scan results uploaded successfully.");
Environment.ExitCode = 0;
}
catch (Exception ex)
{
logger.LogError(ex, "Failed to upload scan results.");
Environment.ExitCode = 1;
}
finally
{
verbosity.MinimumLevel = previousLevel;
}
}
public static async Task HandleConnectorJobAsync(
IServiceProvider services,
string source,
string stage,
string? mode,
bool verbose,
CancellationToken cancellationToken)
{
await using var scope = services.CreateAsyncScope();
var client = scope.ServiceProvider.GetRequiredService<IBackendOperationsClient>();
var logger = scope.ServiceProvider.GetRequiredService<ILoggerFactory>().CreateLogger("db-connector");
var verbosity = scope.ServiceProvider.GetRequiredService<VerbosityState>();
var previousLevel = verbosity.MinimumLevel;
verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information;
using var activity = CliActivitySource.Instance.StartActivity("cli.db.fetch", ActivityKind.Client);
activity?.SetTag("stellaops.cli.command", "db fetch");
activity?.SetTag("stellaops.cli.source", source);
activity?.SetTag("stellaops.cli.stage", stage);
if (!string.IsNullOrWhiteSpace(mode))
{
activity?.SetTag("stellaops.cli.mode", mode);
}
using var duration = CliMetrics.MeasureCommandDuration("db fetch");
try
{
var jobKind = $"source:{source}:{stage}";
var parameters = new Dictionary<string, object?>(StringComparer.Ordinal);
if (!string.IsNullOrWhiteSpace(mode))
{
parameters["mode"] = mode;
}
await TriggerJobAsync(client, logger, jobKind, parameters, cancellationToken).ConfigureAwait(false);
}
catch (Exception ex)
{
logger.LogError(ex, "Connector job failed.");
Environment.ExitCode = 1;
}
finally
{
verbosity.MinimumLevel = previousLevel;
}
}
public static async Task HandleMergeJobAsync(
IServiceProvider services,
bool verbose,
CancellationToken cancellationToken)
{
await using var scope = services.CreateAsyncScope();
var client = scope.ServiceProvider.GetRequiredService<IBackendOperationsClient>();
var logger = scope.ServiceProvider.GetRequiredService<ILoggerFactory>().CreateLogger("db-merge");
var verbosity = scope.ServiceProvider.GetRequiredService<VerbosityState>();
var previousLevel = verbosity.MinimumLevel;
verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information;
using var activity = CliActivitySource.Instance.StartActivity("cli.db.merge", ActivityKind.Client);
activity?.SetTag("stellaops.cli.command", "db merge");
using var duration = CliMetrics.MeasureCommandDuration("db merge");
try
{
await TriggerJobAsync(client, logger, "merge:reconcile", new Dictionary<string, object?>(StringComparer.Ordinal), cancellationToken).ConfigureAwait(false);
}
catch (Exception ex)
{
logger.LogError(ex, "Merge job failed.");
Environment.ExitCode = 1;
}
finally
{
verbosity.MinimumLevel = previousLevel;
}
}
public static async Task HandleExportJobAsync(
IServiceProvider services,
string format,
@@ -271,16 +271,16 @@ internal static class CommandHandlers
bool verbose,
CancellationToken cancellationToken)
{
await using var scope = services.CreateAsyncScope();
var client = scope.ServiceProvider.GetRequiredService<IBackendOperationsClient>();
var logger = scope.ServiceProvider.GetRequiredService<ILoggerFactory>().CreateLogger("db-export");
var verbosity = scope.ServiceProvider.GetRequiredService<VerbosityState>();
var previousLevel = verbosity.MinimumLevel;
verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information;
using var activity = CliActivitySource.Instance.StartActivity("cli.db.export", ActivityKind.Client);
activity?.SetTag("stellaops.cli.command", "db export");
activity?.SetTag("stellaops.cli.format", format);
activity?.SetTag("stellaops.cli.delta", delta);
await using var scope = services.CreateAsyncScope();
var client = scope.ServiceProvider.GetRequiredService<IBackendOperationsClient>();
var logger = scope.ServiceProvider.GetRequiredService<ILoggerFactory>().CreateLogger("db-export");
var verbosity = scope.ServiceProvider.GetRequiredService<VerbosityState>();
var previousLevel = verbosity.MinimumLevel;
verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information;
using var activity = CliActivitySource.Instance.StartActivity("cli.db.export", ActivityKind.Client);
activity?.SetTag("stellaops.cli.command", "db export");
activity?.SetTag("stellaops.cli.format", format);
activity?.SetTag("stellaops.cli.delta", delta);
using var duration = CliMetrics.MeasureCommandDuration("db export");
activity?.SetTag("stellaops.cli.publish_full", publishFull);
activity?.SetTag("stellaops.cli.publish_delta", publishDelta);
@@ -330,16 +330,16 @@ internal static class CommandHandlers
{
parameters["includeDelta"] = includeDelta.Value;
}
await TriggerJobAsync(client, logger, jobKind, parameters, cancellationToken).ConfigureAwait(false);
}
catch (Exception ex)
{
logger.LogError(ex, "Export job failed.");
Environment.ExitCode = 1;
}
finally
{
await TriggerJobAsync(client, logger, jobKind, parameters, cancellationToken).ConfigureAwait(false);
}
catch (Exception ex)
{
logger.LogError(ex, "Export job failed.");
Environment.ExitCode = 1;
}
finally
{
verbosity.MinimumLevel = previousLevel;
}
}
@@ -723,6 +723,62 @@ internal static class CommandHandlers
}
}
public static Task HandleExcititorBackfillStatementsAsync(
IServiceProvider services,
DateTimeOffset? retrievedSince,
bool force,
int batchSize,
int? maxDocuments,
bool verbose,
CancellationToken cancellationToken)
{
if (batchSize <= 0)
{
throw new ArgumentOutOfRangeException(nameof(batchSize), "Batch size must be greater than zero.");
}
if (maxDocuments.HasValue && maxDocuments.Value <= 0)
{
throw new ArgumentOutOfRangeException(nameof(maxDocuments), "Max documents must be greater than zero when specified.");
}
var payload = new Dictionary<string, object?>(StringComparer.Ordinal)
{
["force"] = force,
["batchSize"] = batchSize,
["maxDocuments"] = maxDocuments
};
if (retrievedSince.HasValue)
{
payload["retrievedSince"] = retrievedSince.Value.ToUniversalTime().ToString("O", CultureInfo.InvariantCulture);
}
var activityTags = new Dictionary<string, object?>(StringComparer.Ordinal)
{
["stellaops.cli.force"] = force,
["stellaops.cli.batch_size"] = batchSize,
["stellaops.cli.max_documents"] = maxDocuments
};
if (retrievedSince.HasValue)
{
activityTags["stellaops.cli.retrieved_since"] = retrievedSince.Value.ToUniversalTime().ToString("O", CultureInfo.InvariantCulture);
}
return ExecuteExcititorCommandAsync(
services,
commandName: "excititor backfill-statements",
verbose,
activityTags,
client => client.ExecuteExcititorOperationAsync(
"admin/backfill-statements",
HttpMethod.Post,
RemoveNullValues(payload),
cancellationToken),
cancellationToken);
}
public static Task HandleExcititorVerifyAsync(
IServiceProvider services,
string? exportId,
@@ -2208,7 +2264,7 @@ internal static class CommandHandlers
{
["policyVerdict"] = decision.PolicyVerdict,
["signed"] = decision.Signed,
["hasSbom"] = decision.HasSbom
["hasSbomReferrers"] = decision.HasSbomReferrers
};
if (decision.Reasons.Count > 0)
@@ -2218,11 +2274,26 @@ internal static class CommandHandlers
if (decision.Rekor is not null)
{
map["rekor"] = new Dictionary<string, object?>(StringComparer.Ordinal)
var rekorMap = new Dictionary<string, object?>(StringComparer.Ordinal);
if (!string.IsNullOrWhiteSpace(decision.Rekor.Uuid))
{
["uuid"] = decision.Rekor.Uuid,
["url"] = decision.Rekor.Url
};
rekorMap["uuid"] = decision.Rekor.Uuid;
}
if (!string.IsNullOrWhiteSpace(decision.Rekor.Url))
{
rekorMap["url"] = decision.Rekor.Url;
}
if (decision.Rekor.Verified.HasValue)
{
rekorMap["verified"] = decision.Rekor.Verified;
}
if (rekorMap.Count > 0)
{
map["rekor"] = rekorMap;
}
}
foreach (var kvp in decision.AdditionalProperties)
@@ -2240,7 +2311,8 @@ internal static class CommandHandlers
if (AnsiConsole.Profile.Capabilities.Interactive)
{
var table = new Table().Border(TableBorder.Rounded).AddColumns("Image", "Verdict", "Signed", "SBOM", "Reasons", "Attestation");
var table = new Table().Border(TableBorder.Rounded)
.AddColumns("Image", "Verdict", "Signed", "SBOM Ref", "Quieted", "Confidence", "Reasons", "Attestation");
foreach (var image in orderedImages)
{
@@ -2250,9 +2322,11 @@ internal static class CommandHandlers
image,
decision.PolicyVerdict,
FormatBoolean(decision.Signed),
FormatBoolean(decision.HasSbom),
FormatBoolean(decision.HasSbomReferrers),
FormatQuietedDisplay(decision.AdditionalProperties),
FormatConfidenceDisplay(decision.AdditionalProperties),
decision.Reasons.Count > 0 ? string.Join(Environment.NewLine, decision.Reasons) : "-",
string.IsNullOrWhiteSpace(decision.Rekor?.Uuid) ? "-" : decision.Rekor!.Uuid!);
FormatAttestation(decision.Rekor));
summary[decision.PolicyVerdict] = summary.TryGetValue(decision.PolicyVerdict, out var count) ? count + 1 : 1;
@@ -2264,7 +2338,7 @@ internal static class CommandHandlers
}
else
{
table.AddRow(image, "<missing>", "-", "-", "-", "-");
table.AddRow(image, "<missing>", "-", "-", "-", "-", "-", "-");
}
}
@@ -2278,12 +2352,14 @@ internal static class CommandHandlers
{
var reasons = decision.Reasons.Count > 0 ? string.Join(", ", decision.Reasons) : "none";
logger.LogInformation(
"{Image} -> verdict={Verdict} signed={Signed} sbom={Sbom} attestation={Attestation} reasons={Reasons}",
"{Image} -> verdict={Verdict} signed={Signed} sbomRef={Sbom} quieted={Quieted} confidence={Confidence} attestation={Attestation} reasons={Reasons}",
image,
decision.PolicyVerdict,
FormatBoolean(decision.Signed),
FormatBoolean(decision.HasSbom),
string.IsNullOrWhiteSpace(decision.Rekor?.Uuid) ? "-" : decision.Rekor!.Uuid!,
FormatBoolean(decision.HasSbomReferrers),
FormatQuietedDisplay(decision.AdditionalProperties),
FormatConfidenceDisplay(decision.AdditionalProperties),
FormatAttestation(decision.Rekor),
reasons);
summary[decision.PolicyVerdict] = summary.TryGetValue(decision.PolicyVerdict, out var count) ? count + 1 : 1;
@@ -2346,6 +2422,144 @@ internal static class CommandHandlers
private static string FormatBoolean(bool? value)
=> value is null ? "unknown" : value.Value ? "yes" : "no";
private static string FormatQuietedDisplay(IReadOnlyDictionary<string, object?> metadata)
{
var quieted = GetMetadataBoolean(metadata, "quieted", "quiet");
var quietedBy = GetMetadataString(metadata, "quietedBy", "quietedReason");
if (quieted is true)
{
return string.IsNullOrWhiteSpace(quietedBy) ? "yes" : $"yes ({quietedBy})";
}
if (quieted is false)
{
return "no";
}
return string.IsNullOrWhiteSpace(quietedBy) ? "-" : $"? ({quietedBy})";
}
private static string FormatConfidenceDisplay(IReadOnlyDictionary<string, object?> metadata)
{
var confidence = GetMetadataDouble(metadata, "confidence");
var confidenceBand = GetMetadataString(metadata, "confidenceBand", "confidenceTier");
if (confidence.HasValue && !string.IsNullOrWhiteSpace(confidenceBand))
{
return string.Format(CultureInfo.InvariantCulture, "{0:0.###} ({1})", confidence.Value, confidenceBand);
}
if (confidence.HasValue)
{
return confidence.Value.ToString("0.###", CultureInfo.InvariantCulture);
}
if (!string.IsNullOrWhiteSpace(confidenceBand))
{
return confidenceBand!;
}
return "-";
}
private static string FormatAttestation(RuntimePolicyRekorReference? rekor)
{
if (rekor is null)
{
return "-";
}
var uuid = string.IsNullOrWhiteSpace(rekor.Uuid) ? null : rekor.Uuid;
var url = string.IsNullOrWhiteSpace(rekor.Url) ? null : rekor.Url;
var verified = rekor.Verified;
var core = uuid ?? url;
if (!string.IsNullOrEmpty(core))
{
if (verified.HasValue)
{
var suffix = verified.Value ? " (verified)" : " (unverified)";
return core + suffix;
}
return core!;
}
if (verified.HasValue)
{
return verified.Value ? "verified" : "unverified";
}
return "-";
}
private static bool? GetMetadataBoolean(IReadOnlyDictionary<string, object?> metadata, params string[] keys)
{
foreach (var key in keys)
{
if (metadata.TryGetValue(key, out var value) && value is not null)
{
switch (value)
{
case bool b:
return b;
case string s when bool.TryParse(s, out var parsed):
return parsed;
}
}
}
return null;
}
private static string? GetMetadataString(IReadOnlyDictionary<string, object?> metadata, params string[] keys)
{
foreach (var key in keys)
{
if (metadata.TryGetValue(key, out var value) && value is not null)
{
if (value is string s)
{
return string.IsNullOrWhiteSpace(s) ? null : s;
}
}
}
return null;
}
private static double? GetMetadataDouble(IReadOnlyDictionary<string, object?> metadata, params string[] keys)
{
foreach (var key in keys)
{
if (metadata.TryGetValue(key, out var value) && value is not null)
{
switch (value)
{
case double d:
return d;
case float f:
return f;
case decimal m:
return (double)m;
case long l:
return l;
case int i:
return i;
case string s when double.TryParse(s, NumberStyles.Float | NumberStyles.AllowThousands, CultureInfo.InvariantCulture, out var parsed):
return parsed;
}
}
}
return null;
}
private static readonly IReadOnlyDictionary<string, string> EmptyLabelSelectors =
new ReadOnlyDictionary<string, string>(new Dictionary<string, string>(0, StringComparer.OrdinalIgnoreCase));
private static string FormatAdditionalValue(object? value)
{
return value switch
@@ -2359,8 +2573,6 @@ internal static class CommandHandlers
};
}
private static readonly IReadOnlyDictionary<string, string> EmptyLabelSelectors =
new ReadOnlyDictionary<string, string>(new Dictionary<string, string>(0, StringComparer.OrdinalIgnoreCase));
private static IReadOnlyList<string> NormalizeProviders(IReadOnlyList<string> providers)
{
@@ -2397,29 +2609,29 @@ internal static class CommandHandlers
string jobKind,
IDictionary<string, object?> parameters,
CancellationToken cancellationToken)
{
JobTriggerResult result = await client.TriggerJobAsync(jobKind, parameters, cancellationToken).ConfigureAwait(false);
if (result.Success)
{
if (!string.IsNullOrWhiteSpace(result.Location))
{
logger.LogInformation("Job accepted. Track status at {Location}.", result.Location);
}
else if (result.Run is not null)
{
logger.LogInformation("Job accepted. RunId: {RunId} Status: {Status}", result.Run.RunId, result.Run.Status);
}
else
{
logger.LogInformation("Job accepted.");
}
Environment.ExitCode = 0;
}
else
{
logger.LogError("Job '{JobKind}' failed: {Message}", jobKind, result.Message);
Environment.ExitCode = 1;
{
JobTriggerResult result = await client.TriggerJobAsync(jobKind, parameters, cancellationToken).ConfigureAwait(false);
if (result.Success)
{
if (!string.IsNullOrWhiteSpace(result.Location))
{
logger.LogInformation("Job accepted. Track status at {Location}.", result.Location);
}
else if (result.Run is not null)
{
logger.LogInformation("Job accepted. RunId: {RunId} Status: {Status}", result.Run.RunId, result.Run.Status);
}
else
{
logger.LogInformation("Job accepted.");
}
Environment.ExitCode = 0;
}
else
{
logger.LogError("Job '{JobKind}' failed: {Message}", jobKind, result.Message);
Environment.ExitCode = 1;
}
}
}

View File

@@ -1,4 +1,4 @@
using System;
using System;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.IO;
@@ -19,9 +19,9 @@ using StellaOps.Auth.Client;
using StellaOps.Cli.Configuration;
using StellaOps.Cli.Services.Models;
using StellaOps.Cli.Services.Models.Transport;
namespace StellaOps.Cli.Services;
namespace StellaOps.Cli.Services;
internal sealed class BackendOperationsClient : IBackendOperationsClient
{
private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web);
@@ -48,34 +48,34 @@ internal sealed class BackendOperationsClient : IBackendOperationsClient
{
if (Uri.TryCreate(_options.BackendUrl, UriKind.Absolute, out var baseUri))
{
httpClient.BaseAddress = baseUri;
}
}
}
public async Task<ScannerArtifactResult> DownloadScannerAsync(string channel, string outputPath, bool overwrite, bool verbose, CancellationToken cancellationToken)
{
EnsureBackendConfigured();
channel = string.IsNullOrWhiteSpace(channel) ? "stable" : channel.Trim();
outputPath = ResolveArtifactPath(outputPath, channel);
Directory.CreateDirectory(Path.GetDirectoryName(outputPath)!);
if (!overwrite && File.Exists(outputPath))
{
var existing = new FileInfo(outputPath);
_logger.LogInformation("Scanner artifact already cached at {Path} ({Size} bytes).", outputPath, existing.Length);
return new ScannerArtifactResult(outputPath, existing.Length, true);
}
var attempt = 0;
var maxAttempts = Math.Max(1, _options.ScannerDownloadAttempts);
while (true)
{
attempt++;
try
{
httpClient.BaseAddress = baseUri;
}
}
}
public async Task<ScannerArtifactResult> DownloadScannerAsync(string channel, string outputPath, bool overwrite, bool verbose, CancellationToken cancellationToken)
{
EnsureBackendConfigured();
channel = string.IsNullOrWhiteSpace(channel) ? "stable" : channel.Trim();
outputPath = ResolveArtifactPath(outputPath, channel);
Directory.CreateDirectory(Path.GetDirectoryName(outputPath)!);
if (!overwrite && File.Exists(outputPath))
{
var existing = new FileInfo(outputPath);
_logger.LogInformation("Scanner artifact already cached at {Path} ({Size} bytes).", outputPath, existing.Length);
return new ScannerArtifactResult(outputPath, existing.Length, true);
}
var attempt = 0;
var maxAttempts = Math.Max(1, _options.ScannerDownloadAttempts);
while (true)
{
attempt++;
try
{
using var request = CreateRequest(HttpMethod.Get, $"api/scanner/artifacts/{channel}");
await AuthorizeRequestAsync(request, cancellationToken).ConfigureAwait(false);
using var response = await _httpClient.SendAsync(request, HttpCompletionOption.ResponseHeadersRead, cancellationToken).ConfigureAwait(false);
@@ -83,55 +83,55 @@ internal sealed class BackendOperationsClient : IBackendOperationsClient
{
var failure = await CreateFailureMessageAsync(response, cancellationToken).ConfigureAwait(false);
throw new InvalidOperationException(failure);
}
return await ProcessScannerResponseAsync(response, outputPath, channel, verbose, cancellationToken).ConfigureAwait(false);
}
catch (Exception ex) when (attempt < maxAttempts)
{
var backoffSeconds = Math.Pow(2, attempt);
_logger.LogWarning(ex, "Scanner download attempt {Attempt}/{MaxAttempts} failed. Retrying in {Delay:F0}s...", attempt, maxAttempts, backoffSeconds);
await Task.Delay(TimeSpan.FromSeconds(backoffSeconds), cancellationToken).ConfigureAwait(false);
}
}
}
private async Task<ScannerArtifactResult> ProcessScannerResponseAsync(HttpResponseMessage response, string outputPath, string channel, bool verbose, CancellationToken cancellationToken)
{
var tempFile = outputPath + ".tmp";
await using (var payloadStream = await response.Content.ReadAsStreamAsync(cancellationToken).ConfigureAwait(false))
await using (var fileStream = File.Create(tempFile))
{
await payloadStream.CopyToAsync(fileStream, cancellationToken).ConfigureAwait(false);
}
var expectedDigest = ExtractHeaderValue(response.Headers, "X-StellaOps-Digest");
var signatureHeader = ExtractHeaderValue(response.Headers, "X-StellaOps-Signature");
var digestHex = await ValidateDigestAsync(tempFile, expectedDigest, cancellationToken).ConfigureAwait(false);
await ValidateSignatureAsync(signatureHeader, digestHex, verbose, cancellationToken).ConfigureAwait(false);
if (verbose)
{
var signatureNote = string.IsNullOrWhiteSpace(signatureHeader) ? "no signature" : "signature validated";
_logger.LogDebug("Scanner digest sha256:{Digest} ({SignatureNote}).", digestHex, signatureNote);
}
if (File.Exists(outputPath))
{
File.Delete(outputPath);
}
File.Move(tempFile, outputPath);
PersistMetadata(outputPath, channel, digestHex, signatureHeader, response);
var downloaded = new FileInfo(outputPath);
_logger.LogInformation("Scanner downloaded to {Path} ({Size} bytes).", outputPath, downloaded.Length);
return new ScannerArtifactResult(outputPath, downloaded.Length, false);
}
}
return await ProcessScannerResponseAsync(response, outputPath, channel, verbose, cancellationToken).ConfigureAwait(false);
}
catch (Exception ex) when (attempt < maxAttempts)
{
var backoffSeconds = Math.Pow(2, attempt);
_logger.LogWarning(ex, "Scanner download attempt {Attempt}/{MaxAttempts} failed. Retrying in {Delay:F0}s...", attempt, maxAttempts, backoffSeconds);
await Task.Delay(TimeSpan.FromSeconds(backoffSeconds), cancellationToken).ConfigureAwait(false);
}
}
}
private async Task<ScannerArtifactResult> ProcessScannerResponseAsync(HttpResponseMessage response, string outputPath, string channel, bool verbose, CancellationToken cancellationToken)
{
var tempFile = outputPath + ".tmp";
await using (var payloadStream = await response.Content.ReadAsStreamAsync(cancellationToken).ConfigureAwait(false))
await using (var fileStream = File.Create(tempFile))
{
await payloadStream.CopyToAsync(fileStream, cancellationToken).ConfigureAwait(false);
}
var expectedDigest = ExtractHeaderValue(response.Headers, "X-StellaOps-Digest");
var signatureHeader = ExtractHeaderValue(response.Headers, "X-StellaOps-Signature");
var digestHex = await ValidateDigestAsync(tempFile, expectedDigest, cancellationToken).ConfigureAwait(false);
await ValidateSignatureAsync(signatureHeader, digestHex, verbose, cancellationToken).ConfigureAwait(false);
if (verbose)
{
var signatureNote = string.IsNullOrWhiteSpace(signatureHeader) ? "no signature" : "signature validated";
_logger.LogDebug("Scanner digest sha256:{Digest} ({SignatureNote}).", digestHex, signatureNote);
}
if (File.Exists(outputPath))
{
File.Delete(outputPath);
}
File.Move(tempFile, outputPath);
PersistMetadata(outputPath, channel, digestHex, signatureHeader, response);
var downloaded = new FileInfo(outputPath);
_logger.LogInformation("Scanner downloaded to {Path} ({Size} bytes).", outputPath, downloaded.Length);
return new ScannerArtifactResult(outputPath, downloaded.Length, false);
}
public async Task UploadScanResultsAsync(string filePath, CancellationToken cancellationToken)
{
EnsureBackendConfigured();
@@ -194,46 +194,46 @@ internal sealed class BackendOperationsClient : IBackendOperationsClient
}
}
}
public async Task<JobTriggerResult> TriggerJobAsync(string jobKind, IDictionary<string, object?> parameters, CancellationToken cancellationToken)
{
EnsureBackendConfigured();
if (string.IsNullOrWhiteSpace(jobKind))
{
throw new ArgumentException("Job kind must be provided.", nameof(jobKind));
}
var requestBody = new JobTriggerRequest
{
Trigger = "cli",
Parameters = parameters is null ? new Dictionary<string, object?>(StringComparer.Ordinal) : new Dictionary<string, object?>(parameters, StringComparer.Ordinal)
};
public async Task<JobTriggerResult> TriggerJobAsync(string jobKind, IDictionary<string, object?> parameters, CancellationToken cancellationToken)
{
EnsureBackendConfigured();
if (string.IsNullOrWhiteSpace(jobKind))
{
throw new ArgumentException("Job kind must be provided.", nameof(jobKind));
}
var requestBody = new JobTriggerRequest
{
Trigger = "cli",
Parameters = parameters is null ? new Dictionary<string, object?>(StringComparer.Ordinal) : new Dictionary<string, object?>(parameters, StringComparer.Ordinal)
};
var request = CreateRequest(HttpMethod.Post, $"jobs/{jobKind}");
await AuthorizeRequestAsync(request, cancellationToken).ConfigureAwait(false);
request.Content = JsonContent.Create(requestBody, options: SerializerOptions);
using var response = await _httpClient.SendAsync(request, cancellationToken).ConfigureAwait(false);
if (response.StatusCode == HttpStatusCode.Accepted)
{
JobRunResponse? run = null;
if (response.Content.Headers.ContentLength is > 0)
{
try
{
run = await response.Content.ReadFromJsonAsync<JobRunResponse>(SerializerOptions, cancellationToken).ConfigureAwait(false);
}
catch (JsonException ex)
{
_logger.LogWarning(ex, "Failed to deserialize job run response for job kind {Kind}.", jobKind);
}
}
var location = response.Headers.Location?.ToString();
return new JobTriggerResult(true, "Accepted", location, run);
}
using var response = await _httpClient.SendAsync(request, cancellationToken).ConfigureAwait(false);
if (response.StatusCode == HttpStatusCode.Accepted)
{
JobRunResponse? run = null;
if (response.Content.Headers.ContentLength is > 0)
{
try
{
run = await response.Content.ReadFromJsonAsync<JobRunResponse>(SerializerOptions, cancellationToken).ConfigureAwait(false);
}
catch (JsonException ex)
{
_logger.LogWarning(ex, "Failed to deserialize job run response for job kind {Kind}.", jobKind);
}
}
var location = response.Headers.Location?.ToString();
return new JobTriggerResult(true, "Accepted", location, run);
}
var failureMessage = await CreateFailureMessageAsync(response, cancellationToken).ConfigureAwait(false);
return new JobTriggerResult(false, failureMessage, null, null);
}
@@ -443,19 +443,24 @@ internal sealed class BackendOperationsClient : IBackendOperationsClient
var reasons = ExtractReasons(decision.Reasons);
var metadata = ExtractExtensionMetadata(decision.ExtensionData);
var hasSbom = decision.HasSbomReferrers ?? decision.HasSbomLegacy;
RuntimePolicyRekorReference? rekor = null;
if (decision.Rekor is not null &&
(!string.IsNullOrWhiteSpace(decision.Rekor.Uuid) || !string.IsNullOrWhiteSpace(decision.Rekor.Url)))
(!string.IsNullOrWhiteSpace(decision.Rekor.Uuid) ||
!string.IsNullOrWhiteSpace(decision.Rekor.Url) ||
decision.Rekor.Verified.HasValue))
{
rekor = new RuntimePolicyRekorReference(
NormalizeOptionalString(decision.Rekor.Uuid),
NormalizeOptionalString(decision.Rekor.Url));
NormalizeOptionalString(decision.Rekor.Url),
decision.Rekor.Verified);
}
decisions[image] = new RuntimePolicyImageDecision(
verdict,
decision.Signed,
decision.HasSbom,
hasSbom,
reasons,
rekor,
metadata);
@@ -624,15 +629,15 @@ internal sealed class BackendOperationsClient : IBackendOperationsClient
if (!Uri.TryCreate(relativeUri, UriKind.RelativeOrAbsolute, out var requestUri))
{
throw new InvalidOperationException($"Invalid request URI '{relativeUri}'.");
}
if (requestUri.IsAbsoluteUri)
{
// Nothing to normalize.
}
else
{
requestUri = new Uri(relativeUri.TrimStart('/'), UriKind.Relative);
}
if (requestUri.IsAbsoluteUri)
{
// Nothing to normalize.
}
else
{
requestUri = new Uri(relativeUri.TrimStart('/'), UriKind.Relative);
}
return new HttpRequestMessage(method, requestUri);
@@ -820,76 +825,76 @@ internal sealed class BackendOperationsClient : IBackendOperationsClient
{
if (_httpClient.BaseAddress is null)
{
throw new InvalidOperationException("Backend URL is not configured. Provide STELLAOPS_BACKEND_URL or configure appsettings.");
}
}
private string ResolveArtifactPath(string outputPath, string channel)
{
if (!string.IsNullOrWhiteSpace(outputPath))
{
return Path.GetFullPath(outputPath);
}
var directory = string.IsNullOrWhiteSpace(_options.ScannerCacheDirectory)
? Directory.GetCurrentDirectory()
: Path.GetFullPath(_options.ScannerCacheDirectory);
Directory.CreateDirectory(directory);
var fileName = $"stellaops-scanner-{channel}.tar.gz";
return Path.Combine(directory, fileName);
}
private async Task<string> CreateFailureMessageAsync(HttpResponseMessage response, CancellationToken cancellationToken)
{
var statusCode = (int)response.StatusCode;
var builder = new StringBuilder();
builder.Append("Backend request failed with status ");
builder.Append(statusCode);
builder.Append(' ');
builder.Append(response.ReasonPhrase ?? "Unknown");
if (response.Content.Headers.ContentLength is > 0)
{
try
{
var problem = await response.Content.ReadFromJsonAsync<ProblemDocument>(SerializerOptions, cancellationToken).ConfigureAwait(false);
if (problem is not null)
{
if (!string.IsNullOrWhiteSpace(problem.Title))
{
builder.AppendLine().Append(problem.Title);
}
if (!string.IsNullOrWhiteSpace(problem.Detail))
{
builder.AppendLine().Append(problem.Detail);
}
}
}
catch (JsonException)
{
var raw = await response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false);
if (!string.IsNullOrWhiteSpace(raw))
{
builder.AppendLine().Append(raw);
}
}
}
return builder.ToString();
}
private static string? ExtractHeaderValue(HttpResponseHeaders headers, string name)
{
if (headers.TryGetValues(name, out var values))
{
return values.FirstOrDefault();
}
return null;
}
throw new InvalidOperationException("Backend URL is not configured. Provide STELLAOPS_BACKEND_URL or configure appsettings.");
}
}
private string ResolveArtifactPath(string outputPath, string channel)
{
if (!string.IsNullOrWhiteSpace(outputPath))
{
return Path.GetFullPath(outputPath);
}
var directory = string.IsNullOrWhiteSpace(_options.ScannerCacheDirectory)
? Directory.GetCurrentDirectory()
: Path.GetFullPath(_options.ScannerCacheDirectory);
Directory.CreateDirectory(directory);
var fileName = $"stellaops-scanner-{channel}.tar.gz";
return Path.Combine(directory, fileName);
}
private async Task<string> CreateFailureMessageAsync(HttpResponseMessage response, CancellationToken cancellationToken)
{
var statusCode = (int)response.StatusCode;
var builder = new StringBuilder();
builder.Append("Backend request failed with status ");
builder.Append(statusCode);
builder.Append(' ');
builder.Append(response.ReasonPhrase ?? "Unknown");
if (response.Content.Headers.ContentLength is > 0)
{
try
{
var problem = await response.Content.ReadFromJsonAsync<ProblemDocument>(SerializerOptions, cancellationToken).ConfigureAwait(false);
if (problem is not null)
{
if (!string.IsNullOrWhiteSpace(problem.Title))
{
builder.AppendLine().Append(problem.Title);
}
if (!string.IsNullOrWhiteSpace(problem.Detail))
{
builder.AppendLine().Append(problem.Detail);
}
}
}
catch (JsonException)
{
var raw = await response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false);
if (!string.IsNullOrWhiteSpace(raw))
{
builder.AppendLine().Append(raw);
}
}
}
return builder.ToString();
}
private static string? ExtractHeaderValue(HttpResponseHeaders headers, string name)
{
if (headers.TryGetValues(name, out var values))
{
return values.FirstOrDefault();
}
return null;
}
private static string? NormalizeExpectedDigest(string? digest)
{
if (string.IsNullOrWhiteSpace(digest))
@@ -909,23 +914,23 @@ internal sealed class BackendOperationsClient : IBackendOperationsClient
await using (var stream = File.OpenRead(filePath))
{
var hash = await SHA256.HashDataAsync(stream, cancellationToken).ConfigureAwait(false);
digestHex = Convert.ToHexString(hash).ToLowerInvariant();
}
if (!string.IsNullOrWhiteSpace(expectedDigest))
{
var normalized = NormalizeDigest(expectedDigest);
if (!normalized.Equals(digestHex, StringComparison.OrdinalIgnoreCase))
{
File.Delete(filePath);
throw new InvalidOperationException($"Scanner digest mismatch. Expected sha256:{normalized}, calculated sha256:{digestHex}.");
}
}
else
{
_logger.LogWarning("Scanner download missing X-StellaOps-Digest header; relying on computed digest only.");
}
digestHex = Convert.ToHexString(hash).ToLowerInvariant();
}
if (!string.IsNullOrWhiteSpace(expectedDigest))
{
var normalized = NormalizeDigest(expectedDigest);
if (!normalized.Equals(digestHex, StringComparison.OrdinalIgnoreCase))
{
File.Delete(filePath);
throw new InvalidOperationException($"Scanner digest mismatch. Expected sha256:{normalized}, calculated sha256:{digestHex}.");
}
}
else
{
_logger.LogWarning("Scanner download missing X-StellaOps-Digest header; relying on computed digest only.");
}
return digestHex;
}
@@ -945,71 +950,71 @@ internal sealed class BackendOperationsClient : IBackendOperationsClient
var hash = await SHA256.HashDataAsync(stream, cancellationToken).ConfigureAwait(false);
return Convert.ToHexString(hash).ToLowerInvariant();
}
private async Task ValidateSignatureAsync(string? signatureHeader, string digestHex, bool verbose, CancellationToken cancellationToken)
{
if (string.IsNullOrWhiteSpace(_options.ScannerSignaturePublicKeyPath))
{
if (!string.IsNullOrWhiteSpace(signatureHeader))
{
_logger.LogDebug("Signature header present but no public key configured; skipping validation.");
}
return;
}
if (string.IsNullOrWhiteSpace(signatureHeader))
{
throw new InvalidOperationException("Scanner signature missing while a public key is configured.");
}
var publicKeyPath = Path.GetFullPath(_options.ScannerSignaturePublicKeyPath);
if (!File.Exists(publicKeyPath))
{
throw new FileNotFoundException("Scanner signature public key not found.", publicKeyPath);
}
var signatureBytes = Convert.FromBase64String(signatureHeader);
var digestBytes = Convert.FromHexString(digestHex);
var pem = await File.ReadAllTextAsync(publicKeyPath, cancellationToken).ConfigureAwait(false);
using var rsa = RSA.Create();
rsa.ImportFromPem(pem);
var valid = rsa.VerifyHash(digestBytes, signatureBytes, HashAlgorithmName.SHA256, RSASignaturePadding.Pkcs1);
if (!valid)
{
throw new InvalidOperationException("Scanner signature validation failed.");
}
if (verbose)
{
_logger.LogDebug("Scanner signature validated using key {KeyPath}.", publicKeyPath);
}
}
private async Task ValidateSignatureAsync(string? signatureHeader, string digestHex, bool verbose, CancellationToken cancellationToken)
{
if (string.IsNullOrWhiteSpace(_options.ScannerSignaturePublicKeyPath))
{
if (!string.IsNullOrWhiteSpace(signatureHeader))
{
_logger.LogDebug("Signature header present but no public key configured; skipping validation.");
}
return;
}
if (string.IsNullOrWhiteSpace(signatureHeader))
{
throw new InvalidOperationException("Scanner signature missing while a public key is configured.");
}
var publicKeyPath = Path.GetFullPath(_options.ScannerSignaturePublicKeyPath);
if (!File.Exists(publicKeyPath))
{
throw new FileNotFoundException("Scanner signature public key not found.", publicKeyPath);
}
var signatureBytes = Convert.FromBase64String(signatureHeader);
var digestBytes = Convert.FromHexString(digestHex);
var pem = await File.ReadAllTextAsync(publicKeyPath, cancellationToken).ConfigureAwait(false);
using var rsa = RSA.Create();
rsa.ImportFromPem(pem);
var valid = rsa.VerifyHash(digestBytes, signatureBytes, HashAlgorithmName.SHA256, RSASignaturePadding.Pkcs1);
if (!valid)
{
throw new InvalidOperationException("Scanner signature validation failed.");
}
if (verbose)
{
_logger.LogDebug("Scanner signature validated using key {KeyPath}.", publicKeyPath);
}
}
private void PersistMetadata(string outputPath, string channel, string digestHex, string? signatureHeader, HttpResponseMessage response)
{
var metadata = new
{
channel,
digest = $"sha256:{digestHex}",
signature = signatureHeader,
downloadedAt = DateTimeOffset.UtcNow,
source = response.RequestMessage?.RequestUri?.ToString(),
sizeBytes = new FileInfo(outputPath).Length,
headers = new
{
etag = response.Headers.ETag?.Tag,
lastModified = response.Content.Headers.LastModified,
contentType = response.Content.Headers.ContentType?.ToString()
}
};
var metadataPath = outputPath + ".metadata.json";
var json = JsonSerializer.Serialize(metadata, new JsonSerializerOptions
{
WriteIndented = true
});
digest = $"sha256:{digestHex}",
signature = signatureHeader,
downloadedAt = DateTimeOffset.UtcNow,
source = response.RequestMessage?.RequestUri?.ToString(),
sizeBytes = new FileInfo(outputPath).Length,
headers = new
{
etag = response.Headers.ETag?.Tag,
lastModified = response.Content.Headers.LastModified,
contentType = response.Content.Headers.ContentType?.ToString()
}
};
var metadataPath = outputPath + ".metadata.json";
var json = JsonSerializer.Serialize(metadata, new JsonSerializerOptions
{
WriteIndented = true
});
File.WriteAllText(metadataPath, json);
}

View File

@@ -17,9 +17,9 @@ internal sealed record RuntimePolicyEvaluationResult(
internal sealed record RuntimePolicyImageDecision(
string PolicyVerdict,
bool? Signed,
bool? HasSbom,
bool? HasSbomReferrers,
IReadOnlyList<string> Reasons,
RuntimePolicyRekorReference? Rekor,
IReadOnlyDictionary<string, object?> AdditionalProperties);
internal sealed record RuntimePolicyRekorReference(string? Uuid, string? Url);
internal sealed record RuntimePolicyRekorReference(string? Uuid, string? Url, bool? Verified);

View File

@@ -42,8 +42,12 @@ internal sealed class RuntimePolicyEvaluationImageDocument
[JsonPropertyName("signed")]
public bool? Signed { get; set; }
[JsonPropertyName("hasSbomReferrers")]
public bool? HasSbomReferrers { get; set; }
// Legacy field kept for pre-contract-sync services.
[JsonPropertyName("hasSbom")]
public bool? HasSbom { get; set; }
public bool? HasSbomLegacy { get; set; }
[JsonPropertyName("reasons")]
public List<string>? Reasons { get; set; }
@@ -62,4 +66,7 @@ internal sealed class RuntimePolicyRekorDocument
[JsonPropertyName("url")]
public string? Url { get; set; }
[JsonPropertyName("verified")]
public bool? Verified { get; set; }
}

View File

@@ -20,5 +20,5 @@ If you are working on this file you need to read docs/ARCHITECTURE_EXCITITOR.md
|CLI-RUNTIME-13-005 Runtime policy test verbs|DevEx/CLI|SCANNER-RUNTIME-12-302, ZASTAVA-WEBHOOK-12-102|**DONE (2025-10-19)** Added `runtime policy test` command (stdin/file support, JSON output), backend client method + typed models, verdict table output, docs/tests updated (`dotnet test src/StellaOps.Cli.Tests`).|
|CLI-OFFLINE-13-006 Offline kit workflows|DevEx/CLI|DEVOPS-OFFLINE-14-002|TODO Implement `offline kit pull/import/status` commands with integrity checks, resumable downloads, and doc updates.|
|CLI-PLUGIN-13-007 Plugin packaging|DevEx/CLI|CLI-RUNTIME-13-005, CLI-OFFLINE-13-006|TODO Package non-core verbs as restart-time plug-ins (manifest + loader updates, tests ensuring no hot reload).|
|CLI-RUNTIME-13-008 Runtime policy contract sync|DevEx/CLI, Scanner WebService Guild|SCANNER-RUNTIME-12-302|TODO Once `/api/v1/scanner/policy/runtime` exits TODO, verify CLI output against final schema (field names, metadata) and update formatter/tests if the contract moves. Capture joint review notes in docs/09 and link Scanner task sign-off.|
|CLI-RUNTIME-13-009 Runtime policy smoke fixture|DevEx/CLI, QA Guild|CLI-RUNTIME-13-005|TODO Build Spectre test harness exercising `runtime policy test` against a stubbed backend to lock output shape (table + `--json`) and guard regressions. Integrate into `dotnet test` suite.|
|CLI-RUNTIME-13-008 Runtime policy contract sync|DevEx/CLI, Scanner WebService Guild|SCANNER-RUNTIME-12-302|**DONE (2025-10-19)** CLI runtime table/JSON now align with SCANNER-RUNTIME-12-302 (SBOM referrers, quieted provenance, confidence, verified Rekor); docs/09 updated with joint sign-off note.|
|CLI-RUNTIME-13-009 Runtime policy smoke fixture|DevEx/CLI, QA Guild|CLI-RUNTIME-13-005|**DONE (2025-10-19)** Spectre console harness + regression tests cover table and `--json` output paths for `runtime policy test`, using stubbed backend and integrated into `dotnet test` suite.|

View File

@@ -9,3 +9,4 @@
|FEEDCONN-CCCS-02-006 Observability & documentation|DevEx|Docs|**DONE (2025-10-15)** Added `CccsDiagnostics` meter (fetch/parse/map counters), enriched connector logs with document counts, and published `docs/ops/concelier-cccs-operations.md` covering config, telemetry, and sanitiser guidance.|
|FEEDCONN-CCCS-02-007 Historical advisory harvesting plan|BE-Conn-CCCS|Research|**DONE (2025-10-15)** Measured `/api/cccs/threats/v1/get` inventory (~5.1k rows/lang; earliest 2018-06-08), documented backfill workflow + language split strategy, and linked the runbook for Offline Kit execution.|
|FEEDCONN-CCCS-02-008 Raw DOM parsing refinement|BE-Conn-CCCS|Source.Common|**DONE (2025-10-15)** Parser now walks unsanitised DOM (heading + nested list coverage), sanitizer keeps `<h#>`/`section` nodes, and regression fixtures/tests assert EN/FR list handling + preserved HTML structure.|
|FEEDCONN-CCCS-02-009 Normalized versions rollout (Oct 2025)|BE-Conn-CCCS|Merge coordination (`FEEDMERGE-COORD-02-900`)|**TODO (due 2025-10-21)** Implement trailing-version split helper per Merge guidance (see `../Merge/RANGE_PRIMITIVES_COORDINATION.md` “Helper snippets”) to emit `NormalizedVersions` via `SemVerRangeRuleBuilder`; refresh mapper tests/fixtures to assert provenance notes (`cccs:{serial}:{index}`) and confirm merge counters drop.|

View File

@@ -10,3 +10,4 @@
|FEEDCONN-CERTBUND-02-007 Feed history & locale assessment|BE-Conn-CERTBUND|Research|**DONE (2025-10-15)** Measured RSS retention (~6days/≈250 items), captured connector-driven backfill guidance in the runbook, and aligned locale guidance (preserve `language=de`, Docs glossary follow-up). **Next:** coordinate with Tools to land the state-seeding helper so scripted backfills replace manual Mongo tweaks.|
|FEEDCONN-CERTBUND-02-008 Session bootstrap & cookie strategy|BE-Conn-CERTBUND|Source.Common|**DONE (2025-10-14)** Feed client primes the portal session (cookie container via `SocketsHttpHandler`), shares cookies across detail requests, and documents bootstrap behaviour in options (`PortalBootstrapUri`).|
|FEEDCONN-CERTBUND-02-009 Offline Kit export packaging|BE-Conn-CERTBUND, Docs|Offline Kit|**DONE (2025-10-17)** Added `tools/certbund_offline_snapshot.py` to capture search/export JSON, emit deterministic manifests + SHA files, and refreshed docs (`docs/ops/concelier-certbund-operations.md`, `docs/24_OFFLINE_KIT.md`) with offline-kit instructions and manifest layout guidance. Seed data README/ignore rules cover local snapshot hygiene.|
|FEEDCONN-CERTBUND-02-010 Normalized range translator|BE-Conn-CERTBUND|Merge coordination (`FEEDMERGE-COORD-02-900`)|**TODO (due 2025-10-22)** Translate `product.Versions` phrases (e.g., `2023.1 bis 2024.2`, `alle`) into comparator strings for `SemVerRangeRuleBuilder`, emit `NormalizedVersions` with `certbund:{advisoryId}:{vendor}` provenance, and extend tests/README with localisation notes.|

View File

@@ -12,3 +12,4 @@
|FEEDCONN-ICSCISA-02-009 GovDelivery credential onboarding|Ops, BE-Conn-ICS-CISA|Ops|**DONE (2025-10-14)** GovDelivery onboarding runbook captured in `docs/ops/concelier-icscisa-operations.md`; secret vault path and Offline Kit handling documented.|
|FEEDCONN-ICSCISA-02-010 Mitigation & SemVer polish|BE-Conn-ICS-CISA|02-003, 02-004|**DONE (2025-10-16)** Attachment + mitigation references now land as expected and SemVer primitives carry exact values; end-to-end suite green (see `HANDOVER.md`).|
|FEEDCONN-ICSCISA-02-011 Docs & telemetry refresh|DevEx|02-006|**DONE (2025-10-16)** Ops documentation refreshed (attachments, SemVer validation, proxy knobs) and telemetry notes verified.|
|FEEDCONN-ICSCISA-02-012 Normalized version decision|BE-Conn-ICS-CISA|Merge coordination (`FEEDMERGE-COORD-02-900`)|**TODO (due 2025-10-23)** Promote existing `SemVerPrimitive` exact values into `NormalizedVersions` via `.ToNormalizedVersionRule("ics-cisa:{advisoryId}:{product}")`, add regression coverage, and open Models ticket if non-SemVer firmware requires a new scheme.|

View File

@@ -8,3 +8,4 @@
|FEEDCONN-KISA-02-005 Deterministic fixtures & tests|QA|Testing|**DONE (2025-10-14)** Added `StellaOps.Concelier.Connector.Kisa.Tests` with Korean fixtures and fetch→parse→map regression; fixtures regenerate via `UPDATE_KISA_FIXTURES=1`.|
|FEEDCONN-KISA-02-006 Telemetry & documentation|DevEx|Docs|**DONE (2025-10-14)** Added diagnostics-backed telemetry, structured logs, regression coverage, and published localisation notes in `docs/dev/kisa_connector_notes.md` + fixture guidance for Docs/QA.|
|FEEDCONN-KISA-02-007 RSS contract & localisation brief|BE-Conn-KISA|Research|**DONE (2025-10-11)** Documented RSS URLs, confirmed UTF-8 payload (no additional cookies required), and drafted localisation plan (Hangul glossary + optional MT plugin). Remaining open item: capture SPA detail API contract for full-text translations.|
|FEEDCONN-KISA-02-008 Firmware scheme proposal|BE-Conn-KISA, Models|Merge coordination (`FEEDMERGE-COORD-02-900`)|**TODO (due 2025-10-24)** Define transformation for Hangul-labelled firmware ranges (`XFU 1.0.1.0084 ~ 2.0.1.0034`), propose `kisa.build` (or equivalent) scheme to Models, implement normalized rule emission/tests once scheme approved, and update localisation notes.|

View File

@@ -0,0 +1,95 @@
using System;
using System.Collections.Generic;
using System.Security.Cryptography;
using Microsoft.Extensions.Logging.Abstractions;
using StellaOps.Concelier.Connector.StellaOpsMirror.Security;
using StellaOps.Cryptography;
using Xunit;
namespace StellaOps.Concelier.Connector.StellaOpsMirror.Tests;
public sealed class MirrorSignatureVerifierTests
{
[Fact]
public async Task VerifyAsync_ValidSignaturePasses()
{
var provider = new DefaultCryptoProvider();
var key = CreateSigningKey("mirror-key");
provider.UpsertSigningKey(key);
var registry = new CryptoProviderRegistry(new[] { provider });
var verifier = new MirrorSignatureVerifier(registry, NullLogger<MirrorSignatureVerifier>.Instance);
var payload = "{\"advisories\":[]}\"u8".ToUtf8Bytes();
var (signature, _) = await CreateDetachedJwsAsync(provider, key.Reference.KeyId, payload);
await verifier.VerifyAsync(payload, signature, CancellationToken.None);
}
[Fact]
public async Task VerifyAsync_InvalidSignatureThrows()
{
var provider = new DefaultCryptoProvider();
var key = CreateSigningKey("mirror-key");
provider.UpsertSigningKey(key);
var registry = new CryptoProviderRegistry(new[] { provider });
var verifier = new MirrorSignatureVerifier(registry, NullLogger<MirrorSignatureVerifier>.Instance);
var payload = "{\"advisories\":[]}\"u8".ToUtf8Bytes();
var (signature, _) = await CreateDetachedJwsAsync(provider, key.Reference.KeyId, payload);
var tampered = signature.Replace("a", "b", StringComparison.Ordinal);
await Assert.ThrowsAsync<InvalidOperationException>(() => verifier.VerifyAsync(payload, tampered, CancellationToken.None));
}
private static CryptoSigningKey CreateSigningKey(string keyId)
{
using var ecdsa = ECDsa.Create(ECCurve.NamedCurves.nistP256);
var parameters = ecdsa.ExportParameters(includePrivateParameters: true);
return new CryptoSigningKey(new CryptoKeyReference(keyId), SignatureAlgorithms.Es256, in parameters, DateTimeOffset.UtcNow);
}
private static async Task<(string Signature, DateTimeOffset SignedAt)> CreateDetachedJwsAsync(
DefaultCryptoProvider provider,
string keyId,
ReadOnlyMemory<byte> payload)
{
var signer = provider.GetSigner(SignatureAlgorithms.Es256, new CryptoKeyReference(keyId));
var header = new Dictionary<string, object?>
{
["alg"] = SignatureAlgorithms.Es256,
["kid"] = keyId,
["provider"] = provider.Name,
["typ"] = "application/vnd.stellaops.concelier.mirror-bundle+jws",
["b64"] = false,
["crit"] = new[] { "b64" }
};
var headerJson = System.Text.Json.JsonSerializer.Serialize(header);
var protectedHeader = Microsoft.IdentityModel.Tokens.Base64UrlEncoder.Encode(headerJson);
var signingInput = BuildSigningInput(protectedHeader, payload.Span);
var signatureBytes = await signer.SignAsync(signingInput, CancellationToken.None).ConfigureAwait(false);
var encodedSignature = Microsoft.IdentityModel.Tokens.Base64UrlEncoder.Encode(signatureBytes);
return (string.Concat(protectedHeader, "..", encodedSignature), DateTimeOffset.UtcNow);
}
private static ReadOnlyMemory<byte> BuildSigningInput(string encodedHeader, ReadOnlySpan<byte> payload)
{
var headerBytes = System.Text.Encoding.ASCII.GetBytes(encodedHeader);
var buffer = new byte[headerBytes.Length + 1 + payload.Length];
headerBytes.CopyTo(buffer.AsSpan());
buffer[headerBytes.Length] = (byte)'.';
payload.CopyTo(buffer.AsSpan(headerBytes.Length + 1));
return buffer;
}
}
file static class Utf8Extensions
{
public static ReadOnlyMemory<byte> ToUtf8Bytes(this string value)
=> System.Text.Encoding.UTF8.GetBytes(value);
}

View File

@@ -0,0 +1,11 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
</PropertyGroup>
<ItemGroup>
<ProjectReference Include="../StellaOps.Concelier.Connector.StellaOpsMirror/StellaOps.Concelier.Connector.StellaOpsMirror.csproj" />
<ProjectReference Include="../StellaOps.Cryptography/StellaOps.Cryptography.csproj" />
</ItemGroup>
</Project>

View File

@@ -0,0 +1,319 @@
using System;
using System.Collections.Generic;
using System.Net;
using System.Net.Http;
using System.Security.Cryptography;
using System.Text;
using System.Text.Json;
using Microsoft.Extensions.Configuration;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.Http;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Logging.Abstractions;
using Microsoft.Extensions.Options;
using MongoDB.Bson;
using StellaOps.Concelier.Connector.Common;
using StellaOps.Concelier.Connector.Common.Testing;
using StellaOps.Concelier.Connector.StellaOpsMirror.Settings;
using StellaOps.Concelier.Storage.Mongo;
using StellaOps.Concelier.Storage.Mongo.Documents;
using StellaOps.Concelier.Storage.Mongo.SourceState;
using StellaOps.Concelier.Testing;
using StellaOps.Cryptography;
using Xunit;
namespace StellaOps.Concelier.Connector.StellaOpsMirror.Tests;
[Collection("mongo-fixture")]
public sealed class StellaOpsMirrorConnectorTests : IAsyncLifetime
{
private readonly MongoIntegrationFixture _fixture;
private readonly CannedHttpMessageHandler _handler;
public StellaOpsMirrorConnectorTests(MongoIntegrationFixture fixture)
{
_fixture = fixture;
_handler = new CannedHttpMessageHandler();
}
[Fact]
public async Task FetchAsync_PersistsMirrorArtifacts()
{
var manifestContent = "{\"domain\":\"primary\",\"files\":[]}";
var bundleContent = "{\"advisories\":[{\"id\":\"CVE-2025-0001\"}]}";
var manifestDigest = ComputeDigest(manifestContent);
var bundleDigest = ComputeDigest(bundleContent);
var index = BuildIndex(manifestDigest, Encoding.UTF8.GetByteCount(manifestContent), bundleDigest, Encoding.UTF8.GetByteCount(bundleContent), includeSignature: false);
await using var provider = await BuildServiceProviderAsync();
SeedResponses(index, manifestContent, bundleContent, signature: null);
var connector = provider.GetRequiredService<StellaOpsMirrorConnector>();
await connector.FetchAsync(provider, CancellationToken.None);
var documentStore = provider.GetRequiredService<IDocumentStore>();
var manifestUri = "https://mirror.test/mirror/primary/manifest.json";
var bundleUri = "https://mirror.test/mirror/primary/bundle.json";
var manifestDocument = await documentStore.FindBySourceAndUriAsync(StellaOpsMirrorConnector.Source, manifestUri, CancellationToken.None);
Assert.NotNull(manifestDocument);
Assert.Equal(DocumentStatuses.Mapped, manifestDocument!.Status);
Assert.Equal(NormalizeDigest(manifestDigest), manifestDocument.Sha256);
var bundleDocument = await documentStore.FindBySourceAndUriAsync(StellaOpsMirrorConnector.Source, bundleUri, CancellationToken.None);
Assert.NotNull(bundleDocument);
Assert.Equal(DocumentStatuses.PendingParse, bundleDocument!.Status);
Assert.Equal(NormalizeDigest(bundleDigest), bundleDocument.Sha256);
var rawStorage = provider.GetRequiredService<RawDocumentStorage>();
Assert.NotNull(manifestDocument.GridFsId);
Assert.NotNull(bundleDocument.GridFsId);
var manifestBytes = await rawStorage.DownloadAsync(manifestDocument.GridFsId!.Value, CancellationToken.None);
var bundleBytes = await rawStorage.DownloadAsync(bundleDocument.GridFsId!.Value, CancellationToken.None);
Assert.Equal(manifestContent, Encoding.UTF8.GetString(manifestBytes));
Assert.Equal(bundleContent, Encoding.UTF8.GetString(bundleBytes));
var stateRepository = provider.GetRequiredService<ISourceStateRepository>();
var state = await stateRepository.TryGetAsync(StellaOpsMirrorConnector.Source, CancellationToken.None);
Assert.NotNull(state);
var cursorDocument = state!.Cursor ?? new BsonDocument();
var digestValue = cursorDocument.TryGetValue("bundleDigest", out var digestBson) ? digestBson.AsString : string.Empty;
Assert.Equal(NormalizeDigest(bundleDigest), NormalizeDigest(digestValue));
var pendingDocumentsArray = cursorDocument.TryGetValue("pendingDocuments", out var pendingDocsBson) && pendingDocsBson is BsonArray pendingArray
? pendingArray
: new BsonArray();
Assert.Single(pendingDocumentsArray);
var pendingDocumentId = Guid.Parse(pendingDocumentsArray[0].AsString);
Assert.Equal(bundleDocument.Id, pendingDocumentId);
var pendingMappingsArray = cursorDocument.TryGetValue("pendingMappings", out var pendingMappingsBson) && pendingMappingsBson is BsonArray mappingsArray
? mappingsArray
: new BsonArray();
Assert.Empty(pendingMappingsArray);
}
[Fact]
public async Task FetchAsync_TamperedSignatureThrows()
{
var manifestContent = "{\"domain\":\"primary\"}";
var bundleContent = "{\"advisories\":[{\"id\":\"CVE-2025-0002\"}]}";
var manifestDigest = ComputeDigest(manifestContent);
var bundleDigest = ComputeDigest(bundleContent);
var index = BuildIndex(manifestDigest, Encoding.UTF8.GetByteCount(manifestContent), bundleDigest, Encoding.UTF8.GetByteCount(bundleContent), includeSignature: true);
await using var provider = await BuildServiceProviderAsync(options =>
{
options.Signature.Enabled = true;
options.Signature.KeyId = "mirror-key";
options.Signature.Provider = "default";
});
var defaultProvider = provider.GetRequiredService<DefaultCryptoProvider>();
var signingKey = CreateSigningKey("mirror-key");
defaultProvider.UpsertSigningKey(signingKey);
var (signatureValue, _) = CreateDetachedJws(signingKey, bundleContent);
// Tamper with signature so verification fails.
var tamperedSignature = signatureValue.Replace('a', 'b');
SeedResponses(index, manifestContent, bundleContent, tamperedSignature);
var connector = provider.GetRequiredService<StellaOpsMirrorConnector>();
await Assert.ThrowsAsync<InvalidOperationException>(() => connector.FetchAsync(provider, CancellationToken.None));
var stateRepository = provider.GetRequiredService<ISourceStateRepository>();
var state = await stateRepository.TryGetAsync(StellaOpsMirrorConnector.Source, CancellationToken.None);
Assert.NotNull(state);
Assert.True(state!.FailCount >= 1);
Assert.False(state.Cursor.TryGetValue("bundleDigest", out _));
}
public Task InitializeAsync() => Task.CompletedTask;
public Task DisposeAsync()
{
_handler.Clear();
return Task.CompletedTask;
}
private async Task<ServiceProvider> BuildServiceProviderAsync(Action<StellaOpsMirrorConnectorOptions>? configureOptions = null)
{
await _fixture.Client.DropDatabaseAsync(_fixture.Database.DatabaseNamespace.DatabaseName);
_handler.Clear();
var services = new ServiceCollection();
services.AddLogging(builder => builder.AddProvider(NullLoggerProvider.Instance));
services.AddSingleton(_handler);
services.AddSingleton(TimeProvider.System);
services.AddMongoStorage(options =>
{
options.ConnectionString = _fixture.Runner.ConnectionString;
options.DatabaseName = _fixture.Database.DatabaseNamespace.DatabaseName;
options.CommandTimeout = TimeSpan.FromSeconds(5);
});
services.AddSingleton<DefaultCryptoProvider>();
services.AddSingleton<ICryptoProvider>(sp => sp.GetRequiredService<DefaultCryptoProvider>());
services.AddSingleton<ICryptoProviderRegistry>(sp => new CryptoProviderRegistry(sp.GetServices<ICryptoProvider>()));
var configuration = new ConfigurationBuilder()
.AddInMemoryCollection(new Dictionary<string, string?>
{
["concelier:sources:stellaopsMirror:baseAddress"] = "https://mirror.test/",
["concelier:sources:stellaopsMirror:domainId"] = "primary",
["concelier:sources:stellaopsMirror:indexPath"] = "/concelier/exports/index.json",
})
.Build();
var routine = new StellaOpsMirrorDependencyInjectionRoutine();
routine.Register(services, configuration);
if (configureOptions is not null)
{
services.PostConfigure(configureOptions);
}
services.Configure<HttpClientFactoryOptions>("stellaops-mirror", builder =>
{
builder.HttpMessageHandlerBuilderActions.Add(options =>
{
options.PrimaryHandler = _handler;
});
});
var provider = services.BuildServiceProvider();
var bootstrapper = provider.GetRequiredService<MongoBootstrapper>();
await bootstrapper.InitializeAsync(CancellationToken.None);
return provider;
}
private void SeedResponses(string indexJson, string manifestContent, string bundleContent, string? signature)
{
var baseUri = new Uri("https://mirror.test");
_handler.AddResponse(HttpMethod.Get, new Uri(baseUri, "/concelier/exports/index.json"), () => CreateJsonResponse(indexJson));
_handler.AddResponse(HttpMethod.Get, new Uri(baseUri, "mirror/primary/manifest.json"), () => CreateJsonResponse(manifestContent));
_handler.AddResponse(HttpMethod.Get, new Uri(baseUri, "mirror/primary/bundle.json"), () => CreateJsonResponse(bundleContent));
if (signature is not null)
{
_handler.AddResponse(HttpMethod.Get, new Uri(baseUri, "mirror/primary/bundle.json.jws"), () => new HttpResponseMessage(HttpStatusCode.OK)
{
Content = new StringContent(signature, Encoding.UTF8, "application/jose+json"),
});
}
}
private static HttpResponseMessage CreateJsonResponse(string content)
=> new(HttpStatusCode.OK)
{
Content = new StringContent(content, Encoding.UTF8, "application/json"),
};
private static string BuildIndex(string manifestDigest, int manifestBytes, string bundleDigest, int bundleBytes, bool includeSignature)
{
var index = new
{
schemaVersion = 1,
generatedAt = new DateTimeOffset(2025, 10, 19, 12, 0, 0, TimeSpan.Zero),
targetRepository = "repo",
domains = new[]
{
new
{
domainId = "primary",
displayName = "Primary",
advisoryCount = 1,
manifest = new
{
path = "mirror/primary/manifest.json",
sizeBytes = manifestBytes,
digest = manifestDigest,
signature = (object?)null,
},
bundle = new
{
path = "mirror/primary/bundle.json",
sizeBytes = bundleBytes,
digest = bundleDigest,
signature = includeSignature
? new
{
path = "mirror/primary/bundle.json.jws",
algorithm = "ES256",
keyId = "mirror-key",
provider = "default",
signedAt = new DateTimeOffset(2025, 10, 19, 12, 0, 0, TimeSpan.Zero),
}
: null,
},
sources = Array.Empty<object>(),
}
}
};
return JsonSerializer.Serialize(index, new JsonSerializerOptions
{
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
WriteIndented = false,
});
}
private static string ComputeDigest(string content)
{
var bytes = Encoding.UTF8.GetBytes(content);
var hash = SHA256.HashData(bytes);
return "sha256:" + Convert.ToHexString(hash).ToLowerInvariant();
}
private static string NormalizeDigest(string digest)
=> digest.StartsWith("sha256:", StringComparison.OrdinalIgnoreCase) ? digest[7..] : digest;
private static CryptoSigningKey CreateSigningKey(string keyId)
{
using var ecdsa = ECDsa.Create(ECCurve.NamedCurves.nistP256);
var parameters = ecdsa.ExportParameters(includePrivateParameters: true);
return new CryptoSigningKey(new CryptoKeyReference(keyId), SignatureAlgorithms.Es256, in parameters, DateTimeOffset.UtcNow);
}
private static (string Signature, DateTimeOffset SignedAt) CreateDetachedJws(CryptoSigningKey signingKey, string payload)
{
using var provider = new DefaultCryptoProvider();
provider.UpsertSigningKey(signingKey);
var signer = provider.GetSigner(SignatureAlgorithms.Es256, signingKey.Reference);
var header = new Dictionary<string, object?>
{
["alg"] = SignatureAlgorithms.Es256,
["kid"] = signingKey.Reference.KeyId,
["provider"] = provider.Name,
["typ"] = "application/vnd.stellaops.concelier.mirror-bundle+jws",
["b64"] = false,
["crit"] = new[] { "b64" }
};
var headerJson = JsonSerializer.Serialize(header);
var encodedHeader = Microsoft.IdentityModel.Tokens.Base64UrlEncoder.Encode(headerJson);
var payloadBytes = Encoding.UTF8.GetBytes(payload);
var signingInput = BuildSigningInput(encodedHeader, payloadBytes);
var signatureBytes = signer.SignAsync(signingInput, CancellationToken.None).GetAwaiter().GetResult();
var encodedSignature = Microsoft.IdentityModel.Tokens.Base64UrlEncoder.Encode(signatureBytes);
return (string.Concat(encodedHeader, "..", encodedSignature), DateTimeOffset.UtcNow);
}
private static ReadOnlyMemory<byte> BuildSigningInput(string encodedHeader, ReadOnlySpan<byte> payload)
{
var headerBytes = Encoding.ASCII.GetBytes(encodedHeader);
var buffer = new byte[headerBytes.Length + 1 + payload.Length];
headerBytes.CopyTo(buffer, 0);
buffer[headerBytes.Length] = (byte)'.';
payload.CopyTo(buffer.AsSpan(headerBytes.Length + 1));
return buffer;
}
}

View File

@@ -0,0 +1,89 @@
using System;
using System.Net;
using System.Net.Http.Headers;
using System.Text.Json;
using Microsoft.Extensions.Logging;
using StellaOps.Concelier.Connector.StellaOpsMirror.Internal;
namespace StellaOps.Concelier.Connector.StellaOpsMirror.Client;
/// <summary>
/// Lightweight HTTP client for retrieving mirror index and domain artefacts.
/// </summary>
public sealed class MirrorManifestClient
{
private static readonly JsonSerializerOptions JsonOptions = new(JsonSerializerDefaults.Web)
{
PropertyNameCaseInsensitive = true,
ReadCommentHandling = JsonCommentHandling.Skip
};
private readonly HttpClient _httpClient;
private readonly ILogger<MirrorManifestClient> _logger;
public MirrorManifestClient(HttpClient httpClient, ILogger<MirrorManifestClient> logger)
{
_httpClient = httpClient ?? throw new ArgumentNullException(nameof(httpClient));
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
}
public async Task<MirrorIndexDocument> GetIndexAsync(string indexPath, CancellationToken cancellationToken)
{
if (string.IsNullOrWhiteSpace(indexPath))
{
throw new ArgumentException("Index path must be provided.", nameof(indexPath));
}
using var request = new HttpRequestMessage(HttpMethod.Get, indexPath);
using var response = await _httpClient.SendAsync(request, HttpCompletionOption.ResponseHeadersRead, cancellationToken).ConfigureAwait(false);
await EnsureSuccessAsync(response, indexPath, cancellationToken).ConfigureAwait(false);
await using var stream = await response.Content.ReadAsStreamAsync(cancellationToken).ConfigureAwait(false);
var document = await JsonSerializer.DeserializeAsync<MirrorIndexDocument>(stream, JsonOptions, cancellationToken).ConfigureAwait(false);
if (document is null)
{
throw new InvalidOperationException("Mirror index payload was empty.");
}
return document;
}
public async Task<byte[]> DownloadAsync(string path, CancellationToken cancellationToken)
{
if (string.IsNullOrWhiteSpace(path))
{
throw new ArgumentException("Path must be provided.", nameof(path));
}
using var request = new HttpRequestMessage(HttpMethod.Get, path);
using var response = await _httpClient.SendAsync(request, HttpCompletionOption.ResponseHeadersRead, cancellationToken).ConfigureAwait(false);
await EnsureSuccessAsync(response, path, cancellationToken).ConfigureAwait(false);
return await response.Content.ReadAsByteArrayAsync(cancellationToken).ConfigureAwait(false);
}
private async Task EnsureSuccessAsync(HttpResponseMessage response, string path, CancellationToken cancellationToken)
{
if (response.IsSuccessStatusCode)
{
return;
}
var status = (int)response.StatusCode;
var body = string.Empty;
if (response.Content.Headers.ContentLength is long length && length > 0)
{
body = await response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false);
}
_logger.LogWarning(
"Mirror request to {Path} failed with {StatusCode}. Body: {Body}",
path,
status,
string.IsNullOrEmpty(body) ? "<empty>" : body);
throw new HttpRequestException($"Mirror request to '{path}' failed with status {(HttpStatusCode)status} ({status}).", null, response.StatusCode);
}
}

View File

@@ -0,0 +1,38 @@
using System;
using System.Collections.Generic;
using System.Text.Json.Serialization;
namespace StellaOps.Concelier.Connector.StellaOpsMirror.Internal;
public sealed record MirrorIndexDocument(
[property: JsonPropertyName("schemaVersion")] int SchemaVersion,
[property: JsonPropertyName("generatedAt")] DateTimeOffset GeneratedAt,
[property: JsonPropertyName("targetRepository")] string? TargetRepository,
[property: JsonPropertyName("domains")] IReadOnlyList<MirrorIndexDomainEntry> Domains);
public sealed record MirrorIndexDomainEntry(
[property: JsonPropertyName("domainId")] string DomainId,
[property: JsonPropertyName("displayName")] string DisplayName,
[property: JsonPropertyName("advisoryCount")] int AdvisoryCount,
[property: JsonPropertyName("manifest")] MirrorFileDescriptor Manifest,
[property: JsonPropertyName("bundle")] MirrorFileDescriptor Bundle,
[property: JsonPropertyName("sources")] IReadOnlyList<MirrorSourceSummary> Sources);
public sealed record MirrorFileDescriptor(
[property: JsonPropertyName("path")] string Path,
[property: JsonPropertyName("sizeBytes")] long SizeBytes,
[property: JsonPropertyName("digest")] string Digest,
[property: JsonPropertyName("signature")] MirrorSignatureDescriptor? Signature);
public sealed record MirrorSignatureDescriptor(
[property: JsonPropertyName("path")] string Path,
[property: JsonPropertyName("algorithm")] string Algorithm,
[property: JsonPropertyName("keyId")] string KeyId,
[property: JsonPropertyName("provider")] string Provider,
[property: JsonPropertyName("signedAt")] DateTimeOffset SignedAt);
public sealed record MirrorSourceSummary(
[property: JsonPropertyName("source")] string Source,
[property: JsonPropertyName("firstRecordedAt")] DateTimeOffset? FirstRecordedAt,
[property: JsonPropertyName("lastRecordedAt")] DateTimeOffset? LastRecordedAt,
[property: JsonPropertyName("advisoryCount")] int AdvisoryCount);

View File

@@ -0,0 +1,111 @@
using System.Linq;
using MongoDB.Bson;
namespace StellaOps.Concelier.Connector.StellaOpsMirror.Internal;
internal sealed record StellaOpsMirrorCursor(
string? ExportId,
string? BundleDigest,
DateTimeOffset? GeneratedAt,
IReadOnlyCollection<Guid> PendingDocuments,
IReadOnlyCollection<Guid> PendingMappings)
{
private static readonly IReadOnlyCollection<Guid> EmptyGuids = Array.Empty<Guid>();
public static StellaOpsMirrorCursor Empty { get; } = new(
ExportId: null,
BundleDigest: null,
GeneratedAt: null,
PendingDocuments: EmptyGuids,
PendingMappings: EmptyGuids);
public BsonDocument ToBsonDocument()
{
var document = new BsonDocument
{
["pendingDocuments"] = new BsonArray(PendingDocuments.Select(id => id.ToString())),
["pendingMappings"] = new BsonArray(PendingMappings.Select(id => id.ToString())),
};
if (!string.IsNullOrWhiteSpace(ExportId))
{
document["exportId"] = ExportId;
}
if (!string.IsNullOrWhiteSpace(BundleDigest))
{
document["bundleDigest"] = BundleDigest;
}
if (GeneratedAt.HasValue)
{
document["generatedAt"] = GeneratedAt.Value.UtcDateTime;
}
return document;
}
public static StellaOpsMirrorCursor FromBson(BsonDocument? document)
{
if (document is null || document.ElementCount == 0)
{
return Empty;
}
var exportId = document.TryGetValue("exportId", out var exportValue) && exportValue.IsString ? exportValue.AsString : null;
var digest = document.TryGetValue("bundleDigest", out var digestValue) && digestValue.IsString ? digestValue.AsString : null;
DateTimeOffset? generatedAt = null;
if (document.TryGetValue("generatedAt", out var generatedValue))
{
generatedAt = generatedValue.BsonType switch
{
BsonType.DateTime => DateTime.SpecifyKind(generatedValue.ToUniversalTime(), DateTimeKind.Utc),
BsonType.String when DateTimeOffset.TryParse(generatedValue.AsString, out var parsed) => parsed.ToUniversalTime(),
_ => null,
};
}
var pendingDocuments = ReadGuidArray(document, "pendingDocuments");
var pendingMappings = ReadGuidArray(document, "pendingMappings");
return new StellaOpsMirrorCursor(exportId, digest, generatedAt, pendingDocuments, pendingMappings);
}
public StellaOpsMirrorCursor WithPendingDocuments(IEnumerable<Guid> documents)
=> this with { PendingDocuments = documents?.Distinct().ToArray() ?? EmptyGuids };
public StellaOpsMirrorCursor WithPendingMappings(IEnumerable<Guid> mappings)
=> this with { PendingMappings = mappings?.Distinct().ToArray() ?? EmptyGuids };
public StellaOpsMirrorCursor WithBundleSnapshot(string? exportId, string? digest, DateTimeOffset generatedAt)
=> this with
{
ExportId = string.IsNullOrWhiteSpace(exportId) ? ExportId : exportId,
BundleDigest = digest,
GeneratedAt = generatedAt,
};
private static IReadOnlyCollection<Guid> ReadGuidArray(BsonDocument document, string field)
{
if (!document.TryGetValue(field, out var value) || value is not BsonArray array)
{
return EmptyGuids;
}
var results = new List<Guid>(array.Count);
foreach (var element in array)
{
if (element is null)
{
continue;
}
if (Guid.TryParse(element.ToString(), out var guid))
{
results.Add(guid);
}
}
return results;
}
}

View File

@@ -0,0 +1,43 @@
using StellaOps.Concelier.Core.Jobs;
namespace StellaOps.Concelier.Connector.StellaOpsMirror;
internal static class StellaOpsMirrorJobKinds
{
public const string Fetch = "source:stellaops-mirror:fetch";
public const string Parse = "source:stellaops-mirror:parse";
public const string Map = "source:stellaops-mirror:map";
}
internal sealed class StellaOpsMirrorFetchJob : IJob
{
private readonly StellaOpsMirrorConnector _connector;
public StellaOpsMirrorFetchJob(StellaOpsMirrorConnector connector)
=> _connector = connector ?? throw new ArgumentNullException(nameof(connector));
public Task ExecuteAsync(JobExecutionContext context, CancellationToken cancellationToken)
=> _connector.FetchAsync(context.Services, cancellationToken);
}
internal sealed class StellaOpsMirrorParseJob : IJob
{
private readonly StellaOpsMirrorConnector _connector;
public StellaOpsMirrorParseJob(StellaOpsMirrorConnector connector)
=> _connector = connector ?? throw new ArgumentNullException(nameof(connector));
public Task ExecuteAsync(JobExecutionContext context, CancellationToken cancellationToken)
=> _connector.ParseAsync(context.Services, cancellationToken);
}
internal sealed class StellaOpsMirrorMapJob : IJob
{
private readonly StellaOpsMirrorConnector _connector;
public StellaOpsMirrorMapJob(StellaOpsMirrorConnector connector)
=> _connector = connector ?? throw new ArgumentNullException(nameof(connector));
public Task ExecuteAsync(JobExecutionContext context, CancellationToken cancellationToken)
=> _connector.MapAsync(context.Services, cancellationToken);
}

View File

@@ -0,0 +1,121 @@
using System;
using System.Text;
using System.Text.Json;
using System.Text.Json.Serialization;
using Microsoft.Extensions.Logging;
using Microsoft.IdentityModel.Tokens;
using StellaOps.Cryptography;
namespace StellaOps.Concelier.Connector.StellaOpsMirror.Security;
/// <summary>
/// Validates detached JWS signatures emitted by mirror bundles.
/// </summary>
public sealed class MirrorSignatureVerifier
{
private static readonly JsonSerializerOptions HeaderSerializerOptions = new(JsonSerializerDefaults.Web)
{
PropertyNameCaseInsensitive = true
};
private readonly ICryptoProviderRegistry _providerRegistry;
private readonly ILogger<MirrorSignatureVerifier> _logger;
public MirrorSignatureVerifier(ICryptoProviderRegistry providerRegistry, ILogger<MirrorSignatureVerifier> logger)
{
_providerRegistry = providerRegistry ?? throw new ArgumentNullException(nameof(providerRegistry));
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
}
public async Task VerifyAsync(ReadOnlyMemory<byte> payload, string signatureValue, CancellationToken cancellationToken)
{
if (payload.IsEmpty)
{
throw new ArgumentException("Payload must not be empty.", nameof(payload));
}
if (string.IsNullOrWhiteSpace(signatureValue))
{
throw new ArgumentException("Signature value must be provided.", nameof(signatureValue));
}
if (!TryParseDetachedJws(signatureValue, out var encodedHeader, out var encodedSignature))
{
throw new InvalidOperationException("Detached JWS signature is malformed.");
}
var headerJson = Encoding.UTF8.GetString(Base64UrlEncoder.DecodeBytes(encodedHeader));
var header = JsonSerializer.Deserialize<MirrorSignatureHeader>(headerJson, HeaderSerializerOptions)
?? throw new InvalidOperationException("Detached JWS header could not be parsed.");
if (!header.Critical.Contains("b64", StringComparer.Ordinal))
{
throw new InvalidOperationException("Detached JWS header is missing required 'b64' critical parameter.");
}
if (header.Base64Payload)
{
throw new InvalidOperationException("Detached JWS header sets b64=true; expected unencoded payload.");
}
if (string.IsNullOrWhiteSpace(header.KeyId))
{
throw new InvalidOperationException("Detached JWS header missing key identifier.");
}
if (string.IsNullOrWhiteSpace(header.Algorithm))
{
throw new InvalidOperationException("Detached JWS header missing algorithm identifier.");
}
var signingInput = BuildSigningInput(encodedHeader, payload.Span);
var signatureBytes = Base64UrlEncoder.DecodeBytes(encodedSignature);
var keyReference = new CryptoKeyReference(header.KeyId, header.Provider);
var resolution = _providerRegistry.ResolveSigner(
CryptoCapability.Verification,
header.Algorithm,
keyReference,
header.Provider);
var verified = await resolution.Signer.VerifyAsync(signingInput, signatureBytes, cancellationToken).ConfigureAwait(false);
if (!verified)
{
_logger.LogWarning("Detached JWS verification failed for key {KeyId} via provider {Provider}.", header.KeyId, resolution.ProviderName);
throw new InvalidOperationException("Detached JWS signature verification failed.");
}
}
private static bool TryParseDetachedJws(string value, out string encodedHeader, out string encodedSignature)
{
var parts = value.Split("..", StringSplitOptions.None);
if (parts.Length != 2 || string.IsNullOrEmpty(parts[0]) || string.IsNullOrEmpty(parts[1]))
{
encodedHeader = string.Empty;
encodedSignature = string.Empty;
return false;
}
encodedHeader = parts[0];
encodedSignature = parts[1];
return true;
}
private static ReadOnlyMemory<byte> BuildSigningInput(string encodedHeader, ReadOnlySpan<byte> payload)
{
var headerBytes = Encoding.ASCII.GetBytes(encodedHeader);
var buffer = new byte[headerBytes.Length + 1 + payload.Length];
headerBytes.CopyTo(buffer.AsSpan());
buffer[headerBytes.Length] = (byte)'.';
payload.CopyTo(buffer.AsSpan(headerBytes.Length + 1));
return buffer;
}
private sealed record MirrorSignatureHeader(
[property: JsonPropertyName("alg")] string Algorithm,
[property: JsonPropertyName("kid")] string KeyId,
[property: JsonPropertyName("provider")] string? Provider,
[property: JsonPropertyName("typ")] string? Type,
[property: JsonPropertyName("b64")] bool Base64Payload,
[property: JsonPropertyName("crit")] string[] Critical);
}

View File

@@ -0,0 +1,61 @@
using System;
using System.ComponentModel.DataAnnotations;
namespace StellaOps.Concelier.Connector.StellaOpsMirror.Settings;
/// <summary>
/// Configuration for the StellaOps mirror connector HTTP client.
/// </summary>
public sealed class StellaOpsMirrorConnectorOptions
{
/// <summary>
/// Base address of the mirror distribution endpoint (e.g., https://mirror.stella-ops.org).
/// </summary>
[Required]
public Uri BaseAddress { get; set; } = new("https://mirror.stella-ops.org", UriKind.Absolute);
/// <summary>
/// Relative path to the mirror index document. Defaults to <c>/concelier/exports/index.json</c>.
/// </summary>
[Required]
public string IndexPath { get; set; } = "/concelier/exports/index.json";
/// <summary>
/// Preferred mirror domain identifier when multiple domains are published in the index.
/// </summary>
[Required]
public string DomainId { get; set; } = "primary";
/// <summary>
/// Maximum duration to wait on HTTP requests.
/// </summary>
public TimeSpan HttpTimeout { get; set; } = TimeSpan.FromSeconds(30);
/// <summary>
/// Signature verification configuration for downloaded bundles.
/// </summary>
public SignatureOptions Signature { get; set; } = new();
public sealed class SignatureOptions
{
/// <summary>
/// When <c>true</c>, downloaded bundles must include a detached JWS that validates successfully.
/// </summary>
public bool Enabled { get; set; } = false;
/// <summary>
/// Expected signing key identifier (kid) emitted in the detached JWS header.
/// </summary>
public string KeyId { get; set; } = string.Empty;
/// <summary>
/// Optional crypto provider hint used to resolve verification keys.
/// </summary>
public string? Provider { get; set; }
/// <summary>
/// Optional path to a PEM-encoded EC public key used to verify signatures when registry resolution fails.
/// </summary>
public string? PublicKeyPath { get; set; }
}
}

View File

@@ -0,0 +1,16 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
</PropertyGroup>
<ItemGroup>
<ProjectReference Include="../StellaOps.Concelier.Connector.Common/StellaOps.Concelier.Connector.Common.csproj" />
<ProjectReference Include="../StellaOps.Concelier.Models/StellaOps.Concelier.Models.csproj" />
<ProjectReference Include="../StellaOps.Plugin/StellaOps.Plugin.csproj" />
<ProjectReference Include="../StellaOps.Concelier.Normalization/StellaOps.Concelier.Normalization.csproj" />
<ProjectReference Include="../StellaOps.Cryptography/StellaOps.Cryptography.csproj" />
</ItemGroup>
</Project>

View File

@@ -0,0 +1,288 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Security.Cryptography;
using System.Text;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
using MongoDB.Bson;
using StellaOps.Concelier.Connector.Common.Fetch;
using StellaOps.Concelier.Connector.Common;
using StellaOps.Concelier.Connector.StellaOpsMirror.Client;
using StellaOps.Concelier.Connector.StellaOpsMirror.Internal;
using StellaOps.Concelier.Connector.StellaOpsMirror.Security;
using StellaOps.Concelier.Connector.StellaOpsMirror.Settings;
using StellaOps.Concelier.Storage.Mongo;
using StellaOps.Concelier.Storage.Mongo.Documents;
using StellaOps.Plugin;
namespace StellaOps.Concelier.Connector.StellaOpsMirror;
public sealed class StellaOpsMirrorConnector : IFeedConnector
{
public const string Source = "stellaops-mirror";
private readonly MirrorManifestClient _client;
private readonly MirrorSignatureVerifier _signatureVerifier;
private readonly RawDocumentStorage _rawDocumentStorage;
private readonly IDocumentStore _documentStore;
private readonly ISourceStateRepository _stateRepository;
private readonly TimeProvider _timeProvider;
private readonly ILogger<StellaOpsMirrorConnector> _logger;
private readonly StellaOpsMirrorConnectorOptions _options;
public StellaOpsMirrorConnector(
MirrorManifestClient client,
MirrorSignatureVerifier signatureVerifier,
RawDocumentStorage rawDocumentStorage,
IDocumentStore documentStore,
ISourceStateRepository stateRepository,
IOptions<StellaOpsMirrorConnectorOptions> options,
TimeProvider? timeProvider,
ILogger<StellaOpsMirrorConnector> logger)
{
_client = client ?? throw new ArgumentNullException(nameof(client));
_signatureVerifier = signatureVerifier ?? throw new ArgumentNullException(nameof(signatureVerifier));
_rawDocumentStorage = rawDocumentStorage ?? throw new ArgumentNullException(nameof(rawDocumentStorage));
_documentStore = documentStore ?? throw new ArgumentNullException(nameof(documentStore));
_stateRepository = stateRepository ?? throw new ArgumentNullException(nameof(stateRepository));
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
_timeProvider = timeProvider ?? TimeProvider.System;
_options = (options ?? throw new ArgumentNullException(nameof(options))).Value ?? throw new ArgumentNullException(nameof(options));
ValidateOptions(_options);
}
public string SourceName => Source;
public async Task FetchAsync(IServiceProvider services, CancellationToken cancellationToken)
{
_ = services ?? throw new ArgumentNullException(nameof(services));
var now = _timeProvider.GetUtcNow();
var cursor = await GetCursorAsync(cancellationToken).ConfigureAwait(false);
var pendingDocuments = cursor.PendingDocuments.ToHashSet();
var pendingMappings = cursor.PendingMappings.ToHashSet();
MirrorIndexDocument index;
try
{
index = await _client.GetIndexAsync(_options.IndexPath, cancellationToken).ConfigureAwait(false);
}
catch (Exception ex)
{
await _stateRepository.MarkFailureAsync(Source, now, TimeSpan.FromMinutes(15), ex.Message, cancellationToken).ConfigureAwait(false);
throw;
}
var domain = index.Domains.FirstOrDefault(entry =>
string.Equals(entry.DomainId, _options.DomainId, StringComparison.OrdinalIgnoreCase));
if (domain is null)
{
var message = $"Mirror domain '{_options.DomainId}' not present in index.";
await _stateRepository.MarkFailureAsync(Source, now, TimeSpan.FromMinutes(30), message, cancellationToken).ConfigureAwait(false);
throw new InvalidOperationException(message);
}
if (string.Equals(domain.Bundle.Digest, cursor.BundleDigest, StringComparison.OrdinalIgnoreCase))
{
_logger.LogInformation("Mirror bundle digest {Digest} unchanged; skipping fetch.", domain.Bundle.Digest);
return;
}
try
{
await ProcessDomainAsync(index, domain, pendingDocuments, cancellationToken).ConfigureAwait(false);
}
catch (Exception ex)
{
await _stateRepository.MarkFailureAsync(Source, now, TimeSpan.FromMinutes(10), ex.Message, cancellationToken).ConfigureAwait(false);
throw;
}
var updatedCursor = cursor
.WithPendingDocuments(pendingDocuments)
.WithPendingMappings(pendingMappings)
.WithBundleSnapshot(domain.Bundle.Path, domain.Bundle.Digest, index.GeneratedAt);
await UpdateCursorAsync(updatedCursor, cancellationToken).ConfigureAwait(false);
}
public Task ParseAsync(IServiceProvider services, CancellationToken cancellationToken)
=> Task.CompletedTask;
public Task MapAsync(IServiceProvider services, CancellationToken cancellationToken)
=> Task.CompletedTask;
private async Task ProcessDomainAsync(
MirrorIndexDocument index,
MirrorIndexDomainEntry domain,
HashSet<Guid> pendingDocuments,
CancellationToken cancellationToken)
{
var manifestBytes = await _client.DownloadAsync(domain.Manifest.Path, cancellationToken).ConfigureAwait(false);
var bundleBytes = await _client.DownloadAsync(domain.Bundle.Path, cancellationToken).ConfigureAwait(false);
VerifyDigest(domain.Manifest.Digest, manifestBytes, domain.Manifest.Path);
VerifyDigest(domain.Bundle.Digest, bundleBytes, domain.Bundle.Path);
if (_options.Signature.Enabled)
{
if (domain.Bundle.Signature is null)
{
throw new InvalidOperationException("Mirror bundle did not include a signature descriptor while verification is enabled.");
}
var signatureBytes = await _client.DownloadAsync(domain.Bundle.Signature.Path, cancellationToken).ConfigureAwait(false);
var signatureValue = Encoding.UTF8.GetString(signatureBytes);
await _signatureVerifier.VerifyAsync(bundleBytes, signatureValue, cancellationToken).ConfigureAwait(false);
}
await StoreAsync(domain, index.GeneratedAt, domain.Manifest, manifestBytes, "application/json", DocumentStatuses.Mapped, addToPending: false, pendingDocuments, cancellationToken).ConfigureAwait(false);
var bundleRecord = await StoreAsync(domain, index.GeneratedAt, domain.Bundle, bundleBytes, "application/json", DocumentStatuses.PendingParse, addToPending: true, pendingDocuments, cancellationToken).ConfigureAwait(false);
_logger.LogInformation(
"Stored mirror bundle {Uri} as document {DocumentId} with digest {Digest}.",
bundleRecord.Uri,
bundleRecord.Id,
bundleRecord.Sha256);
}
private async Task<DocumentRecord> StoreAsync(
MirrorIndexDomainEntry domain,
DateTimeOffset generatedAt,
MirrorFileDescriptor descriptor,
byte[] payload,
string contentType,
string status,
bool addToPending,
HashSet<Guid> pendingDocuments,
CancellationToken cancellationToken)
{
var absolute = ResolveAbsolutePath(descriptor.Path);
var existing = await _documentStore.FindBySourceAndUriAsync(Source, absolute, cancellationToken).ConfigureAwait(false);
if (existing is not null && string.Equals(existing.Sha256, NormalizeDigest(descriptor.Digest), StringComparison.OrdinalIgnoreCase))
{
if (addToPending)
{
pendingDocuments.Add(existing.Id);
}
return existing;
}
var gridFsId = await _rawDocumentStorage.UploadAsync(Source, absolute, payload, contentType, cancellationToken).ConfigureAwait(false);
var now = _timeProvider.GetUtcNow();
var sha = ComputeSha256(payload);
var metadata = new Dictionary<string, string>(StringComparer.OrdinalIgnoreCase)
{
["mirror.domainId"] = domain.DomainId,
["mirror.displayName"] = domain.DisplayName,
["mirror.path"] = descriptor.Path,
["mirror.digest"] = NormalizeDigest(descriptor.Digest),
["mirror.type"] = ReferenceEquals(descriptor, domain.Bundle) ? "bundle" : "manifest",
};
var record = new DocumentRecord(
existing?.Id ?? Guid.NewGuid(),
Source,
absolute,
now,
sha,
status,
contentType,
Headers: null,
Metadata: metadata,
Etag: null,
LastModified: generatedAt,
GridFsId: gridFsId,
ExpiresAt: null);
var upserted = await _documentStore.UpsertAsync(record, cancellationToken).ConfigureAwait(false);
if (addToPending)
{
pendingDocuments.Add(upserted.Id);
}
return upserted;
}
private string ResolveAbsolutePath(string path)
{
var uri = new Uri(_options.BaseAddress, path);
return uri.ToString();
}
private async Task<StellaOpsMirrorCursor> GetCursorAsync(CancellationToken cancellationToken)
{
var state = await _stateRepository.TryGetAsync(Source, cancellationToken).ConfigureAwait(false);
return state is null ? StellaOpsMirrorCursor.Empty : StellaOpsMirrorCursor.FromBson(state.Cursor);
}
private async Task UpdateCursorAsync(StellaOpsMirrorCursor cursor, CancellationToken cancellationToken)
{
var document = cursor.ToBsonDocument();
var now = _timeProvider.GetUtcNow();
await _stateRepository.UpdateCursorAsync(Source, document, now, cancellationToken).ConfigureAwait(false);
}
private static void VerifyDigest(string expected, ReadOnlySpan<byte> payload, string path)
{
if (string.IsNullOrWhiteSpace(expected))
{
return;
}
if (!expected.StartsWith("sha256:", StringComparison.OrdinalIgnoreCase))
{
throw new InvalidOperationException($"Unsupported digest '{expected}' for '{path}'.");
}
var actualHash = SHA256.HashData(payload);
var actual = "sha256:" + Convert.ToHexString(actualHash).ToLowerInvariant();
if (!string.Equals(actual, expected, StringComparison.OrdinalIgnoreCase))
{
throw new InvalidOperationException($"Digest mismatch for '{path}'. Expected {expected}, computed {actual}.");
}
}
private static string ComputeSha256(ReadOnlySpan<byte> payload)
{
var hash = SHA256.HashData(payload);
return Convert.ToHexString(hash).ToLowerInvariant();
}
private static string NormalizeDigest(string digest)
{
if (string.IsNullOrWhiteSpace(digest))
{
return string.Empty;
}
return digest.StartsWith("sha256:", StringComparison.OrdinalIgnoreCase)
? digest[7..]
: digest.ToLowerInvariant();
}
private static void ValidateOptions(StellaOpsMirrorConnectorOptions options)
{
if (options.BaseAddress is null || !options.BaseAddress.IsAbsoluteUri)
{
throw new InvalidOperationException("Mirror connector requires an absolute baseAddress.");
}
if (string.IsNullOrWhiteSpace(options.DomainId))
{
throw new InvalidOperationException("Mirror connector requires domainId to be specified.");
}
}
}
file static class UriExtensions
{
public static Uri Combine(this Uri baseUri, string relative)
=> new(baseUri, relative);
}

View File

@@ -0,0 +1,19 @@
using Microsoft.Extensions.DependencyInjection;
using StellaOps.Plugin;
namespace StellaOps.Concelier.Connector.StellaOpsMirror;
public sealed class StellaOpsMirrorConnectorPlugin : IConnectorPlugin
{
public const string SourceName = StellaOpsMirrorConnector.Source;
public string Name => SourceName;
public bool IsAvailable(IServiceProvider services) => services is not null;
public IFeedConnector Create(IServiceProvider services)
{
ArgumentNullException.ThrowIfNull(services);
return ActivatorUtilities.CreateInstance<StellaOpsMirrorConnector>(services);
}
}

View File

@@ -0,0 +1,79 @@
using Microsoft.Extensions.Configuration;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.DependencyInjection.Extensions;
using Microsoft.Extensions.Options;
using StellaOps.Concelier.Connector.Common.Http;
using StellaOps.Concelier.Connector.StellaOpsMirror.Client;
using StellaOps.Concelier.Connector.StellaOpsMirror.Security;
using StellaOps.Concelier.Connector.StellaOpsMirror.Settings;
using StellaOps.Concelier.Core.Jobs;
using StellaOps.DependencyInjection;
namespace StellaOps.Concelier.Connector.StellaOpsMirror;
public sealed class StellaOpsMirrorDependencyInjectionRoutine : IDependencyInjectionRoutine
{
private const string ConfigurationSection = "concelier:sources:stellaopsMirror";
private const string HttpClientName = "stellaops-mirror";
private static readonly TimeSpan FetchTimeout = TimeSpan.FromMinutes(5);
private static readonly TimeSpan LeaseDuration = TimeSpan.FromMinutes(4);
public IServiceCollection Register(IServiceCollection services, IConfiguration configuration)
{
ArgumentNullException.ThrowIfNull(services);
ArgumentNullException.ThrowIfNull(configuration);
services.AddOptions<StellaOpsMirrorConnectorOptions>()
.Bind(configuration.GetSection(ConfigurationSection))
.PostConfigure(options =>
{
if (options.BaseAddress is null)
{
throw new InvalidOperationException("stellaopsMirror.baseAddress must be configured.");
}
})
.ValidateOnStart();
services.AddSourceCommon();
services.AddHttpClient(HttpClientName, (sp, client) =>
{
var options = sp.GetRequiredService<IOptions<StellaOpsMirrorConnectorOptions>>().Value;
client.BaseAddress = options.BaseAddress;
client.Timeout = options.HttpTimeout;
client.DefaultRequestHeaders.Accept.Clear();
client.DefaultRequestHeaders.Accept.Add(new System.Net.Http.Headers.MediaTypeWithQualityHeaderValue("application/json"));
});
services.AddTransient<MirrorManifestClient>(sp =>
{
var factory = sp.GetRequiredService<IHttpClientFactory>();
var httpClient = factory.CreateClient(HttpClientName);
return ActivatorUtilities.CreateInstance<MirrorManifestClient>(sp, httpClient);
});
services.TryAddSingleton<MirrorSignatureVerifier>();
services.AddTransient<StellaOpsMirrorConnector>();
var scheduler = new JobSchedulerBuilder(services);
scheduler.AddJob<StellaOpsMirrorFetchJob>(
StellaOpsMirrorJobKinds.Fetch,
cronExpression: "*/15 * * * *",
timeout: FetchTimeout,
leaseDuration: LeaseDuration);
scheduler.AddJob<StellaOpsMirrorParseJob>(
StellaOpsMirrorJobKinds.Parse,
cronExpression: null,
timeout: TimeSpan.FromMinutes(5),
leaseDuration: LeaseDuration,
enabled: false);
scheduler.AddJob<StellaOpsMirrorMapJob>(
StellaOpsMirrorJobKinds.Map,
cronExpression: null,
timeout: TimeSpan.FromMinutes(5),
leaseDuration: LeaseDuration,
enabled: false);
return services;
}
}

View File

@@ -2,6 +2,6 @@
| ID | Status | Owner(s) | Depends on | Description | Exit Criteria |
|----|--------|----------|------------|-------------|---------------|
| FEEDCONN-STELLA-08-001 | TODO | BE-Conn-Stella | CONCELIER-EXPORT-08-201 | Implement Concelier mirror fetcher hitting `https://<domain>.stella-ops.org/concelier/exports/index.json`, verify signatures/digests, and persist raw documents with provenance. | Fetch job downloads mirror manifest, verifies digest/signature, stores raw docs with tests covering happy-path + tampered manifest. |
| FEEDCONN-STELLA-08-001 | DOING (2025-10-19) | BE-Conn-Stella | CONCELIER-EXPORT-08-201 | Implement Concelier mirror fetcher hitting `https://<domain>.stella-ops.org/concelier/exports/index.json`, verify signatures/digests, and persist raw documents with provenance. | Fetch job downloads mirror manifest, verifies digest/signature, stores raw docs with tests covering happy-path + tampered manifest. *(In progress: HTTP client + detached JWS verifier scaffolding landed.)* |
| FEEDCONN-STELLA-08-002 | TODO | BE-Conn-Stella | FEEDCONN-STELLA-08-001 | Map mirror payloads into canonical advisory DTOs with provenance referencing mirror domain + original source metadata. | Mapper produces advisories/aliases/affected with mirror provenance; fixtures assert canonical parity with upstream JSON exporters. |
| FEEDCONN-STELLA-08-003 | TODO | BE-Conn-Stella | FEEDCONN-STELLA-08-002 | Add incremental cursor + resume support (per-export fingerprint) and document configuration for downstream Concelier instances. | Connector resumes from last export, handles deletion/delta cases, docs updated with config sample; integration test covers resume + new export scenario. |

View File

@@ -9,3 +9,4 @@
|FEEDCONN-CISCO-02-006 Telemetry & documentation|DevEx|Docs|**DONE (2025-10-14)** Cisco diagnostics counters exposed and ops runbook updated with telemetry guidance (`docs/ops/concelier-cisco-operations.md`).|
|FEEDCONN-CISCO-02-007 API selection decision memo|BE-Conn-Cisco|Research|**DONE (2025-10-11)** Drafted decision matrix: openVuln (structured/delta filters, OAuth throttle) vs RSS (delayed/minimal metadata). Pending OAuth onboarding (`FEEDCONN-CISCO-02-008`) before final recommendation circulated.|
|FEEDCONN-CISCO-02-008 OAuth client provisioning|Ops, BE-Conn-Cisco|Ops|**DONE (2025-10-14)** `docs/ops/concelier-cisco-operations.md` documents OAuth provisioning/rotation, quotas, and Offline Kit distribution guidance.|
|FEEDCONN-CISCO-02-009 Normalized SemVer promotion|BE-Conn-Cisco|Merge coordination (`FEEDMERGE-COORD-02-900`)|**TODO (due 2025-10-21)** Use helper from `../Merge/RANGE_PRIMITIVES_COORDINATION.md` to convert `SemVerPrimitive` outputs into `NormalizedVersionRule` with provenance (`cisco:{productId}`), update mapper/tests, and confirm merge normalized-rule counters drop.|

View File

@@ -0,0 +1,198 @@
using System;
using System.Collections.Generic;
using System.Collections.Immutable;
using System.Linq;
using System.Text.Json;
using System.Threading;
using System.Threading.Tasks;
using StellaOps.Concelier.Core.Events;
using StellaOps.Concelier.Models;
using Xunit;
namespace StellaOps.Concelier.Core.Tests.Events;
public sealed class AdvisoryEventLogTests
{
[Fact]
public async Task AppendAsync_PersistsCanonicalStatementEntries()
{
var repository = new FakeRepository();
var timeProvider = new FixedTimeProvider(DateTimeOffset.UtcNow);
var log = new AdvisoryEventLog(repository, timeProvider);
var advisory = new Advisory(
"adv-1",
"Test Advisory",
summary: "Summary",
language: "en",
published: DateTimeOffset.Parse("2025-10-01T00:00:00Z"),
modified: DateTimeOffset.Parse("2025-10-02T00:00:00Z"),
severity: "high",
exploitKnown: true,
aliases: new[] { "CVE-2025-0001" },
references: Array.Empty<AdvisoryReference>(),
affectedPackages: Array.Empty<AffectedPackage>(),
cvssMetrics: Array.Empty<CvssMetric>(),
provenance: Array.Empty<AdvisoryProvenance>());
var statementInput = new AdvisoryStatementInput(
VulnerabilityKey: "CVE-2025-0001",
Advisory: advisory,
AsOf: DateTimeOffset.Parse("2025-10-03T00:00:00Z"),
InputDocumentIds: new[] { Guid.Parse("11111111-1111-1111-1111-111111111111") });
await log.AppendAsync(new AdvisoryEventAppendRequest(new[] { statementInput }), CancellationToken.None);
Assert.Single(repository.InsertedStatements);
var entry = repository.InsertedStatements.Single();
Assert.Equal("cve-2025-0001", entry.VulnerabilityKey);
Assert.Equal("adv-1", entry.AdvisoryKey);
Assert.Equal(DateTimeOffset.Parse("2025-10-03T00:00:00Z"), entry.AsOf);
Assert.Contains("\"advisoryKey\":\"adv-1\"", entry.CanonicalJson);
Assert.NotEqual(ImmutableArray<byte>.Empty, entry.StatementHash);
}
[Fact]
public async Task AppendAsync_PersistsConflictsWithCanonicalizedJson()
{
var repository = new FakeRepository();
var timeProvider = new FixedTimeProvider(DateTimeOffset.Parse("2025-10-19T12:00:00Z"));
var log = new AdvisoryEventLog(repository, timeProvider);
using var conflictJson = JsonDocument.Parse("{\"reason\":\"tie\",\"details\":{\"b\":2,\"a\":1}}");
var conflictInput = new AdvisoryConflictInput(
VulnerabilityKey: "CVE-2025-0001",
Details: conflictJson,
AsOf: DateTimeOffset.Parse("2025-10-04T00:00:00Z"),
StatementIds: new[] { Guid.Parse("22222222-2222-2222-2222-222222222222") });
await log.AppendAsync(new AdvisoryEventAppendRequest(Array.Empty<AdvisoryStatementInput>(), new[] { conflictInput }), CancellationToken.None);
Assert.Single(repository.InsertedConflicts);
var entry = repository.InsertedConflicts.Single();
Assert.Equal("cve-2025-0001", entry.VulnerabilityKey);
Assert.Equal("{\"details\":{\"a\":1,\"b\":2},\"reason\":\"tie\"}", entry.CanonicalJson);
Assert.NotEqual(ImmutableArray<byte>.Empty, entry.ConflictHash);
Assert.Equal(DateTimeOffset.Parse("2025-10-04T00:00:00Z"), entry.AsOf);
}
[Fact]
public async Task ReplayAsync_ReturnsSortedSnapshots()
{
var repository = new FakeRepository();
var timeProvider = new FixedTimeProvider(DateTimeOffset.Parse("2025-10-05T00:00:00Z"));
var log = new AdvisoryEventLog(repository, timeProvider);
repository.StoredStatements.AddRange(new[]
{
new AdvisoryStatementEntry(
Guid.Parse("aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa"),
"cve-2025-0001",
"adv-2",
CanonicalJsonSerializer.Serialize(new Advisory(
"adv-2",
"B title",
null,
null,
null,
DateTimeOffset.Parse("2025-10-02T00:00:00Z"),
null,
false,
Array.Empty<string>(),
Array.Empty<AdvisoryReference>(),
Array.Empty<AffectedPackage>(),
Array.Empty<CvssMetric>(),
Array.Empty<AdvisoryProvenance>())),
ImmutableArray.Create(new byte[] { 0x01, 0x02 }),
DateTimeOffset.Parse("2025-10-04T00:00:00Z"),
DateTimeOffset.Parse("2025-10-04T01:00:00Z"),
ImmutableArray<Guid>.Empty),
new AdvisoryStatementEntry(
Guid.Parse("bbbbbbbb-bbbb-bbbb-bbbb-bbbbbbbbbbbb"),
"cve-2025-0001",
"adv-1",
CanonicalJsonSerializer.Serialize(new Advisory(
"adv-1",
"A title",
null,
null,
null,
DateTimeOffset.Parse("2025-10-01T00:00:00Z"),
null,
false,
Array.Empty<string>(),
Array.Empty<AdvisoryReference>(),
Array.Empty<AffectedPackage>(),
Array.Empty<CvssMetric>(),
Array.Empty<AdvisoryProvenance>())),
ImmutableArray.Create(new byte[] { 0x03, 0x04 }),
DateTimeOffset.Parse("2025-10-03T00:00:00Z"),
DateTimeOffset.Parse("2025-10-04T02:00:00Z"),
ImmutableArray<Guid>.Empty),
});
repository.StoredConflicts.Add(new AdvisoryConflictEntry(
Guid.Parse("cccccccc-cccc-cccc-cccc-cccccccccccc"),
"cve-2025-0001",
CanonicalJson: "{\"reason\":\"conflict\"}",
ConflictHash: ImmutableArray.Create(new byte[] { 0x10 }),
AsOf: DateTimeOffset.Parse("2025-10-04T00:00:00Z"),
RecordedAt: DateTimeOffset.Parse("2025-10-04T03:00:00Z"),
StatementIds: ImmutableArray<Guid>.Empty));
var replay = await log.ReplayAsync("CVE-2025-0001", asOf: null, CancellationToken.None);
Assert.Equal("cve-2025-0001", replay.VulnerabilityKey);
Assert.Collection(
replay.Statements,
first => Assert.Equal("adv-2", first.AdvisoryKey),
second => Assert.Equal("adv-1", second.AdvisoryKey));
Assert.Single(replay.Conflicts);
Assert.Equal("{\"reason\":\"conflict\"}", replay.Conflicts[0].CanonicalJson);
}
private sealed class FakeRepository : IAdvisoryEventRepository
{
public List<AdvisoryStatementEntry> InsertedStatements { get; } = new();
public List<AdvisoryConflictEntry> InsertedConflicts { get; } = new();
public List<AdvisoryStatementEntry> StoredStatements { get; } = new();
public List<AdvisoryConflictEntry> StoredConflicts { get; } = new();
public ValueTask InsertStatementsAsync(IReadOnlyCollection<AdvisoryStatementEntry> statements, CancellationToken cancellationToken)
{
InsertedStatements.AddRange(statements);
return ValueTask.CompletedTask;
}
public ValueTask InsertConflictsAsync(IReadOnlyCollection<AdvisoryConflictEntry> conflicts, CancellationToken cancellationToken)
{
InsertedConflicts.AddRange(conflicts);
return ValueTask.CompletedTask;
}
public ValueTask<IReadOnlyList<AdvisoryStatementEntry>> GetStatementsAsync(string vulnerabilityKey, DateTimeOffset? asOf, CancellationToken cancellationToken)
=> ValueTask.FromResult<IReadOnlyList<AdvisoryStatementEntry>>(StoredStatements.Where(entry =>
string.Equals(entry.VulnerabilityKey, vulnerabilityKey, StringComparison.Ordinal) &&
(!asOf.HasValue || entry.AsOf <= asOf.Value)).ToList());
public ValueTask<IReadOnlyList<AdvisoryConflictEntry>> GetConflictsAsync(string vulnerabilityKey, DateTimeOffset? asOf, CancellationToken cancellationToken)
=> ValueTask.FromResult<IReadOnlyList<AdvisoryConflictEntry>>(StoredConflicts.Where(entry =>
string.Equals(entry.VulnerabilityKey, vulnerabilityKey, StringComparison.Ordinal) &&
(!asOf.HasValue || entry.AsOf <= asOf.Value)).ToList());
}
private sealed class FixedTimeProvider : TimeProvider
{
private readonly DateTimeOffset _now;
public FixedTimeProvider(DateTimeOffset now)
{
_now = now.ToUniversalTime();
}
public override DateTimeOffset GetUtcNow() => _now;
}
}

View File

@@ -0,0 +1,93 @@
using System;
using System.Collections.Immutable;
using System.Text.Json;
using StellaOps.Concelier.Models;
namespace StellaOps.Concelier.Core.Events;
/// <summary>
/// Input payload for appending a canonical advisory statement to the event log.
/// </summary>
public sealed record AdvisoryStatementInput(
string VulnerabilityKey,
Advisory Advisory,
DateTimeOffset AsOf,
IReadOnlyCollection<Guid> InputDocumentIds,
Guid? StatementId = null,
string? AdvisoryKey = null);
/// <summary>
/// Input payload for appending an advisory conflict entry aligned with an advisory statement snapshot.
/// </summary>
public sealed record AdvisoryConflictInput(
string VulnerabilityKey,
JsonDocument Details,
DateTimeOffset AsOf,
IReadOnlyCollection<Guid> StatementIds,
Guid? ConflictId = null);
/// <summary>
/// Append request encapsulating statement and conflict batches sharing a single persistence window.
/// </summary>
public sealed record AdvisoryEventAppendRequest(
IReadOnlyCollection<AdvisoryStatementInput> Statements,
IReadOnlyCollection<AdvisoryConflictInput>? Conflicts = null);
/// <summary>
/// Replay response describing immutable statement snapshots for a vulnerability key.
/// </summary>
public sealed record AdvisoryReplay(
string VulnerabilityKey,
DateTimeOffset? AsOf,
ImmutableArray<AdvisoryStatementSnapshot> Statements,
ImmutableArray<AdvisoryConflictSnapshot> Conflicts);
/// <summary>
/// Immutable advisory statement snapshot captured at a specific <c>asOf</c> time.
/// </summary>
public sealed record AdvisoryStatementSnapshot(
Guid StatementId,
string VulnerabilityKey,
string AdvisoryKey,
Advisory Advisory,
ImmutableArray<byte> StatementHash,
DateTimeOffset AsOf,
DateTimeOffset RecordedAt,
ImmutableArray<Guid> InputDocumentIds);
/// <summary>
/// Immutable advisory conflict snapshot describing divergence explanations for a vulnerability key.
/// </summary>
public sealed record AdvisoryConflictSnapshot(
Guid ConflictId,
string VulnerabilityKey,
ImmutableArray<Guid> StatementIds,
ImmutableArray<byte> ConflictHash,
DateTimeOffset AsOf,
DateTimeOffset RecordedAt,
string CanonicalJson);
/// <summary>
/// Persistence-facing representation of an advisory statement used by repositories.
/// </summary>
public sealed record AdvisoryStatementEntry(
Guid StatementId,
string VulnerabilityKey,
string AdvisoryKey,
string CanonicalJson,
ImmutableArray<byte> StatementHash,
DateTimeOffset AsOf,
DateTimeOffset RecordedAt,
ImmutableArray<Guid> InputDocumentIds);
/// <summary>
/// Persistence-facing representation of an advisory conflict used by repositories.
/// </summary>
public sealed record AdvisoryConflictEntry(
Guid ConflictId,
string VulnerabilityKey,
string CanonicalJson,
ImmutableArray<byte> ConflictHash,
DateTimeOffset AsOf,
DateTimeOffset RecordedAt,
ImmutableArray<Guid> StatementIds);

View File

@@ -0,0 +1,297 @@
using System;
using System.Collections.Generic;
using System.Collections.Immutable;
using System.IO;
using System.Linq;
using System.Security.Cryptography;
using System.Text;
using System.Text.Encodings.Web;
using System.Text.Json;
using System.Threading;
using System.Threading.Tasks;
using StellaOps.Concelier.Models;
namespace StellaOps.Concelier.Core.Events;
/// <summary>
/// Default implementation of <see cref="IAdvisoryEventLog"/> that coordinates statement/conflict persistence.
/// </summary>
public sealed class AdvisoryEventLog : IAdvisoryEventLog
{
private static readonly JsonWriterOptions CanonicalWriterOptions = new()
{
Encoder = JavaScriptEncoder.UnsafeRelaxedJsonEscaping,
Indented = false,
SkipValidation = false,
};
private readonly IAdvisoryEventRepository _repository;
private readonly TimeProvider _timeProvider;
public AdvisoryEventLog(IAdvisoryEventRepository repository, TimeProvider? timeProvider = null)
{
_repository = repository ?? throw new ArgumentNullException(nameof(repository));
_timeProvider = timeProvider ?? TimeProvider.System;
}
public async ValueTask AppendAsync(AdvisoryEventAppendRequest request, CancellationToken cancellationToken)
{
ArgumentNullException.ThrowIfNull(request);
var statements = request.Statements ?? Array.Empty<AdvisoryStatementInput>();
var conflicts = request.Conflicts ?? Array.Empty<AdvisoryConflictInput>();
if (statements.Count == 0 && conflicts.Count == 0)
{
return;
}
var recordedAt = _timeProvider.GetUtcNow();
var statementEntries = BuildStatementEntries(statements, recordedAt);
var conflictEntries = BuildConflictEntries(conflicts, recordedAt);
if (statementEntries.Count > 0)
{
await _repository.InsertStatementsAsync(statementEntries, cancellationToken).ConfigureAwait(false);
}
if (conflictEntries.Count > 0)
{
await _repository.InsertConflictsAsync(conflictEntries, cancellationToken).ConfigureAwait(false);
}
}
public async ValueTask<AdvisoryReplay> ReplayAsync(string vulnerabilityKey, DateTimeOffset? asOf, CancellationToken cancellationToken)
{
if (string.IsNullOrWhiteSpace(vulnerabilityKey))
{
throw new ArgumentException("Vulnerability key must be provided.", nameof(vulnerabilityKey));
}
var normalizedKey = NormalizeKey(vulnerabilityKey, nameof(vulnerabilityKey));
var statements = await _repository.GetStatementsAsync(normalizedKey, asOf, cancellationToken).ConfigureAwait(false);
var conflicts = await _repository.GetConflictsAsync(normalizedKey, asOf, cancellationToken).ConfigureAwait(false);
var statementSnapshots = statements
.OrderByDescending(static entry => entry.AsOf)
.ThenByDescending(static entry => entry.RecordedAt)
.Select(ToStatementSnapshot)
.ToImmutableArray();
var conflictSnapshots = conflicts
.OrderByDescending(static entry => entry.AsOf)
.ThenByDescending(static entry => entry.RecordedAt)
.Select(ToConflictSnapshot)
.ToImmutableArray();
return new AdvisoryReplay(normalizedKey, asOf, statementSnapshots, conflictSnapshots);
}
private static AdvisoryStatementSnapshot ToStatementSnapshot(AdvisoryStatementEntry entry)
{
ArgumentNullException.ThrowIfNull(entry);
var advisory = CanonicalJsonSerializer.Deserialize<Advisory>(entry.CanonicalJson);
return new AdvisoryStatementSnapshot(
entry.StatementId,
entry.VulnerabilityKey,
entry.AdvisoryKey,
advisory,
entry.StatementHash,
entry.AsOf,
entry.RecordedAt,
entry.InputDocumentIds);
}
private static AdvisoryConflictSnapshot ToConflictSnapshot(AdvisoryConflictEntry entry)
{
ArgumentNullException.ThrowIfNull(entry);
return new AdvisoryConflictSnapshot(
entry.ConflictId,
entry.VulnerabilityKey,
entry.StatementIds,
entry.ConflictHash,
entry.AsOf,
entry.RecordedAt,
entry.CanonicalJson);
}
private static IReadOnlyCollection<AdvisoryStatementEntry> BuildStatementEntries(
IReadOnlyCollection<AdvisoryStatementInput> statements,
DateTimeOffset recordedAt)
{
if (statements.Count == 0)
{
return Array.Empty<AdvisoryStatementEntry>();
}
var entries = new List<AdvisoryStatementEntry>(statements.Count);
foreach (var statement in statements)
{
ArgumentNullException.ThrowIfNull(statement);
ArgumentNullException.ThrowIfNull(statement.Advisory);
var vulnerabilityKey = NormalizeKey(statement.VulnerabilityKey, nameof(statement.VulnerabilityKey));
var advisory = CanonicalJsonSerializer.Normalize(statement.Advisory);
var advisoryKey = string.IsNullOrWhiteSpace(statement.AdvisoryKey)
? advisory.AdvisoryKey
: statement.AdvisoryKey.Trim();
if (string.IsNullOrWhiteSpace(advisoryKey))
{
throw new ArgumentException("Advisory key must be provided.", nameof(statement));
}
if (!string.Equals(advisory.AdvisoryKey, advisoryKey, StringComparison.Ordinal))
{
throw new ArgumentException("Advisory key in payload must match provided advisory key.", nameof(statement));
}
var canonicalJson = CanonicalJsonSerializer.Serialize(advisory);
var hashBytes = ComputeHash(canonicalJson);
var asOf = statement.AsOf.ToUniversalTime();
var inputDocuments = statement.InputDocumentIds?.Count > 0
? statement.InputDocumentIds
.Where(static id => id != Guid.Empty)
.Distinct()
.OrderBy(static id => id)
.ToImmutableArray()
: ImmutableArray<Guid>.Empty;
entries.Add(new AdvisoryStatementEntry(
statement.StatementId ?? Guid.NewGuid(),
vulnerabilityKey,
advisoryKey,
canonicalJson,
hashBytes,
asOf,
recordedAt,
inputDocuments));
}
return entries;
}
private static IReadOnlyCollection<AdvisoryConflictEntry> BuildConflictEntries(
IReadOnlyCollection<AdvisoryConflictInput> conflicts,
DateTimeOffset recordedAt)
{
if (conflicts.Count == 0)
{
return Array.Empty<AdvisoryConflictEntry>();
}
var entries = new List<AdvisoryConflictEntry>(conflicts.Count);
foreach (var conflict in conflicts)
{
ArgumentNullException.ThrowIfNull(conflict);
ArgumentNullException.ThrowIfNull(conflict.Details);
var vulnerabilityKey = NormalizeKey(conflict.VulnerabilityKey, nameof(conflict.VulnerabilityKey));
var canonicalJson = Canonicalize(conflict.Details.RootElement);
var hashBytes = ComputeHash(canonicalJson);
var asOf = conflict.AsOf.ToUniversalTime();
var statementIds = conflict.StatementIds?.Count > 0
? conflict.StatementIds
.Where(static id => id != Guid.Empty)
.Distinct()
.OrderBy(static id => id)
.ToImmutableArray()
: ImmutableArray<Guid>.Empty;
entries.Add(new AdvisoryConflictEntry(
conflict.ConflictId ?? Guid.NewGuid(),
vulnerabilityKey,
canonicalJson,
hashBytes,
asOf,
recordedAt,
statementIds));
}
return entries;
}
private static string NormalizeKey(string value, string parameterName)
{
if (string.IsNullOrWhiteSpace(value))
{
throw new ArgumentException("Value must be provided.", parameterName);
}
return value.Trim().ToLowerInvariant();
}
private static ImmutableArray<byte> ComputeHash(string canonicalJson)
{
var bytes = Encoding.UTF8.GetBytes(canonicalJson);
var hash = SHA256.HashData(bytes);
return ImmutableArray.Create(hash);
}
private static string Canonicalize(JsonElement element)
{
using var stream = new MemoryStream();
using (var writer = new Utf8JsonWriter(stream, CanonicalWriterOptions))
{
WriteCanonical(element, writer);
}
return Encoding.UTF8.GetString(stream.ToArray());
}
private static void WriteCanonical(JsonElement element, Utf8JsonWriter writer)
{
switch (element.ValueKind)
{
case JsonValueKind.Object:
writer.WriteStartObject();
foreach (var property in element.EnumerateObject().OrderBy(static p => p.Name, StringComparer.Ordinal))
{
writer.WritePropertyName(property.Name);
WriteCanonical(property.Value, writer);
}
writer.WriteEndObject();
break;
case JsonValueKind.Array:
writer.WriteStartArray();
foreach (var item in element.EnumerateArray())
{
WriteCanonical(item, writer);
}
writer.WriteEndArray();
break;
case JsonValueKind.String:
writer.WriteStringValue(element.GetString());
break;
case JsonValueKind.Number:
writer.WriteRawValue(element.GetRawText());
break;
case JsonValueKind.True:
writer.WriteBooleanValue(true);
break;
case JsonValueKind.False:
writer.WriteBooleanValue(false);
break;
case JsonValueKind.Null:
writer.WriteNullValue();
break;
case JsonValueKind.Undefined:
default:
writer.WriteNullValue();
break;
}
}
}

View File

@@ -0,0 +1,15 @@
using System;
using System.Threading;
using System.Threading.Tasks;
namespace StellaOps.Concelier.Core.Events;
/// <summary>
/// High-level API for recording and replaying advisory statements with deterministic as-of queries.
/// </summary>
public interface IAdvisoryEventLog
{
ValueTask AppendAsync(AdvisoryEventAppendRequest request, CancellationToken cancellationToken);
ValueTask<AdvisoryReplay> ReplayAsync(string vulnerabilityKey, DateTimeOffset? asOf, CancellationToken cancellationToken);
}

View File

@@ -0,0 +1,31 @@
using System;
using System.Collections.Generic;
using System.Collections.Immutable;
using System.Threading;
using System.Threading.Tasks;
namespace StellaOps.Concelier.Core.Events;
/// <summary>
/// Abstraction over the persistence layer for advisory statements and conflicts.
/// </summary>
public interface IAdvisoryEventRepository
{
ValueTask InsertStatementsAsync(
IReadOnlyCollection<AdvisoryStatementEntry> statements,
CancellationToken cancellationToken);
ValueTask InsertConflictsAsync(
IReadOnlyCollection<AdvisoryConflictEntry> conflicts,
CancellationToken cancellationToken);
ValueTask<IReadOnlyList<AdvisoryStatementEntry>> GetStatementsAsync(
string vulnerabilityKey,
DateTimeOffset? asOf,
CancellationToken cancellationToken);
ValueTask<IReadOnlyList<AdvisoryConflictEntry>> GetConflictsAsync(
string vulnerabilityKey,
DateTimeOffset? asOf,
CancellationToken cancellationToken);
}

View File

@@ -3,12 +3,13 @@ using System.Collections.Generic;
using System.Linq;
using System.Reflection;
using Microsoft.Extensions.Configuration;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.Logging;
using StellaOps.DependencyInjection;
using StellaOps.Plugin.Hosting;
namespace StellaOps.Concelier.Core.Jobs;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.Logging;
using StellaOps.DependencyInjection;
using StellaOps.Plugin.DependencyInjection;
using StellaOps.Plugin.Hosting;
namespace StellaOps.Concelier.Core.Jobs;
public static class JobPluginRegistrationExtensions
{
@@ -32,12 +33,14 @@ public static class JobPluginRegistrationExtensions
var currentServices = services;
var seenRoutineTypes = new HashSet<string>(StringComparer.Ordinal);
foreach (var plugin in loadResult.Plugins)
{
foreach (var routineType in GetRoutineTypes(plugin.Assembly))
{
if (!typeof(IDependencyInjectionRoutine).IsAssignableFrom(routineType))
{
foreach (var plugin in loadResult.Plugins)
{
PluginServiceRegistration.RegisterAssemblyMetadata(currentServices, plugin.Assembly, logger);
foreach (var routineType in GetRoutineTypes(plugin.Assembly))
{
if (!typeof(IDependencyInjectionRoutine).IsAssignableFrom(routineType))
{
continue;
}

View File

@@ -1,21 +1,21 @@
# TASKS
| Task | Owner(s) | Depends on | Notes |
|---|---|---|---|
|JobCoordinator implementation (create/get/mark status)|BE-Core|Storage.Mongo|DONE `JobCoordinator` drives Mongo-backed runs.|
|Cron scheduling loop with TimeProvider|BE-Core|Core|DONE `JobSchedulerHostedService` evaluates cron expressions.|
|Single-flight/lease semantics|BE-Core|Storage.Mongo|DONE lease acquisition backed by `MongoLeaseStore`.|
|Trigger API contract (Result mapping)|BE-Core|WebService|DONE `JobTriggerResult` outcomes map to HTTP statuses.|
|Run telemetry enrichment|BE-Core|Observability|DONE `JobDiagnostics` ties activities & counters into coordinator/scheduler paths.|
|Deterministic params hashing|BE-Core|Core|DONE `JobParametersHasher` creates SHA256 hash.|
|Golden tests for timeout/cancel|QA|Core|DONE JobCoordinatorTests cover cancellation timeout path.|
|JobSchedulerBuilder options registry coverage|BE-Core|Core|DONE added scheduler tests confirming cron/timeout/lease metadata persists via JobSchedulerOptions.|
|Plugin discovery + DI glue with PluginHost|BE-Core|Plugin libs|DONE JobPluginRegistrationExtensions now loads PluginHost routines and wires connector/exporter registrations.|
|Harden lease release error handling in JobCoordinator|BE-Core|Storage.Mongo|DONE lease release failures now logged, wrapped, and drive run failure status; fire-and-forget execution guarded. Verified with `dotnet test --no-build --filter JobCoordinator`.|
# TASKS
| Task | Owner(s) | Depends on | Notes |
|---|---|---|---|
|JobCoordinator implementation (create/get/mark status)|BE-Core|Storage.Mongo|DONE `JobCoordinator` drives Mongo-backed runs.|
|Cron scheduling loop with TimeProvider|BE-Core|Core|DONE `JobSchedulerHostedService` evaluates cron expressions.|
|Single-flight/lease semantics|BE-Core|Storage.Mongo|DONE lease acquisition backed by `MongoLeaseStore`.|
|Trigger API contract (Result mapping)|BE-Core|WebService|DONE `JobTriggerResult` outcomes map to HTTP statuses.|
|Run telemetry enrichment|BE-Core|Observability|DONE `JobDiagnostics` ties activities & counters into coordinator/scheduler paths.|
|Deterministic params hashing|BE-Core|Core|DONE `JobParametersHasher` creates SHA256 hash.|
|Golden tests for timeout/cancel|QA|Core|DONE JobCoordinatorTests cover cancellation timeout path.|
|JobSchedulerBuilder options registry coverage|BE-Core|Core|DONE added scheduler tests confirming cron/timeout/lease metadata persists via JobSchedulerOptions.|
|Plugin discovery + DI glue with PluginHost|BE-Core|Plugin libs|DONE JobPluginRegistrationExtensions now loads PluginHost routines and wires connector/exporter registrations.|
|Harden lease release error handling in JobCoordinator|BE-Core|Storage.Mongo|DONE lease release failures now logged, wrapped, and drive run failure status; fire-and-forget execution guarded. Verified with `dotnet test --no-build --filter JobCoordinator`.|
|Validate job trigger parameters for serialization|BE-Core|WebService|DONE trigger parameters normalized/serialized with defensive checks returning InvalidParameters on failure. Full-suite `dotnet test --no-build` currently red from live connector fixture drift (Oracle/JVN/RedHat).|
|FEEDCORE-ENGINE-03-001 Canonical merger implementation|BE-Core|Merge|DONE `CanonicalMerger` applies GHSA/NVD/OSV conflict rules with deterministic provenance and comprehensive unit coverage. **Coordination:** Connector leads must align mapper outputs with the canonical field expectations before 2025-10-18 so Merge can activate the path globally.|
|FEEDCORE-ENGINE-03-002 Field precedence and tie-breaker map|BE-Core|Merge|DONE field precedence and freshness overrides enforced via `FieldPrecedence` map with tie-breakers and analytics capture. **Reminder:** Storage/Merge owners review precedence overrides when onboarding new feeds to ensure `decisionReason` tagging stays consistent.|
|Canonical merger parity for description/CWE/canonical metric|BE-Core|Models|DONE (2025-10-15) merger now populates description/CWEs/canonical metric id with provenance and regression tests cover the new decisions.|
|Reference normalization & freshness instrumentation cleanup|BE-Core, QA|Models|DONE (2025-10-15) reference keys normalized, freshness overrides applied to union fields, and new tests assert decision logging.|
|FEEDCORE-ENGINE-07-001 Advisory event log & asOf queries|Team Core Engine & Storage Analytics|FEEDSTORAGE-DATA-07-001|TODO Introduce immutable advisory statement events, expose `asOf` query surface for merge/export pipelines, and document determinism guarantees for replay.|
|FEEDCORE-ENGINE-07-001 Advisory event log & asOf queries|Team Core Engine & Storage Analytics|FEEDSTORAGE-DATA-07-001|**DONE (2025-10-19)** Implemented `AdvisoryEventLog` service plus repository contracts, canonical hashing, and lower-cased key normalization with replay support; documented determinism guarantees. Tests: `dotnet test src/StellaOps.Concelier.Core.Tests/StellaOps.Concelier.Core.Tests.csproj`.|
|FEEDCORE-ENGINE-07-002 Noise prior computation service|Team Core Engine & Data Science|FEEDCORE-ENGINE-07-001|TODO Build rule-based learner capturing false-positive priors per package/env, persist summaries, and expose APIs for Excititor/scan suppressors with reproducible statistics.|
|FEEDCORE-ENGINE-07-003 Unknown state ledger & confidence seeding|Team Core Engine & Storage Analytics|FEEDCORE-ENGINE-07-001|TODO Persist `unknown_vuln_range/unknown_origin/ambiguous_fix` markers with initial confidence bands, expose query surface for Policy, and add fixtures validating canonical serialization.|

View File

@@ -42,10 +42,14 @@ public sealed class JsonExportSnapshotBuilderTests : IDisposable
severity: "medium"),
};
var result = await builder.WriteAsync(advisories, exportedAt, cancellationToken: CancellationToken.None);
Assert.Equal(advisories.Length, result.AdvisoryCount);
Assert.Equal(exportedAt, result.ExportedAt);
var result = await builder.WriteAsync(advisories, exportedAt, cancellationToken: CancellationToken.None);
Assert.Equal(advisories.Length, result.AdvisoryCount);
Assert.Equal(advisories.Length, result.Advisories.Length);
Assert.Equal(
advisories.Select(a => a.AdvisoryKey).OrderBy(key => key, StringComparer.Ordinal),
result.Advisories.Select(a => a.AdvisoryKey).OrderBy(key => key, StringComparer.Ordinal));
Assert.Equal(exportedAt, result.ExportedAt);
var expectedFiles = result.FilePaths.OrderBy(x => x, StringComparer.Ordinal).ToArray();
Assert.Contains("nvd/2024/CVE-2024-9999.json", expectedFiles);
@@ -107,10 +111,11 @@ public sealed class JsonExportSnapshotBuilderTests : IDisposable
};
var sequence = new SingleEnumerationAsyncSequence(advisories);
var result = await builder.WriteAsync(sequence, exportedAt, cancellationToken: CancellationToken.None);
Assert.Equal(advisories.Length, result.AdvisoryCount);
}
var result = await builder.WriteAsync(sequence, exportedAt, cancellationToken: CancellationToken.None);
Assert.Equal(advisories.Length, result.AdvisoryCount);
Assert.Equal(advisories.Length, result.Advisories.Length);
}
private static Advisory CreateAdvisory(string advisoryKey, string[] aliases, string title, string severity)
{

View File

@@ -1,12 +1,14 @@
using System.Collections.Generic;
using System.Runtime.CompilerServices;
using System.Threading.Tasks;
using System.Collections.Immutable;
using Microsoft.Extensions.Configuration;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
using MongoDB.Driver;
using StellaOps.Concelier.Core.Jobs;
using StellaOps.Concelier.Core.Events;
using StellaOps.Concelier.Exporter.Json;
using StellaOps.Concelier.Storage.Mongo.Advisories;
using StellaOps.Concelier.Storage.Mongo.Exporting;
@@ -20,10 +22,11 @@ public sealed class JsonExporterDependencyInjectionRoutineTests
public void Register_AddsJobDefinitionAndServices()
{
var services = new ServiceCollection();
services.AddLogging();
services.AddSingleton<IAdvisoryStore, StubAdvisoryStore>();
services.AddSingleton<IExportStateStore, StubExportStateStore>();
services.AddOptions<JobSchedulerOptions>();
services.AddLogging();
services.AddSingleton<IAdvisoryStore, StubAdvisoryStore>();
services.AddSingleton<IExportStateStore, StubExportStateStore>();
services.AddSingleton<IAdvisoryEventLog, StubAdvisoryEventLog>();
services.AddOptions<JobSchedulerOptions>();
var configuration = new ConfigurationBuilder()
.AddInMemoryCollection(new Dictionary<string, string?>())
@@ -78,17 +81,34 @@ public sealed class JsonExporterDependencyInjectionRoutineTests
}
}
private sealed class StubExportStateStore : IExportStateStore
{
private ExportStateRecord? _record;
private sealed class StubExportStateStore : IExportStateStore
{
private ExportStateRecord? _record;
public Task<ExportStateRecord?> FindAsync(string id, CancellationToken cancellationToken)
=> Task.FromResult(_record);
public Task<ExportStateRecord> UpsertAsync(ExportStateRecord record, CancellationToken cancellationToken)
{
_record = record;
return Task.FromResult(record);
}
}
}
public Task<ExportStateRecord?> FindAsync(string id, CancellationToken cancellationToken)
{
return Task.FromResult(_record);
}
public Task<ExportStateRecord> UpsertAsync(ExportStateRecord record, CancellationToken cancellationToken)
{
_record = record;
return Task.FromResult(record);
}
}
private sealed class StubAdvisoryEventLog : IAdvisoryEventLog
{
public ValueTask AppendAsync(AdvisoryEventAppendRequest request, CancellationToken cancellationToken)
=> throw new NotSupportedException();
public ValueTask<AdvisoryReplay> ReplayAsync(string vulnerabilityKey, DateTimeOffset? asOf, CancellationToken cancellationToken)
{
return ValueTask.FromResult(new AdvisoryReplay(
vulnerabilityKey,
asOf,
ImmutableArray<AdvisoryStatementSnapshot>.Empty,
ImmutableArray<AdvisoryConflictSnapshot>.Empty));
}
}
}

View File

@@ -1,94 +1,101 @@
using System;
using System.Collections.Generic;
using System.Globalization;
using System.IO;
using System.Linq;
using System.Runtime.CompilerServices;
using System.Text.Json;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.Logging.Abstractions;
using System;
using System.Collections.Generic;
using System.Collections.Immutable;
using System.Globalization;
using System.IO;
using System.Linq;
using System.Runtime.CompilerServices;
using System.Security.Cryptography;
using System.Text;
using System.Text.Json;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.Logging.Abstractions;
using Microsoft.Extensions.Options;
using MongoDB.Driver;
using StellaOps.Concelier.Core.Events;
using StellaOps.Concelier.Exporter.Json;
using StellaOps.Concelier.Models;
using StellaOps.Concelier.Storage.Mongo.Advisories;
using StellaOps.Concelier.Storage.Mongo.Exporting;
namespace StellaOps.Concelier.Exporter.Json.Tests;
public sealed class JsonFeedExporterTests : IDisposable
{
private readonly string _root;
public JsonFeedExporterTests()
{
_root = Directory.CreateTempSubdirectory("concelier-json-exporter-tests").FullName;
}
[Fact]
public async Task ExportAsync_SkipsWhenDigestUnchanged()
{
var advisory = new Advisory(
advisoryKey: "CVE-2024-1234",
title: "Test Advisory",
summary: null,
language: "en",
published: DateTimeOffset.Parse("2024-01-01T00:00:00Z", CultureInfo.InvariantCulture),
modified: DateTimeOffset.Parse("2024-01-02T00:00:00Z", CultureInfo.InvariantCulture),
severity: "high",
exploitKnown: false,
aliases: new[] { "CVE-2024-1234" },
references: Array.Empty<AdvisoryReference>(),
affectedPackages: Array.Empty<AffectedPackage>(),
cvssMetrics: Array.Empty<CvssMetric>(),
provenance: Array.Empty<AdvisoryProvenance>());
var advisoryStore = new StubAdvisoryStore(advisory);
var options = Options.Create(new JsonExportOptions
{
OutputRoot = _root,
MaintainLatestSymlink = false,
});
var stateStore = new InMemoryExportStateStore();
var timeProvider = new TestTimeProvider(DateTimeOffset.Parse("2024-07-15T12:00:00Z", CultureInfo.InvariantCulture));
var stateManager = new ExportStateManager(stateStore, timeProvider);
var exporter = new JsonFeedExporter(
advisoryStore,
options,
new VulnListJsonExportPathResolver(),
stateManager,
NullLogger<JsonFeedExporter>.Instance,
timeProvider);
using var provider = new ServiceCollection().BuildServiceProvider();
await exporter.ExportAsync(provider, CancellationToken.None);
var record = await stateStore.FindAsync(JsonFeedExporter.ExporterId, CancellationToken.None);
Assert.NotNull(record);
var firstUpdated = record!.UpdatedAt;
Assert.Equal("20240715T120000Z", record.BaseExportId);
Assert.Equal(record.LastFullDigest, record.ExportCursor);
var firstExportPath = Path.Combine(_root, "20240715T120000Z");
Assert.True(Directory.Exists(firstExportPath));
timeProvider.Advance(TimeSpan.FromMinutes(5));
await exporter.ExportAsync(provider, CancellationToken.None);
record = await stateStore.FindAsync(JsonFeedExporter.ExporterId, CancellationToken.None);
Assert.NotNull(record);
Assert.Equal(firstUpdated, record!.UpdatedAt);
var secondExportPath = Path.Combine(_root, "20240715T120500Z");
Assert.False(Directory.Exists(secondExportPath));
}
[Fact]
public async Task ExportAsync_WritesManifestMetadata()
{
using StellaOps.Cryptography;
namespace StellaOps.Concelier.Exporter.Json.Tests;
public sealed class JsonFeedExporterTests : IDisposable
{
private readonly string _root;
public JsonFeedExporterTests()
{
_root = Directory.CreateTempSubdirectory("concelier-json-exporter-tests").FullName;
}
[Fact]
public async Task ExportAsync_SkipsWhenDigestUnchanged()
{
var advisory = new Advisory(
advisoryKey: "CVE-2024-1234",
title: "Test Advisory",
summary: null,
language: "en",
published: DateTimeOffset.Parse("2024-01-01T00:00:00Z", CultureInfo.InvariantCulture),
modified: DateTimeOffset.Parse("2024-01-02T00:00:00Z", CultureInfo.InvariantCulture),
severity: "high",
exploitKnown: false,
aliases: new[] { "CVE-2024-1234" },
references: Array.Empty<AdvisoryReference>(),
affectedPackages: Array.Empty<AffectedPackage>(),
cvssMetrics: Array.Empty<CvssMetric>(),
provenance: Array.Empty<AdvisoryProvenance>());
var advisoryStore = new StubAdvisoryStore(advisory);
var options = Options.Create(new JsonExportOptions
{
OutputRoot = _root,
MaintainLatestSymlink = false,
});
var stateStore = new InMemoryExportStateStore();
var timeProvider = new TestTimeProvider(DateTimeOffset.Parse("2024-07-15T12:00:00Z", CultureInfo.InvariantCulture));
var stateManager = new ExportStateManager(stateStore, timeProvider);
var eventLog = new StubAdvisoryEventLog(new[] { advisory }, timeProvider.GetUtcNow());
var exporter = new JsonFeedExporter(
advisoryStore,
options,
new VulnListJsonExportPathResolver(),
stateManager,
eventLog,
NullLogger<JsonFeedExporter>.Instance,
timeProvider);
using var provider = new ServiceCollection().BuildServiceProvider();
await exporter.ExportAsync(provider, CancellationToken.None);
var record = await stateStore.FindAsync(JsonFeedExporter.ExporterId, CancellationToken.None);
Assert.NotNull(record);
var firstUpdated = record!.UpdatedAt;
Assert.Equal("20240715T120000Z", record.BaseExportId);
Assert.Equal(record.LastFullDigest, record.ExportCursor);
var firstExportPath = Path.Combine(_root, "20240715T120000Z");
Assert.True(Directory.Exists(firstExportPath));
timeProvider.Advance(TimeSpan.FromMinutes(5));
await exporter.ExportAsync(provider, CancellationToken.None);
record = await stateStore.FindAsync(JsonFeedExporter.ExporterId, CancellationToken.None);
Assert.NotNull(record);
Assert.Equal(firstUpdated, record!.UpdatedAt);
var secondExportPath = Path.Combine(_root, "20240715T120500Z");
Assert.False(Directory.Exists(secondExportPath));
}
[Fact]
public async Task ExportAsync_WritesManifestMetadata()
{
var exportedAt = DateTimeOffset.Parse("2024-08-10T00:00:00Z", CultureInfo.InvariantCulture);
var recordedAt = DateTimeOffset.Parse("2024-07-02T00:00:00Z", CultureInfo.InvariantCulture);
var reference = new AdvisoryReference(
@@ -135,52 +142,54 @@ public sealed class JsonFeedExporterTests : IDisposable
description: "Detailed description capturing remediation steps.",
cwes: new[] { weakness },
canonicalMetricId: "3.1|CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:C/C:H/I:H/A:H");
var advisoryStore = new StubAdvisoryStore(advisory);
var optionsValue = new JsonExportOptions
{
OutputRoot = _root,
MaintainLatestSymlink = false,
};
var options = Options.Create(optionsValue);
var stateStore = new InMemoryExportStateStore();
var timeProvider = new TestTimeProvider(exportedAt);
var stateManager = new ExportStateManager(stateStore, timeProvider);
var exporter = new JsonFeedExporter(
advisoryStore,
options,
new VulnListJsonExportPathResolver(),
stateManager,
NullLogger<JsonFeedExporter>.Instance,
timeProvider);
using var provider = new ServiceCollection().BuildServiceProvider();
await exporter.ExportAsync(provider, CancellationToken.None);
var exportId = exportedAt.ToString(optionsValue.DirectoryNameFormat, CultureInfo.InvariantCulture);
var exportDirectory = Path.Combine(_root, exportId);
var manifestPath = Path.Combine(exportDirectory, "manifest.json");
Assert.True(File.Exists(manifestPath));
using var document = JsonDocument.Parse(await File.ReadAllBytesAsync(manifestPath, CancellationToken.None));
var root = document.RootElement;
Assert.Equal(exportId, root.GetProperty("exportId").GetString());
Assert.Equal(exportedAt.UtcDateTime, root.GetProperty("generatedAt").GetDateTime());
Assert.Equal(1, root.GetProperty("advisoryCount").GetInt32());
var exportedFiles = Directory.EnumerateFiles(exportDirectory, "*.json", SearchOption.AllDirectories)
.Select(path => new
{
Absolute = path,
Relative = Path.GetRelativePath(exportDirectory, path).Replace("\\", "/", StringComparison.Ordinal),
})
.Where(file => !string.Equals(file.Relative, "manifest.json", StringComparison.OrdinalIgnoreCase))
.OrderBy(file => file.Relative, StringComparer.Ordinal)
.ToArray();
var advisoryStore = new StubAdvisoryStore(advisory);
var optionsValue = new JsonExportOptions
{
OutputRoot = _root,
MaintainLatestSymlink = false,
};
var options = Options.Create(optionsValue);
var stateStore = new InMemoryExportStateStore();
var timeProvider = new TestTimeProvider(exportedAt);
var stateManager = new ExportStateManager(stateStore, timeProvider);
var eventLog = new StubAdvisoryEventLog(new[] { advisory }, exportedAt);
var exporter = new JsonFeedExporter(
advisoryStore,
options,
new VulnListJsonExportPathResolver(),
stateManager,
eventLog,
NullLogger<JsonFeedExporter>.Instance,
timeProvider);
using var provider = new ServiceCollection().BuildServiceProvider();
await exporter.ExportAsync(provider, CancellationToken.None);
var exportId = exportedAt.ToString(optionsValue.DirectoryNameFormat, CultureInfo.InvariantCulture);
var exportDirectory = Path.Combine(_root, exportId);
var manifestPath = Path.Combine(exportDirectory, "manifest.json");
Assert.True(File.Exists(manifestPath));
using var document = JsonDocument.Parse(await File.ReadAllBytesAsync(manifestPath, CancellationToken.None));
var root = document.RootElement;
Assert.Equal(exportId, root.GetProperty("exportId").GetString());
Assert.Equal(exportedAt.UtcDateTime, root.GetProperty("generatedAt").GetDateTime());
Assert.Equal(1, root.GetProperty("advisoryCount").GetInt32());
var exportedFiles = Directory.EnumerateFiles(exportDirectory, "*.json", SearchOption.AllDirectories)
.Select(path => new
{
Absolute = path,
Relative = Path.GetRelativePath(exportDirectory, path).Replace("\\", "/", StringComparison.Ordinal),
})
.Where(file => !string.Equals(file.Relative, "manifest.json", StringComparison.OrdinalIgnoreCase))
.OrderBy(file => file.Relative, StringComparer.Ordinal)
.ToArray();
var filesElement = root.GetProperty("files")
.EnumerateArray()
.Select(element => new
@@ -208,58 +217,278 @@ public sealed class JsonFeedExporterTests : IDisposable
}
Assert.Equal(exportedFiles.Select(file => file.Relative).ToArray(), filesElement.Select(file => file.Path).ToArray());
long totalBytes = exportedFiles.Select(file => new FileInfo(file.Absolute).Length).Sum();
Assert.Equal(totalBytes, root.GetProperty("totalBytes").GetInt64());
Assert.Equal(exportedFiles.Length, root.GetProperty("fileCount").GetInt32());
var digest = root.GetProperty("digest").GetString();
var digestResult = new JsonExportResult(
exportDirectory,
exportedAt,
exportedFiles.Select(file =>
{
var manifestEntry = filesElement.First(f => f.Path == file.Relative);
if (manifestEntry.Digest is null)
{
throw new InvalidOperationException($"Manifest entry for {file.Relative} missing digest.");
}
return new JsonExportFile(file.Relative, new FileInfo(file.Absolute).Length, manifestEntry.Digest);
}),
exportedFiles.Length,
totalBytes);
var expectedDigest = ExportDigestCalculator.ComputeTreeDigest(digestResult);
Assert.Equal(expectedDigest, digest);
var exporterVersion = root.GetProperty("exporterVersion").GetString();
Assert.Equal(ExporterVersion.GetVersion(typeof(JsonFeedExporter)), exporterVersion);
}
public void Dispose()
{
try
{
if (Directory.Exists(_root))
{
Directory.Delete(_root, recursive: true);
}
}
catch
{
// best effort cleanup
}
}
private sealed class StubAdvisoryStore : IAdvisoryStore
{
private readonly IReadOnlyList<Advisory> _advisories;
public StubAdvisoryStore(params Advisory[] advisories)
{
_advisories = advisories;
}
long totalBytes = exportedFiles.Select(file => new FileInfo(file.Absolute).Length).Sum();
Assert.Equal(totalBytes, root.GetProperty("totalBytes").GetInt64());
Assert.Equal(exportedFiles.Length, root.GetProperty("fileCount").GetInt32());
var digest = root.GetProperty("digest").GetString();
var digestResult = new JsonExportResult(
exportDirectory,
exportedAt,
exportedFiles.Select(file =>
{
var manifestEntry = filesElement.First(f => f.Path == file.Relative);
if (manifestEntry.Digest is null)
{
throw new InvalidOperationException($"Manifest entry for {file.Relative} missing digest.");
}
return new JsonExportFile(file.Relative, new FileInfo(file.Absolute).Length, manifestEntry.Digest);
}),
exportedFiles.Length,
totalBytes);
var expectedDigest = ExportDigestCalculator.ComputeTreeDigest(digestResult);
Assert.Equal(expectedDigest, digest);
var exporterVersion = root.GetProperty("exporterVersion").GetString();
Assert.Equal(ExporterVersion.GetVersion(typeof(JsonFeedExporter)), exporterVersion);
}
[Fact]
public async Task ExportAsync_WritesMirrorBundlesWithSignatures()
{
var exportedAt = DateTimeOffset.Parse("2025-01-05T00:00:00Z", CultureInfo.InvariantCulture);
var advisoryOne = new Advisory(
advisoryKey: "CVE-2025-0001",
title: "Mirror Advisory One",
summary: null,
language: "en",
published: exportedAt.AddDays(-10),
modified: exportedAt.AddDays(-9),
severity: "high",
exploitKnown: false,
aliases: new[] { "CVE-2025-0001", "GHSA-aaaa-bbbb-cccc" },
references: Array.Empty<AdvisoryReference>(),
affectedPackages: Array.Empty<AffectedPackage>(),
cvssMetrics: Array.Empty<CvssMetric>(),
provenance: new[]
{
new AdvisoryProvenance("ghsa", "map", "GHSA-aaaa-bbbb-cccc", exportedAt.AddDays(-9)),
new AdvisoryProvenance("nvd", "map", "CVE-2025-0001", exportedAt.AddDays(-8)),
});
var advisoryTwo = new Advisory(
advisoryKey: "CVE-2025-0002",
title: "Mirror Advisory Two",
summary: null,
language: "en",
published: exportedAt.AddDays(-6),
modified: exportedAt.AddDays(-5),
severity: "medium",
exploitKnown: false,
aliases: new[] { "CVE-2025-0002" },
references: Array.Empty<AdvisoryReference>(),
affectedPackages: Array.Empty<AffectedPackage>(),
cvssMetrics: Array.Empty<CvssMetric>(),
provenance: new[]
{
new AdvisoryProvenance("nvd", "map", "CVE-2025-0002", exportedAt.AddDays(-5)),
new AdvisoryProvenance("vendor", "map", "ADVISORY-0002", exportedAt.AddDays(-4)),
});
var advisoryStore = new StubAdvisoryStore(advisoryOne, advisoryTwo);
var optionsValue = new JsonExportOptions
{
OutputRoot = _root,
MaintainLatestSymlink = false,
TargetRepository = "s3://mirror/concelier"
};
optionsValue.Mirror.Enabled = true;
optionsValue.Mirror.DirectoryName = "mirror";
optionsValue.Mirror.Domains.Add(new JsonExportOptions.JsonMirrorDomainOptions
{
Id = "primary",
DisplayName = "Primary"
});
optionsValue.Mirror.Signing.Enabled = true;
optionsValue.Mirror.Signing.KeyId = "mirror-signing-key";
optionsValue.Mirror.Signing.Algorithm = SignatureAlgorithms.Es256;
optionsValue.Mirror.Signing.KeyPath = WriteSigningKey(_root);
var options = Options.Create(optionsValue);
var stateStore = new InMemoryExportStateStore();
var timeProvider = new TestTimeProvider(exportedAt);
var stateManager = new ExportStateManager(stateStore, timeProvider);
var eventLog = new StubAdvisoryEventLog(new[] { advisoryOne, advisoryTwo }, exportedAt);
var exporter = new JsonFeedExporter(
advisoryStore,
options,
new VulnListJsonExportPathResolver(),
stateManager,
eventLog,
NullLogger<JsonFeedExporter>.Instance,
timeProvider);
var services = new ServiceCollection();
services.AddSingleton<DefaultCryptoProvider>();
services.AddSingleton<ICryptoProvider>(sp => sp.GetRequiredService<DefaultCryptoProvider>());
services.AddSingleton<ICryptoProviderRegistry>(sp =>
{
var provider = sp.GetRequiredService<DefaultCryptoProvider>();
return new CryptoProviderRegistry(new[] { provider });
});
using var provider = services.BuildServiceProvider();
await exporter.ExportAsync(provider, CancellationToken.None);
var exportId = exportedAt.ToString(optionsValue.DirectoryNameFormat, CultureInfo.InvariantCulture);
var exportDirectory = Path.Combine(_root, exportId);
var mirrorDirectory = Path.Combine(exportDirectory, "mirror");
var domainDirectory = Path.Combine(mirrorDirectory, "primary");
Assert.True(File.Exists(Path.Combine(mirrorDirectory, "index.json")));
Assert.True(File.Exists(Path.Combine(domainDirectory, "bundle.json")));
Assert.True(File.Exists(Path.Combine(domainDirectory, "bundle.json.jws")));
Assert.True(File.Exists(Path.Combine(domainDirectory, "manifest.json")));
var record = await stateStore.FindAsync(JsonFeedExporter.ExporterId, CancellationToken.None);
Assert.NotNull(record);
Assert.Contains(record!.Files, file => string.Equals(file.Path, "mirror/index.json", StringComparison.Ordinal));
Assert.Contains(record.Files, file => string.Equals(file.Path, "mirror/primary/manifest.json", StringComparison.Ordinal));
var indexPath = Path.Combine(mirrorDirectory, "index.json");
using (var indexDoc = JsonDocument.Parse(await File.ReadAllBytesAsync(indexPath, CancellationToken.None)))
{
var indexRoot = indexDoc.RootElement;
Assert.Equal("s3://mirror/concelier", indexRoot.GetProperty("targetRepository").GetString());
var domains = indexRoot.GetProperty("domains").EnumerateArray().ToArray();
var domain = Assert.Single(domains);
Assert.Equal("primary", domain.GetProperty("domainId").GetString());
Assert.Equal("Primary", domain.GetProperty("displayName").GetString());
Assert.Equal(2, domain.GetProperty("advisoryCount").GetInt32());
var bundleDescriptor = domain.GetProperty("bundle");
Assert.Equal("mirror/primary/bundle.json", bundleDescriptor.GetProperty("path").GetString());
var signatureDescriptor = bundleDescriptor.GetProperty("signature");
Assert.Equal("mirror/primary/bundle.json.jws", signatureDescriptor.GetProperty("path").GetString());
var manifestDescriptor = domain.GetProperty("manifest");
Assert.Equal("mirror/primary/manifest.json", manifestDescriptor.GetProperty("path").GetString());
}
var bundlePathRel = "mirror/primary/bundle.json";
var manifestPathRel = "mirror/primary/manifest.json";
var signaturePathRel = "mirror/primary/bundle.json.jws";
var bundlePath = Path.Combine(exportDirectory, bundlePathRel.Replace('/', Path.DirectorySeparatorChar));
var manifestPath = Path.Combine(exportDirectory, manifestPathRel.Replace('/', Path.DirectorySeparatorChar));
var signaturePath = Path.Combine(exportDirectory, signaturePathRel.Replace('/', Path.DirectorySeparatorChar));
using (var bundleDoc = JsonDocument.Parse(await File.ReadAllBytesAsync(bundlePath, CancellationToken.None)))
{
var bundleRoot = bundleDoc.RootElement;
Assert.Equal("primary", bundleRoot.GetProperty("domainId").GetString());
Assert.Equal(2, bundleRoot.GetProperty("advisoryCount").GetInt32());
Assert.Equal("s3://mirror/concelier", bundleRoot.GetProperty("targetRepository").GetString());
Assert.Equal(2, bundleRoot.GetProperty("advisories").GetArrayLength());
var sources = bundleRoot.GetProperty("sources").EnumerateArray().Select(element => element.GetProperty("source").GetString()).ToArray();
Assert.Contains("ghsa", sources);
Assert.Contains("nvd", sources);
Assert.Contains("vendor", sources);
}
using (var manifestDoc = JsonDocument.Parse(await File.ReadAllBytesAsync(manifestPath, CancellationToken.None)))
{
var manifestRoot = manifestDoc.RootElement;
Assert.Equal("primary", manifestRoot.GetProperty("domainId").GetString());
Assert.Equal(2, manifestRoot.GetProperty("advisoryCount").GetInt32());
Assert.Equal("mirror/primary/bundle.json", manifestRoot.GetProperty("bundle").GetProperty("path").GetString());
}
var bundleBytes = await File.ReadAllBytesAsync(bundlePath, CancellationToken.None);
var signatureValue = await File.ReadAllTextAsync(signaturePath, CancellationToken.None);
var signatureParts = signatureValue.Split("..", StringSplitOptions.None);
Assert.Equal(2, signatureParts.Length);
var signingInput = BuildSigningInput(signatureParts[0], bundleBytes);
var signatureBytes = Base64UrlDecode(signatureParts[1]);
var registry = provider.GetRequiredService<ICryptoProviderRegistry>();
var verification = registry.ResolveSigner(
CryptoCapability.Signing,
optionsValue.Mirror.Signing.Algorithm,
new CryptoKeyReference(optionsValue.Mirror.Signing.KeyId, optionsValue.Mirror.Signing.Provider),
optionsValue.Mirror.Signing.Provider);
var verified = await verification.Signer.VerifyAsync(signingInput, signatureBytes, CancellationToken.None);
Assert.True(verified);
}
public void Dispose()
{
try
{
if (Directory.Exists(_root))
{
Directory.Delete(_root, recursive: true);
}
}
catch
{
// best effort cleanup
}
}
private static string WriteSigningKey(string directory)
{
using var ecdsa = ECDsa.Create(ECCurve.NamedCurves.nistP256);
var pkcs8 = ecdsa.ExportPkcs8PrivateKey();
var pem = BuildPem("PRIVATE KEY", pkcs8);
var path = Path.Combine(directory, $"mirror-key-{Guid.NewGuid():N}.pem");
File.WriteAllText(path, pem);
return path;
}
private static string BuildPem(string label, byte[] data)
{
var base64 = Convert.ToBase64String(data, Base64FormattingOptions.InsertLineBreaks);
return $"-----BEGIN {label}-----\n{base64}\n-----END {label}-----\n";
}
private static byte[] BuildSigningInput(string protectedHeader, byte[] payload)
{
var headerBytes = Encoding.ASCII.GetBytes(protectedHeader);
var buffer = new byte[headerBytes.Length + 1 + payload.Length];
Buffer.BlockCopy(headerBytes, 0, buffer, 0, headerBytes.Length);
buffer[headerBytes.Length] = (byte)'.';
Buffer.BlockCopy(payload, 0, buffer, headerBytes.Length + 1, payload.Length);
return buffer;
}
private static byte[] Base64UrlDecode(string value)
{
var builder = new StringBuilder(value.Length + 3);
foreach (var ch in value)
{
builder.Append(ch switch
{
'-' => '+',
'_' => '/',
_ => ch
});
}
while (builder.Length % 4 != 0)
{
builder.Append('=');
}
return Convert.FromBase64String(builder.ToString());
}
private sealed class StubAdvisoryStore : IAdvisoryStore
{
private readonly IReadOnlyList<Advisory> _advisories;
public StubAdvisoryStore(params Advisory[] advisories)
{
_advisories = advisories;
}
public Task<IReadOnlyList<Advisory>> GetRecentAsync(int limit, CancellationToken cancellationToken, IClientSessionHandle? session = null)
{
_ = session;
@@ -285,38 +514,84 @@ public sealed class JsonFeedExporterTests : IDisposable
async IAsyncEnumerable<Advisory> EnumerateAsync([EnumeratorCancellation] CancellationToken ct)
{
foreach (var advisory in _advisories)
{
ct.ThrowIfCancellationRequested();
yield return advisory;
await Task.Yield();
}
}
}
}
private sealed class InMemoryExportStateStore : IExportStateStore
{
private ExportStateRecord? _record;
public Task<ExportStateRecord?> FindAsync(string id, CancellationToken cancellationToken)
=> Task.FromResult(_record);
public Task<ExportStateRecord> UpsertAsync(ExportStateRecord record, CancellationToken cancellationToken)
{
_record = record;
return Task.FromResult(record);
}
}
private sealed class TestTimeProvider : TimeProvider
{
private DateTimeOffset _now;
public TestTimeProvider(DateTimeOffset start) => _now = start;
public override DateTimeOffset GetUtcNow() => _now;
public void Advance(TimeSpan delta) => _now = _now.Add(delta);
}
}
foreach (var advisory in _advisories)
{
ct.ThrowIfCancellationRequested();
yield return advisory;
await Task.Yield();
}
}
}
}
private sealed class StubAdvisoryEventLog : IAdvisoryEventLog
{
private readonly Dictionary<string, Advisory> _advisories;
private readonly DateTimeOffset _recordedAt;
public StubAdvisoryEventLog(IEnumerable<Advisory> advisories, DateTimeOffset recordedAt)
{
_advisories = advisories.ToDictionary(advisory => advisory.AdvisoryKey, StringComparer.OrdinalIgnoreCase);
_recordedAt = recordedAt;
}
public ValueTask AppendAsync(AdvisoryEventAppendRequest request, CancellationToken cancellationToken)
=> throw new NotSupportedException();
public ValueTask<AdvisoryReplay> ReplayAsync(string vulnerabilityKey, DateTimeOffset? asOf, CancellationToken cancellationToken)
{
if (_advisories.TryGetValue(vulnerabilityKey, out var advisory))
{
var asOfTimestamp = advisory.Modified ?? advisory.Published ?? _recordedAt;
var snapshot = new AdvisoryStatementSnapshot(
Guid.NewGuid(),
vulnerabilityKey,
advisory.AdvisoryKey,
advisory,
ImmutableArray<byte>.Empty,
asOfTimestamp,
_recordedAt,
ImmutableArray<Guid>.Empty);
return ValueTask.FromResult(new AdvisoryReplay(
vulnerabilityKey,
asOf,
ImmutableArray.Create(snapshot),
ImmutableArray<AdvisoryConflictSnapshot>.Empty));
}
return ValueTask.FromResult(new AdvisoryReplay(
vulnerabilityKey,
asOf,
ImmutableArray<AdvisoryStatementSnapshot>.Empty,
ImmutableArray<AdvisoryConflictSnapshot>.Empty));
}
}
private sealed class InMemoryExportStateStore : IExportStateStore
{
private ExportStateRecord? _record;
public Task<ExportStateRecord?> FindAsync(string id, CancellationToken cancellationToken)
{
return Task.FromResult(_record);
}
public Task<ExportStateRecord> UpsertAsync(ExportStateRecord record, CancellationToken cancellationToken)
{
_record = record;
return Task.FromResult(record);
}
}
private sealed class TestTimeProvider : TimeProvider
{
private DateTimeOffset _now;
public TestTimeProvider(DateTimeOffset start) => _now = start;
public override DateTimeOffset GetUtcNow() => _now;
public void Advance(TimeSpan delta) => _now = _now.Add(delta);
}
}

View File

@@ -1,9 +1,11 @@
using System.IO;
namespace StellaOps.Concelier.Exporter.Json;
/// <summary>
/// Configuration for JSON exporter output paths and determinism controls.
using System.Collections.Generic;
using System.IO;
using StellaOps.Cryptography;
namespace StellaOps.Concelier.Exporter.Json;
/// <summary>
/// Configuration for JSON exporter output paths and determinism controls.
/// </summary>
public sealed class JsonExportOptions
{
@@ -27,8 +29,87 @@ public sealed class JsonExportOptions
/// </summary>
public bool MaintainLatestSymlink { get; set; } = true;
/// <summary>
/// Optional repository identifier recorded alongside export state metadata.
/// </summary>
public string? TargetRepository { get; set; }
}
/// <summary>
/// Optional repository identifier recorded alongside export state metadata.
/// </summary>
public string? TargetRepository { get; set; }
/// <summary>
/// Mirror distribution configuration producing aggregate bundles for downstream mirrors.
/// </summary>
public JsonMirrorOptions Mirror { get; set; } = new();
public sealed class JsonMirrorOptions
{
/// <summary>
/// Indicates whether mirror bundle generation is enabled.
/// </summary>
public bool Enabled { get; set; }
/// <summary>
/// Directory name (relative to the export root) where mirror artefacts are written.
/// </summary>
public string DirectoryName { get; set; } = "mirror";
/// <summary>
/// Domains exposed to downstream mirrors.
/// </summary>
public IList<JsonMirrorDomainOptions> Domains { get; } = new List<JsonMirrorDomainOptions>();
/// <summary>
/// Signing configuration for mirror bundles.
/// </summary>
public JsonMirrorSigningOptions Signing { get; set; } = new();
}
public sealed class JsonMirrorDomainOptions
{
/// <summary>
/// Stable identifier for the mirror domain (used in URLs and directory names).
/// </summary>
public string Id { get; set; } = string.Empty;
/// <summary>
/// Optional human-readable label for UI surfaces.
/// </summary>
public string? DisplayName { get; set; }
/// <summary>
/// Optional advisory scheme filters (e.g. CVE, GHSA). Empty collection selects all schemes.
/// </summary>
public IList<string> IncludeSchemes { get; } = new List<string>();
/// <summary>
/// Optional provenance source filters (e.g. nvd, ghsa). Empty collection selects all sources.
/// </summary>
public IList<string> IncludeSources { get; } = new List<string>();
}
public sealed class JsonMirrorSigningOptions
{
/// <summary>
/// Indicates whether bundles should be signed. Defaults to disabled.
/// </summary>
public bool Enabled { get; set; }
/// <summary>
/// Signing algorithm identifier (defaults to ES256).
/// </summary>
public string Algorithm { get; set; } = SignatureAlgorithms.Es256;
/// <summary>
/// Active signing key identifier.
/// </summary>
public string KeyId { get; set; } = string.Empty;
/// <summary>
/// Path to the private key (PEM) used for signing mirror bundles.
/// </summary>
public string KeyPath { get; set; } = string.Empty;
/// <summary>
/// Optional crypto provider hint. When omitted the registry resolves an appropriate provider.
/// </summary>
public string? Provider { get; set; }
}
}

View File

@@ -1,46 +1,55 @@
using System;
using System.Collections.Generic;
using System.Collections.Immutable;
using System.Linq;
namespace StellaOps.Concelier.Exporter.Json;
public sealed class JsonExportResult
{
public JsonExportResult(
string exportDirectory,
DateTimeOffset exportedAt,
IEnumerable<JsonExportFile> files,
int advisoryCount,
long totalBytes)
{
if (string.IsNullOrWhiteSpace(exportDirectory))
{
throw new ArgumentException("Export directory must be provided.", nameof(exportDirectory));
}
ExportDirectory = exportDirectory;
ExportedAt = exportedAt;
AdvisoryCount = advisoryCount;
TotalBytes = totalBytes;
var list = (files ?? throw new ArgumentNullException(nameof(files)))
.Where(static file => file is not null)
.ToImmutableArray();
Files = list;
FilePaths = list.Select(static file => file.RelativePath).ToImmutableArray();
}
public string ExportDirectory { get; }
public DateTimeOffset ExportedAt { get; }
public ImmutableArray<JsonExportFile> Files { get; }
public ImmutableArray<string> FilePaths { get; }
public int AdvisoryCount { get; }
public long TotalBytes { get; }
}
using System;
using System.Collections.Generic;
using System.Collections.Immutable;
using System.Linq;
using StellaOps.Concelier.Models;
namespace StellaOps.Concelier.Exporter.Json;
public sealed class JsonExportResult
{
public JsonExportResult(
string exportDirectory,
DateTimeOffset exportedAt,
IEnumerable<JsonExportFile> files,
int advisoryCount,
long totalBytes,
IEnumerable<Advisory>? advisories = null)
{
if (string.IsNullOrWhiteSpace(exportDirectory))
{
throw new ArgumentException("Export directory must be provided.", nameof(exportDirectory));
}
var list = (files ?? throw new ArgumentNullException(nameof(files)))
.Where(static file => file is not null)
.ToImmutableArray();
var advisoryList = (advisories ?? Array.Empty<Advisory>())
.Where(static advisory => advisory is not null)
.ToImmutableArray();
ExportDirectory = exportDirectory;
ExportedAt = exportedAt;
TotalBytes = totalBytes;
Files = list;
FilePaths = list.Select(static file => file.RelativePath).ToImmutableArray();
Advisories = advisoryList;
AdvisoryCount = advisoryList.IsDefaultOrEmpty ? advisoryCount : advisoryList.Length;
}
public string ExportDirectory { get; }
public DateTimeOffset ExportedAt { get; }
public ImmutableArray<JsonExportFile> Files { get; }
public ImmutableArray<string> FilePaths { get; }
public ImmutableArray<Advisory> Advisories { get; }
public int AdvisoryCount { get; }
public long TotalBytes { get; }
}

View File

@@ -67,26 +67,27 @@ public sealed class JsonExportSnapshotBuilder
Directory.CreateDirectory(exportDirectory);
TrySetDirectoryTimestamp(exportDirectory, exportedAt);
var seen = new HashSet<string>(StringComparer.OrdinalIgnoreCase);
var files = new List<JsonExportFile>();
long totalBytes = 0L;
var advisoryCount = 0;
await foreach (var advisory in advisories.WithCancellation(cancellationToken))
{
cancellationToken.ThrowIfCancellationRequested();
advisoryCount++;
var entry = Resolve(advisory);
if (!seen.Add(entry.RelativePath))
{
throw new InvalidOperationException($"Multiple advisories resolved to the same path '{entry.RelativePath}'.");
}
var destination = Combine(exportDirectory, entry.Segments);
var destinationDirectory = Path.GetDirectoryName(destination);
if (!string.IsNullOrEmpty(destinationDirectory))
{
var seen = new HashSet<string>(StringComparer.OrdinalIgnoreCase);
var files = new List<JsonExportFile>();
var advisoryList = new List<Advisory>();
long totalBytes = 0L;
await foreach (var advisory in advisories.WithCancellation(cancellationToken))
{
cancellationToken.ThrowIfCancellationRequested();
var entry = Resolve(advisory);
if (!seen.Add(entry.RelativePath))
{
throw new InvalidOperationException($"Multiple advisories resolved to the same path '{entry.RelativePath}'.");
}
advisoryList.Add(entry.Advisory);
var destination = Combine(exportDirectory, entry.Segments);
var destinationDirectory = Path.GetDirectoryName(destination);
if (!string.IsNullOrEmpty(destinationDirectory))
{
EnsureDirectoryExists(destinationDirectory);
TrySetDirectoryTimestamp(destinationDirectory, exportedAt);
}
@@ -97,14 +98,14 @@ public sealed class JsonExportSnapshotBuilder
File.SetLastWriteTimeUtc(destination, exportedAt.UtcDateTime);
var digest = ComputeDigest(bytes);
files.Add(new JsonExportFile(entry.RelativePath, bytes.LongLength, digest));
totalBytes += bytes.LongLength;
}
files.Sort(static (left, right) => string.CompareOrdinal(left.RelativePath, right.RelativePath));
return new JsonExportResult(exportDirectory, exportedAt, files, advisoryCount, totalBytes);
}
files.Add(new JsonExportFile(entry.RelativePath, bytes.LongLength, digest));
totalBytes += bytes.LongLength;
}
files.Sort(static (left, right) => string.CompareOrdinal(left.RelativePath, right.RelativePath));
return new JsonExportResult(exportDirectory, exportedAt, files, advisoryList.Count, totalBytes, advisoryList);
}
private static async IAsyncEnumerable<Advisory> EnumerateAsync(
IEnumerable<Advisory> advisories,
@@ -168,10 +169,11 @@ public sealed class JsonExportSnapshotBuilder
throw new ArgumentNullException(nameof(advisory));
}
var relativePath = _pathResolver.GetRelativePath(advisory);
var segments = NormalizeRelativePath(relativePath);
var normalized = string.Join('/', segments);
return new PathResolution(advisory, normalized, segments);
var normalized = CanonicalJsonSerializer.Normalize(advisory);
var relativePath = _pathResolver.GetRelativePath(normalized);
var segments = NormalizeRelativePath(relativePath);
var normalizedPath = string.Join('/', segments);
return new PathResolution(normalized, normalizedPath, segments);
}
private static string[] NormalizeRelativePath(string relativePath)

View File

@@ -31,14 +31,19 @@ public sealed class JsonExporterDependencyInjectionRoutine : IDependencyInjectio
options.OutputRoot = Path.Combine("exports", "json");
}
if (string.IsNullOrWhiteSpace(options.DirectoryNameFormat))
{
options.DirectoryNameFormat = "yyyyMMdd'T'HHmmss'Z'";
}
});
services.AddSingleton<JsonFeedExporter>();
services.AddTransient<JsonExportJob>();
if (string.IsNullOrWhiteSpace(options.DirectoryNameFormat))
{
options.DirectoryNameFormat = "yyyyMMdd'T'HHmmss'Z'";
}
if (string.IsNullOrWhiteSpace(options.Mirror.DirectoryName))
{
options.Mirror.DirectoryName = "mirror";
}
});
services.AddSingleton<JsonFeedExporter>();
services.AddTransient<JsonExportJob>();
services.PostConfigure<JobSchedulerOptions>(options =>
{

View File

@@ -1,12 +1,16 @@
using System;
using System.Globalization;
using System.IO;
using System.Linq;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
using StellaOps.Concelier.Storage.Mongo.Advisories;
using StellaOps.Concelier.Storage.Mongo.Exporting;
using StellaOps.Plugin;
using System.Collections.Generic;
using System.Collections.Immutable;
using System.IO;
using System.Linq;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
using StellaOps.Concelier.Core.Events;
using StellaOps.Concelier.Models;
using StellaOps.Concelier.Storage.Mongo.Advisories;
using StellaOps.Concelier.Storage.Mongo.Exporting;
using StellaOps.Plugin;
namespace StellaOps.Concelier.Exporter.Json;
@@ -16,29 +20,32 @@ public sealed class JsonFeedExporter : IFeedExporter
public const string ExporterId = "export:json";
private readonly IAdvisoryStore _advisoryStore;
private readonly JsonExportOptions _options;
private readonly IJsonExportPathResolver _pathResolver;
private readonly ExportStateManager _stateManager;
private readonly ILogger<JsonFeedExporter> _logger;
private readonly TimeProvider _timeProvider;
private readonly string _exporterVersion;
public JsonFeedExporter(
IAdvisoryStore advisoryStore,
IOptions<JsonExportOptions> options,
IJsonExportPathResolver pathResolver,
ExportStateManager stateManager,
ILogger<JsonFeedExporter> logger,
TimeProvider? timeProvider = null)
{
_advisoryStore = advisoryStore ?? throw new ArgumentNullException(nameof(advisoryStore));
_options = options?.Value ?? throw new ArgumentNullException(nameof(options));
_pathResolver = pathResolver ?? throw new ArgumentNullException(nameof(pathResolver));
_stateManager = stateManager ?? throw new ArgumentNullException(nameof(stateManager));
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
_timeProvider = timeProvider ?? TimeProvider.System;
_exporterVersion = ExporterVersion.GetVersion(typeof(JsonFeedExporter));
}
private readonly JsonExportOptions _options;
private readonly IJsonExportPathResolver _pathResolver;
private readonly ExportStateManager _stateManager;
private readonly ILogger<JsonFeedExporter> _logger;
private readonly TimeProvider _timeProvider;
private readonly string _exporterVersion;
private readonly IAdvisoryEventLog _eventLog;
public JsonFeedExporter(
IAdvisoryStore advisoryStore,
IOptions<JsonExportOptions> options,
IJsonExportPathResolver pathResolver,
ExportStateManager stateManager,
IAdvisoryEventLog eventLog,
ILogger<JsonFeedExporter> logger,
TimeProvider? timeProvider = null)
{
_advisoryStore = advisoryStore ?? throw new ArgumentNullException(nameof(advisoryStore));
_options = options?.Value ?? throw new ArgumentNullException(nameof(options));
_pathResolver = pathResolver ?? throw new ArgumentNullException(nameof(pathResolver));
_stateManager = stateManager ?? throw new ArgumentNullException(nameof(stateManager));
_eventLog = eventLog ?? throw new ArgumentNullException(nameof(eventLog));
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
_timeProvider = timeProvider ?? TimeProvider.System;
_exporterVersion = ExporterVersion.GetVersion(typeof(JsonFeedExporter));
}
public string Name => ExporterName;
@@ -52,11 +59,12 @@ public sealed class JsonFeedExporter : IFeedExporter
var existingState = await _stateManager.GetAsync(ExporterId, cancellationToken).ConfigureAwait(false);
var builder = new JsonExportSnapshotBuilder(_options, _pathResolver);
var advisoryStream = _advisoryStore.StreamAsync(cancellationToken);
var result = await builder.WriteAsync(advisoryStream, exportedAt, exportId, cancellationToken).ConfigureAwait(false);
var digest = ExportDigestCalculator.ComputeTreeDigest(result);
var builder = new JsonExportSnapshotBuilder(_options, _pathResolver);
var canonicalAdvisories = await MaterializeCanonicalAdvisoriesAsync(cancellationToken).ConfigureAwait(false);
var result = await builder.WriteAsync(canonicalAdvisories, exportedAt, exportId, cancellationToken).ConfigureAwait(false);
result = await JsonMirrorBundleWriter.WriteAsync(result, _options, services, _timeProvider, _logger, cancellationToken).ConfigureAwait(false);
var digest = ExportDigestCalculator.ComputeTreeDigest(result);
_logger.LogInformation(
"JSON export {ExportId} wrote {FileCount} files ({Bytes} bytes) covering {AdvisoryCount} advisories with digest {Digest}",
exportId,
@@ -106,7 +114,34 @@ public sealed class JsonFeedExporter : IFeedExporter
{
TryUpdateLatestSymlink(exportRoot, result.ExportDirectory);
}
}
}
private async Task<IReadOnlyList<Advisory>> MaterializeCanonicalAdvisoriesAsync(CancellationToken cancellationToken)
{
var keys = new SortedSet<string>(StringComparer.OrdinalIgnoreCase);
await foreach (var advisory in _advisoryStore.StreamAsync(cancellationToken))
{
cancellationToken.ThrowIfCancellationRequested();
if (!string.IsNullOrWhiteSpace(advisory.AdvisoryKey))
{
keys.Add(advisory.AdvisoryKey.Trim());
}
}
var advisories = new List<Advisory>(keys.Count);
foreach (var key in keys)
{
cancellationToken.ThrowIfCancellationRequested();
var replay = await _eventLog.ReplayAsync(key, asOf: null, cancellationToken).ConfigureAwait(false);
if (!replay.Statements.IsDefaultOrEmpty)
{
advisories.Add(replay.Statements[0].Advisory);
}
}
return advisories;
}
private void TryUpdateLatestSymlink(string exportRoot, string exportDirectory)
{

View File

@@ -0,0 +1,622 @@
using System;
using System.Buffers;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Security.Cryptography;
using System.Text;
using System.Text.Json;
using System.Text.Json.Serialization;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.Hosting;
using Microsoft.Extensions.Logging;
using StellaOps.Concelier.Models;
using StellaOps.Cryptography;
namespace StellaOps.Concelier.Exporter.Json;
internal static class JsonMirrorBundleWriter
{
private const int SchemaVersion = 1;
private const string BundleFileName = "bundle.json";
private const string BundleSignatureFileName = "bundle.json.jws";
private const string ManifestFileName = "manifest.json";
private const string IndexFileName = "index.json";
private const string SignatureMediaType = "application/vnd.stellaops.concelier.mirror-bundle+jws";
private const string DefaultMirrorDirectoryName = "mirror";
private static readonly Encoding Utf8NoBom = new UTF8Encoding(encoderShouldEmitUTF8Identifier: false);
private static readonly JsonSerializerOptions HeaderSerializerOptions = new(JsonSerializerDefaults.General)
{
PropertyNamingPolicy = null,
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull,
WriteIndented = false,
};
public static async Task<JsonExportResult> WriteAsync(
JsonExportResult result,
JsonExportOptions options,
IServiceProvider services,
TimeProvider timeProvider,
ILogger logger,
CancellationToken cancellationToken)
{
ArgumentNullException.ThrowIfNull(result);
ArgumentNullException.ThrowIfNull(options);
ArgumentNullException.ThrowIfNull(services);
ArgumentNullException.ThrowIfNull(timeProvider);
ArgumentNullException.ThrowIfNull(logger);
var mirrorOptions = options.Mirror ?? new JsonExportOptions.JsonMirrorOptions();
if (!mirrorOptions.Enabled || mirrorOptions.Domains.Count == 0)
{
return result;
}
cancellationToken.ThrowIfCancellationRequested();
var exportedAtUtc = result.ExportedAt.UtcDateTime;
var mirrorDirectoryName = string.IsNullOrWhiteSpace(mirrorOptions.DirectoryName)
? DefaultMirrorDirectoryName
: mirrorOptions.DirectoryName.Trim();
var mirrorRoot = Path.Combine(result.ExportDirectory, mirrorDirectoryName);
Directory.CreateDirectory(mirrorRoot);
TrySetDirectoryTimestamp(mirrorRoot, exportedAtUtc);
var advisories = result.Advisories.IsDefaultOrEmpty
? Array.Empty<Advisory>()
: result.Advisories
.OrderBy(static advisory => advisory.AdvisoryKey, StringComparer.Ordinal)
.ToArray();
var signingContext = PrepareSigningContext(mirrorOptions.Signing, services, timeProvider, logger);
var additionalFiles = new List<JsonExportFile>();
var domainEntries = new List<MirrorIndexDomainEntry>();
foreach (var domainOption in mirrorOptions.Domains)
{
cancellationToken.ThrowIfCancellationRequested();
if (domainOption is null)
{
logger.LogWarning("Encountered null mirror domain configuration; skipping.");
continue;
}
var domainId = (domainOption.Id ?? string.Empty).Trim();
if (domainId.Length == 0)
{
logger.LogWarning("Skipping mirror domain with empty id.");
continue;
}
var schemeFilter = CreateFilterSet(domainOption.IncludeSchemes);
var sourceFilter = CreateFilterSet(domainOption.IncludeSources);
var domainAdvisories = advisories
.Where(advisory => MatchesFilters(advisory, schemeFilter, sourceFilter))
.ToArray();
var sources = BuildSourceSummaries(domainAdvisories);
var domainDisplayName = string.IsNullOrWhiteSpace(domainOption.DisplayName)
? domainId
: domainOption.DisplayName!.Trim();
var domainDirectory = Path.Combine(mirrorRoot, domainId);
Directory.CreateDirectory(domainDirectory);
TrySetDirectoryTimestamp(domainDirectory, exportedAtUtc);
var bundleDocument = new MirrorDomainBundleDocument(
SchemaVersion,
result.ExportedAt,
options.TargetRepository,
domainId,
domainDisplayName,
domainAdvisories.Length,
domainAdvisories,
sources);
var bundleBytes = Serialize(bundleDocument);
var bundlePath = Path.Combine(domainDirectory, BundleFileName);
await WriteFileAsync(bundlePath, bundleBytes, exportedAtUtc, cancellationToken).ConfigureAwait(false);
var bundleRelativePath = ToRelativePath(result.ExportDirectory, bundlePath);
var bundleDigest = ComputeDigest(bundleBytes);
var bundleLength = (long)bundleBytes.LongLength;
additionalFiles.Add(new JsonExportFile(bundleRelativePath, bundleLength, bundleDigest));
MirrorSignatureDescriptor? signatureDescriptor = null;
if (signingContext is not null)
{
var (signatureValue, signedAt) = await CreateSignatureAsync(
signingContext,
bundleBytes,
timeProvider,
cancellationToken)
.ConfigureAwait(false);
var signatureBytes = Utf8NoBom.GetBytes(signatureValue);
var signaturePath = Path.Combine(domainDirectory, BundleSignatureFileName);
await WriteFileAsync(signaturePath, signatureBytes, exportedAtUtc, cancellationToken).ConfigureAwait(false);
var signatureRelativePath = ToRelativePath(result.ExportDirectory, signaturePath);
var signatureDigest = ComputeDigest(signatureBytes);
var signatureLength = (long)signatureBytes.LongLength;
additionalFiles.Add(new JsonExportFile(signatureRelativePath, signatureLength, signatureDigest));
signatureDescriptor = new MirrorSignatureDescriptor(
signatureRelativePath,
signingContext.Algorithm,
signingContext.KeyId,
signingContext.Provider,
signedAt);
}
var bundleDescriptor = new MirrorFileDescriptor(bundleRelativePath, bundleLength, bundleDigest, signatureDescriptor);
var manifestDocument = new MirrorDomainManifestDocument(
SchemaVersion,
result.ExportedAt,
domainId,
domainDisplayName,
domainAdvisories.Length,
sources,
bundleDescriptor);
var manifestBytes = Serialize(manifestDocument);
var manifestPath = Path.Combine(domainDirectory, ManifestFileName);
await WriteFileAsync(manifestPath, manifestBytes, exportedAtUtc, cancellationToken).ConfigureAwait(false);
var manifestRelativePath = ToRelativePath(result.ExportDirectory, manifestPath);
var manifestDigest = ComputeDigest(manifestBytes);
var manifestLength = (long)manifestBytes.LongLength;
additionalFiles.Add(new JsonExportFile(manifestRelativePath, manifestLength, manifestDigest));
var manifestDescriptor = new MirrorFileDescriptor(manifestRelativePath, manifestLength, manifestDigest, null);
domainEntries.Add(new MirrorIndexDomainEntry(
domainId,
domainDisplayName,
domainAdvisories.Length,
manifestDescriptor,
bundleDescriptor,
sources));
}
domainEntries.Sort(static (left, right) => string.CompareOrdinal(left.DomainId, right.DomainId));
var indexDocument = new MirrorIndexDocument(
SchemaVersion,
result.ExportedAt,
options.TargetRepository,
domainEntries);
var indexBytes = Serialize(indexDocument);
var indexPath = Path.Combine(mirrorRoot, IndexFileName);
await WriteFileAsync(indexPath, indexBytes, exportedAtUtc, cancellationToken).ConfigureAwait(false);
var indexRelativePath = ToRelativePath(result.ExportDirectory, indexPath);
var indexDigest = ComputeDigest(indexBytes);
var indexLength = (long)indexBytes.LongLength;
additionalFiles.Add(new JsonExportFile(indexRelativePath, indexLength, indexDigest));
logger.LogInformation(
"Generated {DomainCount} Concelier mirror domain bundle(s) under {MirrorRoot}.",
domainEntries.Count,
mirrorDirectoryName);
var combinedFiles = new List<JsonExportFile>(result.Files.Length + additionalFiles.Count);
combinedFiles.AddRange(result.Files);
combinedFiles.AddRange(additionalFiles);
var combinedTotalBytes = checked(result.TotalBytes + additionalFiles.Sum(static file => file.Length));
return new JsonExportResult(
result.ExportDirectory,
result.ExportedAt,
combinedFiles,
result.AdvisoryCount,
combinedTotalBytes,
result.Advisories);
}
private static JsonMirrorSigningContext? PrepareSigningContext(
JsonExportOptions.JsonMirrorSigningOptions signingOptions,
IServiceProvider services,
TimeProvider timeProvider,
ILogger logger)
{
if (signingOptions is null || !signingOptions.Enabled)
{
return null;
}
var algorithm = string.IsNullOrWhiteSpace(signingOptions.Algorithm)
? SignatureAlgorithms.Es256
: signingOptions.Algorithm.Trim();
var keyId = (signingOptions.KeyId ?? string.Empty).Trim();
if (keyId.Length == 0)
{
throw new InvalidOperationException("Mirror signing requires mirror.signing.keyId to be configured.");
}
var registry = services.GetService<ICryptoProviderRegistry>()
?? throw new InvalidOperationException("Mirror signing requires ICryptoProviderRegistry to be registered.");
var providerHint = signingOptions.Provider?.Trim();
var keyReference = new CryptoKeyReference(keyId, providerHint);
CryptoSignerResolution resolved;
try
{
resolved = registry.ResolveSigner(CryptoCapability.Signing, algorithm, keyReference, providerHint);
}
catch (KeyNotFoundException)
{
var provider = ResolveProvider(registry, algorithm, providerHint);
var signingKey = LoadSigningKey(signingOptions, provider, services, timeProvider, algorithm);
provider.UpsertSigningKey(signingKey);
resolved = registry.ResolveSigner(CryptoCapability.Signing, algorithm, keyReference, provider.Name);
}
logger.LogDebug(
"Mirror signing configured with key {KeyId} via provider {Provider} using {Algorithm}.",
resolved.Signer.KeyId,
resolved.ProviderName,
algorithm);
return new JsonMirrorSigningContext(resolved.Signer, algorithm, resolved.Signer.KeyId, resolved.ProviderName);
}
private static ICryptoProvider ResolveProvider(ICryptoProviderRegistry registry, string algorithm, string? providerHint)
{
if (!string.IsNullOrWhiteSpace(providerHint) && registry.TryResolve(providerHint, out var hinted))
{
if (!hinted.Supports(CryptoCapability.Signing, algorithm))
{
throw new InvalidOperationException(
$"Crypto provider '{providerHint}' does not support signing algorithm '{algorithm}'.");
}
return hinted;
}
return registry.ResolveOrThrow(CryptoCapability.Signing, algorithm);
}
private static CryptoSigningKey LoadSigningKey(
JsonExportOptions.JsonMirrorSigningOptions signingOptions,
ICryptoProvider provider,
IServiceProvider services,
TimeProvider timeProvider,
string algorithm)
{
var keyPath = (signingOptions.KeyPath ?? string.Empty).Trim();
if (keyPath.Length == 0)
{
throw new InvalidOperationException("Mirror signing requires mirror.signing.keyPath to be configured.");
}
var environment = services.GetService<IHostEnvironment>();
var basePath = environment?.ContentRootPath ?? AppContext.BaseDirectory;
var resolvedPath = Path.IsPathRooted(keyPath)
? keyPath
: Path.GetFullPath(Path.Combine(basePath, keyPath));
if (!File.Exists(resolvedPath))
{
throw new FileNotFoundException($"Mirror signing key '{signingOptions.KeyId}' not found.", resolvedPath);
}
var pem = File.ReadAllText(resolvedPath);
using var ecdsa = ECDsa.Create();
try
{
ecdsa.ImportFromPem(pem);
}
catch (CryptographicException ex)
{
throw new InvalidOperationException("Failed to import mirror signing key. Ensure the PEM contains an EC private key.", ex);
}
var parameters = ecdsa.ExportParameters(includePrivateParameters: true);
return new CryptoSigningKey(
new CryptoKeyReference(signingOptions.KeyId, provider.Name),
algorithm,
in parameters,
timeProvider.GetUtcNow());
}
private static async Task<(string Value, DateTimeOffset SignedAt)> CreateSignatureAsync(
JsonMirrorSigningContext context,
ReadOnlyMemory<byte> payload,
TimeProvider timeProvider,
CancellationToken cancellationToken)
{
var header = new Dictionary<string, object>
{
["alg"] = context.Algorithm,
["kid"] = context.KeyId,
["typ"] = SignatureMediaType,
["b64"] = false,
["crit"] = new[] { "b64" }
};
if (!string.IsNullOrWhiteSpace(context.Provider))
{
header["provider"] = context.Provider;
}
var headerJson = JsonSerializer.Serialize(header, HeaderSerializerOptions);
var protectedHeader = Base64UrlEncode(Utf8NoBom.GetBytes(headerJson));
var signingInputLength = protectedHeader.Length + 1 + payload.Length;
var buffer = ArrayPool<byte>.Shared.Rent(signingInputLength);
try
{
var headerBytes = Encoding.ASCII.GetBytes(protectedHeader);
Buffer.BlockCopy(headerBytes, 0, buffer, 0, headerBytes.Length);
buffer[headerBytes.Length] = (byte)'.';
var payloadArray = payload.ToArray();
Buffer.BlockCopy(payloadArray, 0, buffer, headerBytes.Length + 1, payloadArray.Length);
var signingInput = new ReadOnlyMemory<byte>(buffer, 0, signingInputLength);
var signatureBytes = await context.Signer.SignAsync(signingInput, cancellationToken).ConfigureAwait(false);
var encodedSignature = Base64UrlEncode(signatureBytes);
var signedAt = timeProvider.GetUtcNow();
return (string.Concat(protectedHeader, "..", encodedSignature), signedAt);
}
finally
{
ArrayPool<byte>.Shared.Return(buffer);
}
}
private static IReadOnlyList<JsonMirrorSourceSummary> BuildSourceSummaries(IReadOnlyList<Advisory> advisories)
{
var builders = new Dictionary<string, SourceAccumulator>(StringComparer.OrdinalIgnoreCase);
foreach (var advisory in advisories)
{
var counted = new HashSet<string>(StringComparer.OrdinalIgnoreCase);
foreach (var provenance in advisory.Provenance)
{
if (string.IsNullOrWhiteSpace(provenance.Source))
{
continue;
}
var source = provenance.Source.Trim();
if (!builders.TryGetValue(source, out var accumulator))
{
accumulator = new SourceAccumulator();
builders[source] = accumulator;
}
accumulator.Record(provenance.RecordedAt);
if (counted.Add(source))
{
accumulator.IncrementAdvisoryCount();
}
}
}
return builders
.OrderBy(static pair => pair.Key, StringComparer.Ordinal)
.Select(pair => new JsonMirrorSourceSummary(
pair.Key,
pair.Value.FirstRecordedAt,
pair.Value.LastRecordedAt,
pair.Value.AdvisoryCount))
.ToArray();
}
private static HashSet<string>? CreateFilterSet(IList<string>? values)
{
if (values is null || values.Count == 0)
{
return null;
}
var set = new HashSet<string>(StringComparer.OrdinalIgnoreCase);
foreach (var value in values)
{
if (string.IsNullOrWhiteSpace(value))
{
continue;
}
set.Add(value.Trim());
}
return set.Count == 0 ? null : set;
}
private static bool MatchesFilters(Advisory advisory, HashSet<string>? schemeFilter, HashSet<string>? sourceFilter)
{
if (schemeFilter is not null)
{
var scheme = ExtractScheme(advisory.AdvisoryKey);
if (!schemeFilter.Contains(scheme))
{
return false;
}
}
if (sourceFilter is not null)
{
var hasSource = advisory.Provenance.Any(provenance =>
!string.IsNullOrWhiteSpace(provenance.Source) &&
sourceFilter.Contains(provenance.Source.Trim()));
if (!hasSource)
{
return false;
}
}
return true;
}
private static string ExtractScheme(string advisoryKey)
{
if (string.IsNullOrWhiteSpace(advisoryKey))
{
return string.Empty;
}
var trimmed = advisoryKey.Trim();
var separatorIndex = trimmed.IndexOf(':');
return separatorIndex <= 0 ? trimmed : trimmed[..separatorIndex];
}
private static byte[] Serialize<T>(T value)
{
var json = CanonicalJsonSerializer.SerializeIndented(value);
return Utf8NoBom.GetBytes(json);
}
private static async Task WriteFileAsync(string path, byte[] content, DateTime exportedAtUtc, CancellationToken cancellationToken)
{
await File.WriteAllBytesAsync(path, content, cancellationToken).ConfigureAwait(false);
File.SetLastWriteTimeUtc(path, exportedAtUtc);
}
private static string ToRelativePath(string root, string fullPath)
{
var relative = Path.GetRelativePath(root, fullPath);
return relative.Replace(Path.DirectorySeparatorChar, '/');
}
private static string ComputeDigest(ReadOnlySpan<byte> payload)
{
var hash = SHA256.HashData(payload);
return $"sha256:{Convert.ToHexString(hash).ToLowerInvariant()}";
}
private static void TrySetDirectoryTimestamp(string directory, DateTime exportedAtUtc)
{
try
{
Directory.SetLastWriteTimeUtc(directory, exportedAtUtc);
}
catch (IOException)
{
}
catch (UnauthorizedAccessException)
{
}
catch (PlatformNotSupportedException)
{
}
}
private static string Base64UrlEncode(ReadOnlySpan<byte> value)
{
var encoded = Convert.ToBase64String(value);
var builder = new StringBuilder(encoded.Length);
foreach (var ch in encoded)
{
switch (ch)
{
case '+':
builder.Append('-');
break;
case '/':
builder.Append('_');
break;
case '=':
break;
default:
builder.Append(ch);
break;
}
}
return builder.ToString();
}
private sealed record JsonMirrorSigningContext(ICryptoSigner Signer, string Algorithm, string KeyId, string Provider);
private sealed record MirrorIndexDocument(
int SchemaVersion,
DateTimeOffset GeneratedAt,
string? TargetRepository,
IReadOnlyList<MirrorIndexDomainEntry> Domains);
private sealed record MirrorIndexDomainEntry(
string DomainId,
string DisplayName,
int AdvisoryCount,
MirrorFileDescriptor Manifest,
MirrorFileDescriptor Bundle,
IReadOnlyList<JsonMirrorSourceSummary> Sources);
private sealed record MirrorDomainManifestDocument(
int SchemaVersion,
DateTimeOffset GeneratedAt,
string DomainId,
string DisplayName,
int AdvisoryCount,
IReadOnlyList<JsonMirrorSourceSummary> Sources,
MirrorFileDescriptor Bundle);
private sealed record MirrorDomainBundleDocument(
int SchemaVersion,
DateTimeOffset GeneratedAt,
string? TargetRepository,
string DomainId,
string DisplayName,
int AdvisoryCount,
IReadOnlyList<Advisory> Advisories,
IReadOnlyList<JsonMirrorSourceSummary> Sources);
private sealed record MirrorFileDescriptor(
string Path,
long SizeBytes,
string Digest,
MirrorSignatureDescriptor? Signature);
private sealed record MirrorSignatureDescriptor(
string Path,
string Algorithm,
string KeyId,
string Provider,
DateTimeOffset SignedAt);
private sealed record JsonMirrorSourceSummary(
string Source,
DateTimeOffset? FirstRecordedAt,
DateTimeOffset? LastRecordedAt,
int AdvisoryCount);
private sealed class SourceAccumulator
{
public DateTimeOffset? FirstRecordedAt { get; private set; }
public DateTimeOffset? LastRecordedAt { get; private set; }
public int AdvisoryCount { get; private set; }
public void Record(DateTimeOffset recordedAt)
{
var normalized = recordedAt.ToUniversalTime();
if (FirstRecordedAt is null || normalized < FirstRecordedAt.Value)
{
FirstRecordedAt = normalized;
}
if (LastRecordedAt is null || normalized > LastRecordedAt.Value)
{
LastRecordedAt = normalized;
}
}
public void IncrementAdvisoryCount()
{
AdvisoryCount++;
}
}
}

View File

@@ -10,13 +10,15 @@
<ItemGroup>
<ProjectReference Include="..\StellaOps.Concelier.Models\StellaOps.Concelier.Models.csproj" />
<ProjectReference Include="..\StellaOps.Concelier.Normalization\StellaOps.Concelier.Normalization.csproj" />
<ProjectReference Include="..\StellaOps.Concelier.Storage.Mongo\StellaOps.Concelier.Storage.Mongo.csproj" />
<ProjectReference Include="../StellaOps.Plugin/StellaOps.Plugin.csproj" />
<ProjectReference Include="../StellaOps.DependencyInjection/StellaOps.DependencyInjection.csproj" />
</ItemGroup>
<ItemGroup>
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="8.0.0" />
<PackageReference Include="Microsoft.Extensions.Options" Version="8.0.0" />
<PackageReference Include="Microsoft.Extensions.Options.ConfigurationExtensions" Version="8.0.0" />
</ItemGroup>
</Project>
<ProjectReference Include="..\StellaOps.Concelier.Storage.Mongo\StellaOps.Concelier.Storage.Mongo.csproj" />
<ProjectReference Include="../StellaOps.Plugin/StellaOps.Plugin.csproj" />
<ProjectReference Include="../StellaOps.DependencyInjection/StellaOps.DependencyInjection.csproj" />
<ProjectReference Include="..\StellaOps.Cryptography\StellaOps.Cryptography.csproj" />
</ItemGroup>
<ItemGroup>
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="8.0.1" />
<PackageReference Include="Microsoft.Extensions.DependencyInjection.Abstractions" Version="8.0.1" />
<PackageReference Include="Microsoft.Extensions.Options" Version="8.0.0" />
<PackageReference Include="Microsoft.Extensions.Options.ConfigurationExtensions" Version="8.0.0" />
</ItemGroup>
</Project>

View File

@@ -10,4 +10,4 @@
|Stream advisories during export|BE-Export|Storage.Mongo|DONE exporter + streaming-only test ensures single enumeration and per-file digest capture.|
|Emit export manifest with digest metadata|BE-Export|Exporters|DONE manifest now includes per-file digests/sizes alongside tree digest.|
|Surface new advisory fields (description/CWEs/canonical metric)|BE-Export|Models, Core|DONE (2025-10-15) JSON exporter validated with new fixtures ensuring description/CWEs/canonical metric are preserved in outputs; `dotnet test src/StellaOps.Concelier.Exporter.Json.Tests` run 2025-10-15 for regression coverage.|
|CONCELIER-EXPORT-08-201 Mirror bundle + domain manifest|Team Concelier Export|FEEDCORE-ENGINE-07-001|TODO Produce per-domain aggregate bundles (JSON + manifest) with deterministic digests, include upstream source metadata, and publish index consumed by mirror endpoints/tests.|
|CONCELIER-EXPORT-08-201 Mirror bundle + domain manifest|Team Concelier Export|FEEDCORE-ENGINE-07-001|DONE (2025-10-19) Mirror bundle writer emits domain aggregates + manifests with cosign-compatible JWS signatures; index/tests updated via `dotnet test src/StellaOps.Concelier.Exporter.Json.Tests/StellaOps.Concelier.Exporter.Json.Tests.csproj` (2025-10-19).|

View File

@@ -283,6 +283,155 @@ public sealed class TrivyDbFeedExporterTests : IDisposable
Assert.Empty(orasPusher.Pushes);
}
[Fact]
public async Task ExportAsync_WritesMirrorBundlesWhenConfigured()
{
var advisoryOne = CreateSampleAdvisory("CVE-2025-1001", "Mirror Advisory One");
var advisoryTwo = CreateSampleAdvisory("CVE-2025-1002", "Mirror Advisory Two");
var advisoryStore = new StubAdvisoryStore(advisoryOne, advisoryTwo);
var optionsValue = new TrivyDbExportOptions
{
OutputRoot = _root,
ReferencePrefix = "example/trivy",
TargetRepository = "s3://mirror/trivy",
Json = new JsonExportOptions
{
OutputRoot = _jsonRoot,
MaintainLatestSymlink = false,
},
KeepWorkingTree = false,
};
optionsValue.Mirror.Enabled = true;
optionsValue.Mirror.DirectoryName = "mirror";
optionsValue.Mirror.Domains.Add(new TrivyDbMirrorDomainOptions
{
Id = "primary",
DisplayName = "Primary Mirror",
});
var options = Options.Create(optionsValue);
var packageBuilder = new TrivyDbPackageBuilder();
var ociWriter = new TrivyDbOciWriter();
var planner = new TrivyDbExportPlanner();
var stateStore = new InMemoryExportStateStore();
var exportedAt = DateTimeOffset.Parse("2024-09-18T12:00:00Z", CultureInfo.InvariantCulture);
var timeProvider = new TestTimeProvider(exportedAt);
var stateManager = new ExportStateManager(stateStore, timeProvider);
var builderMetadata = JsonSerializer.SerializeToUtf8Bytes(new
{
Version = 2,
NextUpdate = "2024-09-19T12:00:00Z",
UpdatedAt = "2024-09-18T12:00:00Z",
});
var builder = new StubTrivyDbBuilder(_root, builderMetadata);
var orasPusher = new StubTrivyDbOrasPusher();
var exporter = new TrivyDbFeedExporter(
advisoryStore,
new VulnListJsonExportPathResolver(),
options,
packageBuilder,
ociWriter,
stateManager,
planner,
builder,
orasPusher,
NullLogger<TrivyDbFeedExporter>.Instance,
timeProvider);
using var provider = new ServiceCollection().BuildServiceProvider();
await exporter.ExportAsync(provider, CancellationToken.None);
var exportId = exportedAt.ToString(optionsValue.TagFormat, CultureInfo.InvariantCulture);
var layoutPath = optionsValue.GetExportRoot(exportId);
var mirrorRoot = Path.Combine(layoutPath, "mirror");
var domainRoot = Path.Combine(mirrorRoot, "primary");
Assert.True(File.Exists(Path.Combine(mirrorRoot, "index.json")));
Assert.True(File.Exists(Path.Combine(domainRoot, "manifest.json")));
Assert.True(File.Exists(Path.Combine(domainRoot, "metadata.json")));
Assert.True(File.Exists(Path.Combine(domainRoot, "db.tar.gz")));
var reference = $"{optionsValue.ReferencePrefix}:{exportId}";
var manifestDigest = ReadManifestDigest(layoutPath);
var indexPath = Path.Combine(mirrorRoot, "index.json");
string? indexManifestDescriptorDigest = null;
string? indexMetadataDigest = null;
string? indexDatabaseDigest = null;
using (var indexDoc = JsonDocument.Parse(File.ReadAllBytes(indexPath)))
{
var root = indexDoc.RootElement;
Assert.Equal(1, root.GetProperty("schemaVersion").GetInt32());
Assert.Equal(reference, root.GetProperty("reference").GetString());
Assert.Equal(manifestDigest, root.GetProperty("manifestDigest").GetString());
Assert.Equal("full", root.GetProperty("mode").GetString());
Assert.Equal("s3://mirror/trivy", root.GetProperty("targetRepository").GetString());
Assert.False(root.TryGetProperty("delta", out _));
var domains = root.GetProperty("domains").EnumerateArray().ToArray();
var domain = Assert.Single(domains);
Assert.Equal("primary", domain.GetProperty("domainId").GetString());
Assert.Equal("Primary Mirror", domain.GetProperty("displayName").GetString());
Assert.Equal(2, domain.GetProperty("advisoryCount").GetInt32());
var manifestDescriptor = domain.GetProperty("manifest");
Assert.Equal("mirror/primary/manifest.json", manifestDescriptor.GetProperty("path").GetString());
indexManifestDescriptorDigest = manifestDescriptor.GetProperty("digest").GetString();
var metadataDescriptor = domain.GetProperty("metadata");
Assert.Equal("mirror/primary/metadata.json", metadataDescriptor.GetProperty("path").GetString());
indexMetadataDigest = metadataDescriptor.GetProperty("digest").GetString();
var databaseDescriptor = domain.GetProperty("database");
Assert.Equal("mirror/primary/db.tar.gz", databaseDescriptor.GetProperty("path").GetString());
indexDatabaseDigest = databaseDescriptor.GetProperty("digest").GetString();
}
var domainManifestPath = Path.Combine(domainRoot, "manifest.json");
var rootMetadataPath = Path.Combine(layoutPath, "metadata.json");
var domainMetadataPath = Path.Combine(domainRoot, "metadata.json");
var domainDbPath = Path.Combine(domainRoot, "db.tar.gz");
var domainManifestBytes = File.ReadAllBytes(domainManifestPath);
var domainManifestDigest = "sha256:" + Convert.ToHexString(SHA256.HashData(domainManifestBytes)).ToLowerInvariant();
var rootMetadataBytes = File.ReadAllBytes(rootMetadataPath);
var domainMetadataBytes = File.ReadAllBytes(domainMetadataPath);
Assert.Equal(rootMetadataBytes, domainMetadataBytes);
var metadataDigest = "sha256:" + Convert.ToHexString(SHA256.HashData(domainMetadataBytes)).ToLowerInvariant();
var databaseDigest = "sha256:" + Convert.ToHexString(SHA256.HashData(File.ReadAllBytes(domainDbPath))).ToLowerInvariant();
Assert.Equal(domainManifestDigest, indexManifestDescriptorDigest);
Assert.Equal(metadataDigest, indexMetadataDigest);
Assert.Equal(databaseDigest, indexDatabaseDigest);
using (var manifestDoc = JsonDocument.Parse(File.ReadAllBytes(domainManifestPath)))
{
var manifestRoot = manifestDoc.RootElement;
Assert.Equal("primary", manifestRoot.GetProperty("domainId").GetString());
Assert.Equal("Primary Mirror", manifestRoot.GetProperty("displayName").GetString());
Assert.Equal(reference, manifestRoot.GetProperty("reference").GetString());
Assert.Equal(manifestDigest, manifestRoot.GetProperty("manifestDigest").GetString());
Assert.Equal("full", manifestRoot.GetProperty("mode").GetString());
Assert.Equal("s3://mirror/trivy", manifestRoot.GetProperty("targetRepository").GetString());
var metadataDescriptor = manifestRoot.GetProperty("metadata");
Assert.Equal("mirror/primary/metadata.json", metadataDescriptor.GetProperty("path").GetString());
Assert.Equal(metadataDigest, metadataDescriptor.GetProperty("digest").GetString());
var databaseDescriptor = manifestRoot.GetProperty("database");
Assert.Equal("mirror/primary/db.tar.gz", databaseDescriptor.GetProperty("path").GetString());
Assert.Equal(databaseDigest, databaseDescriptor.GetProperty("digest").GetString());
var sources = manifestRoot.GetProperty("sources").EnumerateArray().ToArray();
Assert.NotEmpty(sources);
Assert.Contains(sources, element => string.Equals(element.GetProperty("source").GetString(), "nvd", StringComparison.OrdinalIgnoreCase));
}
Assert.Empty(orasPusher.Pushes);
}
[Fact]
public async Task ExportAsync_SkipsOrasPushWhenDeltaPublishingDisabled()
{
@@ -774,7 +923,9 @@ public sealed class TrivyDbFeedExporterTests : IDisposable
private ExportStateRecord? _record;
public Task<ExportStateRecord?> FindAsync(string id, CancellationToken cancellationToken)
=> Task.FromResult(_record);
{
return Task.FromResult(_record);
}
public Task<ExportStateRecord> UpsertAsync(ExportStateRecord record, CancellationToken cancellationToken)
{

View File

@@ -15,8 +15,8 @@
<ProjectReference Include="../StellaOps.Plugin/StellaOps.Plugin.csproj" />
</ItemGroup>
<ItemGroup>
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="8.0.0" />
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="8.0.1" />
<PackageReference Include="Microsoft.Extensions.Options" Version="8.0.0" />
<PackageReference Include="Microsoft.Extensions.Options.ConfigurationExtensions" Version="8.0.0" />
</ItemGroup>
</Project>
</Project>

View File

@@ -12,4 +12,4 @@
|Streamed package building to avoid large copies|BE-Export|Exporters|DONE metadata/config now reuse backing arrays and OCI writer streams directly without double buffering.|
|Plan incremental/delta exports|BE-Export|Exporters|DONE state captures per-file manifests, planner schedules delta vs full resets, layer reuse smoke test verifies OCI reuse, and operator guide documents the validation flow.|
|Advisory schema parity export (description/CWEs/canonical metric)|BE-Export|Models, Core|DONE (2025-10-15) exporter/test fixtures updated to handle description/CWEs/canonical metric fields during Trivy DB packaging; `dotnet test src/StellaOps.Concelier.Exporter.TrivyDb.Tests` re-run 2025-10-15 to confirm coverage.|
|CONCELIER-EXPORT-08-202 Mirror-ready Trivy DB bundles|Team Concelier Export|CONCELIER-EXPORT-08-201|TODO Generate domain-specific Trivy DB archives + metadata manifest, ensure deterministic digests, and document sync process for downstream Concelier nodes.|
|CONCELIER-EXPORT-08-202 Mirror-ready Trivy DB bundles|Team Concelier Export|CONCELIER-EXPORT-08-201|**DONE (2025-10-19)** Added mirror export options and writer emitting `mirror/index.json` plus per-domain `manifest.json`/`metadata.json`/`db.tar.gz` with deterministic SHA-256 digests; regression covered via `dotnet test src/StellaOps.Concelier.Exporter.TrivyDb.Tests/StellaOps.Concelier.Exporter.TrivyDb.Tests.csproj`.|

View File

@@ -24,25 +24,43 @@ public sealed class TrivyDbExportOptions
OutputRoot = Path.Combine("exports", "trivy", "tree")
};
public TrivyDbBuilderOptions Builder { get; set; } = new();
public TrivyDbOrasOptions Oras { get; set; } = new();
public TrivyDbOfflineBundleOptions OfflineBundle { get; set; } = new();
public string GetExportRoot(string exportId)
{
ArgumentException.ThrowIfNullOrEmpty(exportId);
var root = Path.GetFullPath(OutputRoot);
return Path.Combine(root, exportId);
}
}
public sealed class TrivyDbBuilderOptions
{
public string ExecutablePath { get; set; } = "trivy-db";
public string? WorkingDirectory { get; set; }
public TrivyDbBuilderOptions Builder { get; set; } = new();
public TrivyDbOrasOptions Oras { get; set; } = new();
public TrivyDbOfflineBundleOptions OfflineBundle { get; set; } = new();
public TrivyDbMirrorOptions Mirror { get; set; } = new();
public string GetExportRoot(string exportId)
{
ArgumentException.ThrowIfNullOrEmpty(exportId);
var root = Path.GetFullPath(OutputRoot);
return Path.Combine(root, exportId);
}
}
public sealed class TrivyDbMirrorOptions
{
public bool Enabled { get; set; }
public string DirectoryName { get; set; } = "mirror";
public IList<TrivyDbMirrorDomainOptions> Domains { get; } = new List<TrivyDbMirrorDomainOptions>();
}
public sealed class TrivyDbMirrorDomainOptions
{
public string Id { get; set; } = string.Empty;
public string? DisplayName { get; set; }
}
public sealed class TrivyDbBuilderOptions
{
public string ExecutablePath { get; set; } = "trivy-db";
public string? WorkingDirectory { get; set; }
public TimeSpan UpdateInterval { get; set; } = TimeSpan.FromHours(24);

View File

@@ -118,6 +118,8 @@ public sealed class TrivyDbFeedExporter : IFeedExporter
var builderResult = await _builder.BuildAsync(jsonResult, exportedAt, exportId, cancellationToken).ConfigureAwait(false);
var metadataBytes = CreateMetadataJson(plan, builderResult.BuilderMetadata, treeDigest, jsonResult, exportedAt);
var metadataDigest = ComputeDigest(metadataBytes);
var metadataLength = metadataBytes.LongLength;
try
{
@@ -137,6 +139,22 @@ public sealed class TrivyDbFeedExporter : IFeedExporter
}
var ociResult = await _ociWriter.WriteAsync(package, destination, reference, plan, baseLayout, cancellationToken).ConfigureAwait(false);
await TrivyDbMirrorBundleWriter.WriteAsync(
destination,
jsonResult,
_options,
plan,
builderResult,
reference,
ociResult.ManifestDigest,
metadataBytes,
metadataDigest,
metadataLength,
_exporterVersion,
exportedAt,
_logger,
cancellationToken).ConfigureAwait(false);
if (_options.Oras.Enabled && ShouldPublishToOras(plan.Mode))
{
@@ -421,6 +439,13 @@ public sealed class TrivyDbFeedExporter : IFeedExporter
return string.IsNullOrEmpty(normalized) ? "." : normalized;
}
private static string ComputeDigest(ReadOnlySpan<byte> payload)
{
var hash = SHA256.HashData(payload);
var hex = Convert.ToHexString(hash).ToLowerInvariant();
return $"sha256:{hex}";
}
private bool ShouldPublishToOras(TrivyDbExportMode mode)
{
var overrides = TrivyDbExportOverrideScope.Current;

View File

@@ -0,0 +1,392 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Security.Cryptography;
using System.Text.Json;
using System.Text.Json.Serialization;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Extensions.Logging;
using StellaOps.Concelier.Exporter.Json;
using StellaOps.Concelier.Models;
namespace StellaOps.Concelier.Exporter.TrivyDb;
internal static class TrivyDbMirrorBundleWriter
{
private const int SchemaVersion = 1;
private const string DefaultDirectoryName = "mirror";
private const string MetadataFileName = "metadata.json";
private const string DatabaseFileName = "db.tar.gz";
private const string ManifestFileName = "manifest.json";
private static readonly JsonSerializerOptions SerializerOptions = new()
{
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull,
WriteIndented = false,
};
public static async Task WriteAsync(
string layoutRoot,
JsonExportResult jsonResult,
TrivyDbExportOptions options,
TrivyDbExportPlan plan,
TrivyDbBuilderResult builderResult,
string reference,
string manifestDigest,
ReadOnlyMemory<byte> metadataBytes,
string metadataDigest,
long metadataLength,
string exporterVersion,
DateTimeOffset exportedAt,
ILogger logger,
CancellationToken cancellationToken)
{
if (options?.Mirror is null || !options.Mirror.Enabled || options.Mirror.Domains.Count == 0)
{
return;
}
if (string.IsNullOrWhiteSpace(layoutRoot))
{
throw new ArgumentException("Layout root must be provided.", nameof(layoutRoot));
}
if (builderResult is null)
{
throw new ArgumentNullException(nameof(builderResult));
}
if (jsonResult is null)
{
throw new ArgumentNullException(nameof(jsonResult));
}
var directoryName = string.IsNullOrWhiteSpace(options.Mirror.DirectoryName)
? DefaultDirectoryName
: options.Mirror.DirectoryName.Trim();
if (directoryName.Length == 0)
{
directoryName = DefaultDirectoryName;
}
var root = Path.Combine(layoutRoot, directoryName);
Directory.CreateDirectory(root);
var timestamp = exportedAt.UtcDateTime;
TrySetDirectoryTimestamp(root, timestamp);
var advisories = jsonResult.Advisories.IsDefaultOrEmpty
? Array.Empty<Advisory>()
: jsonResult.Advisories
.OrderBy(static advisory => advisory.AdvisoryKey, StringComparer.Ordinal)
.ToArray();
var domains = new List<MirrorIndexDomainEntry>();
foreach (var domainOption in options.Mirror.Domains)
{
cancellationToken.ThrowIfCancellationRequested();
if (domainOption is null)
{
logger.LogWarning("Encountered null Trivy mirror domain configuration; skipping.");
continue;
}
var domainId = (domainOption.Id ?? string.Empty).Trim();
if (domainId.Length == 0)
{
logger.LogWarning("Skipping Trivy mirror domain with empty id.");
continue;
}
var displayName = string.IsNullOrWhiteSpace(domainOption.DisplayName)
? domainId
: domainOption.DisplayName!.Trim();
var domainDirectory = Path.Combine(root, domainId);
Directory.CreateDirectory(domainDirectory);
TrySetDirectoryTimestamp(domainDirectory, timestamp);
var metadataPath = Path.Combine(domainDirectory, MetadataFileName);
await WriteFileAsync(metadataPath, metadataBytes, timestamp, cancellationToken).ConfigureAwait(false);
var metadataRelativePath = ToRelativePath(layoutRoot, metadataPath);
var databasePath = Path.Combine(domainDirectory, DatabaseFileName);
await CopyDatabaseAsync(builderResult.ArchivePath, databasePath, timestamp, cancellationToken).ConfigureAwait(false);
var databaseRelativePath = ToRelativePath(layoutRoot, databasePath);
var sources = BuildSourceSummaries(advisories);
var manifestDocument = new MirrorDomainManifestDocument(
SchemaVersion,
exportedAt,
exporterVersion,
reference,
manifestDigest,
options.TargetRepository,
domainId,
displayName,
plan.Mode.ToString().ToLowerInvariant(),
plan.BaseExportId,
plan.BaseManifestDigest,
plan.ResetBaseline,
new MirrorFileDescriptor(metadataRelativePath, metadataLength, metadataDigest),
new MirrorFileDescriptor(databaseRelativePath, builderResult.ArchiveLength, builderResult.ArchiveDigest),
sources);
var manifestBytes = JsonSerializer.SerializeToUtf8Bytes(manifestDocument, SerializerOptions);
var manifestPath = Path.Combine(domainDirectory, ManifestFileName);
await WriteFileAsync(manifestPath, manifestBytes, timestamp, cancellationToken).ConfigureAwait(false);
var manifestRelativePath = ToRelativePath(layoutRoot, manifestPath);
var manifestDigestValue = ComputeDigest(manifestBytes);
domains.Add(new MirrorIndexDomainEntry(
domainId,
displayName,
advisories.Length,
new MirrorFileDescriptor(manifestRelativePath, manifestBytes.LongLength, manifestDigestValue),
new MirrorFileDescriptor(metadataRelativePath, metadataLength, metadataDigest),
new MirrorFileDescriptor(databaseRelativePath, builderResult.ArchiveLength, builderResult.ArchiveDigest),
sources));
}
if (domains.Count == 0)
{
Directory.Delete(root, recursive: true);
return;
}
domains.Sort(static (left, right) => string.CompareOrdinal(left.DomainId, right.DomainId));
var delta = plan.Mode == TrivyDbExportMode.Delta
? new MirrorDeltaMetadata(
plan.ChangedFiles.Select(static file => new MirrorDeltaFile(file.Path, file.Digest)).ToArray(),
plan.RemovedPaths.ToArray())
: null;
var indexDocument = new MirrorIndexDocument(
SchemaVersion,
exportedAt,
exporterVersion,
options.TargetRepository,
reference,
manifestDigest,
plan.Mode.ToString().ToLowerInvariant(),
plan.BaseExportId,
plan.BaseManifestDigest,
plan.ResetBaseline,
delta,
domains);
var indexBytes = JsonSerializer.SerializeToUtf8Bytes(indexDocument, SerializerOptions);
var indexPath = Path.Combine(root, "index.json");
await WriteFileAsync(indexPath, indexBytes, timestamp, cancellationToken).ConfigureAwait(false);
logger.LogInformation(
"Generated {DomainCount} Trivy DB mirror bundle(s) under {Directory}.",
domains.Count,
directoryName);
}
private static IReadOnlyList<TrivyMirrorSourceSummary> BuildSourceSummaries(IReadOnlyList<Advisory> advisories)
{
if (advisories.Count == 0)
{
return Array.Empty<TrivyMirrorSourceSummary>();
}
var builders = new Dictionary<string, SourceAccumulator>(StringComparer.OrdinalIgnoreCase);
foreach (var advisory in advisories)
{
var counted = new HashSet<string>(StringComparer.OrdinalIgnoreCase);
foreach (var provenance in advisory.Provenance)
{
if (string.IsNullOrWhiteSpace(provenance.Source))
{
continue;
}
var source = provenance.Source.Trim();
if (!builders.TryGetValue(source, out var accumulator))
{
accumulator = new SourceAccumulator();
builders[source] = accumulator;
}
accumulator.Record(provenance.RecordedAt);
if (counted.Add(source))
{
accumulator.Increment();
}
}
}
var entries = builders
.Select(static pair => new TrivyMirrorSourceSummary(
pair.Key,
pair.Value.FirstRecordedAt,
pair.Value.LastRecordedAt,
pair.Value.Count))
.OrderBy(static summary => summary.Source, StringComparer.Ordinal)
.ToArray();
return entries;
}
private static async Task CopyDatabaseAsync(
string sourcePath,
string destinationPath,
DateTime timestamp,
CancellationToken cancellationToken)
{
Directory.CreateDirectory(Path.GetDirectoryName(destinationPath)!);
await using var source = new FileStream(
sourcePath,
FileMode.Open,
FileAccess.Read,
FileShare.Read,
bufferSize: 81920,
options: FileOptions.Asynchronous | FileOptions.SequentialScan);
await using var destination = new FileStream(
destinationPath,
FileMode.Create,
FileAccess.Write,
FileShare.None,
bufferSize: 81920,
options: FileOptions.Asynchronous | FileOptions.SequentialScan);
await source.CopyToAsync(destination, cancellationToken).ConfigureAwait(false);
await destination.FlushAsync(cancellationToken).ConfigureAwait(false);
File.SetLastWriteTimeUtc(destinationPath, timestamp);
}
private static async Task WriteFileAsync(
string path,
ReadOnlyMemory<byte> bytes,
DateTime timestamp,
CancellationToken cancellationToken)
{
Directory.CreateDirectory(Path.GetDirectoryName(path)!);
await using var stream = new FileStream(
path,
FileMode.Create,
FileAccess.Write,
FileShare.None,
bufferSize: 81920,
options: FileOptions.Asynchronous | FileOptions.SequentialScan);
await stream.WriteAsync(bytes, cancellationToken).ConfigureAwait(false);
await stream.FlushAsync(cancellationToken).ConfigureAwait(false);
File.SetLastWriteTimeUtc(path, timestamp);
}
private static string ToRelativePath(string root, string fullPath)
{
var relative = Path.GetRelativePath(root, fullPath);
var normalized = relative.Replace(Path.DirectorySeparatorChar, '/');
return string.IsNullOrEmpty(normalized) ? "." : normalized;
}
private static string ComputeDigest(ReadOnlySpan<byte> payload)
{
var hash = SHA256.HashData(payload);
var hex = Convert.ToHexString(hash).ToLowerInvariant();
return $"sha256:{hex}";
}
private static void TrySetDirectoryTimestamp(string directory, DateTime timestamp)
{
try
{
Directory.SetLastWriteTimeUtc(directory, timestamp);
}
catch
{
// Best effort ignore failures.
}
}
private sealed record MirrorIndexDocument(
int SchemaVersion,
DateTimeOffset GeneratedAt,
string ExporterVersion,
string? TargetRepository,
string Reference,
string ManifestDigest,
string Mode,
string? BaseExportId,
string? BaseManifestDigest,
bool ResetBaseline,
MirrorDeltaMetadata? Delta,
IReadOnlyList<MirrorIndexDomainEntry> Domains);
private sealed record MirrorDeltaMetadata(
IReadOnlyList<MirrorDeltaFile> ChangedFiles,
IReadOnlyList<string> RemovedPaths);
private sealed record MirrorDeltaFile(string Path, string Digest);
private sealed record MirrorIndexDomainEntry(
string DomainId,
string DisplayName,
int AdvisoryCount,
MirrorFileDescriptor Manifest,
MirrorFileDescriptor Metadata,
MirrorFileDescriptor Database,
IReadOnlyList<TrivyMirrorSourceSummary> Sources);
private sealed record MirrorDomainManifestDocument(
int SchemaVersion,
DateTimeOffset GeneratedAt,
string ExporterVersion,
string Reference,
string ManifestDigest,
string? TargetRepository,
string DomainId,
string DisplayName,
string Mode,
string? BaseExportId,
string? BaseManifestDigest,
bool ResetBaseline,
MirrorFileDescriptor Metadata,
MirrorFileDescriptor Database,
IReadOnlyList<TrivyMirrorSourceSummary> Sources);
private sealed record MirrorFileDescriptor(string Path, long SizeBytes, string Digest);
private sealed record TrivyMirrorSourceSummary(
string Source,
DateTimeOffset? FirstRecordedAt,
DateTimeOffset? LastRecordedAt,
int AdvisoryCount);
private sealed class SourceAccumulator
{
public DateTimeOffset? FirstRecordedAt { get; private set; }
public DateTimeOffset? LastRecordedAt { get; private set; }
public int Count { get; private set; }
public void Record(DateTimeOffset recordedAt)
{
var utc = recordedAt.ToUniversalTime();
if (FirstRecordedAt is null || utc < FirstRecordedAt.Value)
{
FirstRecordedAt = utc;
}
if (LastRecordedAt is null || utc > LastRecordedAt.Value)
{
LastRecordedAt = utc;
}
}
public void Increment() => Count++;
}
}

View File

@@ -1,8 +1,11 @@
using System.Collections.Concurrent;
using System.Linq;
using System.Threading.Tasks;
using Microsoft.Extensions.Logging.Abstractions;
using Microsoft.Extensions.Time.Testing;
using MongoDB.Driver;
using StellaOps.Concelier.Core;
using StellaOps.Concelier.Core.Events;
using StellaOps.Concelier.Merge.Services;
using StellaOps.Concelier.Models;
using StellaOps.Concelier.Storage.Mongo.Advisories;
@@ -35,7 +38,8 @@ public sealed class AdvisoryMergeServiceTests
var precedenceMerger = new AdvisoryPrecedenceMerger(new AffectedPackagePrecedenceResolver(), timeProvider);
var aliasResolver = new AliasGraphResolver(aliasStore);
var canonicalMerger = new CanonicalMerger(timeProvider);
var service = new AdvisoryMergeService(aliasResolver, advisoryStore, precedenceMerger, writer, canonicalMerger, NullLogger<AdvisoryMergeService>.Instance);
var eventLog = new RecordingAdvisoryEventLog();
var service = new AdvisoryMergeService(aliasResolver, advisoryStore, precedenceMerger, writer, canonicalMerger, eventLog, timeProvider, NullLogger<AdvisoryMergeService>.Instance);
var result = await service.MergeAsync("GHSA-aaaa-bbbb-cccc", CancellationToken.None);
@@ -52,6 +56,11 @@ public sealed class AdvisoryMergeServiceTests
var summaryDecision = Assert.Single(mergeRecord!.FieldDecisions, decision => decision.Field == "summary");
Assert.Equal("osv", summaryDecision.SelectedSource);
Assert.Equal("freshness_override", summaryDecision.DecisionReason);
var appendRequest = eventLog.LastRequest;
Assert.NotNull(appendRequest);
Assert.Contains(appendRequest!.Statements, statement => string.Equals(statement.Advisory.AdvisoryKey, "CVE-2025-4242", StringComparison.OrdinalIgnoreCase));
Assert.True(appendRequest.Conflicts is null || appendRequest.Conflicts.Count == 0);
}
private static Advisory CreateGhsaAdvisory()
@@ -114,6 +123,23 @@ public sealed class AdvisoryMergeServiceTests
provenance: new[] { provenance });
}
private sealed class RecordingAdvisoryEventLog : IAdvisoryEventLog
{
public AdvisoryEventAppendRequest? LastRequest { get; private set; }
public ValueTask AppendAsync(AdvisoryEventAppendRequest request, CancellationToken cancellationToken)
{
LastRequest = request;
return ValueTask.CompletedTask;
}
public ValueTask<AdvisoryReplay> ReplayAsync(string vulnerabilityKey, DateTimeOffset? asOf, CancellationToken cancellationToken)
{
throw new NotSupportedException();
}
}
private sealed class FakeAliasStore : IAliasStore
{
private readonly ConcurrentDictionary<string, List<AliasRecord>> _records = new(StringComparer.OrdinalIgnoreCase);
@@ -130,7 +156,9 @@ public sealed class AdvisoryMergeServiceTests
}
public Task<AliasUpsertResult> ReplaceAsync(string advisoryKey, IEnumerable<AliasEntry> aliases, DateTimeOffset updatedAt, CancellationToken cancellationToken)
=> Task.FromResult(new AliasUpsertResult(advisoryKey, Array.Empty<AliasCollision>()));
{
return Task.FromResult(new AliasUpsertResult(advisoryKey, Array.Empty<AliasCollision>()));
}
public Task<IReadOnlyList<AliasRecord>> GetByAliasAsync(string scheme, string value, CancellationToken cancellationToken)
{
@@ -206,6 +234,8 @@ public sealed class AdvisoryMergeServiceTests
}
public Task<IReadOnlyList<MergeEventRecord>> GetRecentAsync(string advisoryKey, int limit, CancellationToken cancellationToken)
=> Task.FromResult<IReadOnlyList<MergeEventRecord>>(Array.Empty<MergeEventRecord>());
{
return Task.FromResult<IReadOnlyList<MergeEventRecord>>(Array.Empty<MergeEventRecord>());
}
}
}

View File

@@ -1,70 +1,70 @@
using System;
using System.Collections.Generic;
using System.Linq;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Time.Testing;
using StellaOps.Concelier.Merge.Options;
using StellaOps.Concelier.Merge.Services;
using StellaOps.Concelier.Models;
namespace StellaOps.Concelier.Merge.Tests;
public sealed class AdvisoryPrecedenceMergerTests
{
[Fact]
public void Merge_PrefersVendorPrecedenceOverNvd()
{
var timeProvider = new FakeTimeProvider(new DateTimeOffset(2025, 1, 1, 0, 0, 0, TimeSpan.Zero));
var merger = new AdvisoryPrecedenceMerger(new AffectedPackagePrecedenceResolver(), timeProvider);
using var metrics = new MetricCollector("StellaOps.Concelier.Merge");
var (redHat, nvd) = CreateVendorAndRegistryAdvisories();
var expectedMergeTimestamp = timeProvider.GetUtcNow();
var merged = merger.Merge(new[] { nvd, redHat });
Assert.Equal("CVE-2025-1000", merged.AdvisoryKey);
Assert.Equal("Red Hat Security Advisory", merged.Title);
Assert.Equal("Vendor-confirmed impact on RHEL 9.", merged.Summary);
Assert.Equal("high", merged.Severity);
Assert.Equal(redHat.Published, merged.Published);
Assert.Equal(redHat.Modified, merged.Modified);
Assert.Contains("RHSA-2025:0001", merged.Aliases);
Assert.Contains("CVE-2025-1000", merged.Aliases);
var package = Assert.Single(merged.AffectedPackages);
Assert.Equal("cpe:2.3:o:redhat:enterprise_linux:9:*:*:*:*:*:*:*", package.Identifier);
Assert.Empty(package.VersionRanges); // NVD range suppressed by vendor precedence
Assert.Contains(package.Statuses, status => status.Status == "known_affected");
Assert.Contains(package.Provenance, provenance => provenance.Source == "redhat");
Assert.Contains(package.Provenance, provenance => provenance.Source == "nvd");
Assert.Contains(merged.CvssMetrics, metric => metric.Provenance.Source == "redhat");
Assert.Contains(merged.CvssMetrics, metric => metric.Provenance.Source == "nvd");
var mergeProvenance = merged.Provenance.Single(p => p.Source == "merge");
Assert.Equal("precedence", mergeProvenance.Kind);
Assert.Equal(expectedMergeTimestamp, mergeProvenance.RecordedAt);
Assert.Contains("redhat", mergeProvenance.Value, StringComparison.OrdinalIgnoreCase);
Assert.Contains("nvd", mergeProvenance.Value, StringComparison.OrdinalIgnoreCase);
var rangeMeasurement = Assert.Single(metrics.Measurements, measurement => measurement.Name == "concelier.merge.range_overrides");
Assert.Equal(1, rangeMeasurement.Value);
Assert.Contains(rangeMeasurement.Tags, tag => string.Equals(tag.Key, "suppressed_source", StringComparison.Ordinal) && tag.Value?.ToString()?.Contains("nvd", StringComparison.OrdinalIgnoreCase) == true);
var severityConflict = Assert.Single(metrics.Measurements, measurement => measurement.Name == "concelier.merge.conflicts");
Assert.Equal(1, severityConflict.Value);
Assert.Contains(severityConflict.Tags, tag => string.Equals(tag.Key, "type", StringComparison.Ordinal) && string.Equals(tag.Value?.ToString(), "severity", StringComparison.OrdinalIgnoreCase));
}
[Fact]
public void Merge_KevOnlyTogglesExploitKnown()
{
var timeProvider = new FakeTimeProvider(new DateTimeOffset(2025, 2, 1, 0, 0, 0, TimeSpan.Zero));
var merger = new AdvisoryPrecedenceMerger(new AffectedPackagePrecedenceResolver(), timeProvider);
var nvdProvenance = new AdvisoryProvenance("nvd", "document", "https://nvd", timeProvider.GetUtcNow());
var baseAdvisory = new Advisory(
using System;
using System.Collections.Generic;
using System.Linq;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Time.Testing;
using StellaOps.Concelier.Merge.Options;
using StellaOps.Concelier.Merge.Services;
using StellaOps.Concelier.Models;
namespace StellaOps.Concelier.Merge.Tests;
public sealed class AdvisoryPrecedenceMergerTests
{
[Fact]
public void Merge_PrefersVendorPrecedenceOverNvd()
{
var timeProvider = new FakeTimeProvider(new DateTimeOffset(2025, 1, 1, 0, 0, 0, TimeSpan.Zero));
var merger = new AdvisoryPrecedenceMerger(new AffectedPackagePrecedenceResolver(), timeProvider);
using var metrics = new MetricCollector("StellaOps.Concelier.Merge");
var (redHat, nvd) = CreateVendorAndRegistryAdvisories();
var expectedMergeTimestamp = timeProvider.GetUtcNow();
var merged = merger.Merge(new[] { nvd, redHat }).Advisory;
Assert.Equal("CVE-2025-1000", merged.AdvisoryKey);
Assert.Equal("Red Hat Security Advisory", merged.Title);
Assert.Equal("Vendor-confirmed impact on RHEL 9.", merged.Summary);
Assert.Equal("high", merged.Severity);
Assert.Equal(redHat.Published, merged.Published);
Assert.Equal(redHat.Modified, merged.Modified);
Assert.Contains("RHSA-2025:0001", merged.Aliases);
Assert.Contains("CVE-2025-1000", merged.Aliases);
var package = Assert.Single(merged.AffectedPackages);
Assert.Equal("cpe:2.3:o:redhat:enterprise_linux:9:*:*:*:*:*:*:*", package.Identifier);
Assert.Empty(package.VersionRanges); // NVD range suppressed by vendor precedence
Assert.Contains(package.Statuses, status => status.Status == "known_affected");
Assert.Contains(package.Provenance, provenance => provenance.Source == "redhat");
Assert.Contains(package.Provenance, provenance => provenance.Source == "nvd");
Assert.Contains(merged.CvssMetrics, metric => metric.Provenance.Source == "redhat");
Assert.Contains(merged.CvssMetrics, metric => metric.Provenance.Source == "nvd");
var mergeProvenance = merged.Provenance.Single(p => p.Source == "merge");
Assert.Equal("precedence", mergeProvenance.Kind);
Assert.Equal(expectedMergeTimestamp, mergeProvenance.RecordedAt);
Assert.Contains("redhat", mergeProvenance.Value, StringComparison.OrdinalIgnoreCase);
Assert.Contains("nvd", mergeProvenance.Value, StringComparison.OrdinalIgnoreCase);
var rangeMeasurement = Assert.Single(metrics.Measurements, measurement => measurement.Name == "concelier.merge.range_overrides");
Assert.Equal(1, rangeMeasurement.Value);
Assert.Contains(rangeMeasurement.Tags, tag => string.Equals(tag.Key, "suppressed_source", StringComparison.Ordinal) && tag.Value?.ToString()?.Contains("nvd", StringComparison.OrdinalIgnoreCase) == true);
var severityConflict = Assert.Single(metrics.Measurements, measurement => measurement.Name == "concelier.merge.conflicts");
Assert.Equal(1, severityConflict.Value);
Assert.Contains(severityConflict.Tags, tag => string.Equals(tag.Key, "type", StringComparison.Ordinal) && string.Equals(tag.Value?.ToString(), "severity", StringComparison.OrdinalIgnoreCase));
}
[Fact]
public void Merge_KevOnlyTogglesExploitKnown()
{
var timeProvider = new FakeTimeProvider(new DateTimeOffset(2025, 2, 1, 0, 0, 0, TimeSpan.Zero));
var merger = new AdvisoryPrecedenceMerger(new AffectedPackagePrecedenceResolver(), timeProvider);
var nvdProvenance = new AdvisoryProvenance("nvd", "document", "https://nvd", timeProvider.GetUtcNow());
var baseAdvisory = new Advisory(
"CVE-2025-2000",
"CVE-2025-2000",
"Base registry summary",
@@ -76,52 +76,52 @@ public sealed class AdvisoryPrecedenceMergerTests
aliases: new[] { "CVE-2025-2000" },
credits: Array.Empty<AdvisoryCredit>(),
references: Array.Empty<AdvisoryReference>(),
affectedPackages: new[]
{
new AffectedPackage(
AffectedPackageTypes.Cpe,
"cpe:2.3:a:example:product:2.0:*:*:*:*:*:*:*",
null,
new[]
{
new AffectedVersionRange(
"semver",
"2.0.0",
"2.0.5",
null,
"<2.0.5",
new AdvisoryProvenance("nvd", "cpe_match", "product", timeProvider.GetUtcNow()))
},
Array.Empty<AffectedPackageStatus>(),
new[] { nvdProvenance })
},
cvssMetrics: Array.Empty<CvssMetric>(),
provenance: new[] { nvdProvenance });
var kevProvenance = new AdvisoryProvenance("kev", "catalog", "CVE-2025-2000", timeProvider.GetUtcNow());
var kevAdvisory = new Advisory(
"CVE-2025-2000",
"Known Exploited Vulnerability",
summary: null,
language: null,
published: null,
modified: null,
affectedPackages: new[]
{
new AffectedPackage(
AffectedPackageTypes.Cpe,
"cpe:2.3:a:example:product:2.0:*:*:*:*:*:*:*",
null,
new[]
{
new AffectedVersionRange(
"semver",
"2.0.0",
"2.0.5",
null,
"<2.0.5",
new AdvisoryProvenance("nvd", "cpe_match", "product", timeProvider.GetUtcNow()))
},
Array.Empty<AffectedPackageStatus>(),
new[] { nvdProvenance })
},
cvssMetrics: Array.Empty<CvssMetric>(),
provenance: new[] { nvdProvenance });
var kevProvenance = new AdvisoryProvenance("kev", "catalog", "CVE-2025-2000", timeProvider.GetUtcNow());
var kevAdvisory = new Advisory(
"CVE-2025-2000",
"Known Exploited Vulnerability",
summary: null,
language: null,
published: null,
modified: null,
severity: null,
exploitKnown: true,
aliases: new[] { "KEV-CVE-2025-2000" },
credits: Array.Empty<AdvisoryCredit>(),
references: Array.Empty<AdvisoryReference>(),
affectedPackages: Array.Empty<AffectedPackage>(),
cvssMetrics: Array.Empty<CvssMetric>(),
provenance: new[] { kevProvenance });
var merged = merger.Merge(new[] { baseAdvisory, kevAdvisory });
Assert.True(merged.ExploitKnown);
Assert.Equal("medium", merged.Severity); // KEV must not override severity
Assert.Equal("Base registry summary", merged.Summary);
Assert.Contains("CVE-2025-2000", merged.Aliases);
Assert.Contains("KEV-CVE-2025-2000", merged.Aliases);
affectedPackages: Array.Empty<AffectedPackage>(),
cvssMetrics: Array.Empty<CvssMetric>(),
provenance: new[] { kevProvenance });
var merged = merger.Merge(new[] { baseAdvisory, kevAdvisory }).Advisory;
Assert.True(merged.ExploitKnown);
Assert.Equal("medium", merged.Severity); // KEV must not override severity
Assert.Equal("Base registry summary", merged.Summary);
Assert.Contains("CVE-2025-2000", merged.Aliases);
Assert.Contains("KEV-CVE-2025-2000", merged.Aliases);
Assert.Contains(merged.Provenance, provenance => provenance.Source == "kev");
Assert.Contains(merged.Provenance, provenance => provenance.Source == "merge");
}
@@ -212,7 +212,7 @@ public sealed class AdvisoryPrecedenceMergerTests
cvssMetrics: Array.Empty<CvssMetric>(),
provenance: new[] { new AdvisoryProvenance("osv", "document", "https://osv.dev/vulnerability/CVE-2025-9000", timeProvider.GetUtcNow(), new[] { ProvenanceFieldMasks.Advisory }) });
var merged = merger.Merge(new[] { ghsa, osv });
var merged = merger.Merge(new[] { ghsa, osv }).Advisory;
Assert.Equal("CVE-2025-9000", merged.AdvisoryKey);
Assert.Contains(merged.Credits, credit =>
@@ -311,7 +311,7 @@ public sealed class AdvisoryPrecedenceMergerTests
cvssMetrics: Array.Empty<CvssMetric>(),
provenance: new[] { acscDocumentProvenance });
var merged = merger.Merge(new[] { acsc, vendor });
var merged = merger.Merge(new[] { acsc, vendor }).Advisory;
Assert.Equal("critical", merged.Severity); // ACSC must not override vendor severity
Assert.Equal("Vendor-confirmed exploit.", merged.Summary);
@@ -450,7 +450,7 @@ public sealed class AdvisoryPrecedenceMergerTests
new AdvisoryProvenance("nvd", "document", "https://nvd.nist.gov/vuln/detail/CVE-2025-7000", now),
});
var merged = merger.Merge(new[] { nvdAdvisory, ghsaAdvisory });
var merged = merger.Merge(new[] { nvdAdvisory, ghsaAdvisory }).Advisory;
Assert.Equal(2, merged.AffectedPackages.Length);
var normalizedPackage = Assert.Single(merged.AffectedPackages, pkg => pkg.Identifier == "pkg:npm/example");
@@ -474,72 +474,72 @@ public sealed class AdvisoryPrecedenceMergerTests
[Fact]
public void Merge_RespectsConfiguredPrecedenceOverrides()
{
var timeProvider = new FakeTimeProvider(new DateTimeOffset(2025, 3, 1, 0, 0, 0, TimeSpan.Zero));
var options = new AdvisoryPrecedenceOptions
{
Ranks = new Dictionary<string, int>(StringComparer.OrdinalIgnoreCase)
{
["nvd"] = 0,
["redhat"] = 5,
}
};
var logger = new TestLogger<AdvisoryPrecedenceMerger>();
using var metrics = new MetricCollector("StellaOps.Concelier.Merge");
var merger = new AdvisoryPrecedenceMerger(
new AffectedPackagePrecedenceResolver(),
options,
timeProvider,
logger);
var (redHat, nvd) = CreateVendorAndRegistryAdvisories();
var merged = merger.Merge(new[] { redHat, nvd });
Assert.Equal("CVE-2025-1000", merged.AdvisoryKey);
Assert.Equal("CVE-2025-1000", merged.Title); // NVD preferred
Assert.Equal("NVD summary", merged.Summary);
Assert.Equal("medium", merged.Severity);
var package = Assert.Single(merged.AffectedPackages);
Assert.NotEmpty(package.VersionRanges); // Vendor range no longer overrides
Assert.Contains(package.Provenance, provenance => provenance.Source == "nvd");
Assert.Contains(package.Provenance, provenance => provenance.Source == "redhat");
var overrideMeasurement = Assert.Single(metrics.Measurements, m => m.Name == "concelier.merge.overrides");
Assert.Equal(1, overrideMeasurement.Value);
Assert.Contains(overrideMeasurement.Tags, tag => tag.Key == "primary_source" && string.Equals(tag.Value?.ToString(), "nvd", StringComparison.OrdinalIgnoreCase));
Assert.Contains(overrideMeasurement.Tags, tag => tag.Key == "suppressed_source" && tag.Value?.ToString()?.Contains("redhat", StringComparison.OrdinalIgnoreCase) == true);
Assert.DoesNotContain(metrics.Measurements, measurement => measurement.Name == "concelier.merge.range_overrides");
var conflictMeasurement = Assert.Single(metrics.Measurements, measurement => measurement.Name == "concelier.merge.conflicts");
Assert.Equal(1, conflictMeasurement.Value);
Assert.Contains(conflictMeasurement.Tags, tag => tag.Key == "type" && string.Equals(tag.Value?.ToString(), "severity", StringComparison.OrdinalIgnoreCase));
Assert.Contains(conflictMeasurement.Tags, tag => tag.Key == "reason" && string.Equals(tag.Value?.ToString(), "mismatch", StringComparison.OrdinalIgnoreCase));
var logEntry = Assert.Single(logger.Entries, entry => entry.EventId.Name == "AdvisoryOverride");
Assert.Equal(LogLevel.Information, logEntry.Level);
Assert.NotNull(logEntry.StructuredState);
Assert.Contains(logEntry.StructuredState!, kvp =>
(string.Equals(kvp.Key, "Override", StringComparison.Ordinal) ||
string.Equals(kvp.Key, "@Override", StringComparison.Ordinal)) &&
kvp.Value is not null);
}
private static (Advisory Vendor, Advisory Registry) CreateVendorAndRegistryAdvisories()
{
var redHatPublished = new DateTimeOffset(2025, 1, 10, 0, 0, 0, TimeSpan.Zero);
var redHatModified = redHatPublished.AddDays(1);
var redHatProvenance = new AdvisoryProvenance("redhat", "advisory", "RHSA-2025:0001", redHatModified);
var redHatPackage = new AffectedPackage(
AffectedPackageTypes.Cpe,
"cpe:2.3:o:redhat:enterprise_linux:9:*:*:*:*:*:*:*",
"rhel-9",
Array.Empty<AffectedVersionRange>(),
new[] { new AffectedPackageStatus("known_affected", redHatProvenance) },
new[] { redHatProvenance });
{
var timeProvider = new FakeTimeProvider(new DateTimeOffset(2025, 3, 1, 0, 0, 0, TimeSpan.Zero));
var options = new AdvisoryPrecedenceOptions
{
Ranks = new Dictionary<string, int>(StringComparer.OrdinalIgnoreCase)
{
["nvd"] = 0,
["redhat"] = 5,
}
};
var logger = new TestLogger<AdvisoryPrecedenceMerger>();
using var metrics = new MetricCollector("StellaOps.Concelier.Merge");
var merger = new AdvisoryPrecedenceMerger(
new AffectedPackagePrecedenceResolver(),
options,
timeProvider,
logger);
var (redHat, nvd) = CreateVendorAndRegistryAdvisories();
var merged = merger.Merge(new[] { redHat, nvd }).Advisory;
Assert.Equal("CVE-2025-1000", merged.AdvisoryKey);
Assert.Equal("CVE-2025-1000", merged.Title); // NVD preferred
Assert.Equal("NVD summary", merged.Summary);
Assert.Equal("medium", merged.Severity);
var package = Assert.Single(merged.AffectedPackages);
Assert.NotEmpty(package.VersionRanges); // Vendor range no longer overrides
Assert.Contains(package.Provenance, provenance => provenance.Source == "nvd");
Assert.Contains(package.Provenance, provenance => provenance.Source == "redhat");
var overrideMeasurement = Assert.Single(metrics.Measurements, m => m.Name == "concelier.merge.overrides");
Assert.Equal(1, overrideMeasurement.Value);
Assert.Contains(overrideMeasurement.Tags, tag => tag.Key == "primary_source" && string.Equals(tag.Value?.ToString(), "nvd", StringComparison.OrdinalIgnoreCase));
Assert.Contains(overrideMeasurement.Tags, tag => tag.Key == "suppressed_source" && tag.Value?.ToString()?.Contains("redhat", StringComparison.OrdinalIgnoreCase) == true);
Assert.DoesNotContain(metrics.Measurements, measurement => measurement.Name == "concelier.merge.range_overrides");
var conflictMeasurement = Assert.Single(metrics.Measurements, measurement => measurement.Name == "concelier.merge.conflicts");
Assert.Equal(1, conflictMeasurement.Value);
Assert.Contains(conflictMeasurement.Tags, tag => tag.Key == "type" && string.Equals(tag.Value?.ToString(), "severity", StringComparison.OrdinalIgnoreCase));
Assert.Contains(conflictMeasurement.Tags, tag => tag.Key == "reason" && string.Equals(tag.Value?.ToString(), "mismatch", StringComparison.OrdinalIgnoreCase));
var logEntry = Assert.Single(logger.Entries, entry => entry.EventId.Name == "AdvisoryOverride");
Assert.Equal(LogLevel.Information, logEntry.Level);
Assert.NotNull(logEntry.StructuredState);
Assert.Contains(logEntry.StructuredState!, kvp =>
(string.Equals(kvp.Key, "Override", StringComparison.Ordinal) ||
string.Equals(kvp.Key, "@Override", StringComparison.Ordinal)) &&
kvp.Value is not null);
}
private static (Advisory Vendor, Advisory Registry) CreateVendorAndRegistryAdvisories()
{
var redHatPublished = new DateTimeOffset(2025, 1, 10, 0, 0, 0, TimeSpan.Zero);
var redHatModified = redHatPublished.AddDays(1);
var redHatProvenance = new AdvisoryProvenance("redhat", "advisory", "RHSA-2025:0001", redHatModified);
var redHatPackage = new AffectedPackage(
AffectedPackageTypes.Cpe,
"cpe:2.3:o:redhat:enterprise_linux:9:*:*:*:*:*:*:*",
"rhel-9",
Array.Empty<AffectedVersionRange>(),
new[] { new AffectedPackageStatus("known_affected", redHatProvenance) },
new[] { redHatProvenance });
var redHat = new Advisory(
"CVE-2025-1000",
"Red Hat Security Advisory",
@@ -554,43 +554,43 @@ public sealed class AdvisoryPrecedenceMergerTests
references: new[]
{
new AdvisoryReference(
"https://access.redhat.com/errata/RHSA-2025:0001",
"advisory",
"redhat",
"Red Hat errata",
redHatProvenance)
},
affectedPackages: new[] { redHatPackage },
cvssMetrics: new[]
{
new CvssMetric(
"3.1",
"CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:U/C:H/I:H/A:H",
9.8,
"critical",
new AdvisoryProvenance("redhat", "cvss", "RHSA-2025:0001", redHatModified))
},
provenance: new[] { redHatProvenance });
var nvdPublished = new DateTimeOffset(2025, 1, 5, 0, 0, 0, TimeSpan.Zero);
var nvdModified = nvdPublished.AddDays(2);
var nvdProvenance = new AdvisoryProvenance("nvd", "document", "https://nvd.nist.gov/vuln/detail/CVE-2025-1000", nvdModified);
var nvdPackage = new AffectedPackage(
AffectedPackageTypes.Cpe,
"cpe:2.3:o:redhat:enterprise_linux:9:*:*:*:*:*:*:*",
"rhel-9",
new[]
{
new AffectedVersionRange(
"cpe",
null,
null,
null,
"<=9.0",
new AdvisoryProvenance("nvd", "cpe_match", "RHEL", nvdModified))
},
Array.Empty<AffectedPackageStatus>(),
new[] { nvdProvenance });
"https://access.redhat.com/errata/RHSA-2025:0001",
"advisory",
"redhat",
"Red Hat errata",
redHatProvenance)
},
affectedPackages: new[] { redHatPackage },
cvssMetrics: new[]
{
new CvssMetric(
"3.1",
"CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:U/C:H/I:H/A:H",
9.8,
"critical",
new AdvisoryProvenance("redhat", "cvss", "RHSA-2025:0001", redHatModified))
},
provenance: new[] { redHatProvenance });
var nvdPublished = new DateTimeOffset(2025, 1, 5, 0, 0, 0, TimeSpan.Zero);
var nvdModified = nvdPublished.AddDays(2);
var nvdProvenance = new AdvisoryProvenance("nvd", "document", "https://nvd.nist.gov/vuln/detail/CVE-2025-1000", nvdModified);
var nvdPackage = new AffectedPackage(
AffectedPackageTypes.Cpe,
"cpe:2.3:o:redhat:enterprise_linux:9:*:*:*:*:*:*:*",
"rhel-9",
new[]
{
new AffectedVersionRange(
"cpe",
null,
null,
null,
"<=9.0",
new AdvisoryProvenance("nvd", "cpe_match", "RHEL", nvdModified))
},
Array.Empty<AffectedPackageStatus>(),
new[] { nvdProvenance });
var nvd = new Advisory(
"CVE-2025-1000",
"CVE-2025-1000",
@@ -605,24 +605,24 @@ public sealed class AdvisoryPrecedenceMergerTests
references: new[]
{
new AdvisoryReference(
"https://nvd.nist.gov/vuln/detail/CVE-2025-1000",
"advisory",
"nvd",
"NVD advisory",
nvdProvenance)
},
affectedPackages: new[] { nvdPackage },
cvssMetrics: new[]
{
new CvssMetric(
"3.1",
"CVSS:3.1/AV:N/AC:L/PR:L/UI:R/S:U/C:H/I:H/A:N",
6.8,
"medium",
new AdvisoryProvenance("nvd", "cvss", "CVE-2025-1000", nvdModified))
},
provenance: new[] { nvdProvenance });
return (redHat, nvd);
}
}
"https://nvd.nist.gov/vuln/detail/CVE-2025-1000",
"advisory",
"nvd",
"NVD advisory",
nvdProvenance)
},
affectedPackages: new[] { nvdPackage },
cvssMetrics: new[]
{
new CvssMetric(
"3.1",
"CVSS:3.1/AV:N/AC:L/PR:L/UI:R/S:U/C:H/I:H/A:N",
6.8,
"medium",
new AdvisoryProvenance("nvd", "cvss", "CVE-2025-1000", nvdModified))
},
provenance: new[] { nvdProvenance });
return (redHat, nvd);
}
}

View File

@@ -1,231 +1,234 @@
using System;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Extensions.Logging.Abstractions;
using Microsoft.Extensions.Time.Testing;
using MongoDB.Driver;
using StellaOps.Concelier.Merge.Services;
using StellaOps.Concelier.Models;
using StellaOps.Concelier.Storage.Mongo;
using StellaOps.Concelier.Storage.Mongo.MergeEvents;
using StellaOps.Concelier.Testing;
namespace StellaOps.Concelier.Merge.Tests;
[Collection("mongo-fixture")]
public sealed class MergePrecedenceIntegrationTests : IAsyncLifetime
{
private readonly MongoIntegrationFixture _fixture;
private MergeEventStore? _mergeEventStore;
private MergeEventWriter? _mergeEventWriter;
private AdvisoryPrecedenceMerger? _merger;
private FakeTimeProvider? _timeProvider;
public MergePrecedenceIntegrationTests(MongoIntegrationFixture fixture)
{
_fixture = fixture;
}
[Fact]
public async Task MergePipeline_PsirtOverridesNvd_AndKevOnlyTogglesExploitKnown()
{
await EnsureInitializedAsync();
var merger = _merger!;
var writer = _mergeEventWriter!;
var store = _mergeEventStore!;
var timeProvider = _timeProvider!;
var expectedTimestamp = timeProvider.GetUtcNow();
var nvd = CreateNvdBaseline();
var vendor = CreateVendorOverride();
var kev = CreateKevSignal();
var merged = merger.Merge(new[] { nvd, vendor, kev });
Assert.Equal("CVE-2025-1000", merged.AdvisoryKey);
Assert.Equal("Vendor Security Advisory", merged.Title);
Assert.Equal("Critical impact on supported platforms.", merged.Summary);
Assert.Equal("critical", merged.Severity);
Assert.True(merged.ExploitKnown);
var affected = Assert.Single(merged.AffectedPackages);
Assert.Empty(affected.VersionRanges);
Assert.Contains(affected.Statuses, status => status.Status == "known_affected" && status.Provenance.Source == "vendor");
var mergeProvenance = Assert.Single(merged.Provenance, p => p.Source == "merge");
Assert.Equal("precedence", mergeProvenance.Kind);
Assert.Equal(expectedTimestamp, mergeProvenance.RecordedAt);
Assert.Contains("vendor", mergeProvenance.Value, StringComparison.OrdinalIgnoreCase);
Assert.Contains("kev", mergeProvenance.Value, StringComparison.OrdinalIgnoreCase);
var inputDocumentIds = new[] { Guid.NewGuid(), Guid.NewGuid(), Guid.NewGuid() };
using System;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Extensions.Logging.Abstractions;
using Microsoft.Extensions.Time.Testing;
using MongoDB.Driver;
using StellaOps.Concelier.Merge.Services;
using StellaOps.Concelier.Models;
using StellaOps.Concelier.Storage.Mongo;
using StellaOps.Concelier.Storage.Mongo.MergeEvents;
using StellaOps.Concelier.Testing;
namespace StellaOps.Concelier.Merge.Tests;
[Collection("mongo-fixture")]
public sealed class MergePrecedenceIntegrationTests : IAsyncLifetime
{
private readonly MongoIntegrationFixture _fixture;
private MergeEventStore? _mergeEventStore;
private MergeEventWriter? _mergeEventWriter;
private AdvisoryPrecedenceMerger? _merger;
private FakeTimeProvider? _timeProvider;
public MergePrecedenceIntegrationTests(MongoIntegrationFixture fixture)
{
_fixture = fixture;
}
[Fact]
public async Task MergePipeline_PsirtOverridesNvd_AndKevOnlyTogglesExploitKnown()
{
await EnsureInitializedAsync();
var merger = _merger!;
var writer = _mergeEventWriter!;
var store = _mergeEventStore!;
var timeProvider = _timeProvider!;
var expectedTimestamp = timeProvider.GetUtcNow();
var nvd = CreateNvdBaseline();
var vendor = CreateVendorOverride();
var kev = CreateKevSignal();
var merged = merger.Merge(new[] { nvd, vendor, kev }).Advisory;
Assert.Equal("CVE-2025-1000", merged.AdvisoryKey);
Assert.Equal("Vendor Security Advisory", merged.Title);
Assert.Equal("Critical impact on supported platforms.", merged.Summary);
Assert.Equal("critical", merged.Severity);
Assert.True(merged.ExploitKnown);
var affected = Assert.Single(merged.AffectedPackages);
Assert.Empty(affected.VersionRanges);
Assert.Contains(affected.Statuses, status => status.Status == "known_affected" && status.Provenance.Source == "vendor");
var mergeProvenance = Assert.Single(merged.Provenance, p => p.Source == "merge");
Assert.Equal("precedence", mergeProvenance.Kind);
Assert.Equal(expectedTimestamp, mergeProvenance.RecordedAt);
Assert.Contains("vendor", mergeProvenance.Value, StringComparison.OrdinalIgnoreCase);
Assert.Contains("kev", mergeProvenance.Value, StringComparison.OrdinalIgnoreCase);
var inputDocumentIds = new[] { Guid.NewGuid(), Guid.NewGuid(), Guid.NewGuid() };
var record = await writer.AppendAsync(merged.AdvisoryKey, nvd, merged, inputDocumentIds, Array.Empty<MergeFieldDecision>(), CancellationToken.None);
Assert.Equal(expectedTimestamp, record.MergedAt);
Assert.Equal(inputDocumentIds, record.InputDocumentIds);
Assert.NotEqual(record.BeforeHash, record.AfterHash);
var records = await store.GetRecentAsync(merged.AdvisoryKey, 5, CancellationToken.None);
var persisted = Assert.Single(records);
Assert.Equal(record.Id, persisted.Id);
Assert.Equal(merged.AdvisoryKey, persisted.AdvisoryKey);
Assert.True(persisted.AfterHash.Length > 0);
Assert.True(persisted.BeforeHash.Length > 0);
}
[Fact]
public async Task MergePipeline_IsDeterministicAcrossRuns()
{
await EnsureInitializedAsync();
var merger = _merger!;
var calculator = new CanonicalHashCalculator();
var first = merger.Merge(new[] { CreateNvdBaseline(), CreateVendorOverride() });
var second = merger.Merge(new[] { CreateNvdBaseline(), CreateVendorOverride() });
var firstHash = calculator.ComputeHash(first);
var secondHash = calculator.ComputeHash(second);
Assert.Equal(firstHash, secondHash);
Assert.Equal(first.AdvisoryKey, second.AdvisoryKey);
Assert.Equal(first.Aliases.Length, second.Aliases.Length);
Assert.True(first.Aliases.SequenceEqual(second.Aliases));
}
public async Task InitializeAsync()
{
_timeProvider = new FakeTimeProvider(new DateTimeOffset(2025, 3, 1, 0, 0, 0, TimeSpan.Zero))
{
AutoAdvanceAmount = TimeSpan.Zero,
};
_merger = new AdvisoryPrecedenceMerger(new AffectedPackagePrecedenceResolver(), _timeProvider);
_mergeEventStore = new MergeEventStore(_fixture.Database, NullLogger<MergeEventStore>.Instance);
_mergeEventWriter = new MergeEventWriter(_mergeEventStore, new CanonicalHashCalculator(), _timeProvider, NullLogger<MergeEventWriter>.Instance);
await DropMergeCollectionAsync();
}
public Task DisposeAsync() => Task.CompletedTask;
private async Task EnsureInitializedAsync()
{
if (_mergeEventWriter is null)
{
await InitializeAsync();
}
}
private async Task DropMergeCollectionAsync()
{
try
{
await _fixture.Database.DropCollectionAsync(MongoStorageDefaults.Collections.MergeEvent);
}
catch (MongoCommandException ex) when (ex.CodeName == "NamespaceNotFound" || ex.Message.Contains("ns not found", StringComparison.OrdinalIgnoreCase))
{
// Collection has not been created yet safe to ignore.
}
}
private static Advisory CreateNvdBaseline()
{
var provenance = new AdvisoryProvenance("nvd", "document", "https://nvd.nist.gov/vuln/detail/CVE-2025-1000", DateTimeOffset.Parse("2025-02-10T00:00:00Z"));
return new Advisory(
"CVE-2025-1000",
"CVE-2025-1000",
"Baseline description from NVD.",
"en",
DateTimeOffset.Parse("2025-02-05T00:00:00Z"),
DateTimeOffset.Parse("2025-02-10T12:00:00Z"),
"medium",
exploitKnown: false,
aliases: new[] { "CVE-2025-1000" },
references: new[]
{
new AdvisoryReference("https://nvd.nist.gov/vuln/detail/CVE-2025-1000", "advisory", "nvd", "NVD reference", provenance),
},
affectedPackages: new[]
{
new AffectedPackage(
AffectedPackageTypes.Cpe,
"cpe:2.3:o:vendor:product:1.0:*:*:*:*:*:*:*",
"vendor-os",
new[]
{
new AffectedVersionRange(
rangeKind: "cpe",
introducedVersion: null,
fixedVersion: null,
lastAffectedVersion: null,
rangeExpression: "<=1.0",
provenance: provenance)
},
Array.Empty<AffectedPackageStatus>(),
new[] { provenance })
},
cvssMetrics: new[]
{
new CvssMetric("3.1", "CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:U/C:H/I:H/A:H", 9.8, "critical", provenance)
},
provenance: new[] { provenance });
}
private static Advisory CreateVendorOverride()
{
var provenance = new AdvisoryProvenance("vendor", "psirt", "VSA-2025-1000", DateTimeOffset.Parse("2025-02-11T00:00:00Z"));
return new Advisory(
"CVE-2025-1000",
"Vendor Security Advisory",
"Critical impact on supported platforms.",
"en",
DateTimeOffset.Parse("2025-02-06T00:00:00Z"),
DateTimeOffset.Parse("2025-02-11T06:00:00Z"),
"critical",
exploitKnown: false,
aliases: new[] { "CVE-2025-1000", "VSA-2025-1000" },
references: new[]
{
new AdvisoryReference("https://vendor.example/advisories/VSA-2025-1000", "advisory", "vendor", "Vendor advisory", provenance),
},
affectedPackages: new[]
{
new AffectedPackage(
AffectedPackageTypes.Cpe,
"cpe:2.3:o:vendor:product:1.0:*:*:*:*:*:*:*",
"vendor-os",
Array.Empty<AffectedVersionRange>(),
new[]
{
new AffectedPackageStatus("known_affected", provenance)
},
new[] { provenance })
},
cvssMetrics: new[]
{
new CvssMetric("3.1", "CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:C/C:H/I:H/A:H", 10.0, "critical", provenance)
},
provenance: new[] { provenance });
}
private static Advisory CreateKevSignal()
{
var provenance = new AdvisoryProvenance("kev", "catalog", "CVE-2025-1000", DateTimeOffset.Parse("2025-02-12T00:00:00Z"));
return new Advisory(
"CVE-2025-1000",
"Known Exploited Vulnerability",
null,
null,
published: null,
modified: null,
severity: null,
exploitKnown: true,
aliases: new[] { "KEV-CVE-2025-1000" },
references: Array.Empty<AdvisoryReference>(),
affectedPackages: Array.Empty<AffectedPackage>(),
cvssMetrics: Array.Empty<CvssMetric>(),
provenance: new[] { provenance });
}
}
Assert.Equal(expectedTimestamp, record.MergedAt);
Assert.Equal(inputDocumentIds, record.InputDocumentIds);
Assert.NotEqual(record.BeforeHash, record.AfterHash);
var records = await store.GetRecentAsync(merged.AdvisoryKey, 5, CancellationToken.None);
var persisted = Assert.Single(records);
Assert.Equal(record.Id, persisted.Id);
Assert.Equal(merged.AdvisoryKey, persisted.AdvisoryKey);
Assert.True(persisted.AfterHash.Length > 0);
Assert.True(persisted.BeforeHash.Length > 0);
}
[Fact]
public async Task MergePipeline_IsDeterministicAcrossRuns()
{
await EnsureInitializedAsync();
var merger = _merger!;
var calculator = new CanonicalHashCalculator();
var firstResult = merger.Merge(new[] { CreateNvdBaseline(), CreateVendorOverride() });
var secondResult = merger.Merge(new[] { CreateNvdBaseline(), CreateVendorOverride() });
var first = firstResult.Advisory;
var second = secondResult.Advisory;
var firstHash = calculator.ComputeHash(first);
var secondHash = calculator.ComputeHash(second);
Assert.Equal(firstHash, secondHash);
Assert.Equal(first.AdvisoryKey, second.AdvisoryKey);
Assert.Equal(first.Aliases.Length, second.Aliases.Length);
Assert.True(first.Aliases.SequenceEqual(second.Aliases));
}
public async Task InitializeAsync()
{
_timeProvider = new FakeTimeProvider(new DateTimeOffset(2025, 3, 1, 0, 0, 0, TimeSpan.Zero))
{
AutoAdvanceAmount = TimeSpan.Zero,
};
_merger = new AdvisoryPrecedenceMerger(new AffectedPackagePrecedenceResolver(), _timeProvider);
_mergeEventStore = new MergeEventStore(_fixture.Database, NullLogger<MergeEventStore>.Instance);
_mergeEventWriter = new MergeEventWriter(_mergeEventStore, new CanonicalHashCalculator(), _timeProvider, NullLogger<MergeEventWriter>.Instance);
await DropMergeCollectionAsync();
}
public Task DisposeAsync() => Task.CompletedTask;
private async Task EnsureInitializedAsync()
{
if (_mergeEventWriter is null)
{
await InitializeAsync();
}
}
private async Task DropMergeCollectionAsync()
{
try
{
await _fixture.Database.DropCollectionAsync(MongoStorageDefaults.Collections.MergeEvent);
}
catch (MongoCommandException ex) when (ex.CodeName == "NamespaceNotFound" || ex.Message.Contains("ns not found", StringComparison.OrdinalIgnoreCase))
{
// Collection has not been created yet safe to ignore.
}
}
private static Advisory CreateNvdBaseline()
{
var provenance = new AdvisoryProvenance("nvd", "document", "https://nvd.nist.gov/vuln/detail/CVE-2025-1000", DateTimeOffset.Parse("2025-02-10T00:00:00Z"));
return new Advisory(
"CVE-2025-1000",
"CVE-2025-1000",
"Baseline description from NVD.",
"en",
DateTimeOffset.Parse("2025-02-05T00:00:00Z"),
DateTimeOffset.Parse("2025-02-10T12:00:00Z"),
"medium",
exploitKnown: false,
aliases: new[] { "CVE-2025-1000" },
references: new[]
{
new AdvisoryReference("https://nvd.nist.gov/vuln/detail/CVE-2025-1000", "advisory", "nvd", "NVD reference", provenance),
},
affectedPackages: new[]
{
new AffectedPackage(
AffectedPackageTypes.Cpe,
"cpe:2.3:o:vendor:product:1.0:*:*:*:*:*:*:*",
"vendor-os",
new[]
{
new AffectedVersionRange(
rangeKind: "cpe",
introducedVersion: null,
fixedVersion: null,
lastAffectedVersion: null,
rangeExpression: "<=1.0",
provenance: provenance)
},
Array.Empty<AffectedPackageStatus>(),
new[] { provenance })
},
cvssMetrics: new[]
{
new CvssMetric("3.1", "CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:U/C:H/I:H/A:H", 9.8, "critical", provenance)
},
provenance: new[] { provenance });
}
private static Advisory CreateVendorOverride()
{
var provenance = new AdvisoryProvenance("vendor", "psirt", "VSA-2025-1000", DateTimeOffset.Parse("2025-02-11T00:00:00Z"));
return new Advisory(
"CVE-2025-1000",
"Vendor Security Advisory",
"Critical impact on supported platforms.",
"en",
DateTimeOffset.Parse("2025-02-06T00:00:00Z"),
DateTimeOffset.Parse("2025-02-11T06:00:00Z"),
"critical",
exploitKnown: false,
aliases: new[] { "CVE-2025-1000", "VSA-2025-1000" },
references: new[]
{
new AdvisoryReference("https://vendor.example/advisories/VSA-2025-1000", "advisory", "vendor", "Vendor advisory", provenance),
},
affectedPackages: new[]
{
new AffectedPackage(
AffectedPackageTypes.Cpe,
"cpe:2.3:o:vendor:product:1.0:*:*:*:*:*:*:*",
"vendor-os",
Array.Empty<AffectedVersionRange>(),
new[]
{
new AffectedPackageStatus("known_affected", provenance)
},
new[] { provenance })
},
cvssMetrics: new[]
{
new CvssMetric("3.1", "CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:C/C:H/I:H/A:H", 10.0, "critical", provenance)
},
provenance: new[] { provenance });
}
private static Advisory CreateKevSignal()
{
var provenance = new AdvisoryProvenance("kev", "catalog", "CVE-2025-1000", DateTimeOffset.Parse("2025-02-12T00:00:00Z"));
return new Advisory(
"CVE-2025-1000",
"Known Exploited Vulnerability",
null,
null,
published: null,
modified: null,
severity: null,
exploitKnown: true,
aliases: new[] { "KEV-CVE-2025-1000" },
references: Array.Empty<AdvisoryReference>(),
affectedPackages: Array.Empty<AffectedPackage>(),
cvssMetrics: Array.Empty<CvssMetric>(),
provenance: new[] { provenance });
}
}

View File

@@ -7,21 +7,23 @@ using System.Threading;
using System.Threading.Tasks;
using Microsoft.Extensions.Logging;
using StellaOps.Concelier.Core;
using StellaOps.Concelier.Core.Events;
using StellaOps.Concelier.Models;
using StellaOps.Concelier.Storage.Mongo.Advisories;
using StellaOps.Concelier.Storage.Mongo.Aliases;
using StellaOps.Concelier.Storage.Mongo.MergeEvents;
namespace StellaOps.Concelier.Merge.Services;
public sealed class AdvisoryMergeService
{
private static readonly Meter MergeMeter = new("StellaOps.Concelier.Merge");
private static readonly Counter<long> AliasCollisionCounter = MergeMeter.CreateCounter<long>(
"concelier.merge.identity_conflicts",
unit: "count",
description: "Number of alias collisions detected during merge.");
using System.Text.Json;
namespace StellaOps.Concelier.Merge.Services;
public sealed class AdvisoryMergeService
{
private static readonly Meter MergeMeter = new("StellaOps.Concelier.Merge");
private static readonly Counter<long> AliasCollisionCounter = MergeMeter.CreateCounter<long>(
"concelier.merge.identity_conflicts",
unit: "count",
description: "Number of alias collisions detected during merge.");
private static readonly string[] PreferredAliasSchemes =
{
AliasSchemes.Cve,
@@ -34,6 +36,8 @@ public sealed class AdvisoryMergeService
private readonly IAdvisoryStore _advisoryStore;
private readonly AdvisoryPrecedenceMerger _precedenceMerger;
private readonly MergeEventWriter _mergeEventWriter;
private readonly IAdvisoryEventLog _eventLog;
private readonly TimeProvider _timeProvider;
private readonly CanonicalMerger _canonicalMerger;
private readonly ILogger<AdvisoryMergeService> _logger;
@@ -43,6 +47,8 @@ public sealed class AdvisoryMergeService
AdvisoryPrecedenceMerger precedenceMerger,
MergeEventWriter mergeEventWriter,
CanonicalMerger canonicalMerger,
IAdvisoryEventLog eventLog,
TimeProvider timeProvider,
ILogger<AdvisoryMergeService> logger)
{
_aliasResolver = aliasResolver ?? throw new ArgumentNullException(nameof(aliasResolver));
@@ -50,92 +56,222 @@ public sealed class AdvisoryMergeService
_precedenceMerger = precedenceMerger ?? throw new ArgumentNullException(nameof(precedenceMerger));
_mergeEventWriter = mergeEventWriter ?? throw new ArgumentNullException(nameof(mergeEventWriter));
_canonicalMerger = canonicalMerger ?? throw new ArgumentNullException(nameof(canonicalMerger));
_eventLog = eventLog ?? throw new ArgumentNullException(nameof(eventLog));
_timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider));
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
}
public async Task<AdvisoryMergeResult> MergeAsync(string seedAdvisoryKey, CancellationToken cancellationToken)
{
ArgumentException.ThrowIfNullOrWhiteSpace(seedAdvisoryKey);
var component = await _aliasResolver.BuildComponentAsync(seedAdvisoryKey, cancellationToken).ConfigureAwait(false);
var inputs = new List<Advisory>();
foreach (var advisoryKey in component.AdvisoryKeys)
{
cancellationToken.ThrowIfCancellationRequested();
var advisory = await _advisoryStore.FindAsync(advisoryKey, cancellationToken).ConfigureAwait(false);
if (advisory is not null)
{
inputs.Add(advisory);
}
}
if (inputs.Count == 0)
{
_logger.LogWarning("Alias component seeded by {Seed} contains no persisted advisories", seedAdvisoryKey);
return AdvisoryMergeResult.Empty(seedAdvisoryKey, component);
}
public async Task<AdvisoryMergeResult> MergeAsync(string seedAdvisoryKey, CancellationToken cancellationToken)
{
ArgumentException.ThrowIfNullOrWhiteSpace(seedAdvisoryKey);
var component = await _aliasResolver.BuildComponentAsync(seedAdvisoryKey, cancellationToken).ConfigureAwait(false);
var inputs = new List<Advisory>();
foreach (var advisoryKey in component.AdvisoryKeys)
{
cancellationToken.ThrowIfCancellationRequested();
var advisory = await _advisoryStore.FindAsync(advisoryKey, cancellationToken).ConfigureAwait(false);
if (advisory is not null)
{
inputs.Add(advisory);
}
}
if (inputs.Count == 0)
{
_logger.LogWarning("Alias component seeded by {Seed} contains no persisted advisories", seedAdvisoryKey);
return AdvisoryMergeResult.Empty(seedAdvisoryKey, component);
}
var canonicalKey = SelectCanonicalKey(component) ?? seedAdvisoryKey;
var canonicalMerge = ApplyCanonicalMergeIfNeeded(canonicalKey, inputs);
var before = await _advisoryStore.FindAsync(canonicalKey, cancellationToken).ConfigureAwait(false);
var normalizedInputs = NormalizeInputs(inputs, canonicalKey).ToList();
Advisory? merged;
PrecedenceMergeResult precedenceResult;
try
{
merged = _precedenceMerger.Merge(normalizedInputs);
}
catch (Exception ex)
{
_logger.LogError(ex, "Failed to merge alias component seeded by {Seed}", seedAdvisoryKey);
throw;
}
if (component.Collisions.Count > 0)
{
foreach (var collision in component.Collisions)
{
var tags = new KeyValuePair<string, object?>[]
{
new("scheme", collision.Scheme ?? string.Empty),
new("alias_value", collision.Value ?? string.Empty),
new("advisory_count", collision.AdvisoryKeys.Count),
};
AliasCollisionCounter.Add(1, tags);
_logger.LogInformation(
"Alias collision {Scheme}:{Value} involves advisories {Advisories}",
collision.Scheme,
collision.Value,
string.Join(", ", collision.AdvisoryKeys));
}
}
if (merged is not null)
{
await _advisoryStore.UpsertAsync(merged, cancellationToken).ConfigureAwait(false);
await _mergeEventWriter.AppendAsync(
canonicalKey,
before,
merged,
Array.Empty<Guid>(),
ConvertFieldDecisions(canonicalMerge?.Decisions),
cancellationToken).ConfigureAwait(false);
precedenceResult = _precedenceMerger.Merge(normalizedInputs);
}
catch (Exception ex)
{
_logger.LogError(ex, "Failed to merge alias component seeded by {Seed}", seedAdvisoryKey);
throw;
}
var merged = precedenceResult.Advisory;
var conflictDetails = precedenceResult.Conflicts;
if (component.Collisions.Count > 0)
{
foreach (var collision in component.Collisions)
{
var tags = new KeyValuePair<string, object?>[]
{
new("scheme", collision.Scheme ?? string.Empty),
new("alias_value", collision.Value ?? string.Empty),
new("advisory_count", collision.AdvisoryKeys.Count),
};
AliasCollisionCounter.Add(1, tags);
_logger.LogInformation(
"Alias collision {Scheme}:{Value} involves advisories {Advisories}",
collision.Scheme,
collision.Value,
string.Join(", ", collision.AdvisoryKeys));
}
}
await _advisoryStore.UpsertAsync(merged, cancellationToken).ConfigureAwait(false);
await _mergeEventWriter.AppendAsync(
canonicalKey,
before,
merged,
Array.Empty<Guid>(),
ConvertFieldDecisions(canonicalMerge?.Decisions),
cancellationToken).ConfigureAwait(false);
await AppendEventLogAsync(canonicalKey, normalizedInputs, merged, conflictDetails, cancellationToken).ConfigureAwait(false);
return new AdvisoryMergeResult(seedAdvisoryKey, canonicalKey, component, inputs, before, merged);
}
private static IEnumerable<Advisory> NormalizeInputs(IEnumerable<Advisory> advisories, string canonicalKey)
{
foreach (var advisory in advisories)
{
yield return CloneWithKey(advisory, canonicalKey);
}
}
}
private async Task AppendEventLogAsync(
string vulnerabilityKey,
IReadOnlyList<Advisory> inputs,
Advisory merged,
IReadOnlyList<MergeConflictDetail> conflicts,
CancellationToken cancellationToken)
{
var recordedAt = _timeProvider.GetUtcNow();
var statements = new List<AdvisoryStatementInput>(inputs.Count + 1);
var statementIds = new Dictionary<Advisory, Guid>(ReferenceEqualityComparer.Instance);
foreach (var advisory in inputs)
{
var statementId = Guid.NewGuid();
statementIds[advisory] = statementId;
statements.Add(new AdvisoryStatementInput(
vulnerabilityKey,
advisory,
DetermineAsOf(advisory, recordedAt),
InputDocumentIds: Array.Empty<Guid>(),
StatementId: statementId,
AdvisoryKey: advisory.AdvisoryKey));
}
var canonicalStatementId = Guid.NewGuid();
statementIds[merged] = canonicalStatementId;
statements.Add(new AdvisoryStatementInput(
vulnerabilityKey,
merged,
recordedAt,
InputDocumentIds: Array.Empty<Guid>(),
StatementId: canonicalStatementId,
AdvisoryKey: merged.AdvisoryKey));
var conflictInputs = BuildConflictInputs(conflicts, vulnerabilityKey, statementIds, canonicalStatementId, recordedAt);
if (statements.Count == 0 && conflictInputs.Count == 0)
{
return;
}
var request = new AdvisoryEventAppendRequest(statements, conflictInputs.Count > 0 ? conflictInputs : null);
try
{
await _eventLog.AppendAsync(request, cancellationToken).ConfigureAwait(false);
}
finally
{
foreach (var conflict in conflictInputs)
{
conflict.Details.Dispose();
}
}
}
private static DateTimeOffset DetermineAsOf(Advisory advisory, DateTimeOffset fallback)
{
return (advisory.Modified ?? advisory.Published ?? fallback).ToUniversalTime();
}
private static List<AdvisoryConflictInput> BuildConflictInputs(
IReadOnlyList<MergeConflictDetail> conflicts,
string vulnerabilityKey,
IReadOnlyDictionary<Advisory, Guid> statementIds,
Guid canonicalStatementId,
DateTimeOffset recordedAt)
{
if (conflicts.Count == 0)
{
return new List<AdvisoryConflictInput>(0);
}
var inputs = new List<AdvisoryConflictInput>(conflicts.Count);
foreach (var detail in conflicts)
{
if (!statementIds.TryGetValue(detail.Suppressed, out var suppressedId))
{
continue;
}
var related = new List<Guid> { canonicalStatementId, suppressedId };
if (statementIds.TryGetValue(detail.Primary, out var primaryId))
{
if (!related.Contains(primaryId))
{
related.Add(primaryId);
}
}
var payload = new ConflictDetailPayload(
detail.ConflictType,
detail.Reason,
detail.PrimarySources,
detail.PrimaryRank,
detail.SuppressedSources,
detail.SuppressedRank,
detail.PrimaryValue,
detail.SuppressedValue);
var json = CanonicalJsonSerializer.Serialize(payload);
var document = JsonDocument.Parse(json);
var asOf = (detail.Primary.Modified ?? detail.Suppressed.Modified ?? recordedAt).ToUniversalTime();
inputs.Add(new AdvisoryConflictInput(
vulnerabilityKey,
document,
asOf,
related,
ConflictId: null));
}
return inputs;
}
private sealed record ConflictDetailPayload(
string Type,
string Reason,
IReadOnlyList<string> PrimarySources,
int PrimaryRank,
IReadOnlyList<string> SuppressedSources,
int SuppressedRank,
string? PrimaryValue,
string? SuppressedValue);
private static IEnumerable<Advisory> NormalizeInputs(IEnumerable<Advisory> advisories, string canonicalKey)
{
foreach (var advisory in advisories)
{
yield return CloneWithKey(advisory, canonicalKey);
}
}
private static Advisory CloneWithKey(Advisory source, string advisoryKey)
=> new(
advisoryKey,
@@ -248,47 +384,47 @@ public sealed class AdvisoryMergeService
public const string Nvd = "nvd";
public const string Osv = "osv";
}
private static string? SelectCanonicalKey(AliasComponent component)
{
foreach (var scheme in PreferredAliasSchemes)
{
var alias = component.AliasMap.Values
.SelectMany(static aliases => aliases)
.FirstOrDefault(record => string.Equals(record.Scheme, scheme, StringComparison.OrdinalIgnoreCase));
if (!string.IsNullOrWhiteSpace(alias?.Value))
{
return alias.Value;
}
}
if (component.AliasMap.TryGetValue(component.SeedAdvisoryKey, out var seedAliases))
{
var primary = seedAliases.FirstOrDefault(record => string.Equals(record.Scheme, AliasStoreConstants.PrimaryScheme, StringComparison.OrdinalIgnoreCase));
if (!string.IsNullOrWhiteSpace(primary?.Value))
{
return primary.Value;
}
}
var firstAlias = component.AliasMap.Values.SelectMany(static aliases => aliases).FirstOrDefault();
if (!string.IsNullOrWhiteSpace(firstAlias?.Value))
{
return firstAlias.Value;
}
return component.SeedAdvisoryKey;
}
}
public sealed record AdvisoryMergeResult(
string SeedAdvisoryKey,
string CanonicalAdvisoryKey,
AliasComponent Component,
IReadOnlyList<Advisory> Inputs,
Advisory? Previous,
Advisory? Merged)
{
public static AdvisoryMergeResult Empty(string seed, AliasComponent component)
=> new(seed, seed, component, Array.Empty<Advisory>(), null, null);
}
private static string? SelectCanonicalKey(AliasComponent component)
{
foreach (var scheme in PreferredAliasSchemes)
{
var alias = component.AliasMap.Values
.SelectMany(static aliases => aliases)
.FirstOrDefault(record => string.Equals(record.Scheme, scheme, StringComparison.OrdinalIgnoreCase));
if (!string.IsNullOrWhiteSpace(alias?.Value))
{
return alias.Value;
}
}
if (component.AliasMap.TryGetValue(component.SeedAdvisoryKey, out var seedAliases))
{
var primary = seedAliases.FirstOrDefault(record => string.Equals(record.Scheme, AliasStoreConstants.PrimaryScheme, StringComparison.OrdinalIgnoreCase));
if (!string.IsNullOrWhiteSpace(primary?.Value))
{
return primary.Value;
}
}
var firstAlias = component.AliasMap.Values.SelectMany(static aliases => aliases).FirstOrDefault();
if (!string.IsNullOrWhiteSpace(firstAlias?.Value))
{
return firstAlias.Value;
}
return component.SeedAdvisoryKey;
}
}
public sealed record AdvisoryMergeResult(
string SeedAdvisoryKey,
string CanonicalAdvisoryKey,
AliasComponent Component,
IReadOnlyList<Advisory> Inputs,
Advisory? Previous,
Advisory? Merged)
{
public static AdvisoryMergeResult Empty(string seed, AliasComponent component)
=> new(seed, seed, component, Array.Empty<Advisory>(), null, null);
}

View File

@@ -111,7 +111,7 @@ public sealed class AdvisoryPrecedenceMerger
_logger = logger ?? NullLogger<AdvisoryPrecedenceMerger>.Instance;
}
public Advisory Merge(IEnumerable<Advisory> advisories)
public PrecedenceMergeResult Merge(IEnumerable<Advisory> advisories)
{
if (advisories is null)
{
@@ -193,11 +193,12 @@ public sealed class AdvisoryPrecedenceMerger
var exploitKnown = ordered.Any(entry => entry.Advisory.ExploitKnown);
LogOverrides(advisoryKey, ordered);
LogPackageOverrides(advisoryKey, packageResult.Overrides);
RecordFieldConflicts(advisoryKey, ordered);
return new Advisory(
LogOverrides(advisoryKey, ordered);
LogPackageOverrides(advisoryKey, packageResult.Overrides);
var conflicts = new List<MergeConflictDetail>();
RecordFieldConflicts(advisoryKey, ordered, conflicts);
var merged = new Advisory(
advisoryKey,
title,
summary,
@@ -212,6 +213,8 @@ public sealed class AdvisoryPrecedenceMerger
affectedPackages,
cvssMetrics,
provenance);
return new PrecedenceMergeResult(merged, conflicts);
}
private static void RecordNormalizedRuleMetrics(IReadOnlyList<AffectedPackage> packages)
@@ -379,7 +382,7 @@ public sealed class AdvisoryPrecedenceMerger
}
}
private void RecordFieldConflicts(string advisoryKey, IReadOnlyList<AdvisoryEntry> ordered)
private void RecordFieldConflicts(string advisoryKey, IReadOnlyList<AdvisoryEntry> ordered, List<MergeConflictDetail> conflicts)
{
if (ordered.Count <= 1)
{
@@ -396,42 +399,45 @@ public sealed class AdvisoryPrecedenceMerger
if (!string.IsNullOrEmpty(candidateSeverity))
{
var reason = string.IsNullOrEmpty(primarySeverity) ? "primary_missing" : "mismatch";
if (string.IsNullOrEmpty(primarySeverity) || !string.Equals(primarySeverity, candidateSeverity, StringComparison.OrdinalIgnoreCase))
{
RecordConflict(
advisoryKey,
"severity",
reason,
primary,
candidate,
primarySeverity ?? "(none)",
candidateSeverity);
}
}
if (candidate.Rank == primary.Rank)
{
RecordConflict(
advisoryKey,
"precedence_tie",
"equal_rank",
primary,
candidate,
primary.Rank.ToString(CultureInfo.InvariantCulture),
candidate.Rank.ToString(CultureInfo.InvariantCulture));
}
}
}
private void RecordConflict(
string advisoryKey,
string conflictType,
string reason,
AdvisoryEntry primary,
AdvisoryEntry suppressed,
string? primaryValue,
string? suppressedValue)
var reason = string.IsNullOrEmpty(primarySeverity) ? "primary_missing" : "mismatch";
if (string.IsNullOrEmpty(primarySeverity) || !string.Equals(primarySeverity, candidateSeverity, StringComparison.OrdinalIgnoreCase))
{
RecordConflict(
advisoryKey,
"severity",
reason,
primary,
candidate,
primarySeverity ?? "(none)",
candidateSeverity,
conflicts);
}
}
if (candidate.Rank == primary.Rank)
{
RecordConflict(
advisoryKey,
"precedence_tie",
"equal_rank",
primary,
candidate,
primary.Rank.ToString(CultureInfo.InvariantCulture),
candidate.Rank.ToString(CultureInfo.InvariantCulture),
conflicts);
}
}
}
private void RecordConflict(
string advisoryKey,
string conflictType,
string reason,
AdvisoryEntry primary,
AdvisoryEntry suppressed,
string? primaryValue,
string? suppressedValue,
List<MergeConflictDetail> conflicts)
{
var tags = new KeyValuePair<string, object?>[]
{
@@ -445,18 +451,30 @@ public sealed class AdvisoryPrecedenceMerger
ConflictCounter.Add(1, tags);
var audit = new MergeFieldConflictAudit(
advisoryKey,
conflictType,
reason,
primary.Sources,
primary.Rank,
suppressed.Sources,
suppressed.Rank,
primaryValue,
suppressedValue);
ConflictLogged(_logger, audit, null);
var audit = new MergeFieldConflictAudit(
advisoryKey,
conflictType,
reason,
primary.Sources,
primary.Rank,
suppressed.Sources,
suppressed.Rank,
primaryValue,
suppressedValue);
ConflictLogged(_logger, audit, null);
conflicts.Add(new MergeConflictDetail(
primary.Advisory,
suppressed.Advisory,
conflictType,
reason,
primary.Sources.ToArray(),
primary.Rank,
suppressed.Sources.ToArray(),
suppressed.Rank,
primaryValue,
suppressedValue));
}
private readonly record struct AdvisoryEntry(Advisory Advisory, int Rank)

Some files were not shown because too many files have changed in this diff Show More