Add tests and implement StubBearer authentication for Signer endpoints
Some checks failed
Docs CI / lint-and-preview (push) Has been cancelled

- Created SignerEndpointsTests to validate the SignDsse and VerifyReferrers endpoints.
- Implemented StubBearerAuthenticationDefaults and StubBearerAuthenticationHandler for token-based authentication.
- Developed ConcelierExporterClient for managing Trivy DB settings and export operations.
- Added TrivyDbSettingsPageComponent for UI interactions with Trivy DB settings, including form handling and export triggering.
- Implemented styles and HTML structure for Trivy DB settings page.
- Created NotifySmokeCheck tool for validating Redis event streams and Notify deliveries.
This commit is contained in:
2025-10-21 09:37:07 +03:00
parent 2b6304c9c3
commit 791e12baab
298 changed files with 20490 additions and 5751 deletions

View File

@@ -7,6 +7,10 @@
<IsConcelierPlugin Condition="'$(IsConcelierPlugin)' == '' and $([System.String]::Copy('$(MSBuildProjectName)').StartsWith('StellaOps.Concelier.Connector.'))">true</IsConcelierPlugin>
<IsConcelierPlugin Condition="'$(IsConcelierPlugin)' == '' and $([System.String]::Copy('$(MSBuildProjectName)').StartsWith('StellaOps.Concelier.Exporter.'))">true</IsConcelierPlugin>
<IsAuthorityPlugin Condition="'$(IsAuthorityPlugin)' == '' and $([System.String]::Copy('$(MSBuildProjectName)').StartsWith('StellaOps.Authority.Plugin.'))">true</IsAuthorityPlugin>
<NotifyPluginOutputRoot Condition="'$(NotifyPluginOutputRoot)' == '' and '$(SolutionDir)' != ''">$(SolutionDir)plugins\notify</NotifyPluginOutputRoot>
<NotifyPluginOutputRoot Condition="'$(NotifyPluginOutputRoot)' == '' and '$(SolutionDir)' == ''">$([System.IO.Path]::GetFullPath('$(MSBuildThisFileDirectory)..\plugins\notify\'))</NotifyPluginOutputRoot>
<IsNotifyPlugin Condition="'$(IsNotifyPlugin)' == '' and $([System.String]::Copy('$(MSBuildProjectName)').StartsWith('StellaOps.Notify.Connectors.')) and !$([System.String]::Copy('$(MSBuildProjectName)').EndsWith('.Tests'))">true</IsNotifyPlugin>
<IsNotifyPlugin Condition="'$(IsNotifyPlugin)' == 'true' and $([System.String]::Copy('$(MSBuildProjectName)')) == 'StellaOps.Notify.Connectors.Shared'">false</IsNotifyPlugin>
<ScannerBuildxPluginOutputRoot Condition="'$(ScannerBuildxPluginOutputRoot)' == ''">$([System.IO.Path]::GetFullPath('$(MSBuildThisFileDirectory)..\plugins\scanner\buildx\'))</ScannerBuildxPluginOutputRoot>
<IsScannerBuildxPlugin Condition="'$(IsScannerBuildxPlugin)' == '' and $([System.String]::Copy('$(MSBuildProjectName)')) == 'StellaOps.Scanner.Sbomer.BuildXPlugin'">true</IsScannerBuildxPlugin>
<ScannerOsAnalyzerPluginOutputRoot Condition="'$(ScannerOsAnalyzerPluginOutputRoot)' == ''">$([System.IO.Path]::GetFullPath('$(MSBuildThisFileDirectory)..\plugins\scanner\analyzers\os\'))</ScannerOsAnalyzerPluginOutputRoot>

View File

@@ -31,6 +31,24 @@
<Copy SourceFiles="@(AuthorityPluginArtifacts)" DestinationFolder="$(AuthorityPluginOutputDirectory)" SkipUnchangedFiles="true" />
</Target>
<Target Name="NotifyCopyPluginArtifacts" AfterTargets="Build" Condition="'$(IsNotifyPlugin)' == 'true'">
<PropertyGroup>
<NotifyPluginDirectoryName>$([System.String]::Copy('$(MSBuildProjectName)').Replace('StellaOps.Notify.Connectors.', '').ToLowerInvariant())</NotifyPluginDirectoryName>
<NotifyPluginOutputDirectory>$(NotifyPluginOutputRoot)\$(NotifyPluginDirectoryName)</NotifyPluginOutputDirectory>
</PropertyGroup>
<MakeDir Directories="$(NotifyPluginOutputDirectory)" />
<ItemGroup>
<NotifyPluginArtifacts Include="$(TargetPath)" />
<NotifyPluginArtifacts Include="$(TargetPath).deps.json" Condition="Exists('$(TargetPath).deps.json')" />
<NotifyPluginArtifacts Include="$(TargetDir)$(TargetName).pdb" Condition="Exists('$(TargetDir)$(TargetName).pdb')" />
<NotifyPluginArtifacts Include="$(ProjectDir)notify-plugin.json" Condition="Exists('$(ProjectDir)notify-plugin.json')" />
</ItemGroup>
<Copy SourceFiles="@(NotifyPluginArtifacts)" DestinationFolder="$(NotifyPluginOutputDirectory)" SkipUnchangedFiles="true" />
</Target>
<Target Name="ScannerCopyBuildxPluginArtifacts" AfterTargets="Build" Condition="'$(IsScannerBuildxPlugin)' == 'true'">
<PropertyGroup>
<ScannerBuildxPluginOutputDirectory>$(ScannerBuildxPluginOutputRoot)\$(MSBuildProjectName)</ScannerBuildxPluginOutputDirectory>

View File

@@ -78,7 +78,7 @@ public class StandardPluginRegistrarTests
var registrar = new StandardPluginRegistrar();
registrar.Register(new AuthorityPluginRegistrationContext(services, pluginContext, configuration));
var provider = services.BuildServiceProvider();
using var provider = services.BuildServiceProvider();
var hostedServices = provider.GetServices<IHostedService>();
foreach (var hosted in hostedServices)
{
@@ -88,7 +88,8 @@ public class StandardPluginRegistrarTests
}
}
var plugin = provider.GetRequiredService<IIdentityProviderPlugin>();
using var scope = provider.CreateScope();
var plugin = scope.ServiceProvider.GetRequiredService<IIdentityProviderPlugin>();
Assert.Equal("standard", plugin.Type);
Assert.True(plugin.Capabilities.SupportsPassword);
Assert.False(plugin.Capabilities.SupportsMfa);
@@ -138,7 +139,8 @@ public class StandardPluginRegistrarTests
registrar.Register(new AuthorityPluginRegistrationContext(services, pluginContext, configuration));
using var provider = services.BuildServiceProvider();
_ = provider.GetRequiredService<StandardUserCredentialStore>();
using var scope = provider.CreateScope();
_ = scope.ServiceProvider.GetRequiredService<StandardUserCredentialStore>();
Assert.Contains(loggerProvider.Entries, entry =>
entry.Level == LogLevel.Warning &&
@@ -176,7 +178,8 @@ public class StandardPluginRegistrarTests
registrar.Register(new AuthorityPluginRegistrationContext(services, pluginContext, configuration));
using var provider = services.BuildServiceProvider();
var plugin = provider.GetRequiredService<IIdentityProviderPlugin>();
using var scope = provider.CreateScope();
var plugin = scope.ServiceProvider.GetRequiredService<IIdentityProviderPlugin>();
Assert.True(plugin.Capabilities.SupportsPassword);
}
@@ -215,7 +218,8 @@ public class StandardPluginRegistrarTests
registrar.Register(new AuthorityPluginRegistrationContext(services, pluginContext, configuration));
using var provider = services.BuildServiceProvider();
Assert.Throws<InvalidOperationException>(() => provider.GetRequiredService<IIdentityProviderPlugin>());
using var scope = provider.CreateScope();
Assert.Throws<InvalidOperationException>(() => scope.ServiceProvider.GetRequiredService<IIdentityProviderPlugin>());
}
[Fact]

View File

@@ -1,5 +1,6 @@
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.Hosting;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
@@ -10,24 +11,25 @@ namespace StellaOps.Authority.Plugin.Standard.Bootstrap;
internal sealed class StandardPluginBootstrapper : IHostedService
{
private readonly string pluginName;
private readonly IOptionsMonitor<StandardPluginOptions> optionsMonitor;
private readonly StandardUserCredentialStore credentialStore;
private readonly IServiceScopeFactory scopeFactory;
private readonly ILogger<StandardPluginBootstrapper> logger;
public StandardPluginBootstrapper(
string pluginName,
IOptionsMonitor<StandardPluginOptions> optionsMonitor,
StandardUserCredentialStore credentialStore,
IServiceScopeFactory scopeFactory,
ILogger<StandardPluginBootstrapper> logger)
{
this.pluginName = pluginName;
this.optionsMonitor = optionsMonitor;
this.credentialStore = credentialStore;
this.scopeFactory = scopeFactory;
this.logger = logger;
}
public async Task StartAsync(CancellationToken cancellationToken)
{
using var scope = scopeFactory.CreateScope();
var optionsMonitor = scope.ServiceProvider.GetRequiredService<IOptionsMonitor<StandardPluginOptions>>();
var credentialStore = scope.ServiceProvider.GetRequiredService<StandardUserCredentialStore>();
var options = optionsMonitor.Get(pluginName);
if (options.BootstrapUser is null || !options.BootstrapUser.IsConfigured)
{

View File

@@ -43,7 +43,7 @@ internal sealed class StandardPluginRegistrar : IAuthorityPluginRegistrar
})
.ValidateOnStart();
context.Services.AddSingleton(sp =>
context.Services.AddScoped(sp =>
{
var database = sp.GetRequiredService<IMongoDatabase>();
var optionsMonitor = sp.GetRequiredService<IOptionsMonitor<StandardPluginOptions>>();
@@ -79,7 +79,7 @@ internal sealed class StandardPluginRegistrar : IAuthorityPluginRegistrar
loggerFactory.CreateLogger<StandardUserCredentialStore>());
});
context.Services.AddSingleton(sp =>
context.Services.AddScoped(sp =>
{
var clientStore = sp.GetRequiredService<IAuthorityClientStore>();
var revocationStore = sp.GetRequiredService<IAuthorityRevocationStore>();
@@ -87,7 +87,7 @@ internal sealed class StandardPluginRegistrar : IAuthorityPluginRegistrar
return new StandardClientProvisioningStore(pluginName, clientStore, revocationStore, timeProvider);
});
context.Services.AddSingleton<IIdentityProviderPlugin>(sp =>
context.Services.AddScoped<IIdentityProviderPlugin>(sp =>
{
var store = sp.GetRequiredService<StandardUserCredentialStore>();
var clientProvisioningStore = sp.GetRequiredService<StandardClientProvisioningStore>();
@@ -100,14 +100,13 @@ internal sealed class StandardPluginRegistrar : IAuthorityPluginRegistrar
loggerFactory.CreateLogger<StandardIdentityProviderPlugin>());
});
context.Services.AddSingleton<IClientProvisioningStore>(sp =>
context.Services.AddScoped<IClientProvisioningStore>(sp =>
sp.GetRequiredService<StandardClientProvisioningStore>());
context.Services.AddSingleton<IHostedService>(sp =>
new StandardPluginBootstrapper(
pluginName,
sp.GetRequiredService<IOptionsMonitor<StandardPluginOptions>>(),
sp.GetRequiredService<StandardUserCredentialStore>(),
sp.GetRequiredService<IServiceScopeFactory>(),
sp.GetRequiredService<ILogger<StandardPluginBootstrapper>>()));
}
}

View File

@@ -5,10 +5,10 @@
| PLG6.DOC | DONE (2025-10-11) | BE-Auth Plugin, Docs Guild | PLG1PLG5 | Final polish + diagrams for plugin developer guide (AUTHPLUG-DOCS-01-001). | Docs team delivers copy-edit + exported diagrams; PR merged. |
| SEC1.PLG | DONE (2025-10-11) | Security Guild, BE-Auth Plugin | SEC1.A (StellaOps.Cryptography) | Swap Standard plugin hashing to Argon2id via `StellaOps.Cryptography` abstractions; keep PBKDF2 verification for legacy. | ✅ `StandardUserCredentialStore` uses `ICryptoProvider` to hash/check; ✅ Transparent rehash on success; ✅ Unit tests cover tamper + legacy rehash. |
| SEC1.OPT | DONE (2025-10-11) | Security Guild | SEC1.PLG | Expose password hashing knobs in `StandardPluginOptions` (`memoryKiB`, `iterations`, `parallelism`, `algorithm`) with validation. | ✅ Options bound from YAML; ✅ Invalid configs throw; ✅ Docs include tuning guidance. |
| SEC2.PLG | DOING (2025-10-14) | Security Guild, Storage Guild | SEC2.A (audit contract) | Emit audit events from password verification outcomes and persist via `IAuthorityLoginAttemptStore`. <br>⏳ Awaiting AUTH-DPOP-11-001 / AUTH-MTLS-11-002 / PLUGIN-DI-08-001 completion to unlock Wave0B verification paths. | ✅ Serilog events enriched with subject/client/IP/outcome; ✅ Mongo records written per attempt; ✅ Tests assert success/lockout/failure cases. |
| SEC3.PLG | DOING (2025-10-14) | Security Guild, BE-Auth Plugin | CORE8, SEC3.A (rate limiter) | Ensure lockout responses and rate-limit metadata flow through plugin logs/events (include retry-after). <br> Pending AUTH-DPOP-11-001 / AUTH-MTLS-11-002 / PLUGIN-DI-08-001 so limiter telemetry contract matches final authority surface. | ✅ Audit record includes retry-after; ✅ Tests confirm lockout + limiter interplay. |
| SEC2.PLG | BLOCKED (2025-10-21) | Security Guild, Storage Guild | SEC2.A (audit contract) | Emit audit events from password verification outcomes and persist via `IAuthorityLoginAttemptStore`. <br>⛔ Waiting on AUTH-DPOP-11-001 / AUTH-MTLS-11-002 / PLUGIN-DI-08-001 to stabilise Authority auth surfaces before final verification + publish. | ✅ Serilog events enriched with subject/client/IP/outcome; ✅ Mongo records written per attempt; ✅ Tests assert success/lockout/failure cases. |
| SEC3.PLG | BLOCKED (2025-10-21) | Security Guild, BE-Auth Plugin | CORE8, SEC3.A (rate limiter) | Ensure lockout responses and rate-limit metadata flow through plugin logs/events (include retry-after). <br> Pending AUTH-DPOP-11-001 / AUTH-MTLS-11-002 / PLUGIN-DI-08-001 so limiter telemetry contract matches final authority surface. | ✅ Audit record includes retry-after; ✅ Tests confirm lockout + limiter interplay. |
| SEC4.PLG | DONE (2025-10-12) | Security Guild | SEC4.A (revocation schema) | Provide plugin hooks so revoked users/clients write reasons for revocation bundle export. | ✅ Revocation exporter consumes plugin data; ✅ Tests cover revoked user/client output. |
| SEC5.PLG | DOING (2025-10-14) | Security Guild | SEC5.A (threat model) | Address plugin-specific mitigations (bootstrap user handling, password policy docs) in threat model backlog. <br> Final documentation depends on AUTH-DPOP-11-001 / AUTH-MTLS-11-002 / PLUGIN-DI-08-001 outcomes. | ✅ Threat model lists plugin attack surfaces; ✅ Mitigation items filed. |
| SEC5.PLG | BLOCKED (2025-10-21) | Security Guild | SEC5.A (threat model) | Address plugin-specific mitigations (bootstrap user handling, password policy docs) in threat model backlog. <br> Final documentation depends on AUTH-DPOP-11-001 / AUTH-MTLS-11-002 / PLUGIN-DI-08-001 outcomes. | ✅ Threat model lists plugin attack surfaces; ✅ Mitigation items filed. |
| PLG4-6.CAPABILITIES | BLOCKED (2025-10-12) | BE-Auth Plugin, Docs Guild | PLG1PLG3 | Finalise capability metadata exposure, config validation, and developer guide updates; remaining action is Docs polish/diagram export. | ✅ Capability metadata + validation merged; ✅ Plugin guide updated with final copy & diagrams; ✅ Release notes mention new toggles. <br>⛔ Blocked awaiting Authority rate-limiter stream (CORE8/SEC3) to resume so doc updates reflect final limiter behaviour. |
| PLG7.RFC | REVIEW | BE-Auth Plugin, Security Guild | PLG4 | Socialize LDAP plugin RFC (`docs/rfcs/authority-plugin-ldap.md`) and capture guild feedback. | ✅ Guild review sign-off recorded; ✅ Follow-up issues filed in module boards. |
| PLG6.DIAGRAM | TODO | Docs Guild | PLG6.DOC | Export final sequence/component diagrams for the developer guide and add offline-friendly assets under `docs/assets/authority`. | ✅ Mermaid sources committed; ✅ Rendered SVG/PNG linked from Section 2 + Section 9; ✅ Docs build preview shared with Plugin + Docs guilds. |

View File

@@ -1,7 +1,10 @@
using System;
using System.Collections.Generic;
using System.Diagnostics.CodeAnalysis;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Extensions.Configuration;
using Microsoft.Extensions.DependencyInjection;
namespace StellaOps.Authority.Plugins.Abstractions;
@@ -95,24 +98,24 @@ public interface IAuthorityPluginRegistry
public interface IAuthorityIdentityProviderRegistry
{
/// <summary>
/// Gets all registered identity provider plugins keyed by logical name.
/// Gets metadata for all registered identity provider plugins.
/// </summary>
IReadOnlyCollection<IIdentityProviderPlugin> Providers { get; }
IReadOnlyCollection<AuthorityIdentityProviderMetadata> Providers { get; }
/// <summary>
/// Gets identity providers that advertise password support.
/// Gets metadata for identity providers that advertise password support.
/// </summary>
IReadOnlyCollection<IIdentityProviderPlugin> PasswordProviders { get; }
IReadOnlyCollection<AuthorityIdentityProviderMetadata> PasswordProviders { get; }
/// <summary>
/// Gets identity providers that advertise multi-factor authentication support.
/// Gets metadata for identity providers that advertise multi-factor authentication support.
/// </summary>
IReadOnlyCollection<IIdentityProviderPlugin> MfaProviders { get; }
IReadOnlyCollection<AuthorityIdentityProviderMetadata> MfaProviders { get; }
/// <summary>
/// Gets identity providers that advertise client provisioning support.
/// Gets metadata for identity providers that advertise client provisioning support.
/// </summary>
IReadOnlyCollection<IIdentityProviderPlugin> ClientProvisioningProviders { get; }
IReadOnlyCollection<AuthorityIdentityProviderMetadata> ClientProvisioningProviders { get; }
/// <summary>
/// Aggregate capability flags across all registered providers.
@@ -120,20 +123,89 @@ public interface IAuthorityIdentityProviderRegistry
AuthorityIdentityProviderCapabilities AggregateCapabilities { get; }
/// <summary>
/// Attempts to resolve an identity provider by name.
/// Attempts to resolve identity provider metadata by name.
/// </summary>
bool TryGet(string name, [NotNullWhen(true)] out IIdentityProviderPlugin? provider);
bool TryGet(string name, [NotNullWhen(true)] out AuthorityIdentityProviderMetadata? metadata);
/// <summary>
/// Resolves an identity provider by name or throws when not found.
/// Resolves identity provider metadata by name or throws when not found.
/// </summary>
IIdentityProviderPlugin GetRequired(string name)
AuthorityIdentityProviderMetadata GetRequired(string name)
{
if (TryGet(name, out var provider))
if (TryGet(name, out var metadata))
{
return provider;
return metadata;
}
throw new KeyNotFoundException($"Identity provider plugin '{name}' is not registered.");
}
/// <summary>
/// Acquires a scoped handle to the specified identity provider.
/// </summary>
/// <param name="name">Logical provider name.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>Handle managing the provider instance lifetime.</returns>
ValueTask<AuthorityIdentityProviderHandle> AcquireAsync(string name, CancellationToken cancellationToken);
}
/// <summary>
/// Immutable metadata describing a registered identity provider.
/// </summary>
/// <param name="Name">Logical provider name from the manifest.</param>
/// <param name="Type">Provider type identifier.</param>
/// <param name="Capabilities">Capability flags advertised by the provider.</param>
public sealed record AuthorityIdentityProviderMetadata(
string Name,
string Type,
AuthorityIdentityProviderCapabilities Capabilities);
/// <summary>
/// Represents a scoped identity provider instance and manages its disposal.
/// </summary>
public sealed class AuthorityIdentityProviderHandle : IAsyncDisposable, IDisposable
{
private readonly AsyncServiceScope scope;
private bool disposed;
public AuthorityIdentityProviderHandle(AsyncServiceScope scope, AuthorityIdentityProviderMetadata metadata, IIdentityProviderPlugin provider)
{
this.scope = scope;
Metadata = metadata ?? throw new ArgumentNullException(nameof(metadata));
Provider = provider ?? throw new ArgumentNullException(nameof(provider));
}
/// <summary>
/// Gets the metadata associated with the provider instance.
/// </summary>
public AuthorityIdentityProviderMetadata Metadata { get; }
/// <summary>
/// Gets the active provider instance.
/// </summary>
public IIdentityProviderPlugin Provider { get; }
/// <inheritdoc />
public void Dispose()
{
if (disposed)
{
return;
}
disposed = true;
scope.Dispose();
}
/// <inheritdoc />
public async ValueTask DisposeAsync()
{
if (disposed)
{
return;
}
disposed = true;
await scope.DisposeAsync().ConfigureAwait(false);
}
}

View File

@@ -1,15 +1,20 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Extensions.Configuration;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.Logging.Abstractions;
using StellaOps.Authority.Plugins.Abstractions;
using Xunit;
using System.Linq;
namespace StellaOps.Authority.Tests.Identity;
public class AuthorityIdentityProviderRegistryTests
{
[Fact]
public void RegistryIndexesProvidersAndAggregatesCapabilities()
public async Task RegistryIndexesProvidersAndAggregatesCapabilities()
{
var providers = new[]
{
@@ -17,21 +22,25 @@ public class AuthorityIdentityProviderRegistryTests
CreateProvider("sso", type: "saml", supportsPassword: false, supportsMfa: true, supportsClientProvisioning: true)
};
var registry = new AuthorityIdentityProviderRegistry(providers, NullLogger<AuthorityIdentityProviderRegistry>.Instance);
using var serviceProvider = BuildServiceProvider(providers);
var registry = new AuthorityIdentityProviderRegistry(serviceProvider, NullLogger<AuthorityIdentityProviderRegistry>.Instance);
Assert.Equal(2, registry.Providers.Count);
Assert.True(registry.TryGet("standard", out var standard));
Assert.Same(providers[0], standard);
Assert.Equal("standard", standard!.Name);
Assert.Single(registry.PasswordProviders);
Assert.Single(registry.MfaProviders);
Assert.Single(registry.ClientProvisioningProviders);
Assert.True(registry.AggregateCapabilities.SupportsPassword);
Assert.True(registry.AggregateCapabilities.SupportsMfa);
Assert.True(registry.AggregateCapabilities.SupportsClientProvisioning);
await using var handle = await registry.AcquireAsync("standard", default);
Assert.Same(providers[0], handle.Provider);
}
[Fact]
public void RegistryIgnoresDuplicateNames()
public async Task RegistryIgnoresDuplicateNames()
{
var duplicate = CreateProvider("standard", "ldap", supportsPassword: true, supportsMfa: false, supportsClientProvisioning: false);
var providers = new[]
@@ -40,12 +49,56 @@ public class AuthorityIdentityProviderRegistryTests
duplicate
};
var registry = new AuthorityIdentityProviderRegistry(providers, NullLogger<AuthorityIdentityProviderRegistry>.Instance);
using var serviceProvider = BuildServiceProvider(providers);
var registry = new AuthorityIdentityProviderRegistry(serviceProvider, NullLogger<AuthorityIdentityProviderRegistry>.Instance);
Assert.Single(registry.Providers);
Assert.Same(providers[0], registry.Providers.First());
Assert.Equal("standard", registry.Providers.First().Name);
Assert.True(registry.TryGet("standard", out var provider));
Assert.Same(providers[0], provider);
await using var handle = await registry.AcquireAsync("standard", default);
Assert.Same(providers[0], handle.Provider);
Assert.Equal("standard", provider!.Name);
}
[Fact]
public async Task AcquireAsync_ReturnsScopedProviderInstances()
{
var configuration = new ConfigurationBuilder().Build();
var manifest = new AuthorityPluginManifest(
"scoped",
"scoped",
true,
AssemblyName: null,
AssemblyPath: null,
Capabilities: new[] { AuthorityPluginCapabilities.Password },
Metadata: new Dictionary<string, string?>(StringComparer.OrdinalIgnoreCase),
ConfigPath: string.Empty);
var context = new AuthorityPluginContext(manifest, configuration);
var services = new ServiceCollection();
services.AddScoped<IIdentityProviderPlugin>(_ => new ScopedIdentityProviderPlugin(context));
using var serviceProvider = services.BuildServiceProvider();
var registry = new AuthorityIdentityProviderRegistry(serviceProvider, NullLogger<AuthorityIdentityProviderRegistry>.Instance);
await using var first = await registry.AcquireAsync("scoped", default);
await using var second = await registry.AcquireAsync("scoped", default);
var firstPlugin = Assert.IsType<ScopedIdentityProviderPlugin>(first.Provider);
var secondPlugin = Assert.IsType<ScopedIdentityProviderPlugin>(second.Provider);
Assert.NotEqual(firstPlugin.InstanceId, secondPlugin.InstanceId);
}
private static ServiceProvider BuildServiceProvider(IEnumerable<IIdentityProviderPlugin> providers)
{
var services = new ServiceCollection();
foreach (var provider in providers)
{
services.AddSingleton<IIdentityProviderPlugin>(provider);
}
return services.BuildServiceProvider();
}
private static IIdentityProviderPlugin CreateProvider(
@@ -122,4 +175,36 @@ public class AuthorityIdentityProviderRegistryTests
public ValueTask<AuthorityPluginHealthResult> CheckHealthAsync(CancellationToken cancellationToken)
=> ValueTask.FromResult(AuthorityPluginHealthResult.Healthy());
}
private sealed class ScopedIdentityProviderPlugin : IIdentityProviderPlugin
{
public ScopedIdentityProviderPlugin(AuthorityPluginContext context)
{
Context = context;
InstanceId = Guid.NewGuid();
Capabilities = new AuthorityIdentityProviderCapabilities(
SupportsPassword: true,
SupportsMfa: false,
SupportsClientProvisioning: false);
}
public Guid InstanceId { get; }
public string Name => Context.Manifest.Name;
public string Type => Context.Manifest.Type;
public AuthorityPluginContext Context { get; }
public IUserCredentialStore Credentials => throw new NotImplementedException();
public IClaimsEnricher ClaimsEnricher => throw new NotImplementedException();
public IClientProvisioningStore? ClientProvisioning => null;
public AuthorityIdentityProviderCapabilities Capabilities { get; }
public ValueTask<AuthorityPluginHealthResult> CheckHealthAsync(CancellationToken cancellationToken)
=> ValueTask.FromResult(AuthorityPluginHealthResult.Healthy());
}
}

View File

@@ -1,4 +1,5 @@
using Microsoft.Extensions.Configuration;
using Microsoft.Extensions.DependencyInjection;
using OpenIddict.Abstractions;
using StellaOps.Authority.OpenIddict;
using StellaOps.Authority.Plugins.Abstractions;
@@ -67,8 +68,14 @@ public class AuthorityIdentityProviderSelectorTests
private static AuthorityIdentityProviderRegistry CreateRegistry(IEnumerable<IIdentityProviderPlugin> passwordProviders)
{
var providers = passwordProviders.ToList<IIdentityProviderPlugin>();
return new AuthorityIdentityProviderRegistry(providers, Microsoft.Extensions.Logging.Abstractions.NullLogger<AuthorityIdentityProviderRegistry>.Instance);
var services = new ServiceCollection();
foreach (var provider in passwordProviders)
{
services.AddSingleton<IIdentityProviderPlugin>(provider);
}
var serviceProvider = services.BuildServiceProvider();
return new AuthorityIdentityProviderRegistry(serviceProvider, Microsoft.Extensions.Logging.Abstractions.NullLogger<AuthorityIdentityProviderRegistry>.Instance);
}
private static IIdentityProviderPlugin CreateProvider(string name, bool supportsPassword)

View File

@@ -10,6 +10,7 @@ using Microsoft.AspNetCore.Http;
using Microsoft.AspNetCore.Http.Extensions;
using Microsoft.Extensions.Configuration;
using Microsoft.Extensions.Logging.Abstractions;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.Options;
using Microsoft.Extensions.Primitives;
using Microsoft.IdentityModel.Tokens;
@@ -350,6 +351,7 @@ public class ClientCredentialsHandlersTests
};
options.Security.SenderConstraints.Mtls.Enabled = true;
options.Security.SenderConstraints.Mtls.RequireChainValidation = false;
options.Security.SenderConstraints.Mtls.AllowedSanTypes.Clear();
options.Signing.ActiveKeyId = "test-key";
options.Signing.KeyPath = "/tmp/test-key.pem";
options.Storage.ConnectionString = "mongodb://localhost/test";
@@ -394,7 +396,7 @@ public class ClientCredentialsHandlersTests
await handler.HandleAsync(context);
Assert.False(context.IsRejected);
Assert.False(context.IsRejected, context.ErrorDescription ?? context.Error);
Assert.Equal(AuthoritySenderConstraintKinds.Mtls, context.Transaction.Properties[AuthorityOpenIddictConstants.SenderConstraintProperty]);
var expectedBase64 = Base64UrlEncoder.Encode(certificate.GetCertHash(HashAlgorithmName.SHA256));
@@ -581,7 +583,7 @@ public class TokenValidationHandlersTests
descriptor: CreateDescriptor(clientDocument),
user: userDescriptor);
var registry = new AuthorityIdentityProviderRegistry(new[] { plugin }, NullLogger<AuthorityIdentityProviderRegistry>.Instance);
var registry = CreateRegistryFromPlugins(plugin);
var metadataAccessorSuccess = new TestRateLimiterMetadataAccessor();
var auditSinkSuccess = new TestAuthEventSink();
@@ -1073,7 +1075,7 @@ internal static class TestHelpers
descriptor: clientDescriptor,
user: null);
return new AuthorityIdentityProviderRegistry(new[] { plugin }, NullLogger<AuthorityIdentityProviderRegistry>.Instance);
return CreateRegistryFromPlugins(plugin);
}
public static TestIdentityProviderPlugin CreatePlugin(
@@ -1109,6 +1111,19 @@ internal static class TestHelpers
SupportsClientProvisioning: supportsClientProvisioning));
}
public static AuthorityIdentityProviderRegistry CreateRegistryFromPlugins(params IIdentityProviderPlugin[] plugins)
{
var services = new ServiceCollection();
services.AddLogging();
foreach (var plugin in plugins)
{
services.AddSingleton<IIdentityProviderPlugin>(plugin);
}
var provider = services.BuildServiceProvider();
return new AuthorityIdentityProviderRegistry(provider, NullLogger<AuthorityIdentityProviderRegistry>.Instance);
}
public static OpenIddictServerTransaction CreateTokenTransaction(string clientId, string? secret, string? scope)
{
var request = new OpenIddictRequest

View File

@@ -7,6 +7,7 @@ using System.Threading;
using System.Threading.Tasks;
using Microsoft.Extensions.Configuration;
using Microsoft.Extensions.Logging.Abstractions;
using Microsoft.Extensions.DependencyInjection;
using OpenIddict.Abstractions;
using OpenIddict.Server;
using OpenIddict.Server.AspNetCore;
@@ -97,7 +98,13 @@ public class PasswordGrantHandlersTests
private static AuthorityIdentityProviderRegistry CreateRegistry(IUserCredentialStore store)
{
var plugin = new StubIdentityProviderPlugin("stub", store);
return new AuthorityIdentityProviderRegistry(new[] { plugin }, NullLogger<AuthorityIdentityProviderRegistry>.Instance);
var services = new ServiceCollection();
services.AddLogging();
services.AddSingleton<IIdentityProviderPlugin>(plugin);
var provider = services.BuildServiceProvider();
return new AuthorityIdentityProviderRegistry(provider, NullLogger<AuthorityIdentityProviderRegistry>.Instance);
}
private static OpenIddictServerTransaction CreatePasswordTransaction(string username, string password)

View File

@@ -131,9 +131,7 @@ public sealed class TokenPersistenceIntegrationTests
descriptor,
userDescriptor);
var registry = new AuthorityIdentityProviderRegistry(
new[] { plugin },
NullLogger<AuthorityIdentityProviderRegistry>.Instance);
var registry = TestHelpers.CreateRegistryFromPlugins(plugin);
const string revokedTokenId = "refresh-token-1";
var refreshToken = new AuthorityTokenDocument

View File

@@ -2,6 +2,7 @@ using System;
using System.Collections.Generic;
using Microsoft.Extensions.Configuration;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Logging.Abstractions;
using StellaOps.Authority.Plugins;
using StellaOps.Authority.Plugins.Abstractions;
@@ -67,6 +68,7 @@ public class AuthorityPluginLoaderTests
public void RegisterPlugins_RegistersEnabledPlugin_WhenRegistrarAvailable()
{
var services = new ServiceCollection();
services.AddLogging();
var hostConfiguration = new ConfigurationBuilder().Build();
var manifest = new AuthorityPluginManifest(
@@ -99,6 +101,46 @@ public class AuthorityPluginLoaderTests
Assert.NotNull(provider.GetRequiredService<TestMarkerService>());
}
[Fact]
public void RegisterPlugins_ActivatesRegistrarUsingDependencyInjection()
{
var services = new ServiceCollection();
services.AddLogging();
services.AddSingleton(TimeProvider.System);
var hostConfiguration = new ConfigurationBuilder().Build();
var manifest = new AuthorityPluginManifest(
"di-test",
DiAuthorityPluginRegistrar.PluginTypeIdentifier,
true,
typeof(DiAuthorityPluginRegistrar).Assembly.GetName().Name,
typeof(DiAuthorityPluginRegistrar).Assembly.Location,
Array.Empty<string>(),
new Dictionary<string, string?>(),
"di-test.yaml");
var pluginContext = new AuthorityPluginContext(manifest, hostConfiguration);
var descriptor = new AuthorityPluginLoader.LoadedPluginDescriptor(
typeof(DiAuthorityPluginRegistrar).Assembly,
typeof(DiAuthorityPluginRegistrar).Assembly.Location);
var summary = AuthorityPluginLoader.RegisterPluginsCore(
services,
hostConfiguration,
new[] { pluginContext },
new[] { descriptor },
Array.Empty<string>(),
NullLogger.Instance);
Assert.Contains("di-test", summary.RegisteredPlugins);
var provider = services.BuildServiceProvider();
var dependent = provider.GetRequiredService<DependentService>();
Assert.True(dependent.LoggerWasResolved);
Assert.True(dependent.TimeProviderResolved);
}
private sealed class TestAuthorityPluginRegistrar : IAuthorityPluginRegistrar
{
public const string PluginTypeIdentifier = "test-plugin";
@@ -114,4 +156,38 @@ public class AuthorityPluginLoaderTests
private sealed class TestMarkerService
{
}
private sealed class DiAuthorityPluginRegistrar : IAuthorityPluginRegistrar
{
public const string PluginTypeIdentifier = "test-plugin-di";
private readonly ILogger<DiAuthorityPluginRegistrar> logger;
private readonly TimeProvider timeProvider;
public DiAuthorityPluginRegistrar(ILogger<DiAuthorityPluginRegistrar> logger, TimeProvider timeProvider)
{
this.logger = logger;
this.timeProvider = timeProvider;
}
public string PluginType => PluginTypeIdentifier;
public void Register(AuthorityPluginRegistrationContext context)
{
context.Services.AddSingleton(new DependentService(logger != null, timeProvider != null));
}
}
private sealed class DependentService
{
public DependentService(bool loggerResolved, bool timeProviderResolved)
{
LoggerWasResolved = loggerResolved;
TimeProviderResolved = timeProviderResolved;
}
public bool LoggerWasResolved { get; }
public bool TimeProviderResolved { get; }
}
}

View File

@@ -1,5 +1,9 @@
using System.Collections.ObjectModel;
using System.Diagnostics.CodeAnalysis;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.Logging;
using StellaOps.Authority.Plugins.Abstractions;
@@ -7,29 +11,34 @@ namespace StellaOps.Authority;
internal sealed class AuthorityIdentityProviderRegistry : IAuthorityIdentityProviderRegistry
{
private readonly IReadOnlyDictionary<string, IIdentityProviderPlugin> providersByName;
private readonly ReadOnlyCollection<IIdentityProviderPlugin> providers;
private readonly ReadOnlyCollection<IIdentityProviderPlugin> passwordProviders;
private readonly ReadOnlyCollection<IIdentityProviderPlugin> mfaProviders;
private readonly ReadOnlyCollection<IIdentityProviderPlugin> clientProvisioningProviders;
private readonly IServiceProvider serviceProvider;
private readonly IReadOnlyDictionary<string, AuthorityIdentityProviderMetadata> providersByName;
private readonly ReadOnlyCollection<AuthorityIdentityProviderMetadata> providers;
private readonly ReadOnlyCollection<AuthorityIdentityProviderMetadata> passwordProviders;
private readonly ReadOnlyCollection<AuthorityIdentityProviderMetadata> mfaProviders;
private readonly ReadOnlyCollection<AuthorityIdentityProviderMetadata> clientProvisioningProviders;
public AuthorityIdentityProviderRegistry(
IEnumerable<IIdentityProviderPlugin> providerInstances,
IServiceProvider serviceProvider,
ILogger<AuthorityIdentityProviderRegistry> logger)
{
this.serviceProvider = serviceProvider ?? throw new ArgumentNullException(nameof(serviceProvider));
logger = logger ?? throw new ArgumentNullException(nameof(logger));
using var scope = serviceProvider.CreateScope();
var providerInstances = scope.ServiceProvider.GetServices<IIdentityProviderPlugin>();
var orderedProviders = providerInstances?
.Where(static p => p is not null)
.OrderBy(static p => p.Name, StringComparer.OrdinalIgnoreCase)
.ToList() ?? new List<IIdentityProviderPlugin>();
var uniqueProviders = new List<IIdentityProviderPlugin>(orderedProviders.Count);
var password = new List<IIdentityProviderPlugin>();
var mfa = new List<IIdentityProviderPlugin>();
var clientProvisioning = new List<IIdentityProviderPlugin>();
var uniqueProviders = new List<AuthorityIdentityProviderMetadata>(orderedProviders.Count);
var password = new List<AuthorityIdentityProviderMetadata>();
var mfa = new List<AuthorityIdentityProviderMetadata>();
var clientProvisioning = new List<AuthorityIdentityProviderMetadata>();
var dictionary = new Dictionary<string, IIdentityProviderPlugin>(StringComparer.OrdinalIgnoreCase);
var dictionary = new Dictionary<string, AuthorityIdentityProviderMetadata>(StringComparer.OrdinalIgnoreCase);
foreach (var provider in orderedProviders)
{
@@ -41,7 +50,9 @@ internal sealed class AuthorityIdentityProviderRegistry : IAuthorityIdentityProv
continue;
}
if (!dictionary.TryAdd(provider.Name, provider))
var metadata = new AuthorityIdentityProviderMetadata(provider.Name, provider.Type, provider.Capabilities);
if (!dictionary.TryAdd(provider.Name, metadata))
{
logger.LogWarning(
"Duplicate identity provider name '{PluginName}' detected; ignoring additional registration for type '{PluginType}'.",
@@ -50,29 +61,29 @@ internal sealed class AuthorityIdentityProviderRegistry : IAuthorityIdentityProv
continue;
}
uniqueProviders.Add(provider);
uniqueProviders.Add(metadata);
if (provider.Capabilities.SupportsPassword)
if (metadata.Capabilities.SupportsPassword)
{
password.Add(provider);
password.Add(metadata);
}
if (provider.Capabilities.SupportsMfa)
if (metadata.Capabilities.SupportsMfa)
{
mfa.Add(provider);
mfa.Add(metadata);
}
if (provider.Capabilities.SupportsClientProvisioning)
if (metadata.Capabilities.SupportsClientProvisioning)
{
clientProvisioning.Add(provider);
clientProvisioning.Add(metadata);
}
}
providersByName = dictionary;
providers = new ReadOnlyCollection<IIdentityProviderPlugin>(uniqueProviders);
passwordProviders = new ReadOnlyCollection<IIdentityProviderPlugin>(password);
mfaProviders = new ReadOnlyCollection<IIdentityProviderPlugin>(mfa);
clientProvisioningProviders = new ReadOnlyCollection<IIdentityProviderPlugin>(clientProvisioning);
providers = new ReadOnlyCollection<AuthorityIdentityProviderMetadata>(uniqueProviders);
passwordProviders = new ReadOnlyCollection<AuthorityIdentityProviderMetadata>(password);
mfaProviders = new ReadOnlyCollection<AuthorityIdentityProviderMetadata>(mfa);
clientProvisioningProviders = new ReadOnlyCollection<AuthorityIdentityProviderMetadata>(clientProvisioning);
AggregateCapabilities = new AuthorityIdentityProviderCapabilities(
SupportsPassword: passwordProviders.Count > 0,
@@ -80,24 +91,56 @@ internal sealed class AuthorityIdentityProviderRegistry : IAuthorityIdentityProv
SupportsClientProvisioning: clientProvisioningProviders.Count > 0);
}
public IReadOnlyCollection<IIdentityProviderPlugin> Providers => providers;
public IReadOnlyCollection<AuthorityIdentityProviderMetadata> Providers => providers;
public IReadOnlyCollection<IIdentityProviderPlugin> PasswordProviders => passwordProviders;
public IReadOnlyCollection<AuthorityIdentityProviderMetadata> PasswordProviders => passwordProviders;
public IReadOnlyCollection<IIdentityProviderPlugin> MfaProviders => mfaProviders;
public IReadOnlyCollection<AuthorityIdentityProviderMetadata> MfaProviders => mfaProviders;
public IReadOnlyCollection<IIdentityProviderPlugin> ClientProvisioningProviders => clientProvisioningProviders;
public IReadOnlyCollection<AuthorityIdentityProviderMetadata> ClientProvisioningProviders => clientProvisioningProviders;
public AuthorityIdentityProviderCapabilities AggregateCapabilities { get; }
public bool TryGet(string name, [NotNullWhen(true)] out IIdentityProviderPlugin? provider)
public bool TryGet(string name, [NotNullWhen(true)] out AuthorityIdentityProviderMetadata? metadata)
{
if (string.IsNullOrWhiteSpace(name))
{
provider = null;
metadata = null;
return false;
}
return providersByName.TryGetValue(name, out provider);
return providersByName.TryGetValue(name, out metadata);
}
public async ValueTask<AuthorityIdentityProviderHandle> AcquireAsync(string name, CancellationToken cancellationToken)
{
if (!providersByName.TryGetValue(name, out var metadata))
{
throw new KeyNotFoundException($"Identity provider plugin '{name}' is not registered.");
}
cancellationToken.ThrowIfCancellationRequested();
var scope = serviceProvider.CreateAsyncScope();
try
{
var provider = scope.ServiceProvider
.GetServices<IIdentityProviderPlugin>()
.FirstOrDefault(p => string.Equals(p.Name, metadata.Name, StringComparison.OrdinalIgnoreCase));
if (provider is null)
{
await scope.DisposeAsync().ConfigureAwait(false);
throw new InvalidOperationException($"Identity provider plugin '{metadata.Name}' could not be resolved.");
}
cancellationToken.ThrowIfCancellationRequested();
return new AuthorityIdentityProviderHandle(scope, metadata, provider);
}
catch
{
await scope.DisposeAsync().ConfigureAwait(false);
throw;
}
}
}

View File

@@ -1,3 +1,4 @@
using System.Linq;
using OpenIddict.Abstractions;
using StellaOps.Authority.Plugins.Abstractions;
@@ -50,11 +51,11 @@ internal static class AuthorityIdentityProviderSelector
internal sealed record ProviderSelectionResult(
bool Succeeded,
IIdentityProviderPlugin? Provider,
AuthorityIdentityProviderMetadata? Provider,
string? Error,
string? Description)
{
public static ProviderSelectionResult Success(IIdentityProviderPlugin provider)
public static ProviderSelectionResult Success(AuthorityIdentityProviderMetadata provider)
=> new(true, provider, null, null);
public static ProviderSelectionResult Failure(string error, string description)

View File

@@ -1,5 +1,5 @@
using System.Collections.Generic;
using System.Collections.Immutable;
using System.Collections.Generic;
using System.Collections.Immutable;
using System.Diagnostics;
using System.Globalization;
using System.Linq;
@@ -159,25 +159,28 @@ internal sealed class ValidateClientCredentialsHandler : IOpenIddictServerHandle
context.Transaction.Properties[AuthorityOpenIddictConstants.AuditConfidentialProperty] =
string.Equals(document.ClientType, "confidential", StringComparison.OrdinalIgnoreCase);
IIdentityProviderPlugin? provider = null;
if (!string.IsNullOrWhiteSpace(document.Plugin))
{
if (!registry.TryGet(document.Plugin, out provider))
{
context.Reject(OpenIddictConstants.Errors.InvalidClient, "Configured identity provider is unavailable.");
logger.LogWarning("Client credentials validation failed for {ClientId}: provider {Provider} unavailable.", context.ClientId, document.Plugin);
return;
}
context.Transaction.Properties[AuthorityOpenIddictConstants.AuditProviderProperty] = provider.Name;
if (!provider.Capabilities.SupportsClientProvisioning || provider.ClientProvisioning is null)
{
context.Reject(OpenIddictConstants.Errors.UnauthorizedClient, "Associated identity provider does not support client provisioning.");
logger.LogWarning("Client credentials validation failed for {ClientId}: provider {Provider} lacks client provisioning capabilities.", context.ClientId, provider.Name);
return;
}
}
AuthorityIdentityProviderMetadata? providerMetadata = null;
if (!string.IsNullOrWhiteSpace(document.Plugin))
{
if (!registry.TryGet(document.Plugin, out providerMetadata))
{
context.Reject(OpenIddictConstants.Errors.InvalidClient, "Configured identity provider is unavailable.");
logger.LogWarning("Client credentials validation failed for {ClientId}: provider {Provider} unavailable.", context.ClientId, document.Plugin);
return;
}
await using var providerHandle = await registry.AcquireAsync(providerMetadata.Name, context.CancellationToken).ConfigureAwait(false);
var providerInstance = providerHandle.Provider;
context.Transaction.Properties[AuthorityOpenIddictConstants.AuditProviderProperty] = providerMetadata.Name;
if (!providerMetadata.Capabilities.SupportsClientProvisioning || providerInstance.ClientProvisioning is null)
{
context.Reject(OpenIddictConstants.Errors.UnauthorizedClient, "Associated identity provider does not support client provisioning.");
logger.LogWarning("Client credentials validation failed for {ClientId}: provider {Provider} lacks client provisioning capabilities.", context.ClientId, providerMetadata.Name);
return;
}
}
var allowedGrantTypes = ClientCredentialHandlerHelpers.Split(document.Properties, AuthorityClientMetadataKeys.AllowedGrantTypes);
if (allowedGrantTypes.Count > 0 &&
@@ -191,28 +194,28 @@ internal sealed class ValidateClientCredentialsHandler : IOpenIddictServerHandle
var requiresSecret = string.Equals(document.ClientType, "confidential", StringComparison.OrdinalIgnoreCase);
if (requiresSecret)
{
if (string.IsNullOrWhiteSpace(document.SecretHash))
{
context.Reject(OpenIddictConstants.Errors.InvalidClient, "Client secret is not configured.");
logger.LogWarning("Client credentials validation failed for {ClientId}: secret not configured.", document.ClientId);
return;
}
if (string.IsNullOrWhiteSpace(context.ClientSecret) ||
!ClientCredentialHandlerHelpers.VerifySecret(context.ClientSecret, document.SecretHash))
{
context.Reject(OpenIddictConstants.Errors.InvalidClient, "Invalid client credentials.");
logger.LogWarning("Client credentials validation failed for {ClientId}: secret verification failed.", document.ClientId);
return;
}
}
else if (!string.IsNullOrWhiteSpace(context.ClientSecret) && !string.IsNullOrWhiteSpace(document.SecretHash) &&
!ClientCredentialHandlerHelpers.VerifySecret(context.ClientSecret, document.SecretHash))
{
context.Reject(OpenIddictConstants.Errors.InvalidClient, "Invalid client credentials.");
logger.LogWarning("Client credentials validation failed for {ClientId}: secret verification failed.", document.ClientId);
return;
}
if (string.IsNullOrWhiteSpace(document.SecretHash))
{
context.Reject(OpenIddictConstants.Errors.InvalidClient, "Client secret is not configured.");
logger.LogWarning("Client credentials validation failed for {ClientId}: secret not configured.", document.ClientId);
return;
}
if (string.IsNullOrWhiteSpace(context.ClientSecret) ||
!ClientCredentialHandlerHelpers.VerifySecret(context.ClientSecret, document.SecretHash))
{
context.Reject(OpenIddictConstants.Errors.InvalidClient, "Invalid client credentials.");
logger.LogWarning("Client credentials validation failed for {ClientId}: secret verification failed.", document.ClientId);
return;
}
}
else if (!string.IsNullOrWhiteSpace(context.ClientSecret) && !string.IsNullOrWhiteSpace(document.SecretHash) &&
!ClientCredentialHandlerHelpers.VerifySecret(context.ClientSecret, document.SecretHash))
{
context.Reject(OpenIddictConstants.Errors.InvalidClient, "Invalid client credentials.");
logger.LogWarning("Client credentials validation failed for {ClientId}: secret verification failed.", document.ClientId);
return;
}
var allowedScopes = ClientCredentialHandlerHelpers.Split(document.Properties, AuthorityClientMetadataKeys.AllowedScopes);
var resolvedScopes = ClientCredentialHandlerHelpers.ResolveGrantedScopes(
@@ -230,11 +233,11 @@ internal sealed class ValidateClientCredentialsHandler : IOpenIddictServerHandle
context.Transaction.Properties[AuthorityOpenIddictConstants.AuditGrantedScopesProperty] = resolvedScopes.Scopes;
context.Transaction.Properties[AuthorityOpenIddictConstants.ClientTransactionProperty] = document;
if (provider is not null)
{
context.Transaction.Properties[AuthorityOpenIddictConstants.ClientProviderTransactionProperty] = provider.Name;
activity?.SetTag("authority.identity_provider", provider.Name);
}
if (providerMetadata is not null)
{
context.Transaction.Properties[AuthorityOpenIddictConstants.ClientProviderTransactionProperty] = providerMetadata.Name;
activity?.SetTag("authority.identity_provider", providerMetadata.Name);
}
context.Transaction.Properties[AuthorityOpenIddictConstants.ClientGrantedScopesProperty] = resolvedScopes.Scopes;
logger.LogInformation("Client credentials validated for {ClientId}.", document.ClientId);
@@ -373,70 +376,88 @@ internal sealed class HandleClientCredentialsHandler : IOpenIddictServerHandler<
_ => new[] { OpenIddictConstants.Destinations.AccessToken }
});
var (provider, descriptor) = await ResolveProviderAsync(context, document).ConfigureAwait(false);
if (context.IsRejected)
{
logger.LogWarning("Client credentials request rejected for {ClientId} during provider resolution.", document.ClientId);
return;
}
if (provider is null)
{
if (!string.IsNullOrWhiteSpace(document.Plugin))
{
identity.SetClaim(StellaOpsClaimTypes.IdentityProvider, document.Plugin);
activity?.SetTag("authority.identity_provider", document.Plugin);
}
}
else
{
identity.SetClaim(StellaOpsClaimTypes.IdentityProvider, provider.Name);
activity?.SetTag("authority.identity_provider", provider.Name);
}
ApplySenderConstraintClaims(context, identity, document);
var principal = new ClaimsPrincipal(identity);
var grantedScopes = context.Transaction.Properties.TryGetValue(AuthorityOpenIddictConstants.ClientGrantedScopesProperty, out var scopesValue) &&
scopesValue is IReadOnlyList<string> resolvedScopes
? resolvedScopes
: ClientCredentialHandlerHelpers.Split(document.Properties, AuthorityClientMetadataKeys.AllowedScopes);
if (grantedScopes.Count > 0)
{
principal.SetScopes(grantedScopes);
}
else
{
principal.SetScopes(Array.Empty<string>());
}
if (configuredAudiences.Count > 0)
{
principal.SetAudiences(configuredAudiences);
}
if (provider is not null && descriptor is not null)
{
var enrichmentContext = new AuthorityClaimsEnrichmentContext(provider.Context, user: null, descriptor);
await provider.ClaimsEnricher.EnrichAsync(identity, enrichmentContext, context.CancellationToken).ConfigureAwait(false);
}
var session = await sessionAccessor.GetSessionAsync(context.CancellationToken).ConfigureAwait(false);
await PersistTokenAsync(context, document, tokenId, grantedScopes, session, activity).ConfigureAwait(false);
context.Principal = principal;
context.HandleRequest();
logger.LogInformation("Issued client credentials access token for {ClientId} with scopes {Scopes}.", document.ClientId, grantedScopes);
}
private async ValueTask<(IIdentityProviderPlugin? Provider, AuthorityClientDescriptor? Descriptor)> ResolveProviderAsync(
OpenIddictServerEvents.HandleTokenRequestContext context,
AuthorityClientDocument document)
{
string? providerName = null;
if (context.Transaction.Properties.TryGetValue(AuthorityOpenIddictConstants.ClientProviderTransactionProperty, out var providerValue) &&
var (providerHandle, descriptor) = await ResolveProviderAsync(context, document).ConfigureAwait(false);
if (context.IsRejected)
{
if (providerHandle is not null)
{
await providerHandle.DisposeAsync().ConfigureAwait(false);
}
logger.LogWarning("Client credentials request rejected for {ClientId} during provider resolution.", document.ClientId);
return;
}
AuthorityIdentityProviderHandle? handle = providerHandle;
try
{
var provider = handle?.Provider;
if (provider is null)
{
if (!string.IsNullOrWhiteSpace(document.Plugin))
{
identity.SetClaim(StellaOpsClaimTypes.IdentityProvider, document.Plugin);
activity?.SetTag("authority.identity_provider", document.Plugin);
}
}
else
{
identity.SetClaim(StellaOpsClaimTypes.IdentityProvider, provider.Name);
activity?.SetTag("authority.identity_provider", provider.Name);
}
ApplySenderConstraintClaims(context, identity, document);
var principal = new ClaimsPrincipal(identity);
var grantedScopes = context.Transaction.Properties.TryGetValue(AuthorityOpenIddictConstants.ClientGrantedScopesProperty, out var scopesValue) &&
scopesValue is IReadOnlyList<string> resolvedScopes
? resolvedScopes
: ClientCredentialHandlerHelpers.Split(document.Properties, AuthorityClientMetadataKeys.AllowedScopes);
if (grantedScopes.Count > 0)
{
principal.SetScopes(grantedScopes);
}
else
{
principal.SetScopes(Array.Empty<string>());
}
if (configuredAudiences.Count > 0)
{
principal.SetAudiences(configuredAudiences);
}
if (provider is not null && descriptor is not null)
{
var enrichmentContext = new AuthorityClaimsEnrichmentContext(provider.Context, user: null, descriptor);
await provider.ClaimsEnricher.EnrichAsync(identity, enrichmentContext, context.CancellationToken).ConfigureAwait(false);
}
var session = await sessionAccessor.GetSessionAsync(context.CancellationToken).ConfigureAwait(false);
await PersistTokenAsync(context, document, tokenId, grantedScopes, session, activity).ConfigureAwait(false);
context.Principal = principal;
context.HandleRequest();
logger.LogInformation("Issued client credentials access token for {ClientId} with scopes {Scopes}.", document.ClientId, grantedScopes);
}
finally
{
if (handle is not null)
{
await handle.DisposeAsync().ConfigureAwait(false);
}
}
}
private async ValueTask<(AuthorityIdentityProviderHandle? Handle, AuthorityClientDescriptor? Descriptor)> ResolveProviderAsync(
OpenIddictServerEvents.HandleTokenRequestContext context,
AuthorityClientDocument document)
{
string? providerName = null;
if (context.Transaction.Properties.TryGetValue(AuthorityOpenIddictConstants.ClientProviderTransactionProperty, out var providerValue) &&
providerValue is string storedProvider)
{
providerName = storedProvider;
@@ -446,27 +467,46 @@ internal sealed class HandleClientCredentialsHandler : IOpenIddictServerHandler<
providerName = document.Plugin;
}
if (string.IsNullOrWhiteSpace(providerName))
{
return (null, null);
}
if (!registry.TryGet(providerName, out var provider) || provider.ClientProvisioning is null)
{
context.Reject(OpenIddictConstants.Errors.InvalidClient, "Configured identity provider is unavailable.");
return (null, null);
}
var descriptor = await provider.ClientProvisioning.FindByClientIdAsync(document.ClientId, context.CancellationToken).ConfigureAwait(false);
if (descriptor is null)
{
context.Reject(OpenIddictConstants.Errors.InvalidClient, "Client registration was not found.");
return (null, null);
}
return (provider, descriptor);
}
if (string.IsNullOrWhiteSpace(providerName))
{
return (null, null);
}
if (!registry.TryGet(providerName, out var metadata))
{
context.Reject(OpenIddictConstants.Errors.InvalidClient, "Configured identity provider is unavailable.");
return (null, null);
}
var handle = await registry.AcquireAsync(metadata.Name, context.CancellationToken).ConfigureAwait(false);
try
{
var provider = handle.Provider;
if (provider.ClientProvisioning is null)
{
context.Reject(OpenIddictConstants.Errors.InvalidClient, "Associated identity provider does not support client provisioning.");
await handle.DisposeAsync().ConfigureAwait(false);
return (null, null);
}
var descriptor = await provider.ClientProvisioning.FindByClientIdAsync(document.ClientId, context.CancellationToken).ConfigureAwait(false);
if (descriptor is null)
{
context.Reject(OpenIddictConstants.Errors.InvalidClient, "Client registration was not found.");
await handle.DisposeAsync().ConfigureAwait(false);
return (null, null);
}
return (handle, descriptor);
}
catch
{
await handle.DisposeAsync().ConfigureAwait(false);
throw;
}
}
private async ValueTask PersistTokenAsync(
OpenIddictServerEvents.HandleTokenRequestContext context,

View File

@@ -367,66 +367,79 @@ internal sealed class ValidateDpopProofHandler : IOpenIddictServerHandler<OpenId
return new Uri(url, UriKind.Absolute);
}
private static string? ResolveNonceAudience(OpenIddictRequest request, AuthorityDpopNonceOptions nonceOptions, IReadOnlyList<string> configuredAudiences)
{
if (!nonceOptions.Enabled || request is null)
{
return null;
}
if (request.Resources is not null)
{
foreach (var resource in request.Resources)
{
if (string.IsNullOrWhiteSpace(resource))
{
continue;
}
var normalized = resource.Trim();
if (nonceOptions.RequiredAudiences.Contains(normalized))
{
return normalized;
}
}
}
if (request.Audiences is not null)
{
foreach (var audience in request.Audiences)
{
if (string.IsNullOrWhiteSpace(audience))
{
continue;
}
var normalized = audience.Trim();
if (nonceOptions.RequiredAudiences.Contains(normalized))
{
return normalized;
}
}
}
if (configuredAudiences is { Count: > 0 })
{
foreach (var audience in configuredAudiences)
{
if (string.IsNullOrWhiteSpace(audience))
{
continue;
}
var normalized = audience.Trim();
if (nonceOptions.RequiredAudiences.Contains(normalized))
{
return normalized;
}
}
}
return null;
}
private static string? ResolveNonceAudience(
OpenIddictRequest request,
AuthorityDpopNonceOptions nonceOptions,
IReadOnlyList<string> configuredAudiences)
{
if (!nonceOptions.Enabled || request is null)
{
return null;
}
var normalizedAudiences = nonceOptions.NormalizedAudiences;
IReadOnlySet<string> effectiveAudiences;
if (normalizedAudiences.Count > 0)
{
effectiveAudiences = normalizedAudiences;
}
else if (nonceOptions.RequiredAudiences.Count > 0)
{
effectiveAudiences = nonceOptions.RequiredAudiences.ToHashSet(StringComparer.OrdinalIgnoreCase);
}
else
{
return null;
}
bool TryMatch(string? candidate, out string normalized)
{
normalized = string.Empty;
if (string.IsNullOrWhiteSpace(candidate))
{
return false;
}
normalized = candidate.Trim();
return effectiveAudiences.Contains(normalized);
}
if (request.Resources is not null)
{
foreach (var resource in request.Resources)
{
if (TryMatch(resource, out var normalized))
{
return normalized;
}
}
}
if (request.Audiences is not null)
{
foreach (var audience in request.Audiences)
{
if (TryMatch(audience, out var normalized))
{
return normalized;
}
}
}
if (configuredAudiences is { Count: > 0 })
{
foreach (var audience in configuredAudiences)
{
if (TryMatch(audience, out var normalized))
{
return normalized;
}
}
}
return null;
}
private async ValueTask ChallengeNonceAsync(
OpenIddictServerEvents.ValidateTokenRequestContext context,

View File

@@ -110,6 +110,8 @@ internal sealed class ValidatePasswordGrantHandler : IOpenIddictServerHandler<Op
return;
}
var selectedProvider = selection.Provider!;
if (string.IsNullOrWhiteSpace(context.Request.Username) || string.IsNullOrEmpty(context.Request.Password))
{
var record = PasswordGrantAuditHelper.CreatePasswordGrantRecord(
@@ -119,7 +121,7 @@ internal sealed class ValidatePasswordGrantHandler : IOpenIddictServerHandler<Op
AuthEventOutcome.Failure,
"Both username and password must be provided.",
clientId,
providerName: selection.Provider?.Name,
providerName: selectedProvider.Name,
user: null,
username: context.Request.Username,
scopes: requestedScopes,
@@ -134,9 +136,9 @@ internal sealed class ValidatePasswordGrantHandler : IOpenIddictServerHandler<Op
return;
}
context.Transaction.Properties[AuthorityOpenIddictConstants.ProviderTransactionProperty] = selection.Provider!.Name;
activity?.SetTag("authority.identity_provider", selection.Provider.Name);
logger.LogInformation("Password grant validation succeeded for {Username} using provider {Provider}.", context.Request.Username, selection.Provider.Name);
context.Transaction.Properties[AuthorityOpenIddictConstants.ProviderTransactionProperty] = selectedProvider.Name;
activity?.SetTag("authority.identity_provider", selectedProvider.Name);
logger.LogInformation("Password grant validation succeeded for {Username} using provider {Provider}.", context.Request.Username, selectedProvider.Name);
}
}
@@ -195,10 +197,10 @@ internal sealed class HandlePasswordGrantHandler : IOpenIddictServerHandler<Open
? value as string
: null;
IIdentityProviderPlugin? resolvedProvider;
AuthorityIdentityProviderMetadata? providerMetadata = null;
if (!string.IsNullOrWhiteSpace(providerName))
{
if (!registry.TryGet(providerName!, out var explicitProvider))
if (!registry.TryGet(providerName!, out providerMetadata))
{
var record = PasswordGrantAuditHelper.CreatePasswordGrantRecord(
timeProvider,
@@ -221,8 +223,6 @@ internal sealed class HandlePasswordGrantHandler : IOpenIddictServerHandler<Open
logger.LogError("Password grant handling failed: provider {Provider} not found for user {Username}.", providerName, context.Request.Username);
return;
}
resolvedProvider = explicitProvider;
}
else
{
@@ -251,11 +251,17 @@ internal sealed class HandlePasswordGrantHandler : IOpenIddictServerHandler<Open
return;
}
resolvedProvider = selection.Provider;
providerName = selection.Provider?.Name;
providerMetadata = selection.Provider;
providerName = providerMetadata?.Name;
}
var provider = resolvedProvider ?? throw new InvalidOperationException("No identity provider resolved for password grant.");
if (providerMetadata is null)
{
throw new InvalidOperationException("No identity provider metadata resolved for password grant.");
}
await using var providerHandle = await registry.AcquireAsync(providerMetadata.Name, context.CancellationToken).ConfigureAwait(false);
var provider = providerHandle.Provider;
var username = context.Request.Username;
var password = context.Request.Password;
@@ -268,7 +274,7 @@ internal sealed class HandlePasswordGrantHandler : IOpenIddictServerHandler<Open
AuthEventOutcome.Failure,
"Both username and password must be provided.",
clientId,
provider.Name,
providerMetadata.Name,
user: null,
username: username,
scopes: requestedScopes,
@@ -301,7 +307,7 @@ internal sealed class HandlePasswordGrantHandler : IOpenIddictServerHandler<Open
outcome,
verification.Message,
clientId,
provider.Name,
providerMetadata.Name,
verification.User,
username,
scopes: requestedScopes,
@@ -360,7 +366,7 @@ internal sealed class HandlePasswordGrantHandler : IOpenIddictServerHandler<Open
AuthEventOutcome.Success,
verification.Message,
clientId,
provider.Name,
providerMetadata.Name,
verification.User,
username,
scopes: requestedScopes,

View File

@@ -141,13 +141,16 @@ internal sealed class ValidateAccessTokenHandler : IOpenIddictServerHandler<Open
return;
}
if (!registry.TryGet(providerName, out var provider))
if (!registry.TryGet(providerName, out var providerMetadata))
{
context.Reject(OpenIddictConstants.Errors.InvalidToken, "The identity provider associated with the token is unavailable.");
logger.LogWarning("Access token validation failed: provider {Provider} unavailable for subject {Subject}.", providerName, context.Principal.GetClaim(OpenIddictConstants.Claims.Subject));
return;
}
await using var providerHandle = await registry.AcquireAsync(providerMetadata.Name, context.CancellationToken).ConfigureAwait(false);
var provider = providerHandle.Provider;
AuthorityUserDescriptor? user = null;
AuthorityClientDescriptor? client = null;

View File

@@ -6,6 +6,7 @@ using Microsoft.Extensions.Configuration;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.Logging;
using StellaOps.Authority.Plugins.Abstractions;
using StellaOps.Plugin.DependencyInjection;
using StellaOps.Plugin.Hosting;
namespace StellaOps.Authority.Plugins;
@@ -51,7 +52,9 @@ internal static class AuthorityPluginLoader
IReadOnlyCollection<string> missingOrdered,
ILogger? logger)
{
var registrarLookup = DiscoverRegistrars(loadedAssemblies, logger);
var registrarCandidates = DiscoverRegistrars(loadedAssemblies);
var pluginTypeLookup = new Dictionary<string, Type>(StringComparer.OrdinalIgnoreCase);
var registrarTypeLookup = new Dictionary<Type, string>();
var registered = new List<string>();
var failures = new List<AuthorityPluginRegistrationFailure>();
@@ -79,7 +82,16 @@ internal static class AuthorityPluginLoader
continue;
}
if (!registrarLookup.TryGetValue(manifest.Type, out var registrar))
var activation = TryResolveActivationForManifest(
services,
manifest.Type,
registrarCandidates,
pluginTypeLookup,
registrarTypeLookup,
logger,
out var registrarType);
if (activation is null || registrarType is null)
{
var reason = $"No registrar found for plugin type '{manifest.Type}'.";
logger?.LogError(
@@ -92,7 +104,9 @@ internal static class AuthorityPluginLoader
try
{
registrar.Register(new AuthorityPluginRegistrationContext(services, pluginContext, configuration));
PluginServiceRegistration.RegisterAssemblyMetadata(services, registrarType.Assembly, logger);
activation.Registrar.Register(new AuthorityPluginRegistrationContext(services, pluginContext, configuration));
registered.Add(manifest.Name);
logger?.LogInformation(
@@ -109,6 +123,10 @@ internal static class AuthorityPluginLoader
manifest.Name);
failures.Add(new AuthorityPluginRegistrationFailure(manifest.Name, reason));
}
finally
{
activation.Dispose();
}
}
if (missingOrdered.Count > 0)
@@ -124,11 +142,9 @@ internal static class AuthorityPluginLoader
return new AuthorityPluginRegistrationSummary(registered, failures, missingOrdered);
}
private static Dictionary<string, IAuthorityPluginRegistrar> DiscoverRegistrars(
IReadOnlyCollection<LoadedPluginDescriptor> loadedAssemblies,
ILogger? logger)
private static IReadOnlyList<Type> DiscoverRegistrars(IReadOnlyCollection<LoadedPluginDescriptor> loadedAssemblies)
{
var lookup = new Dictionary<string, IAuthorityPluginRegistrar>(StringComparer.OrdinalIgnoreCase);
var registrars = new List<Type>();
foreach (var descriptor in loadedAssemblies)
{
@@ -139,43 +155,144 @@ internal static class AuthorityPluginLoader
continue;
}
try
{
if (Activator.CreateInstance(type) is not IAuthorityPluginRegistrar registrar)
{
continue;
}
if (string.IsNullOrWhiteSpace(registrar.PluginType))
{
logger?.LogWarning(
"Authority plugin registrar '{RegistrarType}' returned an empty plugin type and will be ignored.",
type.FullName);
continue;
}
if (lookup.TryGetValue(registrar.PluginType, out var existing))
{
logger?.LogWarning(
"Multiple registrars detected for plugin type '{PluginType}'. Replacing '{ExistingType}' with '{RegistrarType}'.",
registrar.PluginType,
existing.GetType().FullName,
type.FullName);
}
lookup[registrar.PluginType] = registrar;
}
catch (Exception ex)
{
logger?.LogError(
ex,
"Failed to instantiate Authority plugin registrar '{RegistrarType}'.",
type.FullName);
}
registrars.Add(type);
}
}
return lookup;
return registrars;
}
private static RegistrarActivation? TryResolveActivationForManifest(
IServiceCollection services,
string pluginType,
IReadOnlyList<Type> registrarCandidates,
IDictionary<string, Type> pluginTypeLookup,
IDictionary<Type, string> registrarTypeLookup,
ILogger? logger,
out Type? resolvedType)
{
resolvedType = null;
if (pluginTypeLookup.TryGetValue(pluginType, out var cachedType))
{
var cachedActivation = CreateRegistrarActivation(services, cachedType, logger);
if (cachedActivation is null)
{
pluginTypeLookup.Remove(pluginType);
registrarTypeLookup.Remove(cachedType);
return null;
}
resolvedType = cachedType;
return cachedActivation;
}
foreach (var candidate in registrarCandidates)
{
if (registrarTypeLookup.TryGetValue(candidate, out var knownType))
{
if (string.IsNullOrWhiteSpace(knownType))
{
continue;
}
if (string.Equals(knownType, pluginType, StringComparison.OrdinalIgnoreCase))
{
var activation = CreateRegistrarActivation(services, candidate, logger);
if (activation is null)
{
registrarTypeLookup.Remove(candidate);
pluginTypeLookup.Remove(knownType);
return null;
}
resolvedType = candidate;
return activation;
}
continue;
}
var attempt = CreateRegistrarActivation(services, candidate, logger);
if (attempt is null)
{
registrarTypeLookup[candidate] = string.Empty;
continue;
}
var candidateType = attempt.Registrar.PluginType;
if (string.IsNullOrWhiteSpace(candidateType))
{
logger?.LogWarning(
"Authority plugin registrar '{RegistrarType}' reported an empty plugin type and will be ignored.",
candidate.FullName);
registrarTypeLookup[candidate] = string.Empty;
attempt.Dispose();
continue;
}
registrarTypeLookup[candidate] = candidateType;
pluginTypeLookup[candidateType] = candidate;
if (string.Equals(candidateType, pluginType, StringComparison.OrdinalIgnoreCase))
{
resolvedType = candidate;
return attempt;
}
attempt.Dispose();
}
return null;
}
private static RegistrarActivation? CreateRegistrarActivation(IServiceCollection services, Type registrarType, ILogger? logger)
{
ServiceProvider? provider = null;
IServiceScope? scope = null;
try
{
provider = services.BuildServiceProvider(new ServiceProviderOptions
{
ValidateScopes = true
});
scope = provider.CreateScope();
var registrar = (IAuthorityPluginRegistrar)ActivatorUtilities.GetServiceOrCreateInstance(scope.ServiceProvider, registrarType);
return new RegistrarActivation(provider, scope, registrar);
}
catch (Exception ex)
{
logger?.LogError(
ex,
"Failed to activate Authority plugin registrar '{RegistrarType}'.",
registrarType.FullName);
scope?.Dispose();
provider?.Dispose();
return null;
}
}
private sealed class RegistrarActivation : IDisposable
{
private readonly ServiceProvider provider;
private readonly IServiceScope scope;
public RegistrarActivation(ServiceProvider provider, IServiceScope scope, IAuthorityPluginRegistrar registrar)
{
this.provider = provider;
this.scope = scope;
Registrar = registrar;
}
public IAuthorityPluginRegistrar Registrar { get; }
public void Dispose()
{
scope.Dispose();
provider.Dispose();
}
}
private static bool IsAssemblyLoaded(

View File

@@ -416,24 +416,24 @@ if (authorityOptions.Bootstrap.Enabled)
return Results.BadRequest(new { error = "invite_provider_mismatch", message = "Invite is limited to a different identity provider." });
}
if (string.IsNullOrWhiteSpace(providerName) || !registry.TryGet(providerName!, out var provider))
if (string.IsNullOrWhiteSpace(providerName) || !registry.TryGet(providerName!, out var providerMetadata))
{
await ReleaseInviteAsync("Specified identity provider was not found.");
await WriteBootstrapUserAuditAsync(AuthEventOutcome.Failure, "Specified identity provider was not found.", null, request.Username, providerName, request.Roles ?? Array.Empty<string>(), inviteToken).ConfigureAwait(false);
return Results.BadRequest(new { error = "invalid_provider", message = "Specified identity provider was not found." });
}
if (!provider.Capabilities.SupportsPassword)
if (!providerMetadata.Capabilities.SupportsPassword)
{
await ReleaseInviteAsync("Selected provider does not support password provisioning.");
await WriteBootstrapUserAuditAsync(AuthEventOutcome.Failure, "Selected provider does not support password provisioning.", null, request.Username, provider.Name, request.Roles ?? Array.Empty<string>(), inviteToken).ConfigureAwait(false);
await WriteBootstrapUserAuditAsync(AuthEventOutcome.Failure, "Selected provider does not support password provisioning.", null, request.Username, providerMetadata.Name, request.Roles ?? Array.Empty<string>(), inviteToken).ConfigureAwait(false);
return Results.BadRequest(new { error = "unsupported_provider", message = "Selected provider does not support password provisioning." });
}
if (string.IsNullOrWhiteSpace(request.Username) || string.IsNullOrEmpty(request.Password))
{
await ReleaseInviteAsync("Username and password are required.");
await WriteBootstrapUserAuditAsync(AuthEventOutcome.Failure, "Username and password are required.", null, request.Username, provider.Name, request.Roles ?? Array.Empty<string>(), inviteToken).ConfigureAwait(false);
await WriteBootstrapUserAuditAsync(AuthEventOutcome.Failure, "Username and password are required.", null, request.Username, providerMetadata.Name, request.Roles ?? Array.Empty<string>(), inviteToken).ConfigureAwait(false);
return Results.BadRequest(new { error = "invalid_request", message = "Username and password are required." });
}
@@ -458,6 +458,9 @@ if (authorityOptions.Bootstrap.Enabled)
roles,
attributes);
await using var providerHandle = await registry.AcquireAsync(providerMetadata.Name, cancellationToken).ConfigureAwait(false);
var provider = providerHandle.Provider;
try
{
var result = await provider.Credentials.UpsertUserAsync(registration, cancellationToken).ConfigureAwait(false);
@@ -465,7 +468,7 @@ if (authorityOptions.Bootstrap.Enabled)
if (!result.Succeeded || result.Value is null)
{
await ReleaseInviteAsync(result.Message ?? "User provisioning failed.");
await WriteBootstrapUserAuditAsync(AuthEventOutcome.Failure, result.Message ?? "User provisioning failed.", null, request.Username, provider.Name, roles, inviteToken).ConfigureAwait(false);
await WriteBootstrapUserAuditAsync(AuthEventOutcome.Failure, result.Message ?? "User provisioning failed.", null, request.Username, providerMetadata.Name, roles, inviteToken).ConfigureAwait(false);
return Results.BadRequest(new { error = result.ErrorCode ?? "bootstrap_failed", message = result.Message ?? "User provisioning failed." });
}
@@ -478,11 +481,11 @@ if (authorityOptions.Bootstrap.Enabled)
}
}
await WriteBootstrapUserAuditAsync(AuthEventOutcome.Success, null, result.Value.SubjectId, result.Value.Username, provider.Name, roles, inviteToken).ConfigureAwait(false);
await WriteBootstrapUserAuditAsync(AuthEventOutcome.Success, null, result.Value.SubjectId, result.Value.Username, providerMetadata.Name, roles, inviteToken).ConfigureAwait(false);
return Results.Ok(new
{
provider = provider.Name,
provider = providerMetadata.Name,
subjectId = result.Value.SubjectId,
username = result.Value.Username
});
@@ -701,24 +704,34 @@ if (authorityOptions.Bootstrap.Enabled)
return Results.BadRequest(new { error = "invite_provider_mismatch", message = "Invite is limited to a different identity provider." });
}
if (string.IsNullOrWhiteSpace(providerName) || !registry.TryGet(providerName!, out var provider))
if (string.IsNullOrWhiteSpace(providerName) || !registry.TryGet(providerName!, out var providerMetadata))
{
await ReleaseInviteAsync("Specified identity provider was not found.");
await WriteBootstrapClientAuditAsync(AuthEventOutcome.Failure, "Specified identity provider was not found.", request.ClientId, null, providerName, request.AllowedScopes ?? Array.Empty<string>(), request?.Confidential, inviteToken).ConfigureAwait(false);
return Results.BadRequest(new { error = "invalid_provider", message = "Specified identity provider was not found." });
}
if (!provider.Capabilities.SupportsClientProvisioning || provider.ClientProvisioning is null)
if (!providerMetadata.Capabilities.SupportsClientProvisioning)
{
await ReleaseInviteAsync("Selected provider does not support client provisioning.");
await WriteBootstrapClientAuditAsync(AuthEventOutcome.Failure, "Selected provider does not support client provisioning.", request.ClientId, null, provider.Name, request.AllowedScopes ?? Array.Empty<string>(), request.Confidential, inviteToken).ConfigureAwait(false);
await WriteBootstrapClientAuditAsync(AuthEventOutcome.Failure, "Selected provider does not support client provisioning.", request.ClientId, null, providerMetadata.Name, request.AllowedScopes ?? Array.Empty<string>(), request.Confidential, inviteToken).ConfigureAwait(false);
return Results.BadRequest(new { error = "unsupported_provider", message = "Selected provider does not support client provisioning." });
}
await using var providerHandle = await registry.AcquireAsync(providerMetadata.Name, cancellationToken).ConfigureAwait(false);
var provider = providerHandle.Provider;
if (provider.ClientProvisioning is null)
{
await ReleaseInviteAsync("Selected provider does not support client provisioning.");
await WriteBootstrapClientAuditAsync(AuthEventOutcome.Failure, "Selected provider does not support client provisioning.", request.ClientId, null, providerMetadata.Name, request.AllowedScopes ?? Array.Empty<string>(), request.Confidential, inviteToken).ConfigureAwait(false);
return Results.BadRequest(new { error = "unsupported_provider", message = "Selected provider does not support client provisioning." });
}
if (string.IsNullOrWhiteSpace(request.ClientId))
{
await ReleaseInviteAsync("ClientId is required.");
await WriteBootstrapClientAuditAsync(AuthEventOutcome.Failure, "ClientId is required.", null, null, provider.Name, request.AllowedScopes ?? Array.Empty<string>(), request.Confidential, inviteToken).ConfigureAwait(false);
await WriteBootstrapClientAuditAsync(AuthEventOutcome.Failure, "ClientId is required.", null, null, providerMetadata.Name, request.AllowedScopes ?? Array.Empty<string>(), request.Confidential, inviteToken).ConfigureAwait(false);
return Results.BadRequest(new { error = "invalid_request", message = "ClientId is required." });
}
@@ -732,7 +745,7 @@ if (authorityOptions.Bootstrap.Enabled)
if (request.Confidential && string.IsNullOrWhiteSpace(request.ClientSecret))
{
await ReleaseInviteAsync("Confidential clients require a client secret.");
await WriteBootstrapClientAuditAsync(AuthEventOutcome.Failure, "Confidential clients require a client secret.", request.ClientId, null, provider.Name, request.AllowedScopes ?? Array.Empty<string>(), request.Confidential, inviteToken).ConfigureAwait(false);
await WriteBootstrapClientAuditAsync(AuthEventOutcome.Failure, "Confidential clients require a client secret.", request.ClientId, null, providerMetadata.Name, request.AllowedScopes ?? Array.Empty<string>(), request.Confidential, inviteToken).ConfigureAwait(false);
return Results.BadRequest(new { error = "invalid_request", message = "Confidential clients require a client secret." });
}
@@ -740,7 +753,7 @@ if (authorityOptions.Bootstrap.Enabled)
{
var errorMessage = redirectError ?? "Redirect URI validation failed.";
await ReleaseInviteAsync(errorMessage);
await WriteBootstrapClientAuditAsync(AuthEventOutcome.Failure, errorMessage, request.ClientId, null, provider.Name, request.AllowedScopes ?? Array.Empty<string>(), request.Confidential, inviteToken).ConfigureAwait(false);
await WriteBootstrapClientAuditAsync(AuthEventOutcome.Failure, errorMessage, request.ClientId, null, providerMetadata.Name, request.AllowedScopes ?? Array.Empty<string>(), request.Confidential, inviteToken).ConfigureAwait(false);
return Results.BadRequest(new { error = "invalid_request", message = errorMessage });
}
@@ -748,7 +761,7 @@ if (authorityOptions.Bootstrap.Enabled)
{
var errorMessage = postLogoutError ?? "Post-logout redirect URI validation failed.";
await ReleaseInviteAsync(errorMessage);
await WriteBootstrapClientAuditAsync(AuthEventOutcome.Failure, errorMessage, request.ClientId, null, provider.Name, request.AllowedScopes ?? Array.Empty<string>(), request.Confidential, inviteToken).ConfigureAwait(false);
await WriteBootstrapClientAuditAsync(AuthEventOutcome.Failure, errorMessage, request.ClientId, null, providerMetadata.Name, request.AllowedScopes ?? Array.Empty<string>(), request.Confidential, inviteToken).ConfigureAwait(false);
return Results.BadRequest(new { error = "invalid_request", message = errorMessage });
}
@@ -765,7 +778,7 @@ if (authorityOptions.Bootstrap.Enabled)
if (binding is null || string.IsNullOrWhiteSpace(binding.Thumbprint))
{
await ReleaseInviteAsync("Certificate binding thumbprint is required.");
await WriteBootstrapClientAuditAsync(AuthEventOutcome.Failure, "Certificate binding thumbprint is required.", request.ClientId, null, provider.Name, request.AllowedScopes ?? Array.Empty<string>(), request.Confidential, inviteToken).ConfigureAwait(false);
await WriteBootstrapClientAuditAsync(AuthEventOutcome.Failure, "Certificate binding thumbprint is required.", request.ClientId, null, providerMetadata.Name, request.AllowedScopes ?? Array.Empty<string>(), request.Confidential, inviteToken).ConfigureAwait(false);
return Results.BadRequest(new { error = "invalid_request", message = "Certificate binding thumbprint is required." });
}
@@ -801,7 +814,7 @@ if (authorityOptions.Bootstrap.Enabled)
if (!result.Succeeded || result.Value is null)
{
await ReleaseInviteAsync(result.Message ?? "Client provisioning failed.");
await WriteBootstrapClientAuditAsync(AuthEventOutcome.Failure, result.Message ?? "Client provisioning failed.", request.ClientId, result.Value?.ClientId, provider.Name, request.AllowedScopes ?? Array.Empty<string>(), request.Confidential, inviteToken).ConfigureAwait(false);
await WriteBootstrapClientAuditAsync(AuthEventOutcome.Failure, result.Message ?? "Client provisioning failed.", request.ClientId, result.Value?.ClientId, providerMetadata.Name, request.AllowedScopes ?? Array.Empty<string>(), request.Confidential, inviteToken).ConfigureAwait(false);
return Results.BadRequest(new { error = result.ErrorCode ?? "bootstrap_failed", message = result.Message ?? "Client provisioning failed." });
}
@@ -814,11 +827,11 @@ if (authorityOptions.Bootstrap.Enabled)
}
}
await WriteBootstrapClientAuditAsync(AuthEventOutcome.Success, null, request.ClientId, result.Value.ClientId, provider.Name, request.AllowedScopes ?? Array.Empty<string>(), request.Confidential, inviteToken).ConfigureAwait(false);
await WriteBootstrapClientAuditAsync(AuthEventOutcome.Success, null, request.ClientId, result.Value.ClientId, providerMetadata.Name, request.AllowedScopes ?? Array.Empty<string>(), request.Confidential, inviteToken).ConfigureAwait(false);
return Results.Ok(new
{
provider = provider.Name,
provider = providerMetadata.Name,
clientId = result.Value.ClientId,
confidential = result.Value.Confidential
});
@@ -1169,12 +1182,13 @@ app.UseAuthorization();
app.MapGet("/health", async (IAuthorityIdentityProviderRegistry registry, CancellationToken cancellationToken) =>
{
var pluginHealth = new List<object>();
foreach (var provider in registry.Providers)
foreach (var providerMetadata in registry.Providers)
{
var health = await provider.CheckHealthAsync(cancellationToken).ConfigureAwait(false);
await using var handle = await registry.AcquireAsync(providerMetadata.Name, cancellationToken).ConfigureAwait(false);
var health = await handle.Provider.CheckHealthAsync(cancellationToken).ConfigureAwait(false);
pluginHealth.Add(new
{
provider = provider.Name,
provider = providerMetadata.Name,
status = health.Status.ToString().ToLowerInvariant(),
message = health.Message
});

View File

@@ -20,13 +20,13 @@
| AUTHCORE-STORAGE-DEVICE-TOKENS | DONE (2025-10-14) | Authority Core, Storage Guild | AUTHCORE-BUILD-OPENIDDICT | Reintroduce `AuthorityTokenDeviceDocument` + projections removed during refactor so storage layer compiles. | ✅ Document type restored with mappings/migrations; ✅ Storage tests cover device artifacts; ✅ Authority solution build green. |
| AUTHCORE-BOOTSTRAP-INVITES | DONE (2025-10-14) | Authority Core, DevOps | AUTHCORE-STORAGE-DEVICE-TOKENS | Wire bootstrap invite cleanup service against restored document schema and re-enable lifecycle tests. | ✅ `BootstrapInviteCleanupService` passes integration tests; ✅ Operator guide updated if behavior changes; ✅ Build/test matrices green. |
| AUTHSTORAGE-MONGO-08-001 | DONE (2025-10-19) | Authority Core & Storage Guild | — | Harden Mongo session usage with causal consistency for mutations and follow-up reads. | • Scoped middleware/service creates `IClientSessionHandle` with causal consistency + majority read/write concerns<br>• Stores accept optional session parameter and reuse it for write + immediate reads<br>• GraphQL/HTTP pipelines updated to flow session through post-mutation queries<br>• Replica-set integration test exercises primary election and verifies read-your-write guarantees |
| AUTH-PLUGIN-COORD-08-002 | DOING (2025-10-19) | Authority Core, Plugin Platform Guild | PLUGIN-DI-08-001 | Coordinate scoped-service adoption for Authority plug-in registrars and background jobs ahead of PLUGIN-DI-08-002 implementation. | ✅ Workshop locked for 2025-10-20 15:0016:00UTC; ✅ Pre-read checklist in `docs/dev/authority-plugin-di-coordination.md`; ✅ Follow-up tasks captured in module backlogs before code changes begin. |
| AUTH-DPOP-11-001 | DOING (2025-10-19) | Authority Core & Security Guild | — | Implement DPoP proof validation + nonce handling for high-value audiences per architecture. | • Proof handler validates method/uri/hash + replay; nonce issuing/consumption implemented for in-memory + Redis stores<br>• Client credential path stamps `cnf.jkt` and persists sender metadata<br>• Remaining: finalize Redis configuration surface (docs/sample config), unskip nonce-challenge regression once HTTP pipeline emits high-value audiences, refresh operator docs |
> Remark (2025-10-19): DPoP handler now seeds request resources/audiences from client metadata; nonce challenge integration test re-enabled (still requires full suite once Concelier build restored).
| AUTH-PLUGIN-COORD-08-002 | DONE (2025-10-20) | Authority Core, Plugin Platform Guild | PLUGIN-DI-08-001 | Coordinate scoped-service adoption for Authority plug-in registrars and background jobs ahead of PLUGIN-DI-08-002 implementation. | ✅ Workshop completed 2025-10-20 15:0016:05UTC with notes/action log in `docs/dev/authority-plugin-di-coordination.md`; ✅ Follow-up backlog updates assigned via documented action items ahead of PLUGIN-DI-08-002 delivery. |
| AUTH-DPOP-11-001 | DONE (2025-10-20) | Authority Core & Security Guild | — | Implement DPoP proof validation + nonce handling for high-value audiences per architecture. | ✅ Redis-configurable nonce store surfaced via `security.senderConstraints.dpop.nonce` with sample YAML and architecture docs refreshed<br>✅ High-value audience enforcement uses normalised required audiences to avoid whitespace/case drift<br>✅ Operator guide updated with Redis-backed nonce snippet and env-var override guidance; integration test already covers nonce challenge |
> Remark (2025-10-20): `etc/authority.yaml.sample` gains senderConstraint sections (rate limits, DPoP, mTLS), docs (`docs/ARCHITECTURE_AUTHORITY.md`, `docs/11_AUTHORITY.md`, plan) refreshed. `ResolveNonceAudience` now relies on `NormalizedAudiences` and options trim persisted values. `dotnet test StellaOps.Authority.sln` attempted (2025-10-20 15:12UTC) but failed on `NU1900` because the mirrored NuGet service index `https://mirrors.ablera.dev/nuget/nuget-mirror/v3/index.json` was unreachable; no project build executed.
| AUTH-MTLS-11-002 | DOING (2025-10-19) | Authority Core & Security Guild | — | Add OAuth mTLS client credential support with certificate-bound tokens and introspection updates. | • Certificate validator scaffold plus cnf stamping present; tokens persist sender thumbprints<br>• Remaining: provisioning/storage for certificate bindings, SAN/CA validation, introspection propagation, integration tests/docs before marking DONE |
> Remark (2025-10-19): Client provisioning accepts certificate bindings; validator enforces SAN types/CA allow-list with rotation grace; mtls integration tests updated (full suite still blocked by upstream build).
> Remark (2025-10-19, AUTHSTORAGE-MONGO-08-001): Prerequisites re-checked (none outstanding). Session accessor wired through Authority pipeline; stores accept optional sessions; added replica-set election regression test for read-your-write.
> Remark (2025-10-19, AUTH-DPOP-11-001): Handler, nonce store, and persistence hooks merged; Redis-backed configuration + end-to-end nonce enforcement still open. Full solution test blocked by `StellaOps.Concelier.Storage.Mongo` compile errors.
> Remark (2025-10-19, AUTH-DPOP-11-001): Handler, nonce store, and persistence hooks merged; Redis-backed configuration + end-to-end nonce enforcement still open. (Superseded by 2025-10-20 update above.)
> Remark (2025-10-19, AUTH-MTLS-11-002): Certificate validator + cnf stamping delivered; binding storage, CA/SAN validation, integration suites outstanding before status can move to DONE.
> Update status columns (TODO / DOING / DONE / BLOCKED) together with code changes. Always run `dotnet test src/StellaOps.Authority.sln` when touching host logic.

View File

@@ -951,6 +951,15 @@ public sealed class CommandHandlersTests
public Task<RuntimePolicyEvaluationResult> EvaluateRuntimePolicyAsync(RuntimePolicyEvaluationRequest request, CancellationToken cancellationToken)
=> Task.FromResult(RuntimePolicyResult);
public Task<OfflineKitDownloadResult> DownloadOfflineKitAsync(string? bundleId, string destinationDirectory, bool overwrite, bool resume, CancellationToken cancellationToken)
=> throw new NotSupportedException();
public Task<OfflineKitImportResult> ImportOfflineKitAsync(OfflineKitImportRequest request, CancellationToken cancellationToken)
=> throw new NotSupportedException();
public Task<OfflineKitStatus> GetOfflineKitStatusAsync(CancellationToken cancellationToken)
=> throw new NotSupportedException();
}
private sealed class StubExecutor : IScannerExecutor

View File

@@ -1,24 +1,25 @@
using System;
using System.Collections.ObjectModel;
using System.Globalization;
using System.IO;
using System.Net;
using System.Net.Http;
using System.Net.Http.Json;
using System.Security.Cryptography;
using System.Text;
using System.Text.Json;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Extensions.Logging;
using Microsoft.IdentityModel.Tokens;
using StellaOps.Auth.Abstractions;
using StellaOps.Auth.Client;
using StellaOps.Cli.Configuration;
using StellaOps.Cli.Services;
using StellaOps.Cli.Services.Models;
using StellaOps.Cli.Services.Models.Transport;
using StellaOps.Cli.Tests.Testing;
using System.IO;
using System.Net;
using System.Net.Http;
using System.Net.Http.Json;
using System.Security.Cryptography;
using System.Text;
using System.Text.Json;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Extensions.Logging;
using Microsoft.IdentityModel.Tokens;
using StellaOps.Auth.Abstractions;
using StellaOps.Auth.Client;
using StellaOps.Cli.Configuration;
using StellaOps.Cli.Services;
using StellaOps.Cli.Services.Models;
using StellaOps.Cli.Services.Models.Transport;
using StellaOps.Cli.Tests.Testing;
using System.Linq;
namespace StellaOps.Cli.Tests.Services;
@@ -481,7 +482,352 @@ public sealed class BackendOperationsClientTests
Assert.Equal("manual-override", Assert.IsType<string>(secondary.AdditionalProperties["quietedBy"]));
}
private sealed class StubTokenClient : IStellaOpsTokenClient
[Fact]
public async Task DownloadOfflineKitAsync_DownloadsBundleAndWritesMetadata()
{
using var temp = new TempDirectory();
var bundleBytes = Encoding.UTF8.GetBytes("bundle-data");
var manifestBytes = Encoding.UTF8.GetBytes("{\"artifacts\":[]}");
var bundleDigest = Convert.ToHexString(SHA256.HashData(bundleBytes)).ToLowerInvariant();
var manifestDigest = Convert.ToHexString(SHA256.HashData(manifestBytes)).ToLowerInvariant();
var metadataPayload = JsonSerializer.Serialize(new
{
bundleId = "2025-10-20-full",
bundleName = "stella-ops-offline-kit-2025-10-20.tgz",
bundleSha256 = $"sha256:{bundleDigest}",
bundleSize = (long)bundleBytes.Length,
bundleUrl = "https://mirror.example/stella-ops-offline-kit-2025-10-20.tgz",
bundleSignatureName = "stella-ops-offline-kit-2025-10-20.tgz.sig",
bundleSignatureUrl = "https://mirror.example/stella-ops-offline-kit-2025-10-20.tgz.sig",
manifestName = "offline-manifest-2025-10-20.json",
manifestSha256 = $"sha256:{manifestDigest}",
manifestUrl = "https://mirror.example/offline-manifest-2025-10-20.json",
manifestSignatureName = "offline-manifest-2025-10-20.json.jws",
manifestSignatureUrl = "https://mirror.example/offline-manifest-2025-10-20.json.jws",
capturedAt = DateTimeOffset.UtcNow
}, new JsonSerializerOptions(JsonSerializerDefaults.Web));
var handler = new StubHttpMessageHandler(
(request, _) =>
{
Assert.Equal("https://backend.example/api/offline-kit/bundles/latest", request.RequestUri!.ToString());
return new HttpResponseMessage(HttpStatusCode.OK)
{
Content = new StringContent(metadataPayload)
};
},
(request, _) =>
{
var absolute = request.RequestUri!.AbsoluteUri;
if (absolute.EndsWith(".tgz", StringComparison.OrdinalIgnoreCase))
{
return new HttpResponseMessage(HttpStatusCode.OK)
{
Content = new ByteArrayContent(bundleBytes)
};
}
if (absolute.EndsWith(".json", StringComparison.OrdinalIgnoreCase))
{
return new HttpResponseMessage(HttpStatusCode.OK)
{
Content = new ByteArrayContent(manifestBytes)
};
}
return new HttpResponseMessage(HttpStatusCode.OK)
{
Content = new ByteArrayContent(Array.Empty<byte>())
};
});
var httpClient = new HttpClient(handler)
{
BaseAddress = new Uri("https://backend.example")
};
var options = new StellaOpsCliOptions
{
BackendUrl = "https://backend.example",
Offline = new StellaOpsCliOfflineOptions
{
KitsDirectory = temp.Path
}
};
var loggerFactory = LoggerFactory.Create(builder => builder.SetMinimumLevel(LogLevel.Debug));
var client = new BackendOperationsClient(httpClient, options, loggerFactory.CreateLogger<BackendOperationsClient>());
var result = await client.DownloadOfflineKitAsync(null, temp.Path, overwrite: false, resume: false, CancellationToken.None);
Assert.False(result.FromCache);
Assert.True(File.Exists(result.BundlePath));
Assert.True(File.Exists(result.ManifestPath));
Assert.NotNull(result.BundleSignaturePath);
Assert.NotNull(result.ManifestSignaturePath);
Assert.True(File.Exists(result.MetadataPath));
using var metadata = JsonDocument.Parse(File.ReadAllText(result.MetadataPath));
Assert.Equal("2025-10-20-full", metadata.RootElement.GetProperty("bundleId").GetString());
Assert.Equal(bundleDigest, metadata.RootElement.GetProperty("bundleSha256").GetString());
}
[Fact]
public async Task DownloadOfflineKitAsync_ResumesPartialDownload()
{
using var temp = new TempDirectory();
var bundleBytes = Encoding.UTF8.GetBytes("partial-download-data");
var manifestBytes = Encoding.UTF8.GetBytes("{\"manifest\":true}");
var bundleDigest = Convert.ToHexString(SHA256.HashData(bundleBytes)).ToLowerInvariant();
var manifestDigest = Convert.ToHexString(SHA256.HashData(manifestBytes)).ToLowerInvariant();
var metadataJson = JsonSerializer.Serialize(new
{
bundleId = "2025-10-21-full",
bundleName = "kit.tgz",
bundleSha256 = bundleDigest,
bundleSize = (long)bundleBytes.Length,
bundleUrl = "https://mirror.example/kit.tgz",
manifestName = "offline-manifest.json",
manifestSha256 = manifestDigest,
manifestUrl = "https://mirror.example/offline-manifest.json",
capturedAt = DateTimeOffset.UtcNow
}, new JsonSerializerOptions(JsonSerializerDefaults.Web));
var partialPath = Path.Combine(temp.Path, "kit.tgz.partial");
await File.WriteAllBytesAsync(partialPath, bundleBytes.AsSpan(0, bundleBytes.Length / 2).ToArray());
var handler = new StubHttpMessageHandler(
(request, _) => new HttpResponseMessage(HttpStatusCode.OK)
{
Content = new StringContent(metadataJson)
},
(request, _) =>
{
if (request.RequestUri!.AbsoluteUri.EndsWith("kit.tgz", StringComparison.OrdinalIgnoreCase))
{
Assert.NotNull(request.Headers.Range);
Assert.Equal(bundleBytes.Length / 2, request.Headers.Range!.Ranges.Single().From);
return new HttpResponseMessage(HttpStatusCode.PartialContent)
{
Content = new ByteArrayContent(bundleBytes.AsSpan(bundleBytes.Length / 2).ToArray())
};
}
return new HttpResponseMessage(HttpStatusCode.OK)
{
Content = new ByteArrayContent(manifestBytes)
};
});
var httpClient = new HttpClient(handler)
{
BaseAddress = new Uri("https://backend.example")
};
var options = new StellaOpsCliOptions
{
BackendUrl = "https://backend.example",
Offline = new StellaOpsCliOfflineOptions
{
KitsDirectory = temp.Path
}
};
var loggerFactory = LoggerFactory.Create(builder => builder.SetMinimumLevel(LogLevel.Debug));
var client = new BackendOperationsClient(httpClient, options, loggerFactory.CreateLogger<BackendOperationsClient>());
var result = await client.DownloadOfflineKitAsync(null, temp.Path, overwrite: false, resume: true, CancellationToken.None);
Assert.Equal(bundleDigest, result.Descriptor.BundleSha256);
Assert.Equal(bundleBytes.Length, new FileInfo(result.BundlePath).Length);
}
[Fact]
public async Task ImportOfflineKitAsync_SendsMultipartPayload()
{
using var temp = new TempDirectory();
var bundlePath = Path.Combine(temp.Path, "kit.tgz");
var manifestPath = Path.Combine(temp.Path, "offline-manifest.json");
var bundleBytes = Encoding.UTF8.GetBytes("bundle-content");
var manifestBytes = Encoding.UTF8.GetBytes("{\"manifest\":true}");
await File.WriteAllBytesAsync(bundlePath, bundleBytes);
await File.WriteAllBytesAsync(manifestPath, manifestBytes);
var bundleDigest = Convert.ToHexString(SHA256.HashData(bundleBytes)).ToLowerInvariant();
var manifestDigest = Convert.ToHexString(SHA256.HashData(manifestBytes)).ToLowerInvariant();
var metadata = new OfflineKitMetadataDocument
{
BundleId = "2025-10-21-full",
BundleName = "kit.tgz",
BundleSha256 = bundleDigest,
BundleSize = bundleBytes.Length,
BundlePath = bundlePath,
CapturedAt = DateTimeOffset.UtcNow,
DownloadedAt = DateTimeOffset.UtcNow,
Channel = "stable",
Kind = "full",
ManifestName = "offline-manifest.json",
ManifestSha256 = manifestDigest,
ManifestSize = manifestBytes.Length,
ManifestPath = manifestPath,
IsDelta = false,
BaseBundleId = null
};
await File.WriteAllTextAsync(bundlePath + ".metadata.json", JsonSerializer.Serialize(metadata, new JsonSerializerOptions(JsonSerializerDefaults.Web) { WriteIndented = true }));
var recordingHandler = new ImportRecordingHandler();
var httpClient = new HttpClient(recordingHandler)
{
BaseAddress = new Uri("https://backend.example")
};
var options = new StellaOpsCliOptions
{
BackendUrl = "https://backend.example"
};
var loggerFactory = LoggerFactory.Create(builder => builder.SetMinimumLevel(LogLevel.Debug));
var client = new BackendOperationsClient(httpClient, options, loggerFactory.CreateLogger<BackendOperationsClient>());
var request = new OfflineKitImportRequest(
bundlePath,
manifestPath,
null,
null,
metadata.BundleId,
metadata.BundleSha256,
metadata.BundleSize,
metadata.CapturedAt,
metadata.Channel,
metadata.Kind,
metadata.IsDelta,
metadata.BaseBundleId,
metadata.ManifestSha256,
metadata.ManifestSize);
var result = await client.ImportOfflineKitAsync(request, CancellationToken.None);
Assert.Equal("imp-1", result.ImportId);
Assert.NotNull(recordingHandler.MetadataJson);
Assert.NotNull(recordingHandler.BundlePayload);
Assert.NotNull(recordingHandler.ManifestPayload);
using var metadataJson = JsonDocument.Parse(recordingHandler.MetadataJson!);
Assert.Equal(bundleDigest, metadataJson.RootElement.GetProperty("bundleSha256").GetString());
Assert.Equal(manifestDigest, metadataJson.RootElement.GetProperty("manifestSha256").GetString());
}
[Fact]
public async Task GetOfflineKitStatusAsync_ParsesResponse()
{
var captured = DateTimeOffset.UtcNow;
var imported = captured.AddMinutes(5);
var statusJson = JsonSerializer.Serialize(new
{
current = new
{
bundleId = "2025-10-22-full",
channel = "stable",
kind = "full",
isDelta = false,
baseBundleId = (string?)null,
bundleSha256 = "sha256:abc123",
bundleSize = 42,
capturedAt = captured,
importedAt = imported
},
components = new[]
{
new
{
name = "concelier-json",
version = "2025-10-22",
digest = "sha256:def456",
capturedAt = captured,
sizeBytes = 1234
}
}
}, new JsonSerializerOptions(JsonSerializerDefaults.Web));
var handler = new StubHttpMessageHandler(
(request, _) =>
{
Assert.Equal("https://backend.example/api/offline-kit/status", request.RequestUri!.ToString());
return new HttpResponseMessage(HttpStatusCode.OK)
{
Content = new StringContent(statusJson)
};
});
var httpClient = new HttpClient(handler)
{
BaseAddress = new Uri("https://backend.example")
};
var options = new StellaOpsCliOptions
{
BackendUrl = "https://backend.example"
};
var loggerFactory = LoggerFactory.Create(builder => builder.SetMinimumLevel(LogLevel.Debug));
var client = new BackendOperationsClient(httpClient, options, loggerFactory.CreateLogger<BackendOperationsClient>());
var status = await client.GetOfflineKitStatusAsync(CancellationToken.None);
Assert.Equal("2025-10-22-full", status.BundleId);
Assert.Equal("stable", status.Channel);
Assert.Equal("full", status.Kind);
Assert.False(status.IsDelta);
Assert.Equal(42, status.BundleSize);
Assert.Single(status.Components);
Assert.Equal("concelier-json", status.Components[0].Name);
}
private sealed class ImportRecordingHandler : HttpMessageHandler
{
public string? MetadataJson { get; private set; }
public byte[]? BundlePayload { get; private set; }
public byte[]? ManifestPayload { get; private set; }
protected override async Task<HttpResponseMessage> SendAsync(HttpRequestMessage request, CancellationToken cancellationToken)
{
if (request.RequestUri!.AbsoluteUri.EndsWith("/api/offline-kit/import", StringComparison.OrdinalIgnoreCase))
{
Assert.IsType<MultipartFormDataContent>(request.Content);
foreach (var part in (MultipartFormDataContent)request.Content!)
{
var name = part.Headers.ContentDisposition?.Name?.Trim('"');
switch (name)
{
case "metadata":
MetadataJson = await part.ReadAsStringAsync(cancellationToken).ConfigureAwait(false);
break;
case "bundle":
BundlePayload = await part.ReadAsByteArrayAsync(cancellationToken).ConfigureAwait(false);
break;
case "manifest":
ManifestPayload = await part.ReadAsByteArrayAsync(cancellationToken).ConfigureAwait(false);
break;
}
}
}
return new HttpResponseMessage(HttpStatusCode.OK)
{
Content = new StringContent("{\"importId\":\"imp-1\",\"status\":\"queued\",\"submittedAt\":\"2025-10-21T00:00:00Z\"}")
};
}
}
private sealed class StubTokenClient : IStellaOpsTokenClient
{
private readonly StellaOpsTokenResult _tokenResult;

View File

@@ -27,6 +27,7 @@ internal static class CommandFactory
root.Add(BuildExcititorCommand(services, verboseOption, cancellationToken));
root.Add(BuildRuntimeCommand(services, verboseOption, cancellationToken));
root.Add(BuildAuthCommand(services, options, verboseOption, cancellationToken));
root.Add(BuildOfflineCommand(services, verboseOption, cancellationToken));
root.Add(BuildConfigCommand(options));
return root;
@@ -606,11 +607,102 @@ internal static class CommandFactory
return auth;
}
private static Command BuildConfigCommand(StellaOpsCliOptions options)
{
var config = new Command("config", "Inspect CLI configuration state.");
var show = new Command("show", "Display resolved configuration values.");
private static Command BuildOfflineCommand(IServiceProvider services, Option<bool> verboseOption, CancellationToken cancellationToken)
{
var offline = new Command("offline", "Offline kit workflows and utilities.");
var kit = new Command("kit", "Manage offline kit bundles.");
var pull = new Command("pull", "Download the latest offline kit bundle.");
var bundleIdOption = new Option<string?>("--bundle-id")
{
Description = "Optional bundle identifier. Defaults to the latest available."
};
var destinationOption = new Option<string?>("--destination")
{
Description = "Directory to store downloaded bundles (defaults to the configured offline kits directory)."
};
var overwriteOption = new Option<bool>("--overwrite")
{
Description = "Overwrite existing files even if checksums match."
};
var noResumeOption = new Option<bool>("--no-resume")
{
Description = "Disable resuming partial downloads."
};
pull.Add(bundleIdOption);
pull.Add(destinationOption);
pull.Add(overwriteOption);
pull.Add(noResumeOption);
pull.SetAction((parseResult, _) =>
{
var bundleId = parseResult.GetValue(bundleIdOption);
var destination = parseResult.GetValue(destinationOption);
var overwrite = parseResult.GetValue(overwriteOption);
var resume = !parseResult.GetValue(noResumeOption);
var verbose = parseResult.GetValue(verboseOption);
return CommandHandlers.HandleOfflineKitPullAsync(services, bundleId, destination, overwrite, resume, verbose, cancellationToken);
});
var import = new Command("import", "Upload an offline kit bundle to the backend.");
var bundleArgument = new Argument<string>("bundle")
{
Description = "Path to the offline kit tarball (.tgz)."
};
var manifestOption = new Option<string?>("--manifest")
{
Description = "Offline manifest JSON path (defaults to metadata or sibling file)."
};
var bundleSignatureOption = new Option<string?>("--bundle-signature")
{
Description = "Detached signature for the offline bundle (e.g. .sig)."
};
var manifestSignatureOption = new Option<string?>("--manifest-signature")
{
Description = "Detached signature for the offline manifest (e.g. .jws)."
};
import.Add(bundleArgument);
import.Add(manifestOption);
import.Add(bundleSignatureOption);
import.Add(manifestSignatureOption);
import.SetAction((parseResult, _) =>
{
var bundlePath = parseResult.GetValue(bundleArgument) ?? string.Empty;
var manifest = parseResult.GetValue(manifestOption);
var bundleSignature = parseResult.GetValue(bundleSignatureOption);
var manifestSignature = parseResult.GetValue(manifestSignatureOption);
var verbose = parseResult.GetValue(verboseOption);
return CommandHandlers.HandleOfflineKitImportAsync(services, bundlePath, manifest, bundleSignature, manifestSignature, verbose, cancellationToken);
});
var status = new Command("status", "Display offline kit installation status.");
var jsonOption = new Option<bool>("--json")
{
Description = "Emit status as JSON."
};
status.Add(jsonOption);
status.SetAction((parseResult, _) =>
{
var asJson = parseResult.GetValue(jsonOption);
var verbose = parseResult.GetValue(verboseOption);
return CommandHandlers.HandleOfflineKitStatusAsync(services, asJson, verbose, cancellationToken);
});
kit.Add(pull);
kit.Add(import);
kit.Add(status);
offline.Add(kit);
return offline;
}
private static Command BuildConfigCommand(StellaOpsCliOptions options)
{
var config = new Command("config", "Inspect CLI configuration state.");
var show = new Command("show", "Display resolved configuration values.");
show.SetAction((_, _) =>
{
var authority = options.Authority ?? new StellaOpsCliAuthorityOptions();

View File

@@ -1448,17 +1448,415 @@ internal static class CommandHandlers
{
logger.LogError(ex, "Failed to verify revocation bundle.");
Environment.ExitCode = 1;
}
finally
{
loggerFactory.Dispose();
}
}
private static bool TryParseDetachedJws(string value, out string encodedHeader, out string encodedSignature)
{
encodedHeader = string.Empty;
encodedSignature = string.Empty;
}
finally
{
loggerFactory.Dispose();
}
}
public static async Task HandleOfflineKitPullAsync(
IServiceProvider services,
string? bundleId,
string? destinationDirectory,
bool overwrite,
bool resume,
bool verbose,
CancellationToken cancellationToken)
{
await using var scope = services.CreateAsyncScope();
var client = scope.ServiceProvider.GetRequiredService<IBackendOperationsClient>();
var options = scope.ServiceProvider.GetRequiredService<StellaOpsCliOptions>();
var logger = scope.ServiceProvider.GetRequiredService<ILoggerFactory>().CreateLogger("offline-kit-pull");
var verbosity = scope.ServiceProvider.GetRequiredService<VerbosityState>();
var previousLevel = verbosity.MinimumLevel;
verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information;
using var activity = CliActivitySource.Instance.StartActivity("cli.offline.kit.pull", ActivityKind.Client);
activity?.SetTag("stellaops.cli.bundle_id", string.IsNullOrWhiteSpace(bundleId) ? "latest" : bundleId);
using var duration = CliMetrics.MeasureCommandDuration("offline kit pull");
try
{
var targetDirectory = string.IsNullOrWhiteSpace(destinationDirectory)
? options.Offline?.KitsDirectory ?? Path.Combine(Environment.CurrentDirectory, "offline-kits")
: destinationDirectory;
targetDirectory = Path.GetFullPath(targetDirectory);
Directory.CreateDirectory(targetDirectory);
var result = await client.DownloadOfflineKitAsync(bundleId, targetDirectory, overwrite, resume, cancellationToken).ConfigureAwait(false);
logger.LogInformation(
"Bundle {BundleId} stored at {Path} (captured {Captured:u}, sha256:{Digest}).",
result.Descriptor.BundleId,
result.BundlePath,
result.Descriptor.CapturedAt,
result.Descriptor.BundleSha256);
logger.LogInformation("Manifest saved to {Manifest}.", result.ManifestPath);
if (!string.IsNullOrWhiteSpace(result.MetadataPath))
{
logger.LogDebug("Metadata recorded at {Metadata}.", result.MetadataPath);
}
if (result.BundleSignaturePath is not null)
{
logger.LogInformation("Bundle signature saved to {Signature}.", result.BundleSignaturePath);
}
if (result.ManifestSignaturePath is not null)
{
logger.LogInformation("Manifest signature saved to {Signature}.", result.ManifestSignaturePath);
}
CliMetrics.RecordOfflineKitDownload(result.Descriptor.Kind ?? "unknown", result.FromCache);
activity?.SetTag("stellaops.cli.bundle_cache", result.FromCache);
Environment.ExitCode = 0;
}
catch (Exception ex)
{
logger.LogError(ex, "Failed to download offline kit bundle.");
Environment.ExitCode = 1;
}
finally
{
verbosity.MinimumLevel = previousLevel;
}
}
public static async Task HandleOfflineKitImportAsync(
IServiceProvider services,
string bundlePath,
string? manifestPath,
string? bundleSignaturePath,
string? manifestSignaturePath,
bool verbose,
CancellationToken cancellationToken)
{
await using var scope = services.CreateAsyncScope();
var client = scope.ServiceProvider.GetRequiredService<IBackendOperationsClient>();
var options = scope.ServiceProvider.GetRequiredService<StellaOpsCliOptions>();
var logger = scope.ServiceProvider.GetRequiredService<ILoggerFactory>().CreateLogger("offline-kit-import");
var verbosity = scope.ServiceProvider.GetRequiredService<VerbosityState>();
var previousLevel = verbosity.MinimumLevel;
verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information;
using var activity = CliActivitySource.Instance.StartActivity("cli.offline.kit.import", ActivityKind.Client);
using var duration = CliMetrics.MeasureCommandDuration("offline kit import");
try
{
if (string.IsNullOrWhiteSpace(bundlePath))
{
logger.LogError("Bundle path is required.");
Environment.ExitCode = 1;
return;
}
bundlePath = Path.GetFullPath(bundlePath);
if (!File.Exists(bundlePath))
{
logger.LogError("Bundle file {Path} not found.", bundlePath);
Environment.ExitCode = 1;
return;
}
var metadata = await LoadOfflineKitMetadataAsync(bundlePath, cancellationToken).ConfigureAwait(false);
if (metadata is not null)
{
manifestPath ??= metadata.ManifestPath;
bundleSignaturePath ??= metadata.BundleSignaturePath;
manifestSignaturePath ??= metadata.ManifestSignaturePath;
}
manifestPath = NormalizeFilePath(manifestPath);
bundleSignaturePath = NormalizeFilePath(bundleSignaturePath);
manifestSignaturePath = NormalizeFilePath(manifestSignaturePath);
if (manifestPath is null)
{
manifestPath = TryInferManifestPath(bundlePath);
if (manifestPath is not null)
{
logger.LogDebug("Using inferred manifest path {Path}.", manifestPath);
}
}
if (manifestPath is not null && !File.Exists(manifestPath))
{
logger.LogError("Manifest file {Path} not found.", manifestPath);
Environment.ExitCode = 1;
return;
}
if (bundleSignaturePath is not null && !File.Exists(bundleSignaturePath))
{
logger.LogWarning("Bundle signature {Path} not found; skipping.", bundleSignaturePath);
bundleSignaturePath = null;
}
if (manifestSignaturePath is not null && !File.Exists(manifestSignaturePath))
{
logger.LogWarning("Manifest signature {Path} not found; skipping.", manifestSignaturePath);
manifestSignaturePath = null;
}
if (metadata is not null)
{
var computedBundleDigest = await ComputeSha256Async(bundlePath, cancellationToken).ConfigureAwait(false);
if (!DigestsEqual(computedBundleDigest, metadata.BundleSha256))
{
logger.LogError("Bundle digest mismatch. Expected sha256:{Expected} but computed sha256:{Actual}.", metadata.BundleSha256, computedBundleDigest);
Environment.ExitCode = 1;
return;
}
if (manifestPath is not null)
{
var computedManifestDigest = await ComputeSha256Async(manifestPath, cancellationToken).ConfigureAwait(false);
if (!DigestsEqual(computedManifestDigest, metadata.ManifestSha256))
{
logger.LogError("Manifest digest mismatch. Expected sha256:{Expected} but computed sha256:{Actual}.", metadata.ManifestSha256, computedManifestDigest);
Environment.ExitCode = 1;
return;
}
}
}
var request = new OfflineKitImportRequest(
bundlePath,
manifestPath,
bundleSignaturePath,
manifestSignaturePath,
metadata?.BundleId,
metadata?.BundleSha256,
metadata?.BundleSize,
metadata?.CapturedAt,
metadata?.Channel,
metadata?.Kind,
metadata?.IsDelta,
metadata?.BaseBundleId,
metadata?.ManifestSha256,
metadata?.ManifestSize);
var result = await client.ImportOfflineKitAsync(request, cancellationToken).ConfigureAwait(false);
CliMetrics.RecordOfflineKitImport(result.Status);
logger.LogInformation(
"Import {ImportId} submitted at {Submitted:u} with status {Status}.",
string.IsNullOrWhiteSpace(result.ImportId) ? "<pending>" : result.ImportId,
result.SubmittedAt,
string.IsNullOrWhiteSpace(result.Status) ? "queued" : result.Status);
if (!string.IsNullOrWhiteSpace(result.Message))
{
logger.LogInformation(result.Message);
}
Environment.ExitCode = 0;
}
catch (Exception ex)
{
logger.LogError(ex, "Offline kit import failed.");
Environment.ExitCode = 1;
}
finally
{
verbosity.MinimumLevel = previousLevel;
}
}
public static async Task HandleOfflineKitStatusAsync(
IServiceProvider services,
bool asJson,
bool verbose,
CancellationToken cancellationToken)
{
await using var scope = services.CreateAsyncScope();
var client = scope.ServiceProvider.GetRequiredService<IBackendOperationsClient>();
var logger = scope.ServiceProvider.GetRequiredService<ILoggerFactory>().CreateLogger("offline-kit-status");
var verbosity = scope.ServiceProvider.GetRequiredService<VerbosityState>();
var previousLevel = verbosity.MinimumLevel;
verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information;
using var activity = CliActivitySource.Instance.StartActivity("cli.offline.kit.status", ActivityKind.Client);
using var duration = CliMetrics.MeasureCommandDuration("offline kit status");
try
{
var status = await client.GetOfflineKitStatusAsync(cancellationToken).ConfigureAwait(false);
if (asJson)
{
var payload = new
{
bundleId = status.BundleId,
channel = status.Channel,
kind = status.Kind,
isDelta = status.IsDelta,
baseBundleId = status.BaseBundleId,
capturedAt = status.CapturedAt,
importedAt = status.ImportedAt,
sha256 = status.BundleSha256,
sizeBytes = status.BundleSize,
components = status.Components.Select(component => new
{
component.Name,
component.Version,
component.Digest,
component.CapturedAt,
component.SizeBytes
})
};
var json = JsonSerializer.Serialize(payload, new JsonSerializerOptions(JsonSerializerDefaults.Web) { WriteIndented = true });
Console.WriteLine(json);
}
else
{
if (string.IsNullOrWhiteSpace(status.BundleId))
{
logger.LogInformation("No offline kit bundle has been imported yet.");
}
else
{
logger.LogInformation(
"Current bundle {BundleId} ({Kind}) captured {Captured:u}, imported {Imported:u}, sha256:{Digest}, size {Size}.",
status.BundleId,
status.Kind ?? "unknown",
status.CapturedAt ?? default,
status.ImportedAt ?? default,
status.BundleSha256 ?? "<n/a>",
status.BundleSize.HasValue ? status.BundleSize.Value.ToString("N0", CultureInfo.InvariantCulture) : "<n/a>");
}
if (status.Components.Count > 0)
{
var table = new Table().AddColumns("Component", "Version", "Digest", "Captured", "Size (bytes)");
foreach (var component in status.Components)
{
table.AddRow(
component.Name,
string.IsNullOrWhiteSpace(component.Version) ? "-" : component.Version!,
string.IsNullOrWhiteSpace(component.Digest) ? "-" : $"sha256:{component.Digest}",
component.CapturedAt?.ToString("u", CultureInfo.InvariantCulture) ?? "-",
component.SizeBytes.HasValue ? component.SizeBytes.Value.ToString("N0", CultureInfo.InvariantCulture) : "-");
}
AnsiConsole.Write(table);
}
}
Environment.ExitCode = 0;
}
catch (Exception ex)
{
logger.LogError(ex, "Failed to read offline kit status.");
Environment.ExitCode = 1;
}
finally
{
verbosity.MinimumLevel = previousLevel;
}
}
private static async Task<OfflineKitMetadataDocument?> LoadOfflineKitMetadataAsync(string bundlePath, CancellationToken cancellationToken)
{
var metadataPath = bundlePath + ".metadata.json";
if (!File.Exists(metadataPath))
{
return null;
}
try
{
await using var stream = File.OpenRead(metadataPath);
return await JsonSerializer.DeserializeAsync<OfflineKitMetadataDocument>(stream, cancellationToken: cancellationToken).ConfigureAwait(false);
}
catch
{
return null;
}
}
private static string? NormalizeFilePath(string? path)
{
if (string.IsNullOrWhiteSpace(path))
{
return null;
}
return Path.GetFullPath(path);
}
private static string? TryInferManifestPath(string bundlePath)
{
var directory = Path.GetDirectoryName(bundlePath);
if (string.IsNullOrWhiteSpace(directory))
{
return null;
}
var baseName = Path.GetFileName(bundlePath);
if (string.IsNullOrWhiteSpace(baseName))
{
return null;
}
baseName = Path.GetFileNameWithoutExtension(baseName);
if (baseName.EndsWith(".tar", StringComparison.OrdinalIgnoreCase))
{
baseName = Path.GetFileNameWithoutExtension(baseName);
}
var candidates = new[]
{
Path.Combine(directory, $"offline-manifest-{baseName}.json"),
Path.Combine(directory, "offline-manifest.json")
};
foreach (var candidate in candidates)
{
if (File.Exists(candidate))
{
return Path.GetFullPath(candidate);
}
}
return Directory.EnumerateFiles(directory, "offline-manifest*.json").FirstOrDefault();
}
private static bool DigestsEqual(string computed, string? expected)
{
if (string.IsNullOrWhiteSpace(expected))
{
return true;
}
return string.Equals(NormalizeDigest(computed), NormalizeDigest(expected), StringComparison.OrdinalIgnoreCase);
}
private static string NormalizeDigest(string digest)
{
var value = digest.Trim();
if (value.StartsWith("sha256:", StringComparison.OrdinalIgnoreCase))
{
value = value.Substring("sha256:".Length);
}
return value.ToLowerInvariant();
}
private static async Task<string> ComputeSha256Async(string path, CancellationToken cancellationToken)
{
await using var stream = File.OpenRead(path);
var hash = await SHA256.HashDataAsync(stream, cancellationToken).ConfigureAwait(false);
return Convert.ToHexString(hash).ToLowerInvariant();
}
private static bool TryParseDetachedJws(string value, out string encodedHeader, out string encodedSignature)
{
encodedHeader = string.Empty;
encodedSignature = string.Empty;
if (string.IsNullOrWhiteSpace(value))
{

View File

@@ -200,6 +200,40 @@ public static class CliBootstrapper
{
authority.TokenCacheDirectory = Path.GetFullPath(authority.TokenCacheDirectory);
}
cliOptions.Offline ??= new StellaOpsCliOfflineOptions();
var offline = cliOptions.Offline;
var kitsDirectory = ResolveWithFallback(
string.Empty,
configuration,
"STELLAOPS_OFFLINE_KITS_DIRECTORY",
"STELLAOPS_OFFLINE_KITS_DIR",
"StellaOps:Offline:KitsDirectory",
"StellaOps:Offline:KitDirectory",
"Offline:KitsDirectory",
"Offline:KitDirectory");
if (string.IsNullOrWhiteSpace(kitsDirectory))
{
kitsDirectory = offline.KitsDirectory ?? "offline-kits";
}
offline.KitsDirectory = Path.GetFullPath(kitsDirectory);
if (!Directory.Exists(offline.KitsDirectory))
{
Directory.CreateDirectory(offline.KitsDirectory);
}
var mirror = ResolveWithFallback(
string.Empty,
configuration,
"STELLAOPS_OFFLINE_MIRROR_URL",
"StellaOps:Offline:KitMirror",
"Offline:KitMirror",
"Offline:MirrorUrl");
offline.MirrorUrl = string.IsNullOrWhiteSpace(mirror) ? null : mirror.Trim();
};
});

View File

@@ -9,7 +9,7 @@ public sealed class StellaOpsCliOptions
public string ApiKey { get; set; } = string.Empty;
public string BackendUrl { get; set; } = string.Empty;
public string ScannerCacheDirectory { get; set; } = "scanners";
public string ResultsDirectory { get; set; } = "results";
@@ -23,6 +23,8 @@ public sealed class StellaOpsCliOptions
public int ScanUploadAttempts { get; set; } = 3;
public StellaOpsCliAuthorityOptions Authority { get; set; } = new();
public StellaOpsCliOfflineOptions Offline { get; set; } = new();
}
public sealed class StellaOpsCliAuthorityOptions
@@ -54,3 +56,10 @@ public sealed class StellaOpsCliAuthorityResilienceOptions
public TimeSpan? OfflineCacheTolerance { get; set; }
}
public sealed class StellaOpsCliOfflineOptions
{
public string KitsDirectory { get; set; } = "offline-kits";
public string? MirrorUrl { get; set; }
}

View File

@@ -535,7 +535,687 @@ internal sealed class BackendOperationsClient : IBackendOperationsClient
return list;
}
private static List<string> NormalizeImages(IReadOnlyList<string> images)
public async Task<OfflineKitDownloadResult> DownloadOfflineKitAsync(string? bundleId, string destinationDirectory, bool overwrite, bool resume, CancellationToken cancellationToken)
{
EnsureBackendConfigured();
var rootDirectory = ResolveOfflineDirectory(destinationDirectory);
Directory.CreateDirectory(rootDirectory);
var descriptor = await FetchOfflineKitDescriptorAsync(bundleId, cancellationToken).ConfigureAwait(false);
var bundlePath = Path.Combine(rootDirectory, descriptor.BundleName);
var metadataPath = bundlePath + ".metadata.json";
var manifestPath = Path.Combine(rootDirectory, descriptor.ManifestName);
var bundleSignaturePath = descriptor.BundleSignatureName is not null ? Path.Combine(rootDirectory, descriptor.BundleSignatureName) : null;
var manifestSignaturePath = descriptor.ManifestSignatureName is not null ? Path.Combine(rootDirectory, descriptor.ManifestSignatureName) : null;
var fromCache = false;
if (!overwrite && File.Exists(bundlePath))
{
var digest = await ComputeSha256Async(bundlePath, cancellationToken).ConfigureAwait(false);
if (string.Equals(digest, descriptor.BundleSha256, StringComparison.OrdinalIgnoreCase))
{
fromCache = true;
}
else if (resume)
{
var partial = bundlePath + ".partial";
File.Move(bundlePath, partial, overwrite: true);
}
else
{
File.Delete(bundlePath);
}
}
if (!fromCache)
{
await DownloadFileWithResumeAsync(descriptor.BundleDownloadUri, bundlePath, descriptor.BundleSha256, descriptor.BundleSize, resume, cancellationToken).ConfigureAwait(false);
}
await DownloadFileWithResumeAsync(descriptor.ManifestDownloadUri, manifestPath, descriptor.ManifestSha256, descriptor.ManifestSize ?? 0, resume: false, cancellationToken).ConfigureAwait(false);
if (descriptor.BundleSignatureDownloadUri is not null && bundleSignaturePath is not null)
{
await DownloadAuxiliaryFileAsync(descriptor.BundleSignatureDownloadUri, bundleSignaturePath, cancellationToken).ConfigureAwait(false);
}
if (descriptor.ManifestSignatureDownloadUri is not null && manifestSignaturePath is not null)
{
await DownloadAuxiliaryFileAsync(descriptor.ManifestSignatureDownloadUri, manifestSignaturePath, cancellationToken).ConfigureAwait(false);
}
await WriteOfflineKitMetadataAsync(metadataPath, descriptor, bundlePath, manifestPath, bundleSignaturePath, manifestSignaturePath, cancellationToken).ConfigureAwait(false);
return new OfflineKitDownloadResult(
descriptor,
bundlePath,
manifestPath,
bundleSignaturePath,
manifestSignaturePath,
metadataPath,
fromCache);
}
public async Task<OfflineKitImportResult> ImportOfflineKitAsync(OfflineKitImportRequest request, CancellationToken cancellationToken)
{
EnsureBackendConfigured();
if (request is null)
{
throw new ArgumentNullException(nameof(request));
}
var bundlePath = Path.GetFullPath(request.BundlePath);
if (!File.Exists(bundlePath))
{
throw new FileNotFoundException("Offline kit bundle not found.", bundlePath);
}
string? manifestPath = null;
if (!string.IsNullOrWhiteSpace(request.ManifestPath))
{
manifestPath = Path.GetFullPath(request.ManifestPath);
if (!File.Exists(manifestPath))
{
throw new FileNotFoundException("Offline kit manifest not found.", manifestPath);
}
}
string? bundleSignaturePath = null;
if (!string.IsNullOrWhiteSpace(request.BundleSignaturePath))
{
bundleSignaturePath = Path.GetFullPath(request.BundleSignaturePath);
if (!File.Exists(bundleSignaturePath))
{
throw new FileNotFoundException("Offline kit bundle signature not found.", bundleSignaturePath);
}
}
string? manifestSignaturePath = null;
if (!string.IsNullOrWhiteSpace(request.ManifestSignaturePath))
{
manifestSignaturePath = Path.GetFullPath(request.ManifestSignaturePath);
if (!File.Exists(manifestSignaturePath))
{
throw new FileNotFoundException("Offline kit manifest signature not found.", manifestSignaturePath);
}
}
var bundleSize = request.BundleSize ?? new FileInfo(bundlePath).Length;
var bundleSha = string.IsNullOrWhiteSpace(request.BundleSha256)
? await ComputeSha256Async(bundlePath, cancellationToken).ConfigureAwait(false)
: NormalizeSha(request.BundleSha256) ?? throw new InvalidOperationException("Bundle digest must not be empty.");
string? manifestSha = null;
long? manifestSize = null;
if (manifestPath is not null)
{
manifestSize = request.ManifestSize ?? new FileInfo(manifestPath).Length;
manifestSha = string.IsNullOrWhiteSpace(request.ManifestSha256)
? await ComputeSha256Async(manifestPath, cancellationToken).ConfigureAwait(false)
: NormalizeSha(request.ManifestSha256);
}
var metadata = new OfflineKitImportMetadataPayload
{
BundleId = request.BundleId,
BundleSha256 = bundleSha,
BundleSize = bundleSize,
CapturedAt = request.CapturedAt,
Channel = request.Channel,
Kind = request.Kind,
IsDelta = request.IsDelta,
BaseBundleId = request.BaseBundleId,
ManifestSha256 = manifestSha,
ManifestSize = manifestSize
};
using var message = CreateRequest(HttpMethod.Post, "api/offline-kit/import");
await AuthorizeRequestAsync(message, cancellationToken).ConfigureAwait(false);
using var content = new MultipartFormDataContent();
var metadataOptions = new JsonSerializerOptions(SerializerOptions)
{
WriteIndented = false
};
var metadataJson = JsonSerializer.Serialize(metadata, metadataOptions);
var metadataContent = new StringContent(metadataJson, Encoding.UTF8, "application/json");
content.Add(metadataContent, "metadata");
var bundleStream = File.OpenRead(bundlePath);
var bundleContent = new StreamContent(bundleStream);
bundleContent.Headers.ContentType = new MediaTypeHeaderValue("application/gzip");
content.Add(bundleContent, "bundle", Path.GetFileName(bundlePath));
if (manifestPath is not null)
{
var manifestStream = File.OpenRead(manifestPath);
var manifestContent = new StreamContent(manifestStream);
manifestContent.Headers.ContentType = new MediaTypeHeaderValue("application/json");
content.Add(manifestContent, "manifest", Path.GetFileName(manifestPath));
}
if (bundleSignaturePath is not null)
{
var signatureStream = File.OpenRead(bundleSignaturePath);
var signatureContent = new StreamContent(signatureStream);
signatureContent.Headers.ContentType = new MediaTypeHeaderValue("application/octet-stream");
content.Add(signatureContent, "bundleSignature", Path.GetFileName(bundleSignaturePath));
}
if (manifestSignaturePath is not null)
{
var manifestSignatureStream = File.OpenRead(manifestSignaturePath);
var manifestSignatureContent = new StreamContent(manifestSignatureStream);
manifestSignatureContent.Headers.ContentType = new MediaTypeHeaderValue("application/octet-stream");
content.Add(manifestSignatureContent, "manifestSignature", Path.GetFileName(manifestSignaturePath));
}
message.Content = content;
using var response = await _httpClient.SendAsync(message, cancellationToken).ConfigureAwait(false);
if (!response.IsSuccessStatusCode)
{
var failure = await CreateFailureMessageAsync(response, cancellationToken).ConfigureAwait(false);
throw new InvalidOperationException(failure);
}
OfflineKitImportResponseTransport? document;
try
{
document = await response.Content.ReadFromJsonAsync<OfflineKitImportResponseTransport>(SerializerOptions, cancellationToken).ConfigureAwait(false);
}
catch (JsonException ex)
{
var raw = await response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false);
throw new InvalidOperationException($"Failed to parse offline kit import response. {ex.Message}", ex)
{
Data = { ["payload"] = raw }
};
}
var submittedAt = document?.SubmittedAt ?? DateTimeOffset.UtcNow;
return new OfflineKitImportResult(
document?.ImportId,
document?.Status,
submittedAt,
document?.Message);
}
public async Task<OfflineKitStatus> GetOfflineKitStatusAsync(CancellationToken cancellationToken)
{
EnsureBackendConfigured();
using var request = CreateRequest(HttpMethod.Get, "api/offline-kit/status");
await AuthorizeRequestAsync(request, cancellationToken).ConfigureAwait(false);
using var response = await _httpClient.SendAsync(request, cancellationToken).ConfigureAwait(false);
if (!response.IsSuccessStatusCode)
{
var failure = await CreateFailureMessageAsync(response, cancellationToken).ConfigureAwait(false);
throw new InvalidOperationException(failure);
}
if (response.Content is null || response.Content.Headers.ContentLength is 0)
{
return new OfflineKitStatus(null, null, null, false, null, null, null, null, null, Array.Empty<OfflineKitComponentStatus>());
}
OfflineKitStatusTransport? document;
try
{
document = await response.Content.ReadFromJsonAsync<OfflineKitStatusTransport>(SerializerOptions, cancellationToken).ConfigureAwait(false);
}
catch (JsonException ex)
{
var raw = await response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false);
throw new InvalidOperationException($"Failed to parse offline kit status response. {ex.Message}", ex)
{
Data = { ["payload"] = raw }
};
}
var current = document?.Current;
var components = MapOfflineComponents(document?.Components);
if (current is null)
{
return new OfflineKitStatus(null, null, null, false, null, null, null, null, null, components);
}
return new OfflineKitStatus(
NormalizeOptionalString(current.BundleId),
NormalizeOptionalString(current.Channel),
NormalizeOptionalString(current.Kind),
current.IsDelta ?? false,
NormalizeOptionalString(current.BaseBundleId),
current.CapturedAt?.ToUniversalTime(),
current.ImportedAt?.ToUniversalTime(),
NormalizeSha(current.BundleSha256),
current.BundleSize,
components);
}
private string ResolveOfflineDirectory(string destinationDirectory)
{
if (!string.IsNullOrWhiteSpace(destinationDirectory))
{
return Path.GetFullPath(destinationDirectory);
}
var configured = _options.Offline?.KitsDirectory;
if (!string.IsNullOrWhiteSpace(configured))
{
return Path.GetFullPath(configured);
}
return Path.GetFullPath(Path.Combine(Environment.CurrentDirectory, "offline-kits"));
}
private async Task<OfflineKitBundleDescriptor> FetchOfflineKitDescriptorAsync(string? bundleId, CancellationToken cancellationToken)
{
var route = string.IsNullOrWhiteSpace(bundleId)
? "api/offline-kit/bundles/latest"
: $"api/offline-kit/bundles/{Uri.EscapeDataString(bundleId)}";
using var request = CreateRequest(HttpMethod.Get, route);
await AuthorizeRequestAsync(request, cancellationToken).ConfigureAwait(false);
using var response = await _httpClient.SendAsync(request, cancellationToken).ConfigureAwait(false);
if (!response.IsSuccessStatusCode)
{
var failure = await CreateFailureMessageAsync(response, cancellationToken).ConfigureAwait(false);
throw new InvalidOperationException(failure);
}
OfflineKitBundleDescriptorTransport? payload;
try
{
payload = await response.Content.ReadFromJsonAsync<OfflineKitBundleDescriptorTransport>(SerializerOptions, cancellationToken).ConfigureAwait(false);
}
catch (JsonException ex)
{
var raw = await response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false);
throw new InvalidOperationException($"Failed to parse offline kit metadata. {ex.Message}", ex)
{
Data = { ["payload"] = raw }
};
}
if (payload is null)
{
throw new InvalidOperationException("Offline kit metadata response was empty.");
}
return MapOfflineKitDescriptor(payload);
}
private OfflineKitBundleDescriptor MapOfflineKitDescriptor(OfflineKitBundleDescriptorTransport transport)
{
if (transport is null)
{
throw new ArgumentNullException(nameof(transport));
}
var bundleName = string.IsNullOrWhiteSpace(transport.BundleName)
? throw new InvalidOperationException("Offline kit metadata missing bundleName.")
: transport.BundleName!.Trim();
var bundleId = string.IsNullOrWhiteSpace(transport.BundleId) ? bundleName : transport.BundleId!.Trim();
var bundleSha = NormalizeSha(transport.BundleSha256) ?? throw new InvalidOperationException("Offline kit metadata missing bundleSha256.");
var bundleSize = transport.BundleSize;
if (bundleSize <= 0)
{
throw new InvalidOperationException("Offline kit metadata missing bundle size.");
}
var manifestName = string.IsNullOrWhiteSpace(transport.ManifestName) ? "offline-manifest.json" : transport.ManifestName!.Trim();
var manifestSha = NormalizeSha(transport.ManifestSha256) ?? throw new InvalidOperationException("Offline kit metadata missing manifestSha256.");
var capturedAt = transport.CapturedAt?.ToUniversalTime() ?? DateTimeOffset.UtcNow;
var bundleDownloadUri = ResolveDownloadUri(transport.BundleUrl, transport.BundlePath, bundleName);
var manifestDownloadUri = ResolveDownloadUri(transport.ManifestUrl, transport.ManifestPath, manifestName);
var bundleSignatureUri = ResolveOptionalDownloadUri(transport.BundleSignatureUrl, transport.BundleSignaturePath, transport.BundleSignatureName);
var manifestSignatureUri = ResolveOptionalDownloadUri(transport.ManifestSignatureUrl, transport.ManifestSignaturePath, transport.ManifestSignatureName);
var bundleSignatureName = ResolveArtifactName(transport.BundleSignatureName, bundleSignatureUri);
var manifestSignatureName = ResolveArtifactName(transport.ManifestSignatureName, manifestSignatureUri);
return new OfflineKitBundleDescriptor(
bundleId,
bundleName,
bundleSha,
bundleSize,
bundleDownloadUri,
manifestName,
manifestSha,
manifestDownloadUri,
capturedAt,
NormalizeOptionalString(transport.Channel),
NormalizeOptionalString(transport.Kind),
transport.IsDelta ?? false,
NormalizeOptionalString(transport.BaseBundleId),
bundleSignatureName,
bundleSignatureUri,
manifestSignatureName,
manifestSignatureUri,
transport.ManifestSize);
}
private static string? ResolveArtifactName(string? explicitName, Uri? uri)
{
if (!string.IsNullOrWhiteSpace(explicitName))
{
return explicitName.Trim();
}
if (uri is not null)
{
var name = Path.GetFileName(uri.LocalPath);
return string.IsNullOrWhiteSpace(name) ? null : name;
}
return null;
}
private Uri ResolveDownloadUri(string? absoluteOrRelativeUrl, string? relativePath, string fallbackFileName)
{
if (!string.IsNullOrWhiteSpace(absoluteOrRelativeUrl))
{
var candidate = new Uri(absoluteOrRelativeUrl, UriKind.RelativeOrAbsolute);
if (candidate.IsAbsoluteUri)
{
return candidate;
}
if (_httpClient.BaseAddress is not null)
{
return new Uri(_httpClient.BaseAddress, candidate);
}
return BuildUriFromRelative(candidate.ToString());
}
if (!string.IsNullOrWhiteSpace(relativePath))
{
return BuildUriFromRelative(relativePath);
}
if (!string.IsNullOrWhiteSpace(fallbackFileName))
{
return BuildUriFromRelative(fallbackFileName);
}
throw new InvalidOperationException("Offline kit metadata did not include a download URL.");
}
private Uri BuildUriFromRelative(string relative)
{
var normalized = relative.TrimStart('/');
if (!string.IsNullOrWhiteSpace(_options.Offline?.MirrorUrl) &&
Uri.TryCreate(_options.Offline.MirrorUrl, UriKind.Absolute, out var mirrorBase))
{
if (!mirrorBase.AbsoluteUri.EndsWith("/"))
{
mirrorBase = new Uri(mirrorBase.AbsoluteUri + "/");
}
return new Uri(mirrorBase, normalized);
}
if (_httpClient.BaseAddress is not null)
{
return new Uri(_httpClient.BaseAddress, normalized);
}
throw new InvalidOperationException($"Cannot resolve offline kit URI for '{relative}' because no mirror or backend base address is configured.");
}
private Uri? ResolveOptionalDownloadUri(string? absoluteOrRelativeUrl, string? relativePath, string? fallbackName)
{
var hasData = !string.IsNullOrWhiteSpace(absoluteOrRelativeUrl) ||
!string.IsNullOrWhiteSpace(relativePath) ||
!string.IsNullOrWhiteSpace(fallbackName);
if (!hasData)
{
return null;
}
try
{
return ResolveDownloadUri(absoluteOrRelativeUrl, relativePath, fallbackName ?? string.Empty);
}
catch
{
return null;
}
}
private async Task DownloadFileWithResumeAsync(Uri downloadUri, string targetPath, string expectedSha256, long expectedSize, bool resume, CancellationToken cancellationToken)
{
var directory = Path.GetDirectoryName(targetPath);
if (!string.IsNullOrEmpty(directory))
{
Directory.CreateDirectory(directory);
}
var partialPath = resume ? targetPath + ".partial" : targetPath + ".tmp";
if (!resume && File.Exists(targetPath))
{
File.Delete(targetPath);
}
if (resume && File.Exists(targetPath))
{
File.Move(targetPath, partialPath, overwrite: true);
}
long existingLength = 0;
if (resume && File.Exists(partialPath))
{
existingLength = new FileInfo(partialPath).Length;
if (expectedSize > 0 && existingLength >= expectedSize)
{
existingLength = expectedSize;
}
}
while (true)
{
using var request = new HttpRequestMessage(HttpMethod.Get, downloadUri);
if (resume && existingLength > 0 && expectedSize > 0 && existingLength < expectedSize)
{
request.Headers.Range = new RangeHeaderValue(existingLength, null);
}
using var response = await _httpClient.SendAsync(request, HttpCompletionOption.ResponseHeadersRead, cancellationToken).ConfigureAwait(false);
if (resume && existingLength > 0 && expectedSize > 0 && existingLength < expectedSize && response.StatusCode == HttpStatusCode.OK)
{
existingLength = 0;
if (File.Exists(partialPath))
{
File.Delete(partialPath);
}
continue;
}
if (!response.IsSuccessStatusCode &&
!(resume && existingLength > 0 && response.StatusCode == HttpStatusCode.PartialContent))
{
var failure = await CreateFailureMessageAsync(response, cancellationToken).ConfigureAwait(false);
throw new InvalidOperationException(failure);
}
var destination = resume ? partialPath : targetPath;
var mode = resume && existingLength > 0 ? FileMode.Append : FileMode.Create;
await using var stream = await response.Content.ReadAsStreamAsync(cancellationToken).ConfigureAwait(false);
await using (var file = new FileStream(destination, mode, FileAccess.Write, FileShare.None, 81920, useAsync: true))
{
await stream.CopyToAsync(file, cancellationToken).ConfigureAwait(false);
}
break;
}
if (resume && File.Exists(partialPath))
{
File.Move(partialPath, targetPath, overwrite: true);
}
var digest = await ComputeSha256Async(targetPath, cancellationToken).ConfigureAwait(false);
if (!string.Equals(digest, expectedSha256, StringComparison.OrdinalIgnoreCase))
{
File.Delete(targetPath);
throw new InvalidOperationException($"Digest mismatch for {Path.GetFileName(targetPath)}. Expected {expectedSha256} but computed {digest}.");
}
if (expectedSize > 0)
{
var actualSize = new FileInfo(targetPath).Length;
if (actualSize != expectedSize)
{
File.Delete(targetPath);
throw new InvalidOperationException($"Size mismatch for {Path.GetFileName(targetPath)}. Expected {expectedSize:N0} bytes but downloaded {actualSize:N0} bytes.");
}
}
}
private async Task DownloadAuxiliaryFileAsync(Uri downloadUri, string targetPath, CancellationToken cancellationToken)
{
var directory = Path.GetDirectoryName(targetPath);
if (!string.IsNullOrEmpty(directory))
{
Directory.CreateDirectory(directory);
}
using var request = new HttpRequestMessage(HttpMethod.Get, downloadUri);
using var response = await _httpClient.SendAsync(request, HttpCompletionOption.ResponseHeadersRead, cancellationToken).ConfigureAwait(false);
if (!response.IsSuccessStatusCode)
{
var failure = await CreateFailureMessageAsync(response, cancellationToken).ConfigureAwait(false);
throw new InvalidOperationException(failure);
}
await using var stream = await response.Content.ReadAsStreamAsync(cancellationToken).ConfigureAwait(false);
await using var file = new FileStream(targetPath, FileMode.Create, FileAccess.Write, FileShare.None, 81920, useAsync: true);
await stream.CopyToAsync(file, cancellationToken).ConfigureAwait(false);
}
private static async Task WriteOfflineKitMetadataAsync(
string metadataPath,
OfflineKitBundleDescriptor descriptor,
string bundlePath,
string manifestPath,
string? bundleSignaturePath,
string? manifestSignaturePath,
CancellationToken cancellationToken)
{
var document = new OfflineKitMetadataDocument
{
BundleId = descriptor.BundleId,
BundleName = descriptor.BundleName,
BundleSha256 = descriptor.BundleSha256,
BundleSize = descriptor.BundleSize,
BundlePath = Path.GetFullPath(bundlePath),
CapturedAt = descriptor.CapturedAt,
DownloadedAt = DateTimeOffset.UtcNow,
Channel = descriptor.Channel,
Kind = descriptor.Kind,
IsDelta = descriptor.IsDelta,
BaseBundleId = descriptor.BaseBundleId,
ManifestName = descriptor.ManifestName,
ManifestSha256 = descriptor.ManifestSha256,
ManifestSize = descriptor.ManifestSize,
ManifestPath = Path.GetFullPath(manifestPath),
BundleSignaturePath = bundleSignaturePath is null ? null : Path.GetFullPath(bundleSignaturePath),
ManifestSignaturePath = manifestSignaturePath is null ? null : Path.GetFullPath(manifestSignaturePath)
};
var options = new JsonSerializerOptions(SerializerOptions)
{
WriteIndented = true
};
var payload = JsonSerializer.Serialize(document, options);
await File.WriteAllTextAsync(metadataPath, payload, cancellationToken).ConfigureAwait(false);
}
private static IReadOnlyList<OfflineKitComponentStatus> MapOfflineComponents(List<OfflineKitComponentStatusTransport>? transports)
{
if (transports is null || transports.Count == 0)
{
return Array.Empty<OfflineKitComponentStatus>();
}
var list = new List<OfflineKitComponentStatus>();
foreach (var transport in transports)
{
if (transport is null || string.IsNullOrWhiteSpace(transport.Name))
{
continue;
}
list.Add(new OfflineKitComponentStatus(
transport.Name.Trim(),
NormalizeOptionalString(transport.Version),
NormalizeSha(transport.Digest),
transport.CapturedAt?.ToUniversalTime(),
transport.SizeBytes));
}
return list.Count == 0 ? Array.Empty<OfflineKitComponentStatus>() : list;
}
private static string? NormalizeSha(string? digest)
{
if (string.IsNullOrWhiteSpace(digest))
{
return null;
}
var value = digest.Trim();
if (value.StartsWith("sha256:", StringComparison.OrdinalIgnoreCase))
{
value = value.Substring("sha256:".Length);
}
return value.ToLowerInvariant();
}
private sealed class OfflineKitImportMetadataPayload
{
public string? BundleId { get; set; }
public string BundleSha256 { get; set; } = string.Empty;
public long BundleSize { get; set; }
public DateTimeOffset? CapturedAt { get; set; }
public string? Channel { get; set; }
public string? Kind { get; set; }
public bool? IsDelta { get; set; }
public string? BaseBundleId { get; set; }
public string? ManifestSha256 { get; set; }
public long? ManifestSize { get; set; }
}
private static List<string> NormalizeImages(IReadOnlyList<string> images)
{
var normalized = new List<string>();
if (images is null)

View File

@@ -22,4 +22,10 @@ internal interface IBackendOperationsClient
Task<IReadOnlyList<ExcititorProviderSummary>> GetExcititorProvidersAsync(bool includeDisabled, CancellationToken cancellationToken);
Task<RuntimePolicyEvaluationResult> EvaluateRuntimePolicyAsync(RuntimePolicyEvaluationRequest request, CancellationToken cancellationToken);
Task<OfflineKitDownloadResult> DownloadOfflineKitAsync(string? bundleId, string destinationDirectory, bool overwrite, bool resume, CancellationToken cancellationToken);
Task<OfflineKitImportResult> ImportOfflineKitAsync(OfflineKitImportRequest request, CancellationToken cancellationToken);
Task<OfflineKitStatus> GetOfflineKitStatusAsync(CancellationToken cancellationToken);
}

View File

@@ -0,0 +1,111 @@
using System;
using System.Collections.Generic;
namespace StellaOps.Cli.Services.Models;
internal sealed record OfflineKitBundleDescriptor(
string BundleId,
string BundleName,
string BundleSha256,
long BundleSize,
Uri BundleDownloadUri,
string ManifestName,
string ManifestSha256,
Uri ManifestDownloadUri,
DateTimeOffset CapturedAt,
string? Channel,
string? Kind,
bool IsDelta,
string? BaseBundleId,
string? BundleSignatureName,
Uri? BundleSignatureDownloadUri,
string? ManifestSignatureName,
Uri? ManifestSignatureDownloadUri,
long? ManifestSize);
internal sealed record OfflineKitDownloadResult(
OfflineKitBundleDescriptor Descriptor,
string BundlePath,
string ManifestPath,
string? BundleSignaturePath,
string? ManifestSignaturePath,
string MetadataPath,
bool FromCache);
internal sealed record OfflineKitImportRequest(
string BundlePath,
string? ManifestPath,
string? BundleSignaturePath,
string? ManifestSignaturePath,
string? BundleId,
string? BundleSha256,
long? BundleSize,
DateTimeOffset? CapturedAt,
string? Channel,
string? Kind,
bool? IsDelta,
string? BaseBundleId,
string? ManifestSha256,
long? ManifestSize);
internal sealed record OfflineKitImportResult(
string? ImportId,
string? Status,
DateTimeOffset SubmittedAt,
string? Message);
internal sealed record OfflineKitStatus(
string? BundleId,
string? Channel,
string? Kind,
bool IsDelta,
string? BaseBundleId,
DateTimeOffset? CapturedAt,
DateTimeOffset? ImportedAt,
string? BundleSha256,
long? BundleSize,
IReadOnlyList<OfflineKitComponentStatus> Components);
internal sealed record OfflineKitComponentStatus(
string Name,
string? Version,
string? Digest,
DateTimeOffset? CapturedAt,
long? SizeBytes);
internal sealed record OfflineKitMetadataDocument
{
public string? BundleId { get; init; }
public string BundleName { get; init; } = string.Empty;
public string BundleSha256 { get; init; } = string.Empty;
public long BundleSize { get; init; }
public string BundlePath { get; init; } = string.Empty;
public DateTimeOffset CapturedAt { get; init; }
public DateTimeOffset DownloadedAt { get; init; }
public string? Channel { get; init; }
public string? Kind { get; init; }
public bool IsDelta { get; init; }
public string? BaseBundleId { get; init; }
public string ManifestName { get; init; } = string.Empty;
public string ManifestSha256 { get; init; } = string.Empty;
public long? ManifestSize { get; init; }
public string ManifestPath { get; init; } = string.Empty;
public string? BundleSignaturePath { get; init; }
public string? ManifestSignaturePath { get; init; }
}

View File

@@ -0,0 +1,103 @@
using System;
using System.Collections.Generic;
namespace StellaOps.Cli.Services.Models.Transport;
internal sealed class OfflineKitBundleDescriptorTransport
{
public string? BundleId { get; set; }
public string? BundleName { get; set; }
public string? BundleSha256 { get; set; }
public long BundleSize { get; set; }
public string? BundleUrl { get; set; }
public string? BundlePath { get; set; }
public string? BundleSignatureName { get; set; }
public string? BundleSignatureUrl { get; set; }
public string? BundleSignaturePath { get; set; }
public string? ManifestName { get; set; }
public string? ManifestSha256 { get; set; }
public long? ManifestSize { get; set; }
public string? ManifestUrl { get; set; }
public string? ManifestPath { get; set; }
public string? ManifestSignatureName { get; set; }
public string? ManifestSignatureUrl { get; set; }
public string? ManifestSignaturePath { get; set; }
public DateTimeOffset? CapturedAt { get; set; }
public string? Channel { get; set; }
public string? Kind { get; set; }
public bool? IsDelta { get; set; }
public string? BaseBundleId { get; set; }
}
internal sealed class OfflineKitStatusBundleTransport
{
public string? BundleId { get; set; }
public string? Channel { get; set; }
public string? Kind { get; set; }
public bool? IsDelta { get; set; }
public string? BaseBundleId { get; set; }
public string? BundleSha256 { get; set; }
public long? BundleSize { get; set; }
public DateTimeOffset? CapturedAt { get; set; }
public DateTimeOffset? ImportedAt { get; set; }
}
internal sealed class OfflineKitStatusTransport
{
public OfflineKitStatusBundleTransport? Current { get; set; }
public List<OfflineKitComponentStatusTransport>? Components { get; set; }
}
internal sealed class OfflineKitComponentStatusTransport
{
public string? Name { get; set; }
public string? Version { get; set; }
public string? Digest { get; set; }
public DateTimeOffset? CapturedAt { get; set; }
public long? SizeBytes { get; set; }
}
internal sealed class OfflineKitImportResponseTransport
{
public string? ImportId { get; set; }
public string? Status { get; set; }
public DateTimeOffset? SubmittedAt { get; set; }
public string? Message { get; set; }
}

View File

@@ -18,7 +18,7 @@ If you are working on this file you need to read docs/ARCHITECTURE_EXCITITOR.md
|EXCITITOR-CLI-01-002 Export download & attestation UX|DevEx/CLI|EXCITITOR-CLI-01-001, EXCITITOR-EXPORT-01-001|DONE (2025-10-19) CLI export prints digest/size/Rekor metadata, `--output` downloads with SHA-256 verification + cache reuse, and unit coverage validated via `dotnet test src/StellaOps.Cli.Tests`.|
|EXCITITOR-CLI-01-003 CLI docs & examples for Excititor|Docs/CLI|EXCITITOR-CLI-01-001|**DOING (2025-10-19)** Update docs/09_API_CLI_REFERENCE.md and quickstart snippets to cover Excititor verbs, offline guidance, and attestation verification workflow.|
|CLI-RUNTIME-13-005 Runtime policy test verbs|DevEx/CLI|SCANNER-RUNTIME-12-302, ZASTAVA-WEBHOOK-12-102|**DONE (2025-10-19)** Added `runtime policy test` command (stdin/file support, JSON output), backend client method + typed models, verdict table output, docs/tests updated (`dotnet test src/StellaOps.Cli.Tests`).|
|CLI-OFFLINE-13-006 Offline kit workflows|DevEx/CLI|DEVOPS-OFFLINE-14-002|TODO Implement `offline kit pull/import/status` commands with integrity checks, resumable downloads, and doc updates.|
|CLI-OFFLINE-13-006 Offline kit workflows|DevEx/CLI|DEVOPS-OFFLINE-14-002|**DONE (2025-10-21)** Added `offline kit pull/import/status` commands with resumable downloads, digest/metadata validation, metrics, docs updates, and regression coverage (`dotnet test src/StellaOps.Cli.Tests`).|
|CLI-PLUGIN-13-007 Plugin packaging|DevEx/CLI|CLI-RUNTIME-13-005, CLI-OFFLINE-13-006|TODO Package non-core verbs as restart-time plug-ins (manifest + loader updates, tests ensuring no hot reload).|
|CLI-RUNTIME-13-008 Runtime policy contract sync|DevEx/CLI, Scanner WebService Guild|SCANNER-RUNTIME-12-302|**DONE (2025-10-19)** CLI runtime table/JSON now align with SCANNER-RUNTIME-12-302 (SBOM referrers, quieted provenance, confidence, verified Rekor); docs/09 updated with joint sign-off note.|
|CLI-RUNTIME-13-009 Runtime policy smoke fixture|DevEx/CLI, QA Guild|CLI-RUNTIME-13-005|**DONE (2025-10-19)** Spectre console harness + regression tests cover table and `--json` output paths for `runtime policy test`, using stubbed backend and integrated into `dotnet test` suite.|

View File

@@ -7,14 +7,16 @@ internal static class CliMetrics
{
private static readonly Meter Meter = new("StellaOps.Cli", "1.0.0");
private static readonly Counter<long> ScannerDownloadCounter = Meter.CreateCounter<long>("stellaops.cli.scanner.download.count");
private static readonly Counter<long> ScannerInstallCounter = Meter.CreateCounter<long>("stellaops.cli.scanner.install.count");
private static readonly Counter<long> ScanRunCounter = Meter.CreateCounter<long>("stellaops.cli.scan.run.count");
private static readonly Histogram<double> CommandDurationHistogram = Meter.CreateHistogram<double>("stellaops.cli.command.duration.ms");
public static void RecordScannerDownload(string channel, bool fromCache)
=> ScannerDownloadCounter.Add(1, new KeyValuePair<string, object?>[]
{
private static readonly Counter<long> ScannerDownloadCounter = Meter.CreateCounter<long>("stellaops.cli.scanner.download.count");
private static readonly Counter<long> ScannerInstallCounter = Meter.CreateCounter<long>("stellaops.cli.scanner.install.count");
private static readonly Counter<long> ScanRunCounter = Meter.CreateCounter<long>("stellaops.cli.scan.run.count");
private static readonly Counter<long> OfflineKitDownloadCounter = Meter.CreateCounter<long>("stellaops.cli.offline.kit.download.count");
private static readonly Counter<long> OfflineKitImportCounter = Meter.CreateCounter<long>("stellaops.cli.offline.kit.import.count");
private static readonly Histogram<double> CommandDurationHistogram = Meter.CreateHistogram<double>("stellaops.cli.command.duration.ms");
public static void RecordScannerDownload(string channel, bool fromCache)
=> ScannerDownloadCounter.Add(1, new KeyValuePair<string, object?>[]
{
new("channel", channel),
new("cache", fromCache ? "hit" : "miss")
});
@@ -23,16 +25,29 @@ internal static class CliMetrics
=> ScannerInstallCounter.Add(1, new KeyValuePair<string, object?>[] { new("channel", channel) });
public static void RecordScanRun(string runner, int exitCode)
=> ScanRunCounter.Add(1, new KeyValuePair<string, object?>[]
{
new("runner", runner),
new("exit_code", exitCode)
});
public static IDisposable MeasureCommandDuration(string command)
{
var start = DateTime.UtcNow;
return new DurationScope(command, start);
=> ScanRunCounter.Add(1, new KeyValuePair<string, object?>[]
{
new("runner", runner),
new("exit_code", exitCode)
});
public static void RecordOfflineKitDownload(string kind, bool fromCache)
=> OfflineKitDownloadCounter.Add(1, new KeyValuePair<string, object?>[]
{
new("kind", string.IsNullOrWhiteSpace(kind) ? "unknown" : kind),
new("cache", fromCache ? "hit" : "miss")
});
public static void RecordOfflineKitImport(string? status)
=> OfflineKitImportCounter.Add(1, new KeyValuePair<string, object?>[]
{
new("status", string.IsNullOrWhiteSpace(status) ? "queued" : status)
});
public static IDisposable MeasureCommandDuration(string command)
{
var start = DateTime.UtcNow;
return new DurationScope(command, start);
}
private sealed class DurationScope : IDisposable

View File

@@ -0,0 +1,33 @@
using System;
using System.IO;
namespace StellaOps.Concelier.Connector.StellaOpsMirror.Tests;
internal static class FixtureLoader
{
private static readonly string FixturesRoot = Path.Combine(AppContext.BaseDirectory, "Fixtures");
public static string Read(string relativePath)
{
if (string.IsNullOrWhiteSpace(relativePath))
{
throw new ArgumentException("Fixture path must be provided.", nameof(relativePath));
}
var normalized = relativePath.Replace('\\', Path.DirectorySeparatorChar).Replace('/', Path.DirectorySeparatorChar);
var path = Path.Combine(FixturesRoot, normalized);
if (!File.Exists(path))
{
throw new FileNotFoundException($"Fixture '{relativePath}' not found at '{path}'.", path);
}
var content = File.ReadAllText(path);
return NormalizeLineEndings(content);
}
public static string Normalize(string value) => NormalizeLineEndings(value);
private static string NormalizeLineEndings(string value)
=> value.Replace("\r\n", "\n", StringComparison.Ordinal);
}

View File

@@ -0,0 +1,212 @@
{
"advisoryKey": "CVE-2025-1111",
"affectedPackages": [
{
"type": "semver",
"identifier": "pkg:npm/example@1.0.0",
"platform": null,
"versionRanges": [
{
"fixedVersion": "1.2.0",
"introducedVersion": "1.0.0",
"lastAffectedVersion": null,
"primitives": {
"evr": null,
"hasVendorExtensions": false,
"nevra": null,
"semVer": {
"constraintExpression": ">=1.0.0,<1.2.0",
"exactValue": null,
"fixed": "1.2.0",
"fixedInclusive": false,
"introduced": "1.0.0",
"introducedInclusive": true,
"lastAffected": null,
"lastAffectedInclusive": true,
"style": "range"
},
"vendorExtensions": null
},
"provenance": {
"source": "ghsa",
"kind": "map",
"value": "range",
"decisionReason": null,
"recordedAt": "2025-10-19T12:00:00+00:00",
"fieldMask": [
"affectedpackages[].versionranges[]"
]
},
"rangeExpression": ">=1.0.0,<1.2.0",
"rangeKind": "semver"
}
],
"normalizedVersions": [
{
"scheme": "semver",
"type": "range",
"min": "1.0.0",
"minInclusive": true,
"max": "1.2.0",
"maxInclusive": false,
"value": null,
"notes": null
}
],
"statuses": [
{
"provenance": {
"source": "ghsa",
"kind": "map",
"value": "status",
"decisionReason": null,
"recordedAt": "2025-10-19T12:00:00+00:00",
"fieldMask": [
"affectedpackages[].statuses[]"
]
},
"status": "fixed"
}
],
"provenance": [
{
"source": "ghsa",
"kind": "map",
"value": "package",
"decisionReason": null,
"recordedAt": "2025-10-19T12:00:00+00:00",
"fieldMask": [
"affectedpackages[]"
]
},
{
"source": "stellaops-mirror",
"kind": "map",
"value": "domain=primary;repository=mirror-primary;generated=2025-10-19T12:00:00.0000000+00:00;package=pkg:npm/example@1.0.0",
"decisionReason": null,
"recordedAt": "2025-10-19T12:00:00+00:00",
"fieldMask": [
"affectedpackages[]",
"affectedpackages[].normalizedversions[]",
"affectedpackages[].statuses[]",
"affectedpackages[].versionranges[]"
]
}
]
}
],
"aliases": [
"CVE-2025-1111",
"GHSA-xxxx-xxxx-xxxx"
],
"canonicalMetricId": "cvss::ghsa::CVE-2025-1111",
"credits": [
{
"displayName": "Security Researcher",
"role": "reporter",
"contacts": [
"mailto:researcher@example.com"
],
"provenance": {
"source": "ghsa",
"kind": "map",
"value": "credit",
"decisionReason": null,
"recordedAt": "2025-10-19T12:00:00+00:00",
"fieldMask": [
"credits[]"
]
}
}
],
"cvssMetrics": [
{
"baseScore": 9.8,
"baseSeverity": "critical",
"provenance": {
"source": "ghsa",
"kind": "map",
"value": "cvss",
"decisionReason": null,
"recordedAt": "2025-10-19T12:00:00+00:00",
"fieldMask": [
"cvssmetrics[]"
]
},
"vector": "CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:U/C:H/I:H/A:H",
"version": "3.1"
}
],
"cwes": [
{
"taxonomy": "cwe",
"identifier": "CWE-79",
"name": "Cross-site Scripting",
"uri": "https://cwe.mitre.org/data/definitions/79.html",
"provenance": [
{
"source": "ghsa",
"kind": "map",
"value": "cwe",
"decisionReason": null,
"recordedAt": "2025-10-19T12:00:00+00:00",
"fieldMask": [
"cwes[]"
]
}
]
}
],
"description": "Deterministic test payload distributed via mirror.",
"exploitKnown": false,
"language": "en",
"modified": "2025-10-11T00:00:00+00:00",
"provenance": [
{
"source": "ghsa",
"kind": "map",
"value": "advisory",
"decisionReason": null,
"recordedAt": "2025-10-19T12:00:00+00:00",
"fieldMask": [
"advisory"
]
},
{
"source": "stellaops-mirror",
"kind": "map",
"value": "domain=primary;repository=mirror-primary;generated=2025-10-19T12:00:00.0000000+00:00",
"decisionReason": null,
"recordedAt": "2025-10-19T12:00:00+00:00",
"fieldMask": [
"advisory",
"credits[]",
"cvssmetrics[]",
"cwes[]",
"references[]"
]
}
],
"published": "2025-10-10T00:00:00+00:00",
"references": [
{
"kind": "advisory",
"provenance": {
"source": "ghsa",
"kind": "map",
"value": "reference",
"decisionReason": null,
"recordedAt": "2025-10-19T12:00:00+00:00",
"fieldMask": [
"references[]"
]
},
"sourceTag": "vendor",
"summary": "Vendor bulletin",
"url": "https://example.com/advisory"
}
],
"severity": "high",
"summary": "Upstream advisory replicated through StellaOps mirror.",
"title": "Sample Mirror Advisory"
}

View File

@@ -0,0 +1,202 @@
{
"advisories": [
{
"advisoryKey": "CVE-2025-1111",
"affectedPackages": [
{
"type": "semver",
"identifier": "pkg:npm/example@1.0.0",
"platform": null,
"versionRanges": [
{
"fixedVersion": "1.2.0",
"introducedVersion": "1.0.0",
"lastAffectedVersion": null,
"primitives": {
"evr": null,
"hasVendorExtensions": false,
"nevra": null,
"semVer": {
"constraintExpression": ">=1.0.0,<1.2.0",
"exactValue": null,
"fixed": "1.2.0",
"fixedInclusive": false,
"introduced": "1.0.0",
"introducedInclusive": true,
"lastAffected": null,
"lastAffectedInclusive": true,
"style": "range"
},
"vendorExtensions": null
},
"provenance": {
"source": "ghsa",
"kind": "map",
"value": "range",
"decisionReason": null,
"recordedAt": "2025-10-19T12:00:00+00:00",
"fieldMask": [
"affectedpackages[].versionranges[]"
]
},
"rangeExpression": ">=1.0.0,<1.2.0",
"rangeKind": "semver"
}
],
"normalizedVersions": [
{
"scheme": "semver",
"type": "range",
"min": "1.0.0",
"minInclusive": true,
"max": "1.2.0",
"maxInclusive": false,
"value": null,
"notes": null
}
],
"statuses": [
{
"provenance": {
"source": "ghsa",
"kind": "map",
"value": "status",
"decisionReason": null,
"recordedAt": "2025-10-19T12:00:00+00:00",
"fieldMask": [
"affectedpackages[].statuses[]"
]
},
"status": "fixed"
}
],
"provenance": [
{
"source": "ghsa",
"kind": "map",
"value": "package",
"decisionReason": null,
"recordedAt": "2025-10-19T12:00:00+00:00",
"fieldMask": [
"affectedpackages[]"
]
}
]
}
],
"aliases": [
"GHSA-xxxx-xxxx-xxxx"
],
"canonicalMetricId": "cvss::ghsa::CVE-2025-1111",
"credits": [
{
"displayName": "Security Researcher",
"role": "reporter",
"contacts": [
"mailto:researcher@example.com"
],
"provenance": {
"source": "ghsa",
"kind": "map",
"value": "credit",
"decisionReason": null,
"recordedAt": "2025-10-19T12:00:00+00:00",
"fieldMask": [
"credits[]"
]
}
}
],
"cvssMetrics": [
{
"baseScore": 9.8,
"baseSeverity": "critical",
"provenance": {
"source": "ghsa",
"kind": "map",
"value": "cvss",
"decisionReason": null,
"recordedAt": "2025-10-19T12:00:00+00:00",
"fieldMask": [
"cvssmetrics[]"
]
},
"vector": "CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:U/C:H/I:H/A:H",
"version": "3.1"
}
],
"cwes": [
{
"taxonomy": "cwe",
"identifier": "CWE-79",
"name": "Cross-site Scripting",
"uri": "https://cwe.mitre.org/data/definitions/79.html",
"provenance": [
{
"source": "ghsa",
"kind": "map",
"value": "cwe",
"decisionReason": null,
"recordedAt": "2025-10-19T12:00:00+00:00",
"fieldMask": [
"cwes[]"
]
}
]
}
],
"description": "Deterministic test payload distributed via mirror.",
"exploitKnown": false,
"language": "en",
"modified": "2025-10-11T00:00:00+00:00",
"provenance": [
{
"source": "ghsa",
"kind": "map",
"value": "advisory",
"decisionReason": null,
"recordedAt": "2025-10-19T12:00:00+00:00",
"fieldMask": [
"advisory"
]
}
],
"published": "2025-10-10T00:00:00+00:00",
"references": [
{
"kind": "advisory",
"provenance": {
"source": "ghsa",
"kind": "map",
"value": "reference",
"decisionReason": null,
"recordedAt": "2025-10-19T12:00:00+00:00",
"fieldMask": [
"references[]"
]
},
"sourceTag": "vendor",
"summary": "Vendor bulletin",
"url": "https://example.com/advisory"
}
],
"severity": "high",
"summary": "Upstream advisory replicated through StellaOps mirror.",
"title": "Sample Mirror Advisory"
}
],
"advisoryCount": 1,
"displayName": "Primary Mirror",
"domainId": "primary",
"generatedAt": "2025-10-19T12:00:00+00:00",
"schemaVersion": 1,
"sources": [
{
"advisoryCount": 1,
"firstRecordedAt": "2025-10-19T12:00:00+00:00",
"lastRecordedAt": "2025-10-19T12:00:00+00:00",
"source": "ghsa"
}
],
"targetRepository": "mirror-primary"
}

View File

@@ -0,0 +1,47 @@
using System;
using StellaOps.Concelier.Connector.StellaOpsMirror.Internal;
using StellaOps.Concelier.Models;
using Xunit;
namespace StellaOps.Concelier.Connector.StellaOpsMirror.Tests;
public sealed class MirrorAdvisoryMapperTests
{
[Fact]
public void Map_ProducesCanonicalAdvisoryWithMirrorProvenance()
{
var bundle = SampleData.CreateBundle();
var bundleJson = CanonicalJsonSerializer.SerializeIndented(bundle);
Assert.Equal(
FixtureLoader.Read(SampleData.BundleFixture).TrimEnd(),
FixtureLoader.Normalize(bundleJson).TrimEnd());
var advisories = MirrorAdvisoryMapper.Map(bundle);
Assert.Single(advisories);
var advisory = advisories[0];
var expectedAdvisory = SampleData.CreateExpectedMappedAdvisory();
var expectedJson = CanonicalJsonSerializer.SerializeIndented(expectedAdvisory);
Assert.Equal(
FixtureLoader.Read(SampleData.AdvisoryFixture).TrimEnd(),
FixtureLoader.Normalize(expectedJson).TrimEnd());
var actualJson = CanonicalJsonSerializer.SerializeIndented(advisory);
Assert.Equal(
FixtureLoader.Normalize(expectedJson).TrimEnd(),
FixtureLoader.Normalize(actualJson).TrimEnd());
Assert.Contains(advisory.Aliases, alias => string.Equals(alias, advisory.AdvisoryKey, StringComparison.OrdinalIgnoreCase));
Assert.Contains(
advisory.Provenance,
provenance => string.Equals(provenance.Source, StellaOpsMirrorConnector.Source, StringComparison.Ordinal) &&
string.Equals(provenance.Kind, "map", StringComparison.Ordinal));
var package = Assert.Single(advisory.AffectedPackages);
Assert.Contains(
package.Provenance,
provenance => string.Equals(provenance.Source, StellaOpsMirrorConnector.Source, StringComparison.Ordinal) &&
string.Equals(provenance.Kind, "map", StringComparison.Ordinal));
}
}

View File

@@ -1,6 +1,8 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Security.Cryptography;
using Microsoft.Extensions.Caching.Memory;
using Microsoft.Extensions.Logging.Abstractions;
using StellaOps.Concelier.Connector.StellaOpsMirror.Security;
using StellaOps.Cryptography;
@@ -18,7 +20,7 @@ public sealed class MirrorSignatureVerifierTests
provider.UpsertSigningKey(key);
var registry = new CryptoProviderRegistry(new[] { provider });
var verifier = new MirrorSignatureVerifier(registry, NullLogger<MirrorSignatureVerifier>.Instance);
var verifier = new MirrorSignatureVerifier(registry, NullLogger<MirrorSignatureVerifier>.Instance, new MemoryCache(new MemoryCacheOptions()));
var payloadText = System.Text.Json.JsonSerializer.Serialize(new { advisories = Array.Empty<string>() });
var payload = payloadText.ToUtf8Bytes();
@@ -35,13 +37,13 @@ public sealed class MirrorSignatureVerifierTests
provider.UpsertSigningKey(key);
var registry = new CryptoProviderRegistry(new[] { provider });
var verifier = new MirrorSignatureVerifier(registry, NullLogger<MirrorSignatureVerifier>.Instance);
var verifier = new MirrorSignatureVerifier(registry, NullLogger<MirrorSignatureVerifier>.Instance, new MemoryCache(new MemoryCacheOptions()));
var payloadText = System.Text.Json.JsonSerializer.Serialize(new { advisories = Array.Empty<string>() });
var payload = payloadText.ToUtf8Bytes();
var (signature, _) = await CreateDetachedJwsAsync(provider, key.Reference.KeyId, payload);
var tampered = signature.Replace('a', 'b', StringComparison.Ordinal);
var tampered = signature.Replace('a', 'b');
await Assert.ThrowsAsync<InvalidOperationException>(() => verifier.VerifyAsync(payload, tampered, CancellationToken.None));
}
@@ -54,7 +56,7 @@ public sealed class MirrorSignatureVerifierTests
provider.UpsertSigningKey(key);
var registry = new CryptoProviderRegistry(new[] { provider });
var verifier = new MirrorSignatureVerifier(registry, NullLogger<MirrorSignatureVerifier>.Instance);
var verifier = new MirrorSignatureVerifier(registry, NullLogger<MirrorSignatureVerifier>.Instance, new MemoryCache(new MemoryCacheOptions()));
var payloadText = System.Text.Json.JsonSerializer.Serialize(new { advisories = Array.Empty<string>() });
var payload = payloadText.ToUtf8Bytes();
@@ -65,6 +67,7 @@ public sealed class MirrorSignatureVerifierTests
signature,
expectedKeyId: "unexpected-key",
expectedProvider: null,
fallbackPublicKeyPath: null,
cancellationToken: CancellationToken.None));
}
@@ -76,7 +79,7 @@ public sealed class MirrorSignatureVerifierTests
provider.UpsertSigningKey(key);
var registry = new CryptoProviderRegistry(new[] { provider });
var verifier = new MirrorSignatureVerifier(registry, NullLogger<MirrorSignatureVerifier>.Instance);
var verifier = new MirrorSignatureVerifier(registry, NullLogger<MirrorSignatureVerifier>.Instance, new MemoryCache(new MemoryCacheOptions()));
var payloadText = System.Text.Json.JsonSerializer.Serialize(new { advisories = Array.Empty<string>() });
var payload = payloadText.ToUtf8Bytes();
@@ -89,9 +92,42 @@ public sealed class MirrorSignatureVerifierTests
signature,
expectedKeyId: key.Reference.KeyId,
expectedProvider: provider.Name,
fallbackPublicKeyPath: null,
cancellationToken: CancellationToken.None));
}
[Fact]
public async Task VerifyAsync_UsesCachedPublicKeyWhenFileRemoved()
{
var provider = new DefaultCryptoProvider();
var signingKey = CreateSigningKey("mirror-key");
provider.UpsertSigningKey(signingKey);
var registry = new CryptoProviderRegistry(new[] { provider });
var memoryCache = new MemoryCache(new MemoryCacheOptions());
var verifier = new MirrorSignatureVerifier(registry, NullLogger<MirrorSignatureVerifier>.Instance, memoryCache);
var payload = "{\"advisories\":[]}";
var (signature, _) = await CreateDetachedJwsAsync(provider, signingKey.Reference.KeyId, payload.ToUtf8Bytes());
provider.RemoveSigningKey(signingKey.Reference.KeyId);
var pemPath = WritePublicKeyPem(signingKey);
try
{
await verifier.VerifyAsync(payload.ToUtf8Bytes(), signature, expectedKeyId: signingKey.Reference.KeyId, expectedProvider: "default", fallbackPublicKeyPath: pemPath, cancellationToken: CancellationToken.None);
File.Delete(pemPath);
await verifier.VerifyAsync(payload.ToUtf8Bytes(), signature, expectedKeyId: signingKey.Reference.KeyId, expectedProvider: "default", fallbackPublicKeyPath: pemPath, cancellationToken: CancellationToken.None);
}
finally
{
if (File.Exists(pemPath))
{
File.Delete(pemPath);
}
}
}
private static CryptoSigningKey CreateSigningKey(string keyId)
{
using var ecdsa = ECDsa.Create(ECCurve.NamedCurves.nistP256);
@@ -99,6 +135,16 @@ public sealed class MirrorSignatureVerifierTests
return new CryptoSigningKey(new CryptoKeyReference(keyId), SignatureAlgorithms.Es256, in parameters, DateTimeOffset.UtcNow);
}
private static string WritePublicKeyPem(CryptoSigningKey signingKey)
{
using var ecdsa = ECDsa.Create(signingKey.PublicParameters);
var info = ecdsa.ExportSubjectPublicKeyInfo();
var pem = PemEncoding.Write("PUBLIC KEY", info);
var path = Path.Combine(Path.GetTempPath(), $"stellaops-mirror-{Guid.NewGuid():N}.pem");
File.WriteAllText(path, pem);
return path;
}
private static async Task<(string Signature, DateTimeOffset SignedAt)> CreateDetachedJwsAsync(
DefaultCryptoProvider provider,
string keyId,

View File

@@ -0,0 +1,265 @@
using System;
using System.Globalization;
using StellaOps.Concelier.Connector.StellaOpsMirror.Internal;
using StellaOps.Concelier.Models;
namespace StellaOps.Concelier.Connector.StellaOpsMirror.Tests;
internal static class SampleData
{
public const string BundleFixture = "mirror-bundle.sample.json";
public const string AdvisoryFixture = "mirror-advisory.expected.json";
public const string TargetRepository = "mirror-primary";
public const string DomainId = "primary";
public const string AdvisoryKey = "CVE-2025-1111";
public const string GhsaAlias = "GHSA-xxxx-xxxx-xxxx";
public static DateTimeOffset GeneratedAt { get; } = new(2025, 10, 19, 12, 0, 0, TimeSpan.Zero);
public static MirrorBundleDocument CreateBundle()
=> new(
SchemaVersion: 1,
GeneratedAt: GeneratedAt,
TargetRepository: TargetRepository,
DomainId: DomainId,
DisplayName: "Primary Mirror",
AdvisoryCount: 1,
Advisories: new[] { CreateSourceAdvisory() },
Sources: new[]
{
new MirrorSourceSummary("ghsa", GeneratedAt, GeneratedAt, 1)
});
public static Advisory CreateExpectedMappedAdvisory()
{
var baseAdvisory = CreateSourceAdvisory();
var recordedAt = GeneratedAt.ToUniversalTime();
var mirrorValue = BuildMirrorValue(recordedAt);
var topProvenance = baseAdvisory.Provenance.Add(new AdvisoryProvenance(
StellaOpsMirrorConnector.Source,
"map",
mirrorValue,
recordedAt,
new[]
{
ProvenanceFieldMasks.Advisory,
ProvenanceFieldMasks.References,
ProvenanceFieldMasks.Credits,
ProvenanceFieldMasks.CvssMetrics,
ProvenanceFieldMasks.Weaknesses,
}));
var package = baseAdvisory.AffectedPackages[0];
var packageProvenance = package.Provenance.Add(new AdvisoryProvenance(
StellaOpsMirrorConnector.Source,
"map",
$"{mirrorValue};package={package.Identifier}",
recordedAt,
new[]
{
ProvenanceFieldMasks.AffectedPackages,
ProvenanceFieldMasks.VersionRanges,
ProvenanceFieldMasks.PackageStatuses,
ProvenanceFieldMasks.NormalizedVersions,
}));
var updatedPackage = new AffectedPackage(
package.Type,
package.Identifier,
package.Platform,
package.VersionRanges,
package.Statuses,
packageProvenance,
package.NormalizedVersions);
return new Advisory(
AdvisoryKey,
baseAdvisory.Title,
baseAdvisory.Summary,
baseAdvisory.Language,
baseAdvisory.Published,
baseAdvisory.Modified,
baseAdvisory.Severity,
baseAdvisory.ExploitKnown,
new[] { AdvisoryKey, GhsaAlias },
baseAdvisory.Credits,
baseAdvisory.References,
new[] { updatedPackage },
baseAdvisory.CvssMetrics,
topProvenance,
baseAdvisory.Description,
baseAdvisory.Cwes,
baseAdvisory.CanonicalMetricId);
}
private static Advisory CreateSourceAdvisory()
{
var recordedAt = GeneratedAt.ToUniversalTime();
var reference = new AdvisoryReference(
"https://example.com/advisory",
"advisory",
"vendor",
"Vendor bulletin",
new AdvisoryProvenance(
"ghsa",
"map",
"reference",
recordedAt,
new[]
{
ProvenanceFieldMasks.References,
}));
var credit = new AdvisoryCredit(
"Security Researcher",
"reporter",
new[] { "mailto:researcher@example.com" },
new AdvisoryProvenance(
"ghsa",
"map",
"credit",
recordedAt,
new[]
{
ProvenanceFieldMasks.Credits,
}));
var semVerPrimitive = new SemVerPrimitive(
Introduced: "1.0.0",
IntroducedInclusive: true,
Fixed: "1.2.0",
FixedInclusive: false,
LastAffected: null,
LastAffectedInclusive: true,
ConstraintExpression: ">=1.0.0,<1.2.0",
ExactValue: null);
var range = new AffectedVersionRange(
rangeKind: "semver",
introducedVersion: "1.0.0",
fixedVersion: "1.2.0",
lastAffectedVersion: null,
rangeExpression: ">=1.0.0,<1.2.0",
provenance: new AdvisoryProvenance(
"ghsa",
"map",
"range",
recordedAt,
new[]
{
ProvenanceFieldMasks.VersionRanges,
}),
primitives: new RangePrimitives(semVerPrimitive, null, null, null));
var status = new AffectedPackageStatus(
"fixed",
new AdvisoryProvenance(
"ghsa",
"map",
"status",
recordedAt,
new[]
{
ProvenanceFieldMasks.PackageStatuses,
}));
var normalizedRule = new NormalizedVersionRule(
scheme: "semver",
type: "range",
min: "1.0.0",
minInclusive: true,
max: "1.2.0",
maxInclusive: false,
value: null,
notes: null);
var package = new AffectedPackage(
AffectedPackageTypes.SemVer,
"pkg:npm/example@1.0.0",
platform: null,
versionRanges: new[] { range },
statuses: new[] { status },
provenance: new[]
{
new AdvisoryProvenance(
"ghsa",
"map",
"package",
recordedAt,
new[]
{
ProvenanceFieldMasks.AffectedPackages,
})
},
normalizedVersions: new[] { normalizedRule });
var cvss = new CvssMetric(
"3.1",
"CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:U/C:H/I:H/A:H",
9.8,
"critical",
new AdvisoryProvenance(
"ghsa",
"map",
"cvss",
recordedAt,
new[]
{
ProvenanceFieldMasks.CvssMetrics,
}));
var weakness = new AdvisoryWeakness(
"cwe",
"CWE-79",
"Cross-site Scripting",
"https://cwe.mitre.org/data/definitions/79.html",
new[]
{
new AdvisoryProvenance(
"ghsa",
"map",
"cwe",
recordedAt,
new[]
{
ProvenanceFieldMasks.Weaknesses,
})
});
var advisory = new Advisory(
AdvisoryKey,
"Sample Mirror Advisory",
"Upstream advisory replicated through StellaOps mirror.",
"en",
published: new DateTimeOffset(2025, 10, 10, 0, 0, 0, TimeSpan.Zero),
modified: new DateTimeOffset(2025, 10, 11, 0, 0, 0, TimeSpan.Zero),
severity: "high",
exploitKnown: false,
aliases: new[] { GhsaAlias },
credits: new[] { credit },
references: new[] { reference },
affectedPackages: new[] { package },
cvssMetrics: new[] { cvss },
provenance: new[]
{
new AdvisoryProvenance(
"ghsa",
"map",
"advisory",
recordedAt,
new[]
{
ProvenanceFieldMasks.Advisory,
})
},
description: "Deterministic test payload distributed via mirror.",
cwes: new[] { weakness },
canonicalMetricId: "cvss::ghsa::CVE-2025-1111");
return CanonicalJsonSerializer.Normalize(advisory);
}
private static string BuildMirrorValue(DateTimeOffset recordedAt)
=> $"domain={DomainId};repository={TargetRepository};generated={recordedAt.ToString("O", CultureInfo.InvariantCulture)}";
}

View File

@@ -4,8 +4,11 @@
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
</PropertyGroup>
<ItemGroup>
<ProjectReference Include="../StellaOps.Concelier.Connector.StellaOpsMirror/StellaOps.Concelier.Connector.StellaOpsMirror.csproj" />
<ProjectReference Include="../StellaOps.Cryptography/StellaOps.Cryptography.csproj" />
</ItemGroup>
</Project>
<ItemGroup>
<ProjectReference Include="../StellaOps.Concelier.Connector.StellaOpsMirror/StellaOps.Concelier.Connector.StellaOpsMirror.csproj" />
<ProjectReference Include="../StellaOps.Cryptography/StellaOps.Cryptography.csproj" />
</ItemGroup>
<ItemGroup>
<None Include="Fixtures\**\*.json" CopyToOutputDirectory="Always" />
</ItemGroup>
</Project>

View File

@@ -1,5 +1,6 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Net;
using System.Net.Http;
using System.Security.Cryptography;
@@ -15,11 +16,15 @@ using MongoDB.Bson;
using StellaOps.Concelier.Connector.Common;
using StellaOps.Concelier.Connector.Common.Fetch;
using StellaOps.Concelier.Connector.Common.Testing;
using StellaOps.Concelier.Connector.StellaOpsMirror.Internal;
using StellaOps.Concelier.Connector.StellaOpsMirror.Settings;
using StellaOps.Concelier.Storage.Mongo;
using StellaOps.Concelier.Storage.Mongo.Advisories;
using StellaOps.Concelier.Storage.Mongo.Documents;
using StellaOps.Concelier.Storage.Mongo.Dtos;
using StellaOps.Concelier.Testing;
using StellaOps.Cryptography;
using StellaOps.Concelier.Models;
using Xunit;
namespace StellaOps.Concelier.Connector.StellaOpsMirror.Tests;
@@ -168,6 +173,95 @@ public sealed class StellaOpsMirrorConnectorTests : IAsyncLifetime
await Assert.ThrowsAsync<InvalidOperationException>(() => connector.FetchAsync(provider, CancellationToken.None));
}
[Fact]
public async Task FetchAsync_VerifiesSignatureUsingFallbackPublicKey()
{
var manifestContent = "{\"domain\":\"primary\"}";
var bundleContent = "{\"advisories\":[{\"id\":\"CVE-2025-0004\"}]}";
var manifestDigest = ComputeDigest(manifestContent);
var bundleDigest = ComputeDigest(bundleContent);
var index = BuildIndex(manifestDigest, Encoding.UTF8.GetByteCount(manifestContent), bundleDigest, Encoding.UTF8.GetByteCount(bundleContent), includeSignature: true);
var signingKey = CreateSigningKey("mirror-key");
var (signatureValue, _) = CreateDetachedJws(signingKey, bundleContent);
var publicKeyPath = WritePublicKeyPem(signingKey);
await using var provider = await BuildServiceProviderAsync(options =>
{
options.Signature.Enabled = true;
options.Signature.KeyId = "mirror-key";
options.Signature.Provider = "default";
options.Signature.PublicKeyPath = publicKeyPath;
});
try
{
SeedResponses(index, manifestContent, bundleContent, signatureValue);
var connector = provider.GetRequiredService<StellaOpsMirrorConnector>();
await connector.FetchAsync(provider, CancellationToken.None);
var stateRepository = provider.GetRequiredService<ISourceStateRepository>();
var state = await stateRepository.TryGetAsync(StellaOpsMirrorConnector.Source, CancellationToken.None);
Assert.NotNull(state);
Assert.Equal(0, state!.FailCount);
}
finally
{
if (File.Exists(publicKeyPath))
{
File.Delete(publicKeyPath);
}
}
}
[Fact]
public async Task FetchAsync_DigestMismatchMarksFailure()
{
var manifestExpected = "{\"domain\":\"primary\"}";
var manifestTampered = "{\"domain\":\"tampered\"}";
var bundleContent = "{\"advisories\":[{\"id\":\"CVE-2025-0005\"}]}";
var manifestDigest = ComputeDigest(manifestExpected);
var bundleDigest = ComputeDigest(bundleContent);
var index = BuildIndex(manifestDigest, Encoding.UTF8.GetByteCount(manifestExpected), bundleDigest, Encoding.UTF8.GetByteCount(bundleContent), includeSignature: false);
await using var provider = await BuildServiceProviderAsync();
SeedResponses(index, manifestTampered, bundleContent, signature: null);
var connector = provider.GetRequiredService<StellaOpsMirrorConnector>();
await Assert.ThrowsAsync<InvalidOperationException>(() => connector.FetchAsync(provider, CancellationToken.None));
var stateRepository = provider.GetRequiredService<ISourceStateRepository>();
var state = await stateRepository.TryGetAsync(StellaOpsMirrorConnector.Source, CancellationToken.None);
Assert.NotNull(state);
var cursor = state!.Cursor ?? new BsonDocument();
Assert.True(state.FailCount >= 1);
Assert.False(cursor.Contains("bundleDigest"));
}
[Fact]
public void ParseAndMap_PersistAdvisoriesFromBundle()
{
var bundleDocument = SampleData.CreateBundle();
var bundleJson = CanonicalJsonSerializer.SerializeIndented(bundleDocument);
var normalizedFixture = FixtureLoader.Read(SampleData.BundleFixture).TrimEnd();
Assert.Equal(normalizedFixture, FixtureLoader.Normalize(bundleJson).TrimEnd());
var advisories = MirrorAdvisoryMapper.Map(bundleDocument);
Assert.Single(advisories);
var advisory = advisories[0];
var expectedAdvisoryJson = FixtureLoader.Read(SampleData.AdvisoryFixture).TrimEnd();
var mappedJson = CanonicalJsonSerializer.SerializeIndented(advisory);
Assert.Equal(expectedAdvisoryJson, FixtureLoader.Normalize(mappedJson).TrimEnd());
// AdvisoryStore integration validated elsewhere; ensure canonical serialization is stable.
}
public Task InitializeAsync() => Task.CompletedTask;
public Task DisposeAsync()
@@ -323,6 +417,17 @@ public sealed class StellaOpsMirrorConnectorTests : IAsyncLifetime
return new CryptoSigningKey(new CryptoKeyReference(keyId), SignatureAlgorithms.Es256, in parameters, DateTimeOffset.UtcNow);
}
private static string WritePublicKeyPem(CryptoSigningKey signingKey)
{
ArgumentNullException.ThrowIfNull(signingKey);
var path = Path.Combine(Path.GetTempPath(), $"stellaops-mirror-{Guid.NewGuid():N}.pem");
using var ecdsa = ECDsa.Create(signingKey.PublicParameters);
var publicKeyInfo = ecdsa.ExportSubjectPublicKeyInfo();
var pem = PemEncoding.Write("PUBLIC KEY", publicKeyInfo);
File.WriteAllText(path, pem);
return path;
}
private static (string Signature, DateTimeOffset SignedAt) CreateDetachedJws(CryptoSigningKey signingKey, string payload)
{
var provider = new DefaultCryptoProvider();

View File

@@ -0,0 +1,203 @@
using System;
using System.Collections.Generic;
using System.Collections.Immutable;
using System.Linq;
using System.Globalization;
using StellaOps.Concelier.Models;
namespace StellaOps.Concelier.Connector.StellaOpsMirror.Internal;
internal static class MirrorAdvisoryMapper
{
private const string MirrorProvenanceKind = "map";
private static readonly string[] TopLevelFieldMask =
{
ProvenanceFieldMasks.Advisory,
ProvenanceFieldMasks.References,
ProvenanceFieldMasks.Credits,
ProvenanceFieldMasks.CvssMetrics,
ProvenanceFieldMasks.Weaknesses,
};
public static ImmutableArray<Advisory> Map(MirrorBundleDocument bundle)
{
if (bundle?.Advisories is null || bundle.Advisories.Count == 0)
{
return ImmutableArray<Advisory>.Empty;
}
var builder = ImmutableArray.CreateBuilder<Advisory>(bundle.Advisories.Count);
var recordedAt = bundle.GeneratedAt.ToUniversalTime();
var mirrorValue = BuildMirrorValue(bundle, recordedAt);
var topLevelProvenance = new AdvisoryProvenance(
StellaOpsMirrorConnector.Source,
MirrorProvenanceKind,
mirrorValue,
recordedAt,
TopLevelFieldMask);
foreach (var advisory in bundle.Advisories)
{
if (advisory is null)
{
continue;
}
var normalized = CanonicalJsonSerializer.Normalize(advisory);
var aliases = EnsureAliasCoverage(normalized);
var provenance = EnsureProvenance(normalized.Provenance, topLevelProvenance);
var packages = EnsurePackageProvenance(normalized.AffectedPackages, mirrorValue, recordedAt);
var updated = new Advisory(
normalized.AdvisoryKey,
normalized.Title,
normalized.Summary,
normalized.Language,
normalized.Published,
normalized.Modified,
normalized.Severity,
normalized.ExploitKnown,
aliases,
normalized.Credits,
normalized.References,
packages,
normalized.CvssMetrics,
provenance,
normalized.Description,
normalized.Cwes,
normalized.CanonicalMetricId);
builder.Add(updated);
}
return builder.ToImmutable();
}
private static IEnumerable<string> EnsureAliasCoverage(Advisory advisory)
{
var aliases = new List<string>(advisory.Aliases.Length + 1);
var seen = new HashSet<string>(StringComparer.OrdinalIgnoreCase);
foreach (var alias in advisory.Aliases)
{
if (seen.Add(alias))
{
aliases.Add(alias);
}
}
if (seen.Add(advisory.AdvisoryKey))
{
aliases.Add(advisory.AdvisoryKey);
}
return aliases;
}
private static IEnumerable<AdvisoryProvenance> EnsureProvenance(
ImmutableArray<AdvisoryProvenance> existing,
AdvisoryProvenance mirrorProvenance)
{
if (!existing.IsDefaultOrEmpty
&& existing.Any(provenance =>
string.Equals(provenance.Source, mirrorProvenance.Source, StringComparison.Ordinal)
&& string.Equals(provenance.Kind, mirrorProvenance.Kind, StringComparison.Ordinal)
&& string.Equals(provenance.Value, mirrorProvenance.Value, StringComparison.Ordinal)))
{
return existing;
}
return existing.Add(mirrorProvenance);
}
private static IEnumerable<AffectedPackage> EnsurePackageProvenance(
ImmutableArray<AffectedPackage> packages,
string mirrorValue,
DateTimeOffset recordedAt)
{
if (packages.IsDefaultOrEmpty || packages.Length == 0)
{
return packages;
}
var results = new List<AffectedPackage>(packages.Length);
foreach (var package in packages)
{
var value = $"{mirrorValue};package={package.Identifier}";
if (!package.Provenance.IsDefaultOrEmpty
&& package.Provenance.Any(provenance =>
string.Equals(provenance.Source, StellaOpsMirrorConnector.Source, StringComparison.Ordinal)
&& string.Equals(provenance.Kind, MirrorProvenanceKind, StringComparison.Ordinal)
&& string.Equals(provenance.Value, value, StringComparison.Ordinal)))
{
results.Add(package);
continue;
}
var masks = BuildPackageFieldMask(package);
var packageProvenance = new AdvisoryProvenance(
StellaOpsMirrorConnector.Source,
MirrorProvenanceKind,
value,
recordedAt,
masks);
var provenance = package.Provenance.Add(packageProvenance);
var updated = new AffectedPackage(
package.Type,
package.Identifier,
package.Platform,
package.VersionRanges,
package.Statuses,
provenance,
package.NormalizedVersions);
results.Add(updated);
}
return results;
}
private static string[] BuildPackageFieldMask(AffectedPackage package)
{
var masks = new HashSet<string>(StringComparer.Ordinal)
{
ProvenanceFieldMasks.AffectedPackages,
};
if (!package.VersionRanges.IsDefaultOrEmpty && package.VersionRanges.Length > 0)
{
masks.Add(ProvenanceFieldMasks.VersionRanges);
}
if (!package.Statuses.IsDefaultOrEmpty && package.Statuses.Length > 0)
{
masks.Add(ProvenanceFieldMasks.PackageStatuses);
}
if (!package.NormalizedVersions.IsDefaultOrEmpty && package.NormalizedVersions.Length > 0)
{
masks.Add(ProvenanceFieldMasks.NormalizedVersions);
}
return masks.ToArray();
}
private static string BuildMirrorValue(MirrorBundleDocument bundle, DateTimeOffset recordedAt)
{
var segments = new List<string>
{
$"domain={bundle.DomainId}",
};
if (!string.IsNullOrWhiteSpace(bundle.TargetRepository))
{
segments.Add($"repository={bundle.TargetRepository}");
}
segments.Add($"generated={recordedAt.ToString("O", CultureInfo.InvariantCulture)}");
return string.Join(';', segments);
}
}

View File

@@ -0,0 +1,14 @@
using System.Text.Json.Serialization;
using StellaOps.Concelier.Models;
namespace StellaOps.Concelier.Connector.StellaOpsMirror.Internal;
public sealed record MirrorBundleDocument(
[property: JsonPropertyName("schemaVersion")] int SchemaVersion,
[property: JsonPropertyName("generatedAt")] DateTimeOffset GeneratedAt,
[property: JsonPropertyName("targetRepository")] string? TargetRepository,
[property: JsonPropertyName("domainId")] string DomainId,
[property: JsonPropertyName("displayName")] string DisplayName,
[property: JsonPropertyName("advisoryCount")] int AdvisoryCount,
[property: JsonPropertyName("advisories")] IReadOnlyList<Advisory> Advisories,
[property: JsonPropertyName("sources")] IReadOnlyList<MirrorSourceSummary> Sources);

View File

@@ -3,93 +3,107 @@ using MongoDB.Bson;
namespace StellaOps.Concelier.Connector.StellaOpsMirror.Internal;
internal sealed record StellaOpsMirrorCursor(
string? ExportId,
string? BundleDigest,
DateTimeOffset? GeneratedAt,
IReadOnlyCollection<Guid> PendingDocuments,
IReadOnlyCollection<Guid> PendingMappings)
{
private static readonly IReadOnlyCollection<Guid> EmptyGuids = Array.Empty<Guid>();
public static StellaOpsMirrorCursor Empty { get; } = new(
ExportId: null,
BundleDigest: null,
GeneratedAt: null,
PendingDocuments: EmptyGuids,
PendingMappings: EmptyGuids);
public BsonDocument ToBsonDocument()
{
var document = new BsonDocument
{
["pendingDocuments"] = new BsonArray(PendingDocuments.Select(id => id.ToString())),
["pendingMappings"] = new BsonArray(PendingMappings.Select(id => id.ToString())),
};
if (!string.IsNullOrWhiteSpace(ExportId))
{
document["exportId"] = ExportId;
}
if (!string.IsNullOrWhiteSpace(BundleDigest))
{
document["bundleDigest"] = BundleDigest;
}
if (GeneratedAt.HasValue)
{
document["generatedAt"] = GeneratedAt.Value.UtcDateTime;
}
return document;
}
public static StellaOpsMirrorCursor FromBson(BsonDocument? document)
internal sealed record StellaOpsMirrorCursor(
string? ExportId,
string? BundleDigest,
DateTimeOffset? GeneratedAt,
IReadOnlyCollection<Guid> PendingDocuments,
IReadOnlyCollection<Guid> PendingMappings,
string? CompletedFingerprint)
{
private static readonly IReadOnlyCollection<Guid> EmptyGuids = Array.Empty<Guid>();
public static StellaOpsMirrorCursor Empty { get; } = new(
ExportId: null,
BundleDigest: null,
GeneratedAt: null,
PendingDocuments: EmptyGuids,
PendingMappings: EmptyGuids,
CompletedFingerprint: null);
public BsonDocument ToBsonDocument()
{
var document = new BsonDocument
{
["pendingDocuments"] = new BsonArray(PendingDocuments.Select(id => id.ToString())),
["pendingMappings"] = new BsonArray(PendingMappings.Select(id => id.ToString())),
};
if (!string.IsNullOrWhiteSpace(ExportId))
{
document["exportId"] = ExportId;
}
if (!string.IsNullOrWhiteSpace(BundleDigest))
{
document["bundleDigest"] = BundleDigest;
}
if (GeneratedAt.HasValue)
{
document["generatedAt"] = GeneratedAt.Value.UtcDateTime;
}
if (!string.IsNullOrWhiteSpace(CompletedFingerprint))
{
document["completedFingerprint"] = CompletedFingerprint;
}
return document;
}
public static StellaOpsMirrorCursor FromBson(BsonDocument? document)
{
if (document is null || document.ElementCount == 0)
{
return Empty;
}
var exportId = document.TryGetValue("exportId", out var exportValue) && exportValue.IsString ? exportValue.AsString : null;
var digest = document.TryGetValue("bundleDigest", out var digestValue) && digestValue.IsString ? digestValue.AsString : null;
DateTimeOffset? generatedAt = null;
if (document.TryGetValue("generatedAt", out var generatedValue))
{
generatedAt = generatedValue.BsonType switch
{
var exportId = document.TryGetValue("exportId", out var exportValue) && exportValue.IsString ? exportValue.AsString : null;
var digest = document.TryGetValue("bundleDigest", out var digestValue) && digestValue.IsString ? digestValue.AsString : null;
DateTimeOffset? generatedAt = null;
if (document.TryGetValue("generatedAt", out var generatedValue))
{
generatedAt = generatedValue.BsonType switch
{
BsonType.DateTime => DateTime.SpecifyKind(generatedValue.ToUniversalTime(), DateTimeKind.Utc),
BsonType.String when DateTimeOffset.TryParse(generatedValue.AsString, out var parsed) => parsed.ToUniversalTime(),
_ => null,
};
}
var pendingDocuments = ReadGuidArray(document, "pendingDocuments");
var pendingMappings = ReadGuidArray(document, "pendingMappings");
return new StellaOpsMirrorCursor(exportId, digest, generatedAt, pendingDocuments, pendingMappings);
}
public StellaOpsMirrorCursor WithPendingDocuments(IEnumerable<Guid> documents)
=> this with { PendingDocuments = documents?.Distinct().ToArray() ?? EmptyGuids };
public StellaOpsMirrorCursor WithPendingMappings(IEnumerable<Guid> mappings)
=> this with { PendingMappings = mappings?.Distinct().ToArray() ?? EmptyGuids };
public StellaOpsMirrorCursor WithBundleSnapshot(string? exportId, string? digest, DateTimeOffset generatedAt)
=> this with
{
ExportId = string.IsNullOrWhiteSpace(exportId) ? ExportId : exportId,
BundleDigest = digest,
GeneratedAt = generatedAt,
};
private static IReadOnlyCollection<Guid> ReadGuidArray(BsonDocument document, string field)
{
if (!document.TryGetValue(field, out var value) || value is not BsonArray array)
{
return EmptyGuids;
_ => null,
};
}
var pendingDocuments = ReadGuidArray(document, "pendingDocuments");
var pendingMappings = ReadGuidArray(document, "pendingMappings");
var fingerprint = document.TryGetValue("completedFingerprint", out var fingerprintValue) && fingerprintValue.IsString
? fingerprintValue.AsString
: null;
return new StellaOpsMirrorCursor(exportId, digest, generatedAt, pendingDocuments, pendingMappings, fingerprint);
}
public StellaOpsMirrorCursor WithPendingDocuments(IEnumerable<Guid> documents)
=> this with { PendingDocuments = documents?.Distinct().ToArray() ?? EmptyGuids };
public StellaOpsMirrorCursor WithPendingMappings(IEnumerable<Guid> mappings)
=> this with { PendingMappings = mappings?.Distinct().ToArray() ?? EmptyGuids };
public StellaOpsMirrorCursor WithBundleSnapshot(string? exportId, string? digest, DateTimeOffset generatedAt)
=> this with
{
ExportId = string.IsNullOrWhiteSpace(exportId) ? ExportId : exportId,
BundleDigest = digest,
GeneratedAt = generatedAt,
};
public StellaOpsMirrorCursor WithCompletedFingerprint(string? fingerprint)
=> this with { CompletedFingerprint = string.IsNullOrWhiteSpace(fingerprint) ? null : fingerprint };
private static IReadOnlyCollection<Guid> ReadGuidArray(BsonDocument document, string field)
{
if (!document.TryGetValue(field, out var value) || value is not BsonArray array)
{
return EmptyGuids;
}
var results = new List<Guid>(array.Count);

View File

@@ -0,0 +1,3 @@
using System.Runtime.CompilerServices;
[assembly: InternalsVisibleTo("StellaOps.Concelier.Connector.StellaOpsMirror.Tests")]

View File

@@ -1,7 +1,12 @@
using System;
using System.IO;
using System.Security.Cryptography;
using System.Text;
using System.Text.Json;
using System.Text.Json.Serialization;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Extensions.Caching.Memory;
using Microsoft.Extensions.Logging;
using Microsoft.IdentityModel.Tokens;
using StellaOps.Cryptography;
@@ -13,6 +18,7 @@ namespace StellaOps.Concelier.Connector.StellaOpsMirror.Security;
/// </summary>
public sealed class MirrorSignatureVerifier
{
private const string CachePrefix = "stellaops:mirror:public-key:";
private static readonly JsonSerializerOptions HeaderSerializerOptions = new(JsonSerializerDefaults.Web)
{
PropertyNameCaseInsensitive = true
@@ -20,21 +26,27 @@ public sealed class MirrorSignatureVerifier
private readonly ICryptoProviderRegistry _providerRegistry;
private readonly ILogger<MirrorSignatureVerifier> _logger;
private readonly IMemoryCache? _memoryCache;
public MirrorSignatureVerifier(ICryptoProviderRegistry providerRegistry, ILogger<MirrorSignatureVerifier> logger)
public MirrorSignatureVerifier(
ICryptoProviderRegistry providerRegistry,
ILogger<MirrorSignatureVerifier> logger,
IMemoryCache? memoryCache = null)
{
_providerRegistry = providerRegistry ?? throw new ArgumentNullException(nameof(providerRegistry));
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
_memoryCache = memoryCache;
}
public Task VerifyAsync(ReadOnlyMemory<byte> payload, string signatureValue, CancellationToken cancellationToken)
=> VerifyAsync(payload, signatureValue, expectedKeyId: null, expectedProvider: null, cancellationToken);
=> VerifyAsync(payload, signatureValue, expectedKeyId: null, expectedProvider: null, fallbackPublicKeyPath: null, cancellationToken);
public async Task VerifyAsync(
ReadOnlyMemory<byte> payload,
string signatureValue,
string? expectedKeyId,
string? expectedProvider,
string? fallbackPublicKeyPath,
CancellationToken cancellationToken)
{
if (payload.IsEmpty)
@@ -92,7 +104,8 @@ public sealed class MirrorSignatureVerifier
var signatureBytes = Base64UrlEncoder.DecodeBytes(encodedSignature);
var keyReference = new CryptoKeyReference(header.KeyId, header.Provider);
CryptoSignerResolution resolution;
CryptoSignerResolution? resolution = null;
bool providerVerified = false;
try
{
resolution = _providerRegistry.ResolveSigner(
@@ -100,19 +113,38 @@ public sealed class MirrorSignatureVerifier
header.Algorithm,
keyReference,
header.Provider);
providerVerified = await resolution.Signer.VerifyAsync(signingInput, signatureBytes, cancellationToken).ConfigureAwait(false);
if (providerVerified)
{
return;
}
_logger.LogWarning(
"Detached JWS verification failed for key {KeyId} via provider {Provider}.",
header.KeyId,
resolution.ProviderName);
}
catch (Exception ex) when (ex is InvalidOperationException or KeyNotFoundException)
{
_logger.LogWarning(ex, "Unable to resolve signer for mirror signature key {KeyId} via provider {Provider}.", header.KeyId, header.Provider ?? "<null>");
throw new InvalidOperationException("Detached JWS signature verification failed.", ex);
}
var verified = await resolution.Signer.VerifyAsync(signingInput, signatureBytes, cancellationToken).ConfigureAwait(false);
if (!verified)
if (providerVerified)
{
_logger.LogWarning("Detached JWS verification failed for key {KeyId} via provider {Provider}.", header.KeyId, resolution.ProviderName);
throw new InvalidOperationException("Detached JWS signature verification failed.");
return;
}
if (!string.IsNullOrWhiteSpace(fallbackPublicKeyPath) &&
await TryVerifyWithFallbackAsync(signingInput, signatureBytes, header.Algorithm, fallbackPublicKeyPath!, cancellationToken).ConfigureAwait(false))
{
_logger.LogDebug(
"Detached JWS verification succeeded for key {KeyId} using fallback public key at {Path}.",
header.KeyId,
fallbackPublicKeyPath);
return;
}
throw new InvalidOperationException("Detached JWS signature verification failed.");
}
private static bool TryParseDetachedJws(string value, out string encodedHeader, out string encodedSignature)
@@ -140,6 +172,97 @@ public sealed class MirrorSignatureVerifier
return buffer;
}
private async Task<bool> TryVerifyWithFallbackAsync(
ReadOnlyMemory<byte> signingInput,
ReadOnlyMemory<byte> signature,
string algorithm,
string fallbackPublicKeyPath,
CancellationToken cancellationToken)
{
try
{
cancellationToken.ThrowIfCancellationRequested();
var parameters = await GetFallbackPublicKeyAsync(fallbackPublicKeyPath, cancellationToken).ConfigureAwait(false);
if (parameters is null)
{
return false;
}
using var ecdsa = ECDsa.Create();
ecdsa.ImportParameters(parameters.Value);
var hashAlgorithm = ResolveHashAlgorithm(algorithm);
return ecdsa.VerifyData(signingInput.Span, signature.Span, hashAlgorithm);
}
catch (OperationCanceledException)
{
throw;
}
catch (Exception ex) when (ex is IOException or UnauthorizedAccessException or CryptographicException or ArgumentException)
{
_logger.LogWarning(ex, "Failed to verify mirror signature using fallback public key at {Path}.", fallbackPublicKeyPath);
return false;
}
}
private Task<ECParameters?> GetFallbackPublicKeyAsync(string path, CancellationToken cancellationToken)
{
cancellationToken.ThrowIfCancellationRequested();
if (_memoryCache is null)
{
return Task.FromResult(LoadPublicKey(path));
}
if (_memoryCache.TryGetValue<Lazy<ECParameters?>>(CachePrefix + path, out var cached))
{
return Task.FromResult(cached?.Value);
}
if (!File.Exists(path))
{
_logger.LogWarning("Mirror signature fallback public key path {Path} was not found.", path);
return Task.FromResult<ECParameters?>(null);
}
var lazy = new Lazy<ECParameters?>(
() => LoadPublicKey(path),
LazyThreadSafetyMode.ExecutionAndPublication);
var options = new MemoryCacheEntryOptions
{
AbsoluteExpirationRelativeToNow = TimeSpan.FromHours(6),
SlidingExpiration = TimeSpan.FromMinutes(30),
};
_memoryCache.Set(CachePrefix + path, lazy, options);
return Task.FromResult(lazy.Value);
}
private ECParameters? LoadPublicKey(string path)
{
try
{
var pem = File.ReadAllText(path);
using var ecdsa = ECDsa.Create();
ecdsa.ImportFromPem(pem.AsSpan());
return ecdsa.ExportParameters(includePrivateParameters: false);
}
catch (Exception ex) when (ex is IOException or UnauthorizedAccessException or CryptographicException or ArgumentException)
{
_logger.LogWarning(ex, "Failed to load mirror fallback public key from {Path}.", path);
return null;
}
}
private static HashAlgorithmName ResolveHashAlgorithm(string algorithmId)
=> algorithmId switch
{
{ } alg when string.Equals(alg, SignatureAlgorithms.Es256, StringComparison.OrdinalIgnoreCase) => HashAlgorithmName.SHA256,
{ } alg when string.Equals(alg, SignatureAlgorithms.Es384, StringComparison.OrdinalIgnoreCase) => HashAlgorithmName.SHA384,
{ } alg when string.Equals(alg, SignatureAlgorithms.Es512, StringComparison.OrdinalIgnoreCase) => HashAlgorithmName.SHA512,
_ => throw new InvalidOperationException($"Unsupported mirror signature algorithm '{algorithmId}'."),
};
private sealed record MirrorSignatureHeader(
[property: JsonPropertyName("alg")] string Algorithm,
[property: JsonPropertyName("kid")] string KeyId,

View File

@@ -12,8 +12,11 @@ using StellaOps.Concelier.Connector.StellaOpsMirror.Client;
using StellaOps.Concelier.Connector.StellaOpsMirror.Internal;
using StellaOps.Concelier.Connector.StellaOpsMirror.Security;
using StellaOps.Concelier.Connector.StellaOpsMirror.Settings;
using StellaOps.Concelier.Models;
using StellaOps.Concelier.Storage.Mongo;
using StellaOps.Concelier.Storage.Mongo.Advisories;
using StellaOps.Concelier.Storage.Mongo.Documents;
using StellaOps.Concelier.Storage.Mongo.Dtos;
using StellaOps.Plugin;
namespace StellaOps.Concelier.Connector.StellaOpsMirror;
@@ -21,11 +24,14 @@ namespace StellaOps.Concelier.Connector.StellaOpsMirror;
public sealed class StellaOpsMirrorConnector : IFeedConnector
{
public const string Source = "stellaops-mirror";
private const string BundleDtoSchemaVersion = "stellaops.mirror.bundle.v1";
private readonly MirrorManifestClient _client;
private readonly MirrorSignatureVerifier _signatureVerifier;
private readonly RawDocumentStorage _rawDocumentStorage;
private readonly IDocumentStore _documentStore;
private readonly IDtoStore _dtoStore;
private readonly IAdvisoryStore _advisoryStore;
private readonly ISourceStateRepository _stateRepository;
private readonly TimeProvider _timeProvider;
private readonly ILogger<StellaOpsMirrorConnector> _logger;
@@ -36,6 +42,8 @@ public sealed class StellaOpsMirrorConnector : IFeedConnector
MirrorSignatureVerifier signatureVerifier,
RawDocumentStorage rawDocumentStorage,
IDocumentStore documentStore,
IDtoStore dtoStore,
IAdvisoryStore advisoryStore,
ISourceStateRepository stateRepository,
IOptions<StellaOpsMirrorConnectorOptions> options,
TimeProvider? timeProvider,
@@ -45,6 +53,8 @@ public sealed class StellaOpsMirrorConnector : IFeedConnector
_signatureVerifier = signatureVerifier ?? throw new ArgumentNullException(nameof(signatureVerifier));
_rawDocumentStorage = rawDocumentStorage ?? throw new ArgumentNullException(nameof(rawDocumentStorage));
_documentStore = documentStore ?? throw new ArgumentNullException(nameof(documentStore));
_dtoStore = dtoStore ?? throw new ArgumentNullException(nameof(dtoStore));
_advisoryStore = advisoryStore ?? throw new ArgumentNullException(nameof(advisoryStore));
_stateRepository = stateRepository ?? throw new ArgumentNullException(nameof(stateRepository));
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
_timeProvider = timeProvider ?? TimeProvider.System;
@@ -84,6 +94,15 @@ public sealed class StellaOpsMirrorConnector : IFeedConnector
throw new InvalidOperationException(message);
}
var fingerprint = CreateFingerprint(index, domain);
var isNewDigest = !string.Equals(domain.Bundle.Digest, cursor.BundleDigest, StringComparison.OrdinalIgnoreCase);
if (isNewDigest)
{
pendingDocuments.Clear();
pendingMappings.Clear();
}
if (string.Equals(domain.Bundle.Digest, cursor.BundleDigest, StringComparison.OrdinalIgnoreCase))
{
_logger.LogInformation("Mirror bundle digest {Digest} unchanged; skipping fetch.", domain.Bundle.Digest);
@@ -100,19 +119,29 @@ public sealed class StellaOpsMirrorConnector : IFeedConnector
throw;
}
var completedFingerprint = isNewDigest ? null : cursor.CompletedFingerprint;
var updatedCursor = cursor
.WithPendingDocuments(pendingDocuments)
.WithPendingMappings(pendingMappings)
.WithBundleSnapshot(domain.Bundle.Path, domain.Bundle.Digest, index.GeneratedAt);
.WithBundleSnapshot(domain.Bundle.Path, domain.Bundle.Digest, index.GeneratedAt)
.WithCompletedFingerprint(completedFingerprint);
await UpdateCursorAsync(updatedCursor, cancellationToken).ConfigureAwait(false);
}
public Task ParseAsync(IServiceProvider services, CancellationToken cancellationToken)
=> Task.CompletedTask;
{
ArgumentNullException.ThrowIfNull(services);
return ParseInternalAsync(cancellationToken);
}
public Task MapAsync(IServiceProvider services, CancellationToken cancellationToken)
=> Task.CompletedTask;
{
ArgumentNullException.ThrowIfNull(services);
return MapInternalAsync(cancellationToken);
}
private async Task ProcessDomainAsync(
MirrorIndexDocument index,
@@ -152,6 +181,7 @@ public sealed class StellaOpsMirrorConnector : IFeedConnector
signatureValue,
expectedKeyId: _options.Signature.KeyId,
expectedProvider: _options.Signature.Provider,
fallbackPublicKeyPath: _options.Signature.PublicKeyPath,
cancellationToken).ConfigureAwait(false);
}
else if (domain.Bundle.Signature is not null)
@@ -288,6 +318,20 @@ public sealed class StellaOpsMirrorConnector : IFeedConnector
: digest.ToLowerInvariant();
}
private static string? CreateFingerprint(MirrorIndexDocument index, MirrorIndexDomainEntry domain)
=> CreateFingerprint(domain.Bundle.Digest, index.GeneratedAt);
private static string? CreateFingerprint(string? digest, DateTimeOffset? generatedAt)
{
var normalizedDigest = NormalizeDigest(digest ?? string.Empty);
if (string.IsNullOrWhiteSpace(normalizedDigest) || generatedAt is null)
{
return null;
}
return FormattableString.Invariant($"{normalizedDigest}:{generatedAt.Value.ToUniversalTime():O}");
}
private static void ValidateOptions(StellaOpsMirrorConnectorOptions options)
{
if (options.BaseAddress is null || !options.BaseAddress.IsAbsoluteUri)
@@ -300,6 +344,226 @@ public sealed class StellaOpsMirrorConnector : IFeedConnector
throw new InvalidOperationException("Mirror connector requires domainId to be specified.");
}
}
private async Task ParseInternalAsync(CancellationToken cancellationToken)
{
var cursor = await GetCursorAsync(cancellationToken).ConfigureAwait(false);
if (cursor.PendingDocuments.Count == 0)
{
return;
}
var pendingDocuments = cursor.PendingDocuments.ToHashSet();
var pendingMappings = cursor.PendingMappings.ToHashSet();
var now = _timeProvider.GetUtcNow();
var parsed = 0;
var failures = 0;
foreach (var documentId in cursor.PendingDocuments.ToArray())
{
cancellationToken.ThrowIfCancellationRequested();
var document = await _documentStore.FindAsync(documentId, cancellationToken).ConfigureAwait(false);
if (document is null)
{
pendingDocuments.Remove(documentId);
pendingMappings.Remove(documentId);
failures++;
continue;
}
if (!document.GridFsId.HasValue)
{
_logger.LogWarning("Mirror bundle document {DocumentId} missing GridFS payload.", documentId);
await _documentStore.UpdateStatusAsync(document.Id, DocumentStatuses.Failed, cancellationToken).ConfigureAwait(false);
pendingDocuments.Remove(documentId);
pendingMappings.Remove(documentId);
failures++;
continue;
}
byte[] payload;
try
{
payload = await _rawDocumentStorage.DownloadAsync(document.GridFsId.Value, cancellationToken).ConfigureAwait(false);
}
catch (Exception ex)
{
_logger.LogError(ex, "Mirror bundle {DocumentId} failed to download from raw storage.", documentId);
throw;
}
MirrorBundleDocument? bundle;
string json;
try
{
json = Encoding.UTF8.GetString(payload);
bundle = CanonicalJsonSerializer.Deserialize<MirrorBundleDocument>(json);
}
catch (Exception ex)
{
_logger.LogWarning(ex, "Mirror bundle {DocumentId} failed to deserialize.", documentId);
await _documentStore.UpdateStatusAsync(document.Id, DocumentStatuses.Failed, cancellationToken).ConfigureAwait(false);
pendingDocuments.Remove(documentId);
pendingMappings.Remove(documentId);
failures++;
continue;
}
if (bundle is null || bundle.Advisories is null)
{
_logger.LogWarning("Mirror bundle {DocumentId} produced null payload.", documentId);
await _documentStore.UpdateStatusAsync(document.Id, DocumentStatuses.Failed, cancellationToken).ConfigureAwait(false);
pendingDocuments.Remove(documentId);
pendingMappings.Remove(documentId);
failures++;
continue;
}
var dtoBson = BsonDocument.Parse(json);
var dtoRecord = new DtoRecord(Guid.NewGuid(), document.Id, Source, BundleDtoSchemaVersion, dtoBson, now);
await _dtoStore.UpsertAsync(dtoRecord, cancellationToken).ConfigureAwait(false);
await _documentStore.UpdateStatusAsync(document.Id, DocumentStatuses.PendingMap, cancellationToken).ConfigureAwait(false);
pendingDocuments.Remove(documentId);
pendingMappings.Add(document.Id);
parsed++;
_logger.LogDebug(
"Parsed mirror bundle {DocumentId} domain={DomainId} advisories={AdvisoryCount}.",
document.Id,
bundle.DomainId,
bundle.AdvisoryCount);
}
var updatedCursor = cursor
.WithPendingDocuments(pendingDocuments)
.WithPendingMappings(pendingMappings);
await UpdateCursorAsync(updatedCursor, cancellationToken).ConfigureAwait(false);
if (parsed > 0 || failures > 0)
{
_logger.LogInformation(
"Mirror parse completed parsed={Parsed} failures={Failures} pendingDocuments={PendingDocuments} pendingMappings={PendingMappings}.",
parsed,
failures,
pendingDocuments.Count,
pendingMappings.Count);
}
}
private async Task MapInternalAsync(CancellationToken cancellationToken)
{
var cursor = await GetCursorAsync(cancellationToken).ConfigureAwait(false);
if (cursor.PendingMappings.Count == 0)
{
return;
}
var pendingMappings = cursor.PendingMappings.ToHashSet();
var mapped = 0;
var failures = 0;
var completedFingerprint = cursor.CompletedFingerprint;
foreach (var documentId in cursor.PendingMappings.ToArray())
{
cancellationToken.ThrowIfCancellationRequested();
var document = await _documentStore.FindAsync(documentId, cancellationToken).ConfigureAwait(false);
if (document is null)
{
pendingMappings.Remove(documentId);
failures++;
continue;
}
var dtoRecord = await _dtoStore.FindByDocumentIdAsync(documentId, cancellationToken).ConfigureAwait(false);
if (dtoRecord is null)
{
_logger.LogWarning("Mirror document {DocumentId} missing DTO payload.", documentId);
await _documentStore.UpdateStatusAsync(document.Id, DocumentStatuses.Failed, cancellationToken).ConfigureAwait(false);
pendingMappings.Remove(documentId);
failures++;
continue;
}
MirrorBundleDocument? bundle;
try
{
var json = dtoRecord.Payload.ToJson();
bundle = CanonicalJsonSerializer.Deserialize<MirrorBundleDocument>(json);
}
catch (Exception ex)
{
_logger.LogWarning(ex, "Mirror DTO for document {DocumentId} failed to deserialize.", documentId);
await _documentStore.UpdateStatusAsync(document.Id, DocumentStatuses.Failed, cancellationToken).ConfigureAwait(false);
pendingMappings.Remove(documentId);
failures++;
continue;
}
if (bundle is null || bundle.Advisories is null)
{
_logger.LogWarning("Mirror bundle DTO {DocumentId} evaluated to null.", documentId);
await _documentStore.UpdateStatusAsync(document.Id, DocumentStatuses.Failed, cancellationToken).ConfigureAwait(false);
pendingMappings.Remove(documentId);
failures++;
continue;
}
try
{
var advisories = MirrorAdvisoryMapper.Map(bundle);
foreach (var advisory in advisories)
{
cancellationToken.ThrowIfCancellationRequested();
await _advisoryStore.UpsertAsync(advisory, cancellationToken).ConfigureAwait(false);
}
await _documentStore.UpdateStatusAsync(document.Id, DocumentStatuses.Mapped, cancellationToken).ConfigureAwait(false);
pendingMappings.Remove(documentId);
mapped++;
_logger.LogDebug(
"Mirror map completed for document {DocumentId} domain={DomainId} advisories={AdvisoryCount}.",
document.Id,
bundle.DomainId,
advisories.Length);
}
catch (Exception ex)
{
_logger.LogError(ex, "Mirror mapping failed for document {DocumentId}.", documentId);
await _documentStore.UpdateStatusAsync(document.Id, DocumentStatuses.Failed, cancellationToken).ConfigureAwait(false);
pendingMappings.Remove(documentId);
failures++;
}
}
if (pendingMappings.Count == 0 && failures == 0)
{
var fingerprint = CreateFingerprint(cursor.BundleDigest, cursor.GeneratedAt);
if (!string.IsNullOrWhiteSpace(fingerprint))
{
completedFingerprint = fingerprint;
}
}
var updatedCursor = cursor
.WithPendingMappings(pendingMappings)
.WithCompletedFingerprint(completedFingerprint);
await UpdateCursorAsync(updatedCursor, cancellationToken).ConfigureAwait(false);
if (mapped > 0 || failures > 0)
{
_logger.LogInformation(
"Mirror map completed mapped={Mapped} failures={Failures} pendingMappings={PendingMappings}.",
mapped,
failures,
pendingMappings.Count);
}
}
}
file static class UriExtensions

View File

@@ -23,23 +23,24 @@ public sealed class StellaOpsMirrorDependencyInjectionRoutine : IDependencyInjec
ArgumentNullException.ThrowIfNull(services);
ArgumentNullException.ThrowIfNull(configuration);
services.AddOptions<StellaOpsMirrorConnectorOptions>()
.Bind(configuration.GetSection(ConfigurationSection))
.PostConfigure(options =>
{
if (options.BaseAddress is null)
services.AddOptions<StellaOpsMirrorConnectorOptions>()
.Bind(configuration.GetSection(ConfigurationSection))
.PostConfigure(options =>
{
if (options.BaseAddress is null)
{
throw new InvalidOperationException("stellaopsMirror.baseAddress must be configured.");
}
})
.ValidateOnStart();
services.AddSourceCommon();
services.AddHttpClient(HttpClientName, (sp, client) =>
{
var options = sp.GetRequiredService<IOptions<StellaOpsMirrorConnectorOptions>>().Value;
client.BaseAddress = options.BaseAddress;
.ValidateOnStart();
services.AddSourceCommon();
services.AddMemoryCache();
services.AddHttpClient(HttpClientName, (sp, client) =>
{
var options = sp.GetRequiredService<IOptions<StellaOpsMirrorConnectorOptions>>().Value;
client.BaseAddress = options.BaseAddress;
client.Timeout = options.HttpTimeout;
client.DefaultRequestHeaders.Accept.Clear();
client.DefaultRequestHeaders.Accept.Add(new System.Net.Http.Headers.MediaTypeWithQualityHeaderValue("application/json"));

View File

@@ -2,6 +2,6 @@
| ID | Status | Owner(s) | Depends on | Description | Exit Criteria |
|----|--------|----------|------------|-------------|---------------|
| FEEDCONN-STELLA-08-001 | DOING (2025-10-19) | BE-Conn-Stella | CONCELIER-EXPORT-08-201 | Implement Concelier mirror fetcher hitting `https://<domain>.stella-ops.org/concelier/exports/index.json`, verify signatures/digests, and persist raw documents with provenance. | Fetch job downloads mirror manifest, verifies digest/signature, stores raw docs with tests covering happy-path + tampered manifest. *(In progress: HTTP client + detached JWS verifier scaffolding landed.)* |
| FEEDCONN-STELLA-08-002 | TODO | BE-Conn-Stella | FEEDCONN-STELLA-08-001 | Map mirror payloads into canonical advisory DTOs with provenance referencing mirror domain + original source metadata. | Mapper produces advisories/aliases/affected with mirror provenance; fixtures assert canonical parity with upstream JSON exporters. |
| FEEDCONN-STELLA-08-003 | TODO | BE-Conn-Stella | FEEDCONN-STELLA-08-002 | Add incremental cursor + resume support (per-export fingerprint) and document configuration for downstream Concelier instances. | Connector resumes from last export, handles deletion/delta cases, docs updated with config sample; integration test covers resume + new export scenario. |
| FEEDCONN-STELLA-08-001 | DONE (2025-10-20) | BE-Conn-Stella | CONCELIER-EXPORT-08-201 | Implement Concelier mirror fetcher hitting `https://<domain>.stella-ops.org/concelier/exports/index.json`, verify signatures/digests, and persist raw documents with provenance. | Fetch job downloads mirror manifest, verifies digest/signature, stores raw docs with tests covering happy-path + tampered manifest. *(Completed 2025-10-20: detached JWS + digest enforcement, metadata persisted, and regression coverage via `dotnet test src/StellaOps.Concelier.Connector.StellaOpsMirror.Tests/StellaOps.Concelier.Connector.StellaOpsMirror.Tests.csproj`.)* |
| FEEDCONN-STELLA-08-002 | DONE (2025-10-20) | BE-Conn-Stella | FEEDCONN-STELLA-08-001 | Map mirror payloads into canonical advisory DTOs with provenance referencing mirror domain + original source metadata. | Mapper produces advisories/aliases/affected with mirror provenance; fixtures assert canonical parity with upstream JSON exporters. |
| FEEDCONN-STELLA-08-003 | DONE (2025-10-20) | BE-Conn-Stella | FEEDCONN-STELLA-08-002 | Add incremental cursor + resume support (per-export fingerprint) and document configuration for downstream Concelier instances. | Connector resumes from last export, handles deletion/delta cases, docs updated with config sample; integration test covers resume + new export scenario. |

View File

@@ -0,0 +1,320 @@
using System;
using System.Collections.Generic;
using System.Collections.Immutable;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using StellaOps.Concelier.Core.Events;
using StellaOps.Concelier.Core.Noise;
using StellaOps.Concelier.Models;
using Xunit;
namespace StellaOps.Concelier.Core.Tests.Noise;
public sealed class NoisePriorServiceTests
{
[Fact]
public async Task RecomputeAsync_PersistsSummariesWithRules()
{
var statements = ImmutableArray.Create(
CreateStatement(
asOf: DateTimeOffset.Parse("2025-10-10T00:00:00Z"),
CreatePackage(
statuses: new[]
{
CreateStatus(AffectedPackageStatusCatalog.NotAffected, "vendor.redhat"),
},
platform: "linux")));
statements = statements.Add(CreateStatement(
asOf: DateTimeOffset.Parse("2025-10-11T00:00:00Z"),
CreatePackage(
statuses: new[]
{
CreateStatus(AffectedPackageStatusCatalog.KnownNotAffected, "vendor.canonical"),
},
platform: "linux")));
statements = statements.Add(CreateStatement(
asOf: DateTimeOffset.Parse("2025-10-12T00:00:00Z"),
CreatePackage(
statuses: new[]
{
CreateStatus(AffectedPackageStatusCatalog.Affected, "vendor.osv"),
},
platform: "linux",
versionRanges: new[]
{
new AffectedVersionRange(
rangeKind: "semver",
introducedVersion: "1.0.0",
fixedVersion: null,
lastAffectedVersion: null,
rangeExpression: null,
provenance: CreateProvenance("vendor.osv")),
})));
var replay = new AdvisoryReplay(
"cve-9999-0001",
null,
statements,
ImmutableArray<AdvisoryConflictSnapshot>.Empty);
var eventLog = new FakeEventLog(replay);
var repository = new FakeNoisePriorRepository();
var now = DateTimeOffset.Parse("2025-10-21T12:00:00Z");
var timeProvider = new FixedTimeProvider(now);
var service = new NoisePriorService(eventLog, repository, timeProvider);
var result = await service.RecomputeAsync(
new NoisePriorComputationRequest("CVE-9999-0001"),
CancellationToken.None);
Assert.Equal("cve-9999-0001", result.VulnerabilityKey);
Assert.Single(result.Summaries);
var summary = result.Summaries[0];
Assert.Equal("cve-9999-0001", summary.VulnerabilityKey);
Assert.Equal("semver", summary.PackageType);
Assert.Equal("pkg:npm/example", summary.PackageIdentifier);
Assert.Equal("linux", summary.Platform);
Assert.Equal(3, summary.ObservationCount);
Assert.Equal(2, summary.NegativeSignals);
Assert.Equal(1, summary.PositiveSignals);
Assert.Equal(0, summary.NeutralSignals);
Assert.Equal(1, summary.VersionRangeSignals);
Assert.Equal(2, summary.UniqueNegativeSources);
Assert.Equal(0.6, summary.Probability);
Assert.Equal(now, summary.GeneratedAt);
Assert.Equal(DateTimeOffset.Parse("2025-10-10T00:00:00Z"), summary.FirstObserved);
Assert.Equal(DateTimeOffset.Parse("2025-10-12T00:00:00Z"), summary.LastObserved);
Assert.Equal(
new[] { "conflicting_signals", "multi_source_negative", "positive_evidence" },
summary.RuleHits.ToArray());
Assert.Equal("cve-9999-0001", repository.LastUpsertKey);
Assert.NotNull(repository.LastUpsertSummaries);
Assert.Single(repository.LastUpsertSummaries!);
}
[Fact]
public async Task RecomputeAsync_AllNegativeSignalsProducesHighPrior()
{
var statements = ImmutableArray.Create(
CreateStatement(
asOf: DateTimeOffset.Parse("2025-10-01T00:00:00Z"),
CreatePackage(
statuses: new[]
{
CreateStatus(AffectedPackageStatusCatalog.NotAffected, "vendor.redhat"),
}),
vulnerabilityKey: "cve-2025-1111"));
statements = statements.Add(CreateStatement(
asOf: DateTimeOffset.Parse("2025-10-02T00:00:00Z"),
CreatePackage(
statuses: new[]
{
CreateStatus(AffectedPackageStatusCatalog.KnownNotAffected, "vendor.redhat"),
}),
vulnerabilityKey: "cve-2025-1111"));
var replay = new AdvisoryReplay(
"cve-2025-1111",
null,
statements,
ImmutableArray<AdvisoryConflictSnapshot>.Empty);
var eventLog = new FakeEventLog(replay);
var repository = new FakeNoisePriorRepository();
var now = DateTimeOffset.Parse("2025-10-21T13:00:00Z");
var timeProvider = new FixedTimeProvider(now);
var service = new NoisePriorService(eventLog, repository, timeProvider);
var result = await service.RecomputeAsync(
new NoisePriorComputationRequest("cve-2025-1111"),
CancellationToken.None);
var summary = Assert.Single(result.Summaries);
Assert.Equal(1.0, summary.Probability);
Assert.Equal(
new[] { "all_negative", "sparse_observations" },
summary.RuleHits.ToArray());
}
[Fact]
public async Task GetByPackageAsync_NormalizesInputs()
{
var statements = ImmutableArray.Create(
CreateStatement(
asOf: DateTimeOffset.Parse("2025-10-03T00:00:00Z"),
CreatePackage(
statuses: new[]
{
CreateStatus(AffectedPackageStatusCatalog.Unknown, "vendor.generic"),
},
platform: "linux"),
vulnerabilityKey: "cve-2025-2000"));
var replay = new AdvisoryReplay(
"cve-2025-2000",
null,
statements,
ImmutableArray<AdvisoryConflictSnapshot>.Empty);
var eventLog = new FakeEventLog(replay);
var repository = new FakeNoisePriorRepository();
var service = new NoisePriorService(eventLog, repository, new FixedTimeProvider(DateTimeOffset.UtcNow));
await service.RecomputeAsync(
new NoisePriorComputationRequest("CVE-2025-2000"),
CancellationToken.None);
var summaries = await service.GetByPackageAsync(
" SemVer ",
"pkg:npm/example",
" linux ",
CancellationToken.None);
Assert.Single(summaries);
Assert.Equal("semver", summaries[0].PackageType);
Assert.Equal("linux", summaries[0].Platform);
}
private static AdvisoryStatementSnapshot CreateStatement(
DateTimeOffset asOf,
AffectedPackage package,
string vulnerabilityKey = "cve-9999-0001")
{
var advisory = new Advisory(
advisoryKey: $"adv-{asOf:yyyyMMddHHmmss}",
title: "Example Advisory",
summary: null,
language: "en",
published: null,
modified: asOf,
severity: "high",
exploitKnown: false,
aliases: new[] { "CVE-TEST-0001" },
references: Array.Empty<AdvisoryReference>(),
affectedPackages: new[] { package },
cvssMetrics: Array.Empty<CvssMetric>(),
provenance: Array.Empty<AdvisoryProvenance>());
return new AdvisoryStatementSnapshot(
Guid.NewGuid(),
vulnerabilityKey,
advisory.AdvisoryKey,
advisory,
StatementHash: ImmutableArray<byte>.Empty,
AsOf: asOf,
RecordedAt: asOf,
InputDocumentIds: ImmutableArray<Guid>.Empty);
}
private static AffectedPackage CreatePackage(
IEnumerable<AffectedPackageStatus> statuses,
string? platform = null,
IEnumerable<AffectedVersionRange>? versionRanges = null)
=> new(
type: "semver",
identifier: "pkg:npm/example",
platform: platform,
versionRanges: versionRanges,
statuses: statuses,
provenance: new[] { CreateProvenance("vendor.core") },
normalizedVersions: null);
private static AffectedPackageStatus CreateStatus(string status, string source)
=> new(
status,
CreateProvenance(source));
private static AdvisoryProvenance CreateProvenance(string source, string kind = "vendor")
=> new(
source,
kind,
value: string.Empty,
recordedAt: DateTimeOffset.Parse("2025-10-01T00:00:00Z"),
fieldMask: null,
decisionReason: null);
private sealed class FakeEventLog : IAdvisoryEventLog
{
private readonly AdvisoryReplay _replay;
public FakeEventLog(AdvisoryReplay replay)
{
_replay = replay;
}
public ValueTask AppendAsync(AdvisoryEventAppendRequest request, CancellationToken cancellationToken)
=> throw new NotSupportedException("Append operations are not required for tests.");
public ValueTask<AdvisoryReplay> ReplayAsync(string vulnerabilityKey, DateTimeOffset? asOf, CancellationToken cancellationToken)
=> ValueTask.FromResult(_replay);
}
private sealed class FakeNoisePriorRepository : INoisePriorRepository
{
private readonly List<NoisePriorSummary> _store = new();
public string? LastUpsertKey { get; private set; }
public IReadOnlyCollection<NoisePriorSummary>? LastUpsertSummaries { get; private set; }
public ValueTask UpsertAsync(
string vulnerabilityKey,
IReadOnlyCollection<NoisePriorSummary> summaries,
CancellationToken cancellationToken)
{
LastUpsertKey = vulnerabilityKey;
LastUpsertSummaries = summaries;
_store.RemoveAll(summary =>
string.Equals(summary.VulnerabilityKey, vulnerabilityKey, StringComparison.Ordinal));
_store.AddRange(summaries);
return ValueTask.CompletedTask;
}
public ValueTask<IReadOnlyList<NoisePriorSummary>> GetByVulnerabilityAsync(
string vulnerabilityKey,
CancellationToken cancellationToken)
{
var matches = _store
.Where(summary => string.Equals(summary.VulnerabilityKey, vulnerabilityKey, StringComparison.Ordinal))
.ToList();
return ValueTask.FromResult<IReadOnlyList<NoisePriorSummary>>(matches);
}
public ValueTask<IReadOnlyList<NoisePriorSummary>> GetByPackageAsync(
string packageType,
string packageIdentifier,
string? platform,
CancellationToken cancellationToken)
{
var matches = _store
.Where(summary =>
string.Equals(summary.PackageType, packageType, StringComparison.Ordinal) &&
string.Equals(summary.PackageIdentifier, packageIdentifier, StringComparison.Ordinal) &&
string.Equals(summary.Platform ?? string.Empty, platform ?? string.Empty, StringComparison.Ordinal))
.ToList();
return ValueTask.FromResult<IReadOnlyList<NoisePriorSummary>>(matches);
}
}
private sealed class FixedTimeProvider : TimeProvider
{
private readonly DateTimeOffset _now;
public FixedTimeProvider(DateTimeOffset now)
{
_now = now.ToUniversalTime();
}
public override DateTimeOffset GetUtcNow() => _now;
}
}

View File

@@ -0,0 +1,26 @@
using System.Collections.Generic;
using System.Threading;
using System.Threading.Tasks;
namespace StellaOps.Concelier.Core.Noise;
/// <summary>
/// Persistence abstraction for storing and retrieving noise prior summaries.
/// </summary>
public interface INoisePriorRepository
{
ValueTask UpsertAsync(
string vulnerabilityKey,
IReadOnlyCollection<NoisePriorSummary> summaries,
CancellationToken cancellationToken);
ValueTask<IReadOnlyList<NoisePriorSummary>> GetByVulnerabilityAsync(
string vulnerabilityKey,
CancellationToken cancellationToken);
ValueTask<IReadOnlyList<NoisePriorSummary>> GetByPackageAsync(
string packageType,
string packageIdentifier,
string? platform,
CancellationToken cancellationToken);
}

View File

@@ -0,0 +1,25 @@
using System.Collections.Generic;
using System.Threading;
using System.Threading.Tasks;
namespace StellaOps.Concelier.Core.Noise;
/// <summary>
/// Computes and serves false-positive priors for canonical advisories.
/// </summary>
public interface INoisePriorService
{
ValueTask<NoisePriorComputationResult> RecomputeAsync(
NoisePriorComputationRequest request,
CancellationToken cancellationToken);
ValueTask<IReadOnlyList<NoisePriorSummary>> GetByVulnerabilityAsync(
string vulnerabilityKey,
CancellationToken cancellationToken);
ValueTask<IReadOnlyList<NoisePriorSummary>> GetByPackageAsync(
string packageType,
string packageIdentifier,
string? platform,
CancellationToken cancellationToken);
}

View File

@@ -0,0 +1,10 @@
using System;
namespace StellaOps.Concelier.Core.Noise;
/// <summary>
/// Options for recomputing noise priors for a single vulnerability key.
/// </summary>
public sealed record NoisePriorComputationRequest(
string VulnerabilityKey,
DateTimeOffset? AsOf = null);

View File

@@ -0,0 +1,10 @@
using System.Collections.Immutable;
namespace StellaOps.Concelier.Core.Noise;
/// <summary>
/// Results of a recompute operation containing per-package noise prior summaries.
/// </summary>
public sealed record NoisePriorComputationResult(
string VulnerabilityKey,
ImmutableArray<NoisePriorSummary> Summaries);

View File

@@ -0,0 +1,400 @@
using System;
using System.Collections.Generic;
using System.Collections.Immutable;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using StellaOps.Concelier.Core.Events;
using StellaOps.Concelier.Models;
namespace StellaOps.Concelier.Core.Noise;
/// <summary>
/// Default implementation that derives false-positive priors from advisory statements.
/// </summary>
public sealed class NoisePriorService : INoisePriorService
{
private static readonly HashSet<string> NegativeStatuses = new(
new[]
{
AffectedPackageStatusCatalog.KnownNotAffected,
AffectedPackageStatusCatalog.NotAffected,
AffectedPackageStatusCatalog.NotApplicable,
},
StringComparer.Ordinal);
private static readonly HashSet<string> PositiveStatuses = new(
new[]
{
AffectedPackageStatusCatalog.KnownAffected,
AffectedPackageStatusCatalog.Affected,
AffectedPackageStatusCatalog.UnderInvestigation,
AffectedPackageStatusCatalog.Pending,
},
StringComparer.Ordinal);
private static readonly HashSet<string> ResolvedStatuses = new(
new[]
{
AffectedPackageStatusCatalog.Fixed,
AffectedPackageStatusCatalog.FirstFixed,
AffectedPackageStatusCatalog.Mitigated,
},
StringComparer.Ordinal);
private readonly IAdvisoryEventLog _eventLog;
private readonly INoisePriorRepository _repository;
private readonly TimeProvider _timeProvider;
public NoisePriorService(
IAdvisoryEventLog eventLog,
INoisePriorRepository repository,
TimeProvider? timeProvider = null)
{
_eventLog = eventLog ?? throw new ArgumentNullException(nameof(eventLog));
_repository = repository ?? throw new ArgumentNullException(nameof(repository));
_timeProvider = timeProvider ?? TimeProvider.System;
}
public async ValueTask<NoisePriorComputationResult> RecomputeAsync(
NoisePriorComputationRequest request,
CancellationToken cancellationToken)
{
ArgumentNullException.ThrowIfNull(request);
var normalizedKey = NormalizeKey(request.VulnerabilityKey, nameof(request.VulnerabilityKey));
var replay = await _eventLog.ReplayAsync(normalizedKey, request.AsOf, cancellationToken).ConfigureAwait(false);
var generatedAt = _timeProvider.GetUtcNow();
var summaries = ComputeSummaries(replay, generatedAt);
await _repository.UpsertAsync(normalizedKey, summaries, cancellationToken).ConfigureAwait(false);
return new NoisePriorComputationResult(
normalizedKey,
summaries);
}
public ValueTask<IReadOnlyList<NoisePriorSummary>> GetByVulnerabilityAsync(
string vulnerabilityKey,
CancellationToken cancellationToken)
{
var normalizedKey = NormalizeKey(vulnerabilityKey, nameof(vulnerabilityKey));
return _repository.GetByVulnerabilityAsync(normalizedKey, cancellationToken);
}
public ValueTask<IReadOnlyList<NoisePriorSummary>> GetByPackageAsync(
string packageType,
string packageIdentifier,
string? platform,
CancellationToken cancellationToken)
{
ArgumentException.ThrowIfNullOrWhiteSpace(packageType);
ArgumentException.ThrowIfNullOrWhiteSpace(packageIdentifier);
var normalizedType = packageType.Trim().ToLowerInvariant();
var normalizedIdentifier = packageIdentifier.Trim();
var normalizedPlatform = NormalizePlatform(platform);
return _repository.GetByPackageAsync(
normalizedType,
normalizedIdentifier,
normalizedPlatform,
cancellationToken);
}
private ImmutableArray<NoisePriorSummary> ComputeSummaries(
AdvisoryReplay replay,
DateTimeOffset generatedAt)
{
if (replay is null || replay.Statements.IsDefaultOrEmpty)
{
return ImmutableArray<NoisePriorSummary>.Empty;
}
var accumulators = new Dictionary<PackageKey, NoiseAccumulator>(capacity: replay.Statements.Length);
foreach (var statement in replay.Statements)
{
if (statement is null)
{
continue;
}
foreach (var package in statement.Advisory.AffectedPackages)
{
if (package is null || string.IsNullOrWhiteSpace(package.Identifier))
{
continue;
}
var platform = NormalizePlatform(package.Platform);
var key = new PackageKey(package.Type, package.Identifier, platform);
if (!accumulators.TryGetValue(key, out var accumulator))
{
accumulator = new NoiseAccumulator(
replay.VulnerabilityKey,
package.Type,
package.Identifier,
platform);
accumulators.Add(key, accumulator);
}
accumulator.Register(statement.AsOf, package);
}
}
if (accumulators.Count == 0)
{
return ImmutableArray<NoisePriorSummary>.Empty;
}
var builder = ImmutableArray.CreateBuilder<NoisePriorSummary>(accumulators.Count);
foreach (var accumulator in accumulators.Values
.OrderBy(static a => a.PackageType, StringComparer.Ordinal)
.ThenBy(static a => a.PackageIdentifier, StringComparer.Ordinal)
.ThenBy(static a => a.Platform, StringComparer.Ordinal))
{
builder.Add(accumulator.ToSummary(generatedAt));
}
return builder.ToImmutable();
}
private static string NormalizeKey(string value, string parameterName)
{
if (string.IsNullOrWhiteSpace(value))
{
throw new ArgumentException("Value must be provided.", parameterName);
}
return value.Trim().ToLowerInvariant();
}
private static string? NormalizePlatform(string? value)
=> string.IsNullOrWhiteSpace(value) ? null : value.Trim();
private sealed record PackageKey(
string PackageType,
string PackageIdentifier,
string? Platform);
private sealed class NoiseAccumulator
{
private readonly string _vulnerabilityKey;
private readonly HashSet<string> _negativeSources = new(StringComparer.Ordinal);
public NoiseAccumulator(
string vulnerabilityKey,
string packageType,
string packageIdentifier,
string? platform)
{
_vulnerabilityKey = vulnerabilityKey;
PackageType = packageType;
PackageIdentifier = packageIdentifier;
Platform = platform;
FirstObserved = DateTimeOffset.MaxValue;
LastObserved = DateTimeOffset.MinValue;
}
public string PackageType { get; }
public string PackageIdentifier { get; }
public string? Platform { get; }
public int ObservationCount { get; private set; }
public int NegativeSignals { get; private set; }
public int PositiveSignals { get; private set; }
public int NeutralSignals { get; private set; }
public int VersionRangeSignals { get; private set; }
public bool HasMissingStatus { get; private set; }
public DateTimeOffset FirstObserved { get; private set; }
public DateTimeOffset LastObserved { get; private set; }
public int UniqueNegativeSources => _negativeSources.Count;
public void Register(DateTimeOffset asOf, AffectedPackage package)
{
ObservationCount++;
var asOfUtc = asOf.ToUniversalTime();
if (asOfUtc < FirstObserved)
{
FirstObserved = asOfUtc;
}
if (asOfUtc > LastObserved)
{
LastObserved = asOfUtc;
}
var statuses = package.Statuses;
if (statuses.IsDefaultOrEmpty || statuses.Length == 0)
{
HasMissingStatus = true;
}
foreach (var status in statuses)
{
if (NegativeStatuses.Contains(status.Status))
{
NegativeSignals++;
if (!string.IsNullOrWhiteSpace(status.Provenance.Source))
{
_negativeSources.Add(status.Provenance.Source);
}
}
else if (PositiveStatuses.Contains(status.Status) || ResolvedStatuses.Contains(status.Status))
{
PositiveSignals++;
}
else if (string.Equals(status.Status, AffectedPackageStatusCatalog.Unknown, StringComparison.Ordinal))
{
NeutralSignals++;
}
else
{
NeutralSignals++;
}
}
if (!package.VersionRanges.IsDefaultOrEmpty && package.VersionRanges.Length > 0)
{
VersionRangeSignals++;
}
}
public NoisePriorSummary ToSummary(DateTimeOffset generatedAt)
{
var boundedFirst = FirstObserved == DateTimeOffset.MaxValue ? generatedAt : FirstObserved;
var boundedLast = LastObserved == DateTimeOffset.MinValue ? generatedAt : LastObserved;
var probability = ComputeProbability();
var rules = BuildRules();
return new NoisePriorSummary(
_vulnerabilityKey,
PackageType,
PackageIdentifier,
Platform,
probability,
ObservationCount,
NegativeSignals,
PositiveSignals,
NeutralSignals,
VersionRangeSignals,
UniqueNegativeSources,
rules,
boundedFirst,
boundedLast,
generatedAt);
}
private double ComputeProbability()
{
var positiveSignals = PositiveSignals + VersionRangeSignals;
var denominator = NegativeSignals + positiveSignals;
double score;
if (denominator == 0)
{
if (HasMissingStatus)
{
score = 0.35;
}
else if (NeutralSignals > 0)
{
score = 0.40;
}
else
{
score = 0.0;
}
}
else
{
score = NegativeSignals / (double)denominator;
if (NegativeSignals > 0 && positiveSignals == 0)
{
score = Math.Min(1.0, score + 0.20);
}
if (positiveSignals > 0 && NegativeSignals == 0)
{
score = Math.Max(0.0, score - 0.25);
}
if (PositiveSignals > NegativeSignals)
{
score = Math.Max(0.0, score - 0.10);
}
if (UniqueNegativeSources >= 2)
{
score = Math.Min(1.0, score + 0.10);
}
if (NeutralSignals > 0)
{
var neutralBoost = Math.Min(0.10, NeutralSignals * 0.02);
score = Math.Min(1.0, score + neutralBoost);
}
}
return Math.Round(Math.Clamp(score, 0.0, 1.0), 4, MidpointRounding.ToZero);
}
private ImmutableArray<string> BuildRules()
{
var rules = new HashSet<string>(StringComparer.Ordinal);
if (NegativeSignals > 0 && PositiveSignals == 0 && VersionRangeSignals == 0)
{
rules.Add("all_negative");
}
if (UniqueNegativeSources >= 2)
{
rules.Add("multi_source_negative");
}
if (PositiveSignals > 0 || VersionRangeSignals > 0)
{
rules.Add("positive_evidence");
}
if (NegativeSignals > 0 && (PositiveSignals > 0 || VersionRangeSignals > 0))
{
rules.Add("conflicting_signals");
}
if (ObservationCount < 3)
{
rules.Add("sparse_observations");
}
if (HasMissingStatus)
{
rules.Add("missing_status");
}
if (NeutralSignals > 0 && NegativeSignals == 0 && PositiveSignals == 0 && VersionRangeSignals == 0)
{
rules.Add("neutral_only");
}
return rules.OrderBy(static rule => rule, StringComparer.Ordinal).ToImmutableArray();
}
}
}

View File

@@ -0,0 +1,24 @@
using System;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.DependencyInjection.Extensions;
namespace StellaOps.Concelier.Core.Noise;
/// <summary>
/// Dependency injection helpers for the noise prior service.
/// </summary>
public static class NoisePriorServiceCollectionExtensions
{
public static IServiceCollection AddNoisePriorService(this IServiceCollection services)
{
if (services is null)
{
throw new ArgumentNullException(nameof(services));
}
services.TryAddSingleton(TimeProvider.System);
services.AddSingleton<INoisePriorService, NoisePriorService>();
return services;
}
}

View File

@@ -0,0 +1,24 @@
using System;
using System.Collections.Immutable;
namespace StellaOps.Concelier.Core.Noise;
/// <summary>
/// Immutable noise prior summary describing false-positive likelihood signals for a package/environment tuple.
/// </summary>
public sealed record NoisePriorSummary(
string VulnerabilityKey,
string PackageType,
string PackageIdentifier,
string? Platform,
double Probability,
int ObservationCount,
int NegativeSignals,
int PositiveSignals,
int NeutralSignals,
int VersionRangeSignals,
int UniqueNegativeSources,
ImmutableArray<string> RuleHits,
DateTimeOffset FirstObserved,
DateTimeOffset LastObserved,
DateTimeOffset GeneratedAt);

View File

@@ -17,5 +17,5 @@
|Canonical merger parity for description/CWE/canonical metric|BE-Core|Models|DONE (2025-10-15) merger now populates description/CWEs/canonical metric id with provenance and regression tests cover the new decisions.|
|Reference normalization & freshness instrumentation cleanup|BE-Core, QA|Models|DONE (2025-10-15) reference keys normalized, freshness overrides applied to union fields, and new tests assert decision logging.|
|FEEDCORE-ENGINE-07-001 Advisory event log & asOf queries|Team Core Engine & Storage Analytics|FEEDSTORAGE-DATA-07-001|**DONE (2025-10-19)** Implemented `AdvisoryEventLog` service plus repository contracts, canonical hashing, and lower-cased key normalization with replay support; documented determinism guarantees. Tests: `dotnet test src/StellaOps.Concelier.Core.Tests/StellaOps.Concelier.Core.Tests.csproj`.|
|FEEDCORE-ENGINE-07-002 Noise prior computation service|Team Core Engine & Data Science|FEEDCORE-ENGINE-07-001|TODO Build rule-based learner capturing false-positive priors per package/env, persist summaries, and expose APIs for Excititor/scan suppressors with reproducible statistics.|
|FEEDCORE-ENGINE-07-002 Noise prior computation service|Team Core Engine & Data Science|FEEDCORE-ENGINE-07-001|**DONE (2025-10-21)** Build rule-based learner capturing false-positive priors per package/env, persist summaries, and expose APIs for Excititor/scan suppressors with reproducible statistics.|
|FEEDCORE-ENGINE-07-003 Unknown state ledger & confidence seeding|Team Core Engine & Storage Analytics|FEEDCORE-ENGINE-07-001|TODO Persist `unknown_vuln_range/unknown_origin/ambiguous_fix` markers with initial confidence bands, expose query surface for Policy, and add fixtures validating canonical serialization.|

View File

@@ -1,6 +1,6 @@
# Range Primitive Coordination (Sprint 2)
_Status date: 2025-10-11_
_Status date: 2025-10-20_
## Why this exists
- SemVer range outputs must follow the embedded rule guidance in `../FASTER_MODELING_AND_NORMALIZATION.md` (array of `{scheme,type,min/max/value,notes}`).
@@ -16,24 +16,24 @@ _Status date: 2025-10-11_
Until these blocks land, connectors should stage changes behind a feature flag or fixture branch so we can flip on normalized writes in sync.
## Connector adoption matrix
| Connector | Owner team | Current state (2025-10-11) | Required actions for SemVer guidance | Coordination notes |
|-----------|------------|-----------------------------|-------------------------------------|--------------------|
| Acsc | BE-Conn-ACSC | All tasks still TODO | Blocked on initial ingest work; align DTO design with normalized rule array before mapper lands. | Schedule pairing once `SemVerRangeRuleBuilder` API is published; ensure fixtures capture vendor/device taxonomy for provenance notes. |
| Cccs | BE-Conn-CCCS | All tasks still TODO | Same as Acsc; design DTOs with normalized rule hooks from day one. | Provide sample rule snippets in kickoff; share Mongo dual-write plan once storage flag is ready. |
| CertBund | BE-Conn-CERTBUND | All tasks still TODO | Ensure canonical mapper emits vendor range primitives plus normalized rules for product firmware. | Needs language/localisation guidance; coordinate with Localization WG for deterministic casing. |
| CertCc | BE-Conn-CERTCC | Fetch in progress, mapping TODO | Map VINCE vendor/product data into `RangePrimitives` with `certcc.vendor` extensions; build normalized SemVer ranges when version strings surface. | Follow up on 2025-10-14 to review VINCE payload examples and confirm builder requirements. |
| Cve | BE-Conn-CVE | Mapping/tests DONE (legacy SemVer) | Refactor `CveMapper` to call the shared builder and populate `NormalizedVersions` + provenance notes once models land. | Prepare MR behind `ENABLE_NORMALIZED_VERSIONS` flag; regression fixtures already cover version ranges—extend snapshots to cover rule arrays. |
| Ghsa | BE-Conn-GHSA | Normalized rules emitted (2025-10-11) | Maintain SemVer builder integration; share regression diffs if schema shifts occur. | Fixtures refreshed with `ghsa:{identifier}` notes; OSV rollout next in queue—await connector handoff update. |
| Osv | BE-Conn-OSV | Normalized rules emitted (2025-10-11) | Keep SemVer builder wiring current; extend notes if new ecosystems appear. | npm/PyPI parity snapshots updated with `osv:{ecosystem}:{advisoryId}:{identifier}` notes; merge analytics notified. |
| Nvd | BE-Conn-NVD | Normalized rules emitted (2025-10-11) | Maintain SemVer coverage for ecosystem ranges; keep notes aligned with CVE IDs. | CPE ranges now emit semver primitives when versions parse; fixtures refreshed, report sent to FEEDMERGE-COORD-02-900. |
| Cve | BE-Conn-CVE | Normalized rules emitted (2025-10-11) | Maintain SemVer notes for vendor ecosystems; backfill additional fixture coverage as CVE payloads expand. | Connector outputs `cve:{cveId}:{identifier}` notes; npm parity test fixtures updated and merge ping acknowledged. |
| Ics.Cisa | BE-Conn-ICS-CISA | All tasks TODO | When defining product schema, plan for SemVer or vendor version rules (many advisories use firmware revisions). | Gather sample advisories and confirm whether ranges are SemVer or vendor-specific so we can introduce scheme identifiers early. |
| Kisa | BE-Conn-KISA | All tasks TODO | Ensure DTO parsing captures version strings despite localisation; feed into normalized rule builder once ready. | Requires translation samples; request help from Localization WG before mapper implementation. |
| Ru.Bdu | BE-Conn-BDU | All tasks TODO | Map product releases into normalized rules; add provenance notes referencing BDU advisory identifiers. | Verify we have UTF-8 safe handling in builder; share sample sanitized inputs. |
| Ru.Nkcki | BE-Conn-Nkcki | All tasks TODO | Similar to BDU; capture vendor firmware/build numbers and map into normalized rules. | Coordinate with Localization WG for Cyrillic transliteration strategy. |
| Vndr.Apple | BE-Conn-Apple | Mapper/tests/telemetry marked DOING | Continue extending vendor range primitives (`apple.version`, `apple.build`) and adopt normalized rule arrays for OS build spans. | Request builder integration review on 2025-10-16; ensure fixtures cover multi-range tables and include provenance notes. |
| Vndr.Cisco | BE-Conn-Cisco | ✅ Emits SemVer primitives with vendor notes | Parser maps versions into SemVer primitives with `cisco.productId` vendor extensions; sample fixtures landing in `StellaOps.Concelier.Connector.Vndr.Cisco.Tests`. | No custom comparer required; SemVer + vendor metadata suffices. |
| Vndr.Msrc | BE-Conn-MSRC | All tasks TODO | Canonical mapper must output product/build coverage as normalized rules (likely `msrc.patch` scheme) with provenance referencing KB IDs. | Sync with Models on adding scheme identifiers for MSRC packages; plan fixture coverage for monthly rollups. |
| Connector | Owner team | Current state (2025-10-20) | Required actions for normalized rules | Coordination notes |
|-----------|------------|----------------------------|--------------------------------------|--------------------|
| Acsc | BE-Conn-ACSC | ❌ Not started mapper emits legacy range strings only | Stage `SemVerRangeRuleBuilder` integration once relay HTTP/2 fixes stabilise; target kickoff 2025-10-24. | Pair with Merge on sample payloads; ensure fixtures capture vendor/device taxonomy for provenance notes. |
| Cccs | BE-Conn-CCCS | ⚠️ DOING helper branch under review (due 2025-10-21) | Wire trailing-version split helper, emit `NormalizedVersions` with `cccs:{serial}:{index}` notes, refresh fixtures/tests. | Share MR link before 2025-10-21 stand-up; Merge to validate counters once fixtures land. |
| CertBund | BE-Conn-CERTBUND | ⚠️ In progress localisation work pending (due 2025-10-22) | Translate `product.Versions` phrases (`bis`, `alle`) into builder inputs; emit provenance `certbund:{advisoryId}:{vendor}`; update README/tests. | Localization WG drafting deterministic casing guidance; expect sample payloads 2025-10-21. |
| CertCc | BE-Conn-CERTCC | ✅ Complete emitting `certcc.vendor` rules since 2025-10-12 | Keep builder contract stable; bubble any VINCE payload changes. | Merge verified counters drop on 2025-10-19 run; no follow-up. |
| Cve | BE-Conn-CVE | ✅ Complete SemVer rules emitted 2025-10-12 | Maintain provenance notes (`cve:{cveId}:{identifier}`) and extend fixtures as schema grows. | Latest nightly confirms normalized counters at expected baseline. |
| Ghsa | BE-Conn-GHSA | ✅ Complete normalized rollout live 2025-10-11 | Monitor schema diffs; keep fixtures synced with GHSA provenance notes. | Coordinate with OSV on shared ecosystems; no open issues. |
| Osv | BE-Conn-OSV | ✅ Complete normalized rules shipping 2025-10-11 | Track new ecosystems; ensure notes stay aligned with `osv:{ecosystem}:{advisoryId}:{identifier}`. | Merge analytics watching npm/PyPI parity; no action needed. |
| Nvd | BE-Conn-NVD | ✅ Complete normalized SemVer output live 2025-10-11 | Maintain CVE-aligned provenance; monitor MR toggles if schema shifts. | Next check: confirm export parity once storage migration flips on 2025-10-23. |
| Kev | BE-Conn-KEV | ✅ Complete catalog/due-date rules emitted 2025-10-12 | Keep schedule metadata synced with CISA feed. | Acts as flag-only enrich; no additional merge work required. |
| Ics.Cisa | BE-Conn-ICS-CISA | ⚠️ Pending decision (due 2025-10-23) | Promote existing SemVer primitives into normalized rules; open Models ticket if firmware requires new scheme. | Provide sample advisories to Merge by 2025-10-22 for schema review. |
| Kisa | BE-Conn-KISA | ⚠️ Proposal drafting (due 2025-10-24) | Finalise `kisa.build` (or alternate) scheme with Models, then emit normalized rules and update localisation notes/tests. | Localization WG prepping translation samples; Merge to review scheme request immediately. |
| Ru.Bdu | BE-Conn-BDU | ✅ Complete emitting `ru-bdu.raw` rules since 2025-10-14 | Monitor UTF-8 sanitisation; keep provenance notes aligned with advisory ids. | Storage snapshot verified 2025-10-19; counters green. |
| Ru.Nkcki | BE-Conn-Nkcki | ✅ Complete SemVer + normalized rules live 2025-10-13 | Maintain Cyrillic provenance fields and SemVer coverage. | Localization WG confirmed transliteration guidance; no open items. |
| Vndr.Apple | BE-Conn-Apple | ✅ Complete `apple.build` SemVer rules live 2025-10-11 | Keep fixtures covering multi-range tables; notify Merge of schema evolutions. | Prepare follow-up for macOS/iOS beta channels by 2025-10-26. |
| Vndr.Cisco | BE-Conn-Cisco | ⚠️ DOING normalized promotion branch open (due 2025-10-21) | Use helper to convert SemVer primitives into rule arrays with `cisco:{productId}` notes; refresh tests. | OAuth throttling validated; Merge to rerun counters post-merge. |
| Vndr.Msrc | BE-Conn-MSRC | ✅ Complete `msrc.build` rules live 2025-10-15 | Monitor monthly rollup coverage and provenance notes. | Merge verified rule ingestion 2025-10-19; no outstanding actions. |
## Storage alignment quick reference (2025-10-11)
- `NormalizedVersionDocumentFactory` copies each `NormalizedVersionRule` into Mongo with the shape `{ packageId, packageType, scheme, type, style, min, minInclusive, max, maxInclusive, value, notes, decisionReason, constraint, source, recordedAt }`. `style` is currently a direct echo of `type` but reserved for future vendor comparers—no connector action required.
@@ -83,12 +83,14 @@ Until these blocks land, connectors should stage changes behind a feature flag o
```
## Immediate next steps
- Normalization team to share draft `SemVerRangeRuleBuilder` API by **2025-10-13** for review; Merge will circulate feedback within 24 hours.
- Connector owners to prepare fixture pull requests demonstrating sample normalized rule arrays (even if feature-flagged) by **2025-10-17**.
- Merge team will run a cross-connector review on **2025-10-18** to confirm consistent field usage and provenance tagging before enabling merge union logic.
- Schedule held for **2025-10-14 14:00 UTC** to review the CERT/CC staging VINCE advisory sample once `enableDetailMapping` is flipped; capture findings in `#concelier-merge` with snapshot diffs.
- **2025-10-21** Cccs and Cisco teams to merge normalized-rule branches, regenerate fixtures, and post counter screenshots.
- **2025-10-22** CertBund translator review with Localization WG; confirm localisation glossary + deterministic casing before merge.
- **2025-10-23** ICS-CISA to confirm SemVer vs firmware scheme; escalate Models ticket if new scheme required.
- **2025-10-24** KISA firmware scheme proposal due; Merge to review immediately and unblock builder integration.
- **2025-10-25** Merge cross-connector review to validate counters, provenance notes, and storage projections before flipping default union logic.
## Tracking & follow-up
- Track due dates above; if a connector slips past its deadline, flag in `#concelier-merge` stand-up and open a blocker ticket referencing FEEDMERGE-COORD-02-900.
- Capture connector progress updates in stand-ups twice per week; link PRs/issues back to this document and the rollout dashboard (`docs/dev/normalized_versions_rollout.md`).
- Monitor merge counters `concelier.merge.normalized_rules` and `concelier.merge.normalized_rules_missing` to spot advisories that still lack normalized arrays after precedence merge.
- When a connector is ready to emit normalized rules, update its module `TASKS.md` status and ping Merge in `#concelier-merge` with fixture diff screenshots.

View File

@@ -16,7 +16,11 @@
|Override audit logging|BE-Merge|Observability|DONE override audits now emit structured logs plus bounded-tag metrics suitable for prod telemetry.|
|Configurable precedence table|BE-Merge|Architecture|DONE precedence options bind via concelier:merge:precedence:ranks with docs/tests covering operator workflow.|
|Range primitives backlog|BE-Merge|Connector WGs|**DOING** Coordinate remaining connectors (`Acsc`, `Cccs`, `CertBund`, `CertCc`, `Cve`, `Ghsa`, `Ics.Cisa`, `Kisa`, `Ru.Bdu`, `Ru.Nkcki`, `Vndr.Apple`, `Vndr.Cisco`, `Vndr.Msrc`) to emit canonical RangePrimitives with provenance tags; track progress/fixtures here.<br>2025-10-11: Storage alignment notes + sample normalized rule JSON now captured in `RANGE_PRIMITIVES_COORDINATION.md` (see “Storage alignment quick reference”).<br>2025-10-11 18:45Z: GHSA normalized rules landed; OSV connector picked up next for rollout.<br>2025-10-11 21:10Z: `docs/dev/merge_semver_playbook.md` Section 8 now documents the persisted Mongo projection (SemVer + NEVRA) for connector reviewers.<br>2025-10-11 21:30Z: Added `docs/dev/normalized_versions_rollout.md` dashboard to centralize connector status and upcoming milestones.<br>2025-10-11 21:55Z: Merge now emits `concelier.merge.normalized_rules*` counters and unions connector-provided normalized arrays; see new test coverage in `AdvisoryPrecedenceMergerTests.Merge_RecordsNormalizedRuleMetrics`.<br>2025-10-12 17:05Z: CVE + KEV normalized rule verification complete; OSV parity fixtures revalidated—downstream parity/monitoring tasks may proceed.<br>2025-10-19 14:35Z: Prerequisites reviewed (none outstanding); FEEDMERGE-COORD-02-900 remains in DOING with connector follow-ups unchanged.<br>2025-10-19 15:25Z: Refreshed `RANGE_PRIMITIVES_COORDINATION.md` matrix + added targeted follow-ups (Cccs, CertBund, ICS-CISA, Kisa, Vndr.Cisco) with delivery dates 2025-10-21 → 2025-10-25; monitoring merge counters for regression.|
|Range primitives backlog|BE-Merge|Connector WGs|**DOING** Coordinate remaining connectors (`Acsc`, `Cccs`, `CertBund`, `CertCc`, `Cve`, `Ghsa`, `Ics.Cisa`, `Kisa`, `Ru.Bdu`, `Ru.Nkcki`, `Vndr.Apple`, `Vndr.Cisco`, `Vndr.Msrc`) to emit canonical RangePrimitives with provenance tags; track progress/fixtures here.<br>2025-10-11: Storage alignment notes + sample normalized rule JSON now captured in `RANGE_PRIMITIVES_COORDINATION.md` (see “Storage alignment quick reference”).<br>2025-10-11 18:45Z: GHSA normalized rules landed; OSV connector picked up next for rollout.<br>2025-10-11 21:10Z: `docs/dev/merge_semver_playbook.md` Section 8 now documents the persisted Mongo projection (SemVer + NEVRA) for connector reviewers.<br>2025-10-11 21:30Z: Added `docs/dev/normalized_versions_rollout.md` dashboard to centralize connector status and upcoming milestones.<br>2025-10-11 21:55Z: Merge now emits `concelier.merge.normalized_rules*` counters and unions connector-provided normalized arrays; see new test coverage in `AdvisoryPrecedenceMergerTests.Merge_RecordsNormalizedRuleMetrics`.<br>2025-10-12 17:05Z: CVE + KEV normalized rule verification complete; OSV parity fixtures revalidated—downstream parity/monitoring tasks may proceed.<br>2025-10-19 14:35Z: Prerequisites reviewed (none outstanding); FEEDMERGE-COORD-02-900 remains in DOING with connector follow-ups unchanged.<br>2025-10-19 15:25Z: Refreshed `RANGE_PRIMITIVES_COORDINATION.md` matrix + added targeted follow-ups (Cccs, CertBund, ICS-CISA, Kisa, Vndr.Cisco) with delivery dates 2025-10-21 → 2025-10-25; monitoring merge counters for regression.<br>2025-10-20 19:30Z: Coordination matrix + rollout dashboard updated with current connector statuses and due dates; flagged Slack escalation plan if Cccs/Cisco miss 2025-10-21 and documented Acsc kickoff window for 2025-10-24.|
|Merge pipeline parity for new advisory fields|BE-Merge|Models, Core|DONE (2025-10-15) merge service now surfaces description/CWE/canonical metric decisions with updated metrics/tests.|
|Connector coordination for new advisory fields|Connector Leads, BE-Merge|Models, Core|**DONE (2025-10-15)** GHSA, NVD, and OSV connectors now emit advisory descriptions, CWE weaknesses, and canonical metric ids. Fixtures refreshed (GHSA connector regression suite, `conflict-nvd.canonical.json`, OSV parity snapshots) and completion recorded in coordination log.|
|FEEDMERGE-ENGINE-07-001 Conflict sets & explainers|BE-Merge|FEEDSTORAGE-DATA-07-001|**DONE (2025-10-20)** Merge surfaces conflict explainers with replay hashes via `MergeConflictSummary`; API exposes structured payloads and integration tests cover deterministic `asOf` hashes.|
> Remark (2025-10-20): `AdvisoryMergeService` now returns conflict summaries with deterministic hashes; WebService replay endpoint emits typed explainers verified by new tests.
|FEEDMERGE-COORD-02-901 Connector deadline check-ins|BE-Merge|FEEDMERGE-COORD-02-900|**TODO (due 2025-10-21)** Confirm Cccs/Cisco normalized-rule branches land, capture `concelier.merge.normalized_rules*` counter screenshots, and update coordination docs with the results.|
|FEEDMERGE-COORD-02-902 ICS-CISA normalized-rule decision support|BE-Merge, Models|FEEDMERGE-COORD-02-900|**TODO (due 2025-10-23)** Review ICS-CISA sample advisories, confirm SemVer reuse vs new firmware scheme, pre-stage Models ticket template, and document outcome in coordination docs + tracker files.|
|FEEDMERGE-COORD-02-903 KISA firmware scheme review|BE-Merge, Models|FEEDMERGE-COORD-02-900|**TODO (due 2025-10-24)** Pair with KISA team on proposed firmware scheme (`kisa.build` or variant), ensure builder alignment, open Models ticket if required, and log decision in coordination docs + tracker files.|

View File

@@ -16,7 +16,7 @@
|Batch job definition last-run lookup|BE-Base|Core|DONE definitions endpoint now precomputes kinds array and reuses batched last-run dictionary; manual smoke verified via local GET `/jobs/definitions`.|
|Add no-cache headers to health/readiness/jobs APIs|BE-Base|WebService|DONE helper applies Cache-Control/Pragma/Expires on all health/ready/jobs endpoints; awaiting automated probe tests once connector fixtures stabilize.|
|Authority configuration parity (FSR1)|DevEx/Concelier|Authority options schema|**DONE (2025-10-10)** Options post-config loads clientSecretFile fallback, validators normalize scopes/audiences, and sample config documents issuer/credential/bypass settings.|
|Document authority toggle & scope requirements|Docs/Concelier|Authority integration|**DOING (2025-10-10)** Quickstart updated with staging flag, client credentials, env overrides; operator guide refresh pending Docs guild review.|
|Document authority toggle & scope requirements|Docs/Concelier|Authority integration|**DONE (2025-10-21)** Quickstart now documents staging flag, client credentials, env overrides; operator guide refresh merged. Remaining copy polishing is tracked under `DOCS-CONCELIER-07-201` in `docs/TASKS.md`.|
|Plumb Authority client resilience options|BE-Base|Auth libraries LIB5|**DONE (2025-10-12)** `Program.cs` wires `authority.resilience.*` + client scopes into `AddStellaOpsAuthClient`; new integration test asserts binding and retries.|
|Author ops guidance for resilience tuning|Docs/Concelier|Plumb Authority client resilience options|**DONE (2025-10-12)** `docs/21_INSTALL_GUIDE.md` + `docs/ops/concelier-authority-audit-runbook.md` document resilience profiles for connected vs air-gapped installs and reference monitoring cues.|
|Document authority bypass logging patterns|Docs/Concelier|FSR3 logging|**DONE (2025-10-12)** Updated operator guides clarify `Concelier.Authorization.Audit` fields (route/status/subject/clientId/scopes/bypass/remote) and SIEM triggers.|

View File

@@ -376,17 +376,25 @@ public sealed class AuthorityDpopNonceOptions
throw new InvalidOperationException("Dpop.Nonce.RedisConnectionString must be provided when using the 'redis' store.");
}
NormalizedAudiences = requiredAudiences
.Select(static aud => aud.Trim())
.Where(static aud => aud.Length > 0)
.ToHashSet(StringComparer.OrdinalIgnoreCase);
if (NormalizedAudiences.Count == 0)
{
throw new InvalidOperationException("Dpop.Nonce.RequiredAudiences must include at least one audience.");
}
}
}
var normalizedAudiences = requiredAudiences
.Select(static aud => aud.Trim())
.Where(static aud => aud.Length > 0)
.ToHashSet(StringComparer.OrdinalIgnoreCase);
if (normalizedAudiences.Count == 0)
{
throw new InvalidOperationException("Dpop.Nonce.RequiredAudiences must include at least one audience.");
}
requiredAudiences.Clear();
foreach (var audience in normalizedAudiences)
{
requiredAudiences.Add(audience);
}
NormalizedAudiences = normalizedAudiences;
}
}
public sealed class AuthorityMtlsOptions
{

View File

@@ -1,17 +1,17 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<LangVersion>preview</LangVersion>
<Nullable>enable</Nullable>
<ImplicitUsings>enable</ImplicitUsings>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="AWSSDK.S3" Version="3.7.305.6" />
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="9.0.0" />
<PackageReference Include="Microsoft.Extensions.Options" Version="9.0.0" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\StellaOps.Excititor.Export\StellaOps.Excititor.Export.csproj" />
</ItemGroup>
</Project>
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<LangVersion>preview</LangVersion>
<Nullable>enable</Nullable>
<ImplicitUsings>enable</ImplicitUsings>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="AWSSDK.S3" Version="3.7.305.6" />
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="10.0.0-preview.7.25380.108" />
<PackageReference Include="Microsoft.Extensions.Options" Version="10.0.0-preview.7.25380.108" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\StellaOps.Excititor.Export\StellaOps.Excititor.Export.csproj" />
</ItemGroup>
</Project>

View File

@@ -1,17 +1,17 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<LangVersion>preview</LangVersion>
<Nullable>enable</Nullable>
<ImplicitUsings>enable</ImplicitUsings>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="9.0.0" />
<PackageReference Include="Microsoft.Extensions.Options" Version="9.0.0" />
<PackageReference Include="Microsoft.Extensions.Http" Version="9.0.0" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\StellaOps.Excititor.Core\StellaOps.Excititor.Core.csproj" />
</ItemGroup>
</Project>
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<LangVersion>preview</LangVersion>
<Nullable>enable</Nullable>
<ImplicitUsings>enable</ImplicitUsings>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="10.0.0-preview.7.25380.108" />
<PackageReference Include="Microsoft.Extensions.Options" Version="10.0.0-preview.7.25380.108" />
<PackageReference Include="Microsoft.Extensions.Http" Version="10.0.0-preview.7.25380.108" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\StellaOps.Excititor.Core\StellaOps.Excititor.Core.csproj" />
</ItemGroup>
</Project>

View File

@@ -1,17 +1,17 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<LangVersion>preview</LangVersion>
<Nullable>enable</Nullable>
<ImplicitUsings>enable</ImplicitUsings>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
</PropertyGroup>
<ItemGroup>
<ProjectReference Include="..\StellaOps.Excititor.Core\StellaOps.Excititor.Core.csproj" />
</ItemGroup>
<ItemGroup>
<PackageReference Include="Microsoft.Extensions.Configuration" Version="8.0.0" />
<PackageReference Include="Microsoft.Extensions.Configuration.Binder" Version="8.0.1" />
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="8.0.0" />
</ItemGroup>
</Project>
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<LangVersion>preview</LangVersion>
<Nullable>enable</Nullable>
<ImplicitUsings>enable</ImplicitUsings>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
</PropertyGroup>
<ItemGroup>
<ProjectReference Include="..\StellaOps.Excititor.Core\StellaOps.Excititor.Core.csproj" />
</ItemGroup>
<ItemGroup>
<PackageReference Include="Microsoft.Extensions.Configuration" Version="10.0.0-preview.7.25380.108" />
<PackageReference Include="Microsoft.Extensions.Configuration.Binder" Version="10.0.0-preview.7.25380.108" />
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="10.0.0-preview.7.25380.108" />
</ItemGroup>
</Project>

View File

@@ -1,16 +1,16 @@
using System.Collections.Generic;
using System.Collections.Generic;
using System.Net;
using System.Net.Http;
using System.Text;
using FluentAssertions;
using Microsoft.Extensions.Caching.Memory;
using Microsoft.Extensions.Logging.Abstractions;
using Microsoft.Extensions.Options;
using Microsoft.Extensions.DependencyInjection;
using StellaOps.Excititor.Connectors.Abstractions;
using StellaOps.Excititor.Connectors.Cisco.CSAF;
using StellaOps.Excititor.Connectors.Cisco.CSAF.Configuration;
using StellaOps.Excititor.Connectors.Cisco.CSAF.Metadata;
using Microsoft.Extensions.Caching.Memory;
using Microsoft.Extensions.Logging.Abstractions;
using Microsoft.Extensions.Options;
using Microsoft.Extensions.DependencyInjection;
using StellaOps.Excititor.Connectors.Abstractions;
using StellaOps.Excititor.Connectors.Cisco.CSAF;
using StellaOps.Excititor.Connectors.Cisco.CSAF.Configuration;
using StellaOps.Excititor.Connectors.Cisco.CSAF.Metadata;
using StellaOps.Excititor.Core;
using StellaOps.Excititor.Storage.Mongo;
using System.Collections.Immutable;
@@ -18,148 +18,148 @@ using System.IO.Abstractions.TestingHelpers;
using Xunit;
using System.Threading;
using MongoDB.Driver;
namespace StellaOps.Excititor.Connectors.Cisco.CSAF.Tests.Connectors;
public sealed class CiscoCsafConnectorTests
{
[Fact]
public async Task FetchAsync_NewAdvisory_StoresDocumentAndUpdatesState()
{
var responses = new Dictionary<Uri, Queue<HttpResponseMessage>>
{
[new Uri("https://api.cisco.test/.well-known/csaf/provider-metadata.json")] = QueueResponses("""
{
"metadata": {
"publisher": {
"name": "Cisco",
"category": "vendor",
"contact_details": { "id": "excititor:cisco" }
}
},
"distributions": {
"directories": [ "https://api.cisco.test/csaf/" ]
}
}
"""),
[new Uri("https://api.cisco.test/csaf/index.json")] = QueueResponses("""
{
"advisories": [
{
"id": "cisco-sa-2025",
"url": "https://api.cisco.test/csaf/cisco-sa-2025.json",
"published": "2025-10-01T00:00:00Z",
"lastModified": "2025-10-02T00:00:00Z",
"sha256": "cafebabe"
}
]
}
"""),
[new Uri("https://api.cisco.test/csaf/cisco-sa-2025.json")] = QueueResponses("{ \"document\": \"payload\" }")
};
var handler = new RoutingHttpMessageHandler(responses);
var httpClient = new HttpClient(handler);
var factory = new SingleHttpClientFactory(httpClient);
var metadataLoader = new CiscoProviderMetadataLoader(
factory,
new MemoryCache(new MemoryCacheOptions()),
Options.Create(new CiscoConnectorOptions
{
MetadataUri = "https://api.cisco.test/.well-known/csaf/provider-metadata.json",
PersistOfflineSnapshot = false,
}),
NullLogger<CiscoProviderMetadataLoader>.Instance,
new MockFileSystem());
var stateRepository = new InMemoryConnectorStateRepository();
var connector = new CiscoCsafConnector(
metadataLoader,
factory,
stateRepository,
new[] { new CiscoConnectorOptionsValidator() },
NullLogger<CiscoCsafConnector>.Instance,
TimeProvider.System);
var settings = new VexConnectorSettings(ImmutableDictionary<string, string>.Empty);
await connector.ValidateAsync(settings, CancellationToken.None);
var sink = new InMemoryRawSink();
var context = new VexConnectorContext(null, VexConnectorSettings.Empty, sink, new NoopSignatureVerifier(), new NoopNormalizerRouter(), new ServiceCollection().BuildServiceProvider());
var documents = new List<VexRawDocument>();
await foreach (var doc in connector.FetchAsync(context, CancellationToken.None))
{
documents.Add(doc);
}
documents.Should().HaveCount(1);
sink.Documents.Should().HaveCount(1);
stateRepository.CurrentState.Should().NotBeNull();
stateRepository.CurrentState!.DocumentDigests.Should().HaveCount(1);
// second run should not refetch documents
sink.Documents.Clear();
documents.Clear();
await foreach (var doc in connector.FetchAsync(context, CancellationToken.None))
{
documents.Add(doc);
}
documents.Should().BeEmpty();
sink.Documents.Should().BeEmpty();
}
private static Queue<HttpResponseMessage> QueueResponses(string payload)
=> new(new[]
{
new HttpResponseMessage(HttpStatusCode.OK)
{
Content = new StringContent(payload, Encoding.UTF8, "application/json"),
}
});
private sealed class RoutingHttpMessageHandler : HttpMessageHandler
{
private readonly Dictionary<Uri, Queue<HttpResponseMessage>> _responses;
public RoutingHttpMessageHandler(Dictionary<Uri, Queue<HttpResponseMessage>> responses)
{
_responses = responses;
}
protected override Task<HttpResponseMessage> SendAsync(HttpRequestMessage request, CancellationToken cancellationToken)
{
if (request.RequestUri is not null && _responses.TryGetValue(request.RequestUri, out var queue) && queue.Count > 0)
{
var response = queue.Peek();
return Task.FromResult(response.Clone());
}
return Task.FromResult(new HttpResponseMessage(HttpStatusCode.NotFound)
{
Content = new StringContent($"No response configured for {request.RequestUri}"),
});
}
}
private sealed class SingleHttpClientFactory : IHttpClientFactory
{
private readonly HttpClient _client;
public SingleHttpClientFactory(HttpClient client)
{
_client = client;
}
public HttpClient CreateClient(string name) => _client;
}
private sealed class InMemoryConnectorStateRepository : IVexConnectorStateRepository
{
public VexConnectorState? CurrentState { get; private set; }
namespace StellaOps.Excititor.Connectors.Cisco.CSAF.Tests.Connectors;
public sealed class CiscoCsafConnectorTests
{
[Fact]
public async Task FetchAsync_NewAdvisory_StoresDocumentAndUpdatesState()
{
var responses = new Dictionary<Uri, Queue<HttpResponseMessage>>
{
[new Uri("https://api.cisco.test/.well-known/csaf/provider-metadata.json")] = QueueResponses("""
{
"metadata": {
"publisher": {
"name": "Cisco",
"category": "vendor",
"contact_details": { "id": "excititor:cisco" }
}
},
"distributions": {
"directories": [ "https://api.cisco.test/csaf/" ]
}
}
"""),
[new Uri("https://api.cisco.test/csaf/index.json")] = QueueResponses("""
{
"advisories": [
{
"id": "cisco-sa-2025",
"url": "https://api.cisco.test/csaf/cisco-sa-2025.json",
"published": "2025-10-01T00:00:00Z",
"lastModified": "2025-10-02T00:00:00Z",
"sha256": "cafebabe"
}
]
}
"""),
[new Uri("https://api.cisco.test/csaf/cisco-sa-2025.json")] = QueueResponses("{ \"document\": \"payload\" }")
};
var handler = new RoutingHttpMessageHandler(responses);
var httpClient = new HttpClient(handler);
var factory = new SingleHttpClientFactory(httpClient);
var metadataLoader = new CiscoProviderMetadataLoader(
factory,
new MemoryCache(new MemoryCacheOptions()),
Options.Create(new CiscoConnectorOptions
{
MetadataUri = "https://api.cisco.test/.well-known/csaf/provider-metadata.json",
PersistOfflineSnapshot = false,
}),
NullLogger<CiscoProviderMetadataLoader>.Instance,
new MockFileSystem());
var stateRepository = new InMemoryConnectorStateRepository();
var connector = new CiscoCsafConnector(
metadataLoader,
factory,
stateRepository,
new[] { new CiscoConnectorOptionsValidator() },
NullLogger<CiscoCsafConnector>.Instance,
TimeProvider.System);
var settings = new VexConnectorSettings(ImmutableDictionary<string, string>.Empty);
await connector.ValidateAsync(settings, CancellationToken.None);
var sink = new InMemoryRawSink();
var context = new VexConnectorContext(null, VexConnectorSettings.Empty, sink, new NoopSignatureVerifier(), new NoopNormalizerRouter(), new ServiceCollection().BuildServiceProvider(), ImmutableDictionary<string, string>.Empty);
var documents = new List<VexRawDocument>();
await foreach (var doc in connector.FetchAsync(context, CancellationToken.None))
{
documents.Add(doc);
}
documents.Should().HaveCount(1);
sink.Documents.Should().HaveCount(1);
stateRepository.CurrentState.Should().NotBeNull();
stateRepository.CurrentState!.DocumentDigests.Should().HaveCount(1);
// second run should not refetch documents
sink.Documents.Clear();
documents.Clear();
await foreach (var doc in connector.FetchAsync(context, CancellationToken.None))
{
documents.Add(doc);
}
documents.Should().BeEmpty();
sink.Documents.Should().BeEmpty();
}
private static Queue<HttpResponseMessage> QueueResponses(string payload)
=> new(new[]
{
new HttpResponseMessage(HttpStatusCode.OK)
{
Content = new StringContent(payload, Encoding.UTF8, "application/json"),
}
});
private sealed class RoutingHttpMessageHandler : HttpMessageHandler
{
private readonly Dictionary<Uri, Queue<HttpResponseMessage>> _responses;
public RoutingHttpMessageHandler(Dictionary<Uri, Queue<HttpResponseMessage>> responses)
{
_responses = responses;
}
protected override Task<HttpResponseMessage> SendAsync(HttpRequestMessage request, CancellationToken cancellationToken)
{
if (request.RequestUri is not null && _responses.TryGetValue(request.RequestUri, out var queue) && queue.Count > 0)
{
var response = queue.Peek();
return Task.FromResult(response.Clone());
}
return Task.FromResult(new HttpResponseMessage(HttpStatusCode.NotFound)
{
Content = new StringContent($"No response configured for {request.RequestUri}"),
});
}
}
private sealed class SingleHttpClientFactory : IHttpClientFactory
{
private readonly HttpClient _client;
public SingleHttpClientFactory(HttpClient client)
{
_client = client;
}
public HttpClient CreateClient(string name) => _client;
}
private sealed class InMemoryConnectorStateRepository : IVexConnectorStateRepository
{
public VexConnectorState? CurrentState { get; private set; }
public ValueTask<VexConnectorState?> GetAsync(string connectorId, CancellationToken cancellationToken, IClientSessionHandle? session = null)
=> ValueTask.FromResult(CurrentState);
@@ -168,48 +168,48 @@ public sealed class CiscoCsafConnectorTests
CurrentState = state;
return ValueTask.CompletedTask;
}
}
private sealed class InMemoryRawSink : IVexRawDocumentSink
{
public List<VexRawDocument> Documents { get; } = new();
public ValueTask StoreAsync(VexRawDocument document, CancellationToken cancellationToken)
{
Documents.Add(document);
return ValueTask.CompletedTask;
}
}
private sealed class NoopSignatureVerifier : IVexSignatureVerifier
{
public ValueTask<VexSignatureMetadata?> VerifyAsync(VexRawDocument document, CancellationToken cancellationToken)
=> ValueTask.FromResult<VexSignatureMetadata?>(null);
}
private sealed class NoopNormalizerRouter : IVexNormalizerRouter
{
public ValueTask<VexClaimBatch> NormalizeAsync(VexRawDocument document, CancellationToken cancellationToken)
=> ValueTask.FromResult(new VexClaimBatch(document, ImmutableArray<VexClaim>.Empty, ImmutableDictionary<string, string>.Empty));
}
}
internal static class HttpResponseMessageExtensions
{
public static HttpResponseMessage Clone(this HttpResponseMessage response)
{
var clone = new HttpResponseMessage(response.StatusCode);
foreach (var header in response.Headers)
{
clone.Headers.TryAddWithoutValidation(header.Key, header.Value);
}
if (response.Content is not null)
{
var payload = response.Content.ReadAsStringAsync().GetAwaiter().GetResult();
clone.Content = new StringContent(payload, Encoding.UTF8, response.Content.Headers.ContentType?.MediaType);
}
return clone;
}
}
}
private sealed class InMemoryRawSink : IVexRawDocumentSink
{
public List<VexRawDocument> Documents { get; } = new();
public ValueTask StoreAsync(VexRawDocument document, CancellationToken cancellationToken)
{
Documents.Add(document);
return ValueTask.CompletedTask;
}
}
private sealed class NoopSignatureVerifier : IVexSignatureVerifier
{
public ValueTask<VexSignatureMetadata?> VerifyAsync(VexRawDocument document, CancellationToken cancellationToken)
=> ValueTask.FromResult<VexSignatureMetadata?>(null);
}
private sealed class NoopNormalizerRouter : IVexNormalizerRouter
{
public ValueTask<VexClaimBatch> NormalizeAsync(VexRawDocument document, CancellationToken cancellationToken)
=> ValueTask.FromResult(new VexClaimBatch(document, ImmutableArray<VexClaim>.Empty, ImmutableDictionary<string, string>.Empty));
}
}
internal static class HttpResponseMessageExtensions
{
public static HttpResponseMessage Clone(this HttpResponseMessage response)
{
var clone = new HttpResponseMessage(response.StatusCode);
foreach (var header in response.Headers)
{
clone.Headers.TryAddWithoutValidation(header.Key, header.Value);
}
if (response.Content is not null)
{
var payload = response.Content.ReadAsStringAsync().GetAwaiter().GetResult();
clone.Content = new StringContent(payload, Encoding.UTF8, response.Content.Headers.ContentType?.MediaType);
}
return clone;
}
}

View File

@@ -1,20 +1,20 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<LangVersion>preview</LangVersion>
<Nullable>enable</Nullable>
<ImplicitUsings>enable</ImplicitUsings>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
</PropertyGroup>
<ItemGroup>
<ProjectReference Include="..\StellaOps.Excititor.Connectors.Abstractions\StellaOps.Excititor.Connectors.Abstractions.csproj" />
<ProjectReference Include="..\StellaOps.Excititor.Core\StellaOps.Excititor.Core.csproj" />
<ProjectReference Include="..\StellaOps.Excititor.Storage.Mongo\StellaOps.Excititor.Storage.Mongo.csproj" />
</ItemGroup>
<ItemGroup>
<PackageReference Include="Microsoft.Extensions.Caching.Memory" Version="9.0.0" />
<PackageReference Include="Microsoft.Extensions.Http" Version="9.0.0" />
<PackageReference Include="Microsoft.Extensions.Options" Version="9.0.0" />
<PackageReference Include="System.IO.Abstractions" Version="20.0.28" />
</ItemGroup>
</Project>
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<LangVersion>preview</LangVersion>
<Nullable>enable</Nullable>
<ImplicitUsings>enable</ImplicitUsings>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
</PropertyGroup>
<ItemGroup>
<ProjectReference Include="..\StellaOps.Excititor.Connectors.Abstractions\StellaOps.Excititor.Connectors.Abstractions.csproj" />
<ProjectReference Include="..\StellaOps.Excititor.Core\StellaOps.Excititor.Core.csproj" />
<ProjectReference Include="..\StellaOps.Excititor.Storage.Mongo\StellaOps.Excititor.Storage.Mongo.csproj" />
</ItemGroup>
<ItemGroup>
<PackageReference Include="Microsoft.Extensions.Caching.Memory" Version="10.0.0-preview.7.25380.108" />
<PackageReference Include="Microsoft.Extensions.Http" Version="10.0.0-preview.7.25380.108" />
<PackageReference Include="Microsoft.Extensions.Options" Version="10.0.0-preview.7.25380.108" />
<PackageReference Include="System.IO.Abstractions" Version="20.0.28" />
</ItemGroup>
</Project>

View File

@@ -1,322 +1,325 @@
using System.Collections.Generic;
using System.Collections.Immutable;
using System.IO.Compression;
using System.Net;
using System.Net.Http;
using System.Text;
using FluentAssertions;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.Logging.Abstractions;
using Microsoft.Extensions.Options;
using StellaOps.Excititor.Connectors.Abstractions;
using StellaOps.Excititor.Connectors.MSRC.CSAF;
using System.Collections.Generic;
using System.Collections.Immutable;
using System.IO.Compression;
using System.Net;
using System.Net.Http;
using System.Text;
using FluentAssertions;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.Logging.Abstractions;
using Microsoft.Extensions.Options;
using StellaOps.Excititor.Connectors.Abstractions;
using StellaOps.Excititor.Connectors.MSRC.CSAF;
using StellaOps.Excititor.Connectors.MSRC.CSAF.Authentication;
using StellaOps.Excititor.Connectors.MSRC.CSAF.Configuration;
using StellaOps.Excititor.Core;
using StellaOps.Excititor.Storage.Mongo;
using Xunit;
using MongoDB.Driver;
namespace StellaOps.Excititor.Connectors.MSRC.CSAF.Tests.Connectors;
public sealed class MsrcCsafConnectorTests
{
private static readonly VexConnectorDescriptor Descriptor = new("excititor:msrc", VexProviderKind.Vendor, "MSRC CSAF");
[Fact]
public async Task FetchAsync_EmitsDocumentAndPersistsState()
{
var summary = """
{
"value": [
{
"id": "ADV-0001",
"vulnerabilityId": "ADV-0001",
"severity": "Critical",
"releaseDate": "2025-10-17T00:00:00Z",
"lastModifiedDate": "2025-10-18T00:00:00Z",
"cvrfUrl": "https://example.com/csaf/ADV-0001.json"
}
]
}
""";
var csaf = """{"document":{"title":"Example"}}""";
var handler = TestHttpMessageHandler.Create(
_ => Response(HttpStatusCode.OK, summary, "application/json"),
_ => Response(HttpStatusCode.OK, csaf, "application/json"));
var httpClient = new HttpClient(handler)
{
BaseAddress = new Uri("https://example.com/"),
};
var factory = new SingleClientHttpClientFactory(httpClient);
var stateRepository = new InMemoryConnectorStateRepository();
var options = Options.Create(CreateOptions());
var connector = new MsrcCsafConnector(
factory,
new StubTokenProvider(),
stateRepository,
options,
NullLogger<MsrcCsafConnector>.Instance,
TimeProvider.System);
await connector.ValidateAsync(VexConnectorSettings.Empty, CancellationToken.None);
var sink = new CapturingRawSink();
var context = new VexConnectorContext(
Since: new DateTimeOffset(2025, 10, 15, 0, 0, 0, TimeSpan.Zero),
Settings: VexConnectorSettings.Empty,
RawSink: sink,
SignatureVerifier: new NoopSignatureVerifier(),
Normalizers: new NoopNormalizerRouter(),
Services: new ServiceCollection().BuildServiceProvider());
var documents = new List<VexRawDocument>();
await foreach (var document in connector.FetchAsync(context, CancellationToken.None))
{
documents.Add(document);
}
documents.Should().HaveCount(1);
sink.Documents.Should().HaveCount(1);
var emitted = documents[0];
emitted.SourceUri.Should().Be(new Uri("https://example.com/csaf/ADV-0001.json"));
emitted.Metadata["msrc.vulnerabilityId"].Should().Be("ADV-0001");
emitted.Metadata["msrc.csaf.format"].Should().Be("json");
emitted.Metadata.Should().NotContainKey("excititor.quarantine.reason");
stateRepository.State.Should().NotBeNull();
stateRepository.State!.LastUpdated.Should().Be(new DateTimeOffset(2025, 10, 18, 0, 0, 0, TimeSpan.Zero));
stateRepository.State.DocumentDigests.Should().HaveCount(1);
}
[Fact]
public async Task FetchAsync_SkipsDocumentsWithExistingDigest()
{
var summary = """
{
"value": [
{
"id": "ADV-0001",
"vulnerabilityId": "ADV-0001",
"lastModifiedDate": "2025-10-18T00:00:00Z",
"cvrfUrl": "https://example.com/csaf/ADV-0001.json"
}
]
}
""";
var csaf = """{"document":{"title":"Example"}}""";
var handler = TestHttpMessageHandler.Create(
_ => Response(HttpStatusCode.OK, summary, "application/json"),
_ => Response(HttpStatusCode.OK, csaf, "application/json"));
var httpClient = new HttpClient(handler)
{
BaseAddress = new Uri("https://example.com/"),
};
var factory = new SingleClientHttpClientFactory(httpClient);
var stateRepository = new InMemoryConnectorStateRepository();
var options = Options.Create(CreateOptions());
var connector = new MsrcCsafConnector(
factory,
new StubTokenProvider(),
stateRepository,
options,
NullLogger<MsrcCsafConnector>.Instance,
TimeProvider.System);
await connector.ValidateAsync(VexConnectorSettings.Empty, CancellationToken.None);
var sink = new CapturingRawSink();
var context = new VexConnectorContext(
Since: new DateTimeOffset(2025, 10, 15, 0, 0, 0, TimeSpan.Zero),
Settings: VexConnectorSettings.Empty,
RawSink: sink,
SignatureVerifier: new NoopSignatureVerifier(),
Normalizers: new NoopNormalizerRouter(),
Services: new ServiceCollection().BuildServiceProvider());
var firstPass = new List<VexRawDocument>();
await foreach (var document in connector.FetchAsync(context, CancellationToken.None))
{
firstPass.Add(document);
}
firstPass.Should().HaveCount(1);
stateRepository.State.Should().NotBeNull();
var persistedState = stateRepository.State!;
handler.Reset(
_ => Response(HttpStatusCode.OK, summary, "application/json"),
_ => Response(HttpStatusCode.OK, csaf, "application/json"));
sink.Documents.Clear();
var secondPass = new List<VexRawDocument>();
await foreach (var document in connector.FetchAsync(context, CancellationToken.None))
{
secondPass.Add(document);
}
secondPass.Should().BeEmpty();
sink.Documents.Should().BeEmpty();
stateRepository.State.Should().NotBeNull();
stateRepository.State!.DocumentDigests.Should().Equal(persistedState.DocumentDigests);
}
[Fact]
public async Task FetchAsync_QuarantinesInvalidCsafPayload()
{
var summary = """
{
"value": [
{
"id": "ADV-0002",
"vulnerabilityId": "ADV-0002",
"lastModifiedDate": "2025-10-19T00:00:00Z",
"cvrfUrl": "https://example.com/csaf/ADV-0002.zip"
}
]
}
""";
var csafZip = CreateZip("document.json", "{ invalid json ");
var handler = TestHttpMessageHandler.Create(
_ => Response(HttpStatusCode.OK, summary, "application/json"),
_ => Response(HttpStatusCode.OK, csafZip, "application/zip"));
var httpClient = new HttpClient(handler)
{
BaseAddress = new Uri("https://example.com/"),
};
var factory = new SingleClientHttpClientFactory(httpClient);
var stateRepository = new InMemoryConnectorStateRepository();
var options = Options.Create(CreateOptions());
var connector = new MsrcCsafConnector(
factory,
new StubTokenProvider(),
stateRepository,
options,
NullLogger<MsrcCsafConnector>.Instance,
TimeProvider.System);
await connector.ValidateAsync(VexConnectorSettings.Empty, CancellationToken.None);
var sink = new CapturingRawSink();
var context = new VexConnectorContext(
Since: new DateTimeOffset(2025, 10, 17, 0, 0, 0, TimeSpan.Zero),
Settings: VexConnectorSettings.Empty,
RawSink: sink,
SignatureVerifier: new NoopSignatureVerifier(),
Normalizers: new NoopNormalizerRouter(),
Services: new ServiceCollection().BuildServiceProvider());
var documents = new List<VexRawDocument>();
await foreach (var document in connector.FetchAsync(context, CancellationToken.None))
{
documents.Add(document);
}
documents.Should().BeEmpty();
sink.Documents.Should().HaveCount(1);
sink.Documents[0].Metadata["excititor.quarantine.reason"].Should().Contain("JSON parse failed");
sink.Documents[0].Metadata["msrc.csaf.format"].Should().Be("zip");
stateRepository.State.Should().NotBeNull();
stateRepository.State!.DocumentDigests.Should().HaveCount(1);
}
private static HttpResponseMessage Response(HttpStatusCode statusCode, string content, string contentType)
=> new(statusCode)
{
Content = new StringContent(content, Encoding.UTF8, contentType),
};
private static HttpResponseMessage Response(HttpStatusCode statusCode, byte[] content, string contentType)
{
var response = new HttpResponseMessage(statusCode);
response.Content = new ByteArrayContent(content);
response.Content.Headers.ContentType = new System.Net.Http.Headers.MediaTypeHeaderValue(contentType);
return response;
}
private static MsrcConnectorOptions CreateOptions()
=> new()
{
BaseUri = new Uri("https://example.com/", UriKind.Absolute),
TenantId = Guid.NewGuid().ToString(),
ClientId = "client-id",
ClientSecret = "secret",
Scope = MsrcConnectorOptions.DefaultScope,
PageSize = 5,
MaxAdvisoriesPerFetch = 5,
RequestDelay = TimeSpan.Zero,
RetryBaseDelay = TimeSpan.FromMilliseconds(10),
MaxRetryAttempts = 2,
};
private static byte[] CreateZip(string entryName, string content)
{
using var buffer = new MemoryStream();
using (var archive = new ZipArchive(buffer, ZipArchiveMode.Create, leaveOpen: true))
{
var entry = archive.CreateEntry(entryName);
using var writer = new StreamWriter(entry.Open(), Encoding.UTF8);
writer.Write(content);
}
return buffer.ToArray();
}
private sealed class StubTokenProvider : IMsrcTokenProvider
{
public ValueTask<MsrcAccessToken> GetAccessTokenAsync(CancellationToken cancellationToken)
=> ValueTask.FromResult(new MsrcAccessToken("token", "Bearer", DateTimeOffset.MaxValue));
}
private sealed class CapturingRawSink : IVexRawDocumentSink
{
public List<VexRawDocument> Documents { get; } = new();
public ValueTask StoreAsync(VexRawDocument document, CancellationToken cancellationToken)
{
Documents.Add(document);
return ValueTask.CompletedTask;
}
}
private sealed class NoopSignatureVerifier : IVexSignatureVerifier
{
public ValueTask<VexSignatureMetadata?> VerifyAsync(VexRawDocument document, CancellationToken cancellationToken)
=> ValueTask.FromResult<VexSignatureMetadata?>(null);
}
private sealed class NoopNormalizerRouter : IVexNormalizerRouter
{
public ValueTask<VexClaimBatch> NormalizeAsync(VexRawDocument document, CancellationToken cancellationToken)
=> ValueTask.FromResult(new VexClaimBatch(document, ImmutableArray<VexClaim>.Empty, ImmutableDictionary<string, string>.Empty));
}
private sealed class SingleClientHttpClientFactory : IHttpClientFactory
{
private readonly HttpClient _client;
public SingleClientHttpClientFactory(HttpClient client)
{
_client = client;
}
public HttpClient CreateClient(string name) => _client;
}
private sealed class InMemoryConnectorStateRepository : IVexConnectorStateRepository
{
public VexConnectorState? State { get; private set; }
namespace StellaOps.Excititor.Connectors.MSRC.CSAF.Tests.Connectors;
public sealed class MsrcCsafConnectorTests
{
private static readonly VexConnectorDescriptor Descriptor = new("excititor:msrc", VexProviderKind.Vendor, "MSRC CSAF");
[Fact]
public async Task FetchAsync_EmitsDocumentAndPersistsState()
{
var summary = """
{
"value": [
{
"id": "ADV-0001",
"vulnerabilityId": "ADV-0001",
"severity": "Critical",
"releaseDate": "2025-10-17T00:00:00Z",
"lastModifiedDate": "2025-10-18T00:00:00Z",
"cvrfUrl": "https://example.com/csaf/ADV-0001.json"
}
]
}
""";
var csaf = """{"document":{"title":"Example"}}""";
var handler = TestHttpMessageHandler.Create(
_ => Response(HttpStatusCode.OK, summary, "application/json"),
_ => Response(HttpStatusCode.OK, csaf, "application/json"));
var httpClient = new HttpClient(handler)
{
BaseAddress = new Uri("https://example.com/"),
};
var factory = new SingleClientHttpClientFactory(httpClient);
var stateRepository = new InMemoryConnectorStateRepository();
var options = Options.Create(CreateOptions());
var connector = new MsrcCsafConnector(
factory,
new StubTokenProvider(),
stateRepository,
options,
NullLogger<MsrcCsafConnector>.Instance,
TimeProvider.System);
await connector.ValidateAsync(VexConnectorSettings.Empty, CancellationToken.None);
var sink = new CapturingRawSink();
var context = new VexConnectorContext(
Since: new DateTimeOffset(2025, 10, 15, 0, 0, 0, TimeSpan.Zero),
Settings: VexConnectorSettings.Empty,
RawSink: sink,
SignatureVerifier: new NoopSignatureVerifier(),
Normalizers: new NoopNormalizerRouter(),
Services: new ServiceCollection().BuildServiceProvider(),
ResumeTokens: ImmutableDictionary<string, string>.Empty);
var documents = new List<VexRawDocument>();
await foreach (var document in connector.FetchAsync(context, CancellationToken.None))
{
documents.Add(document);
}
documents.Should().HaveCount(1);
sink.Documents.Should().HaveCount(1);
var emitted = documents[0];
emitted.SourceUri.Should().Be(new Uri("https://example.com/csaf/ADV-0001.json"));
emitted.Metadata["msrc.vulnerabilityId"].Should().Be("ADV-0001");
emitted.Metadata["msrc.csaf.format"].Should().Be("json");
emitted.Metadata.Should().NotContainKey("excititor.quarantine.reason");
stateRepository.State.Should().NotBeNull();
stateRepository.State!.LastUpdated.Should().Be(new DateTimeOffset(2025, 10, 18, 0, 0, 0, TimeSpan.Zero));
stateRepository.State.DocumentDigests.Should().HaveCount(1);
}
[Fact]
public async Task FetchAsync_SkipsDocumentsWithExistingDigest()
{
var summary = """
{
"value": [
{
"id": "ADV-0001",
"vulnerabilityId": "ADV-0001",
"lastModifiedDate": "2025-10-18T00:00:00Z",
"cvrfUrl": "https://example.com/csaf/ADV-0001.json"
}
]
}
""";
var csaf = """{"document":{"title":"Example"}}""";
var handler = TestHttpMessageHandler.Create(
_ => Response(HttpStatusCode.OK, summary, "application/json"),
_ => Response(HttpStatusCode.OK, csaf, "application/json"));
var httpClient = new HttpClient(handler)
{
BaseAddress = new Uri("https://example.com/"),
};
var factory = new SingleClientHttpClientFactory(httpClient);
var stateRepository = new InMemoryConnectorStateRepository();
var options = Options.Create(CreateOptions());
var connector = new MsrcCsafConnector(
factory,
new StubTokenProvider(),
stateRepository,
options,
NullLogger<MsrcCsafConnector>.Instance,
TimeProvider.System);
await connector.ValidateAsync(VexConnectorSettings.Empty, CancellationToken.None);
var sink = new CapturingRawSink();
var context = new VexConnectorContext(
Since: new DateTimeOffset(2025, 10, 15, 0, 0, 0, TimeSpan.Zero),
Settings: VexConnectorSettings.Empty,
RawSink: sink,
SignatureVerifier: new NoopSignatureVerifier(),
Normalizers: new NoopNormalizerRouter(),
Services: new ServiceCollection().BuildServiceProvider(),
ResumeTokens: ImmutableDictionary<string, string>.Empty);
var firstPass = new List<VexRawDocument>();
await foreach (var document in connector.FetchAsync(context, CancellationToken.None))
{
firstPass.Add(document);
}
firstPass.Should().HaveCount(1);
stateRepository.State.Should().NotBeNull();
var persistedState = stateRepository.State!;
handler.Reset(
_ => Response(HttpStatusCode.OK, summary, "application/json"),
_ => Response(HttpStatusCode.OK, csaf, "application/json"));
sink.Documents.Clear();
var secondPass = new List<VexRawDocument>();
await foreach (var document in connector.FetchAsync(context, CancellationToken.None))
{
secondPass.Add(document);
}
secondPass.Should().BeEmpty();
sink.Documents.Should().BeEmpty();
stateRepository.State.Should().NotBeNull();
stateRepository.State!.DocumentDigests.Should().Equal(persistedState.DocumentDigests);
}
[Fact]
public async Task FetchAsync_QuarantinesInvalidCsafPayload()
{
var summary = """
{
"value": [
{
"id": "ADV-0002",
"vulnerabilityId": "ADV-0002",
"lastModifiedDate": "2025-10-19T00:00:00Z",
"cvrfUrl": "https://example.com/csaf/ADV-0002.zip"
}
]
}
""";
var csafZip = CreateZip("document.json", "{ invalid json ");
var handler = TestHttpMessageHandler.Create(
_ => Response(HttpStatusCode.OK, summary, "application/json"),
_ => Response(HttpStatusCode.OK, csafZip, "application/zip"));
var httpClient = new HttpClient(handler)
{
BaseAddress = new Uri("https://example.com/"),
};
var factory = new SingleClientHttpClientFactory(httpClient);
var stateRepository = new InMemoryConnectorStateRepository();
var options = Options.Create(CreateOptions());
var connector = new MsrcCsafConnector(
factory,
new StubTokenProvider(),
stateRepository,
options,
NullLogger<MsrcCsafConnector>.Instance,
TimeProvider.System);
await connector.ValidateAsync(VexConnectorSettings.Empty, CancellationToken.None);
var sink = new CapturingRawSink();
var context = new VexConnectorContext(
Since: new DateTimeOffset(2025, 10, 17, 0, 0, 0, TimeSpan.Zero),
Settings: VexConnectorSettings.Empty,
RawSink: sink,
SignatureVerifier: new NoopSignatureVerifier(),
Normalizers: new NoopNormalizerRouter(),
Services: new ServiceCollection().BuildServiceProvider(),
ResumeTokens: ImmutableDictionary<string, string>.Empty);
var documents = new List<VexRawDocument>();
await foreach (var document in connector.FetchAsync(context, CancellationToken.None))
{
documents.Add(document);
}
documents.Should().BeEmpty();
sink.Documents.Should().HaveCount(1);
sink.Documents[0].Metadata["excititor.quarantine.reason"].Should().Contain("JSON parse failed");
sink.Documents[0].Metadata["msrc.csaf.format"].Should().Be("zip");
stateRepository.State.Should().NotBeNull();
stateRepository.State!.DocumentDigests.Should().HaveCount(1);
}
private static HttpResponseMessage Response(HttpStatusCode statusCode, string content, string contentType)
=> new(statusCode)
{
Content = new StringContent(content, Encoding.UTF8, contentType),
};
private static HttpResponseMessage Response(HttpStatusCode statusCode, byte[] content, string contentType)
{
var response = new HttpResponseMessage(statusCode);
response.Content = new ByteArrayContent(content);
response.Content.Headers.ContentType = new System.Net.Http.Headers.MediaTypeHeaderValue(contentType);
return response;
}
private static MsrcConnectorOptions CreateOptions()
=> new()
{
BaseUri = new Uri("https://example.com/", UriKind.Absolute),
TenantId = Guid.NewGuid().ToString(),
ClientId = "client-id",
ClientSecret = "secret",
Scope = MsrcConnectorOptions.DefaultScope,
PageSize = 5,
MaxAdvisoriesPerFetch = 5,
RequestDelay = TimeSpan.Zero,
RetryBaseDelay = TimeSpan.FromMilliseconds(10),
MaxRetryAttempts = 2,
};
private static byte[] CreateZip(string entryName, string content)
{
using var buffer = new MemoryStream();
using (var archive = new ZipArchive(buffer, ZipArchiveMode.Create, leaveOpen: true))
{
var entry = archive.CreateEntry(entryName);
using var writer = new StreamWriter(entry.Open(), Encoding.UTF8);
writer.Write(content);
}
return buffer.ToArray();
}
private sealed class StubTokenProvider : IMsrcTokenProvider
{
public ValueTask<MsrcAccessToken> GetAccessTokenAsync(CancellationToken cancellationToken)
=> ValueTask.FromResult(new MsrcAccessToken("token", "Bearer", DateTimeOffset.MaxValue));
}
private sealed class CapturingRawSink : IVexRawDocumentSink
{
public List<VexRawDocument> Documents { get; } = new();
public ValueTask StoreAsync(VexRawDocument document, CancellationToken cancellationToken)
{
Documents.Add(document);
return ValueTask.CompletedTask;
}
}
private sealed class NoopSignatureVerifier : IVexSignatureVerifier
{
public ValueTask<VexSignatureMetadata?> VerifyAsync(VexRawDocument document, CancellationToken cancellationToken)
=> ValueTask.FromResult<VexSignatureMetadata?>(null);
}
private sealed class NoopNormalizerRouter : IVexNormalizerRouter
{
public ValueTask<VexClaimBatch> NormalizeAsync(VexRawDocument document, CancellationToken cancellationToken)
=> ValueTask.FromResult(new VexClaimBatch(document, ImmutableArray<VexClaim>.Empty, ImmutableDictionary<string, string>.Empty));
}
private sealed class SingleClientHttpClientFactory : IHttpClientFactory
{
private readonly HttpClient _client;
public SingleClientHttpClientFactory(HttpClient client)
{
_client = client;
}
public HttpClient CreateClient(string name) => _client;
}
private sealed class InMemoryConnectorStateRepository : IVexConnectorStateRepository
{
public VexConnectorState? State { get; private set; }
public ValueTask<VexConnectorState?> GetAsync(string connectorId, CancellationToken cancellationToken, IClientSessionHandle? session = null)
=> ValueTask.FromResult(State);
@@ -325,40 +328,40 @@ public sealed class MsrcCsafConnectorTests
State = state;
return ValueTask.CompletedTask;
}
}
private sealed class TestHttpMessageHandler : HttpMessageHandler
{
private readonly Queue<Func<HttpRequestMessage, HttpResponseMessage>> _responders;
private TestHttpMessageHandler(IEnumerable<Func<HttpRequestMessage, HttpResponseMessage>> responders)
{
_responders = new Queue<Func<HttpRequestMessage, HttpResponseMessage>>(responders);
}
public static TestHttpMessageHandler Create(params Func<HttpRequestMessage, HttpResponseMessage>[] responders)
=> new(responders);
public void Reset(params Func<HttpRequestMessage, HttpResponseMessage>[] responders)
{
_responders.Clear();
foreach (var responder in responders)
{
_responders.Enqueue(responder);
}
}
protected override Task<HttpResponseMessage> SendAsync(HttpRequestMessage request, CancellationToken cancellationToken)
{
if (_responders.Count == 0)
{
throw new InvalidOperationException("No responder configured for MSRC connector test request.");
}
var responder = _responders.Count > 1 ? _responders.Dequeue() : _responders.Peek();
var response = responder(request);
response.RequestMessage = request;
return Task.FromResult(response);
}
}
}
}
private sealed class TestHttpMessageHandler : HttpMessageHandler
{
private readonly Queue<Func<HttpRequestMessage, HttpResponseMessage>> _responders;
private TestHttpMessageHandler(IEnumerable<Func<HttpRequestMessage, HttpResponseMessage>> responders)
{
_responders = new Queue<Func<HttpRequestMessage, HttpResponseMessage>>(responders);
}
public static TestHttpMessageHandler Create(params Func<HttpRequestMessage, HttpResponseMessage>[] responders)
=> new(responders);
public void Reset(params Func<HttpRequestMessage, HttpResponseMessage>[] responders)
{
_responders.Clear();
foreach (var responder in responders)
{
_responders.Enqueue(responder);
}
}
protected override Task<HttpResponseMessage> SendAsync(HttpRequestMessage request, CancellationToken cancellationToken)
{
if (_responders.Count == 0)
{
throw new InvalidOperationException("No responder configured for MSRC connector test request.");
}
var responder = _responders.Count > 1 ? _responders.Dequeue() : _responders.Peek();
var response = responder(request);
response.RequestMessage = request;
return Task.FromResult(response);
}
}
}

View File

@@ -1,18 +1,18 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<LangVersion>preview</LangVersion>
<Nullable>enable</Nullable>
<ImplicitUsings>enable</ImplicitUsings>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
</PropertyGroup>
<ItemGroup>
<ProjectReference Include="..\StellaOps.Excititor.Connectors.MSRC.CSAF\StellaOps.Excititor.Connectors.MSRC.CSAF.csproj" />
</ItemGroup>
<ItemGroup>
<PackageReference Include="FluentAssertions" Version="6.12.0" />
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="9.0.0" />
<PackageReference Include="NSubstitute" Version="5.1.0" />
<PackageReference Include="System.IO.Abstractions.TestingHelpers" Version="20.0.28" />
</ItemGroup>
</Project>
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<LangVersion>preview</LangVersion>
<Nullable>enable</Nullable>
<ImplicitUsings>enable</ImplicitUsings>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
</PropertyGroup>
<ItemGroup>
<ProjectReference Include="..\StellaOps.Excititor.Connectors.MSRC.CSAF\StellaOps.Excititor.Connectors.MSRC.CSAF.csproj" />
</ItemGroup>
<ItemGroup>
<PackageReference Include="FluentAssertions" Version="6.12.0" />
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="10.0.0-preview.7.25380.108" />
<PackageReference Include="NSubstitute" Version="5.1.0" />
<PackageReference Include="System.IO.Abstractions.TestingHelpers" Version="20.0.28" />
</ItemGroup>
</Project>

View File

@@ -1,19 +1,19 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<LangVersion>preview</LangVersion>
<Nullable>enable</Nullable>
<ImplicitUsings>enable</ImplicitUsings>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
</PropertyGroup>
<ItemGroup>
<ProjectReference Include="..\StellaOps.Excititor.Connectors.Abstractions\StellaOps.Excititor.Connectors.Abstractions.csproj" />
<ProjectReference Include="..\StellaOps.Excititor.Storage.Mongo\StellaOps.Excititor.Storage.Mongo.csproj" />
</ItemGroup>
<ItemGroup>
<PackageReference Include="Microsoft.Extensions.Caching.Memory" Version="9.0.0" />
<PackageReference Include="Microsoft.Extensions.Http" Version="9.0.0" />
<PackageReference Include="Microsoft.Extensions.Options" Version="9.0.0" />
<PackageReference Include="System.IO.Abstractions" Version="20.0.28" />
</ItemGroup>
</Project>
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<LangVersion>preview</LangVersion>
<Nullable>enable</Nullable>
<ImplicitUsings>enable</ImplicitUsings>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
</PropertyGroup>
<ItemGroup>
<ProjectReference Include="..\StellaOps.Excititor.Connectors.Abstractions\StellaOps.Excititor.Connectors.Abstractions.csproj" />
<ProjectReference Include="..\StellaOps.Excititor.Storage.Mongo\StellaOps.Excititor.Storage.Mongo.csproj" />
</ItemGroup>
<ItemGroup>
<PackageReference Include="Microsoft.Extensions.Caching.Memory" Version="10.0.0-preview.7.25380.108" />
<PackageReference Include="Microsoft.Extensions.Http" Version="10.0.0-preview.7.25380.108" />
<PackageReference Include="Microsoft.Extensions.Options" Version="10.0.0-preview.7.25380.108" />
<PackageReference Include="System.IO.Abstractions" Version="20.0.28" />
</ItemGroup>
</Project>

View File

@@ -1,213 +1,215 @@
using System;
using System.Collections.Generic;
using System.Collections.Immutable;
using System.Net;
using System.Net.Http;
using System.Threading;
using System.Threading.Tasks;
using FluentAssertions;
using Microsoft.Extensions.Caching.Memory;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.Logging.Abstractions;
using StellaOps.Excititor.Connectors.Abstractions;
using StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest;
using StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest.Configuration;
using StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest.DependencyInjection;
using StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest.Discovery;
using StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest.Fetch;
using StellaOps.Excititor.Core;
using System.IO.Abstractions.TestingHelpers;
using Xunit;
namespace StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest.Tests.Connector;
public sealed class OciOpenVexAttestationConnectorTests
{
[Fact]
public async Task FetchAsync_WithOfflineBundle_EmitsRawDocument()
{
var fileSystem = new MockFileSystem(new Dictionary<string, MockFileData>
{
["/bundles/attestation.json"] = new MockFileData("{\"payload\":\"\",\"payloadType\":\"application/vnd.in-toto+json\",\"signatures\":[{\"sig\":\"\"}]}"),
});
using var cache = new MemoryCache(new MemoryCacheOptions());
var httpClient = new HttpClient(new StubHttpMessageHandler())
{
BaseAddress = new System.Uri("https://registry.example.com/")
};
var httpFactory = new SingleClientHttpClientFactory(httpClient);
var discovery = new OciAttestationDiscoveryService(cache, fileSystem, NullLogger<OciAttestationDiscoveryService>.Instance);
var fetcher = new OciAttestationFetcher(httpFactory, fileSystem, NullLogger<OciAttestationFetcher>.Instance);
var connector = new OciOpenVexAttestationConnector(
discovery,
fetcher,
NullLogger<OciOpenVexAttestationConnector>.Instance,
TimeProvider.System);
var settingsValues = ImmutableDictionary<string, string>.Empty
.Add("Images:0:Reference", "registry.example.com/repo/image:latest")
.Add("Images:0:OfflineBundlePath", "/bundles/attestation.json")
.Add("Offline:PreferOffline", "true")
.Add("Offline:AllowNetworkFallback", "false")
.Add("Cosign:Mode", "None");
var settings = new VexConnectorSettings(settingsValues);
await connector.ValidateAsync(settings, CancellationToken.None);
var sink = new CapturingRawSink();
var verifier = new CapturingSignatureVerifier();
var context = new VexConnectorContext(
Since: null,
Settings: VexConnectorSettings.Empty,
RawSink: sink,
SignatureVerifier: verifier,
Normalizers: new NoopNormalizerRouter(),
Services: new Microsoft.Extensions.DependencyInjection.ServiceCollection().BuildServiceProvider());
var documents = new List<VexRawDocument>();
await foreach (var document in connector.FetchAsync(context, CancellationToken.None))
{
documents.Add(document);
}
documents.Should().HaveCount(1);
sink.Documents.Should().HaveCount(1);
documents[0].Format.Should().Be(VexDocumentFormat.OciAttestation);
documents[0].Metadata.Should().ContainKey("oci.attestation.sourceKind").WhoseValue.Should().Be("offline");
documents[0].Metadata.Should().ContainKey("vex.provenance.sourceKind").WhoseValue.Should().Be("offline");
documents[0].Metadata.Should().ContainKey("vex.provenance.registryAuthMode").WhoseValue.Should().Be("Anonymous");
verifier.VerifyCalls.Should().Be(1);
}
[Fact]
public async Task FetchAsync_WithSignatureMetadata_EnrichesProvenance()
{
var fileSystem = new MockFileSystem(new Dictionary<string, MockFileData>
{
["/bundles/attestation.json"] = new MockFileData("{\"payload\":\"\",\"payloadType\":\"application/vnd.in-toto+json\",\"signatures\":[{\"sig\":\"\"}]}"),
});
using var cache = new MemoryCache(new MemoryCacheOptions());
var httpClient = new HttpClient(new StubHttpMessageHandler())
{
BaseAddress = new System.Uri("https://registry.example.com/")
};
var httpFactory = new SingleClientHttpClientFactory(httpClient);
var discovery = new OciAttestationDiscoveryService(cache, fileSystem, NullLogger<OciAttestationDiscoveryService>.Instance);
var fetcher = new OciAttestationFetcher(httpFactory, fileSystem, NullLogger<OciAttestationFetcher>.Instance);
var connector = new OciOpenVexAttestationConnector(
discovery,
fetcher,
NullLogger<OciOpenVexAttestationConnector>.Instance,
TimeProvider.System);
var settingsValues = ImmutableDictionary<string, string>.Empty
.Add("Images:0:Reference", "registry.example.com/repo/image:latest")
.Add("Images:0:OfflineBundlePath", "/bundles/attestation.json")
.Add("Offline:PreferOffline", "true")
.Add("Offline:AllowNetworkFallback", "false")
.Add("Cosign:Mode", "Keyless")
.Add("Cosign:Keyless:Issuer", "https://issuer.example.com")
.Add("Cosign:Keyless:Subject", "subject@example.com");
var settings = new VexConnectorSettings(settingsValues);
await connector.ValidateAsync(settings, CancellationToken.None);
var sink = new CapturingRawSink();
var verifier = new CapturingSignatureVerifier
{
Result = new VexSignatureMetadata(
type: "cosign",
subject: "sig-subject",
issuer: "sig-issuer",
keyId: "key-id",
verifiedAt: DateTimeOffset.UtcNow,
transparencyLogReference: "rekor://entry/123")
};
var context = new VexConnectorContext(
Since: null,
Settings: VexConnectorSettings.Empty,
RawSink: sink,
SignatureVerifier: verifier,
Normalizers: new NoopNormalizerRouter(),
Services: new ServiceCollection().BuildServiceProvider());
var documents = new List<VexRawDocument>();
await foreach (var document in connector.FetchAsync(context, CancellationToken.None))
{
documents.Add(document);
}
documents.Should().HaveCount(1);
var metadata = documents[0].Metadata;
metadata.Should().Contain("vex.signature.type", "cosign");
metadata.Should().Contain("vex.signature.subject", "sig-subject");
metadata.Should().Contain("vex.signature.issuer", "sig-issuer");
metadata.Should().Contain("vex.signature.keyId", "key-id");
metadata.Should().ContainKey("vex.signature.verifiedAt");
metadata.Should().Contain("vex.signature.transparencyLogReference", "rekor://entry/123");
metadata.Should().Contain("vex.provenance.cosign.mode", "Keyless");
metadata.Should().Contain("vex.provenance.cosign.issuer", "https://issuer.example.com");
metadata.Should().Contain("vex.provenance.cosign.subject", "subject@example.com");
verifier.VerifyCalls.Should().Be(1);
}
private sealed class CapturingRawSink : IVexRawDocumentSink
{
public List<VexRawDocument> Documents { get; } = new();
public ValueTask StoreAsync(VexRawDocument document, CancellationToken cancellationToken)
{
Documents.Add(document);
return ValueTask.CompletedTask;
}
}
private sealed class CapturingSignatureVerifier : IVexSignatureVerifier
{
public int VerifyCalls { get; private set; }
public VexSignatureMetadata? Result { get; set; }
public ValueTask<VexSignatureMetadata?> VerifyAsync(VexRawDocument document, CancellationToken cancellationToken)
{
VerifyCalls++;
return ValueTask.FromResult(Result);
}
}
private sealed class NoopNormalizerRouter : IVexNormalizerRouter
{
public ValueTask<VexClaimBatch> NormalizeAsync(VexRawDocument document, CancellationToken cancellationToken)
=> ValueTask.FromResult(new VexClaimBatch(document, ImmutableArray<VexClaim>.Empty, ImmutableDictionary<string, string>.Empty));
}
private sealed class SingleClientHttpClientFactory : IHttpClientFactory
{
private readonly HttpClient _client;
public SingleClientHttpClientFactory(HttpClient client)
{
_client = client;
}
public HttpClient CreateClient(string name) => _client;
}
private sealed class StubHttpMessageHandler : HttpMessageHandler
{
protected override Task<HttpResponseMessage> SendAsync(HttpRequestMessage request, CancellationToken cancellationToken)
{
return Task.FromResult(new HttpResponseMessage(HttpStatusCode.NotFound)
{
RequestMessage = request
});
}
}
}
using System;
using System.Collections.Generic;
using System.Collections.Immutable;
using System.Net;
using System.Net.Http;
using System.Threading;
using System.Threading.Tasks;
using FluentAssertions;
using Microsoft.Extensions.Caching.Memory;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.Logging.Abstractions;
using StellaOps.Excititor.Connectors.Abstractions;
using StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest;
using StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest.Configuration;
using StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest.DependencyInjection;
using StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest.Discovery;
using StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest.Fetch;
using StellaOps.Excititor.Core;
using System.IO.Abstractions.TestingHelpers;
using Xunit;
namespace StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest.Tests.Connector;
public sealed class OciOpenVexAttestationConnectorTests
{
[Fact]
public async Task FetchAsync_WithOfflineBundle_EmitsRawDocument()
{
var fileSystem = new MockFileSystem(new Dictionary<string, MockFileData>
{
["/bundles/attestation.json"] = new MockFileData("{\"payload\":\"\",\"payloadType\":\"application/vnd.in-toto+json\",\"signatures\":[{\"sig\":\"\"}]}"),
});
using var cache = new MemoryCache(new MemoryCacheOptions());
var httpClient = new HttpClient(new StubHttpMessageHandler())
{
BaseAddress = new System.Uri("https://registry.example.com/")
};
var httpFactory = new SingleClientHttpClientFactory(httpClient);
var discovery = new OciAttestationDiscoveryService(cache, fileSystem, NullLogger<OciAttestationDiscoveryService>.Instance);
var fetcher = new OciAttestationFetcher(httpFactory, fileSystem, NullLogger<OciAttestationFetcher>.Instance);
var connector = new OciOpenVexAttestationConnector(
discovery,
fetcher,
NullLogger<OciOpenVexAttestationConnector>.Instance,
TimeProvider.System);
var settingsValues = ImmutableDictionary<string, string>.Empty
.Add("Images:0:Reference", "registry.example.com/repo/image:latest")
.Add("Images:0:OfflineBundlePath", "/bundles/attestation.json")
.Add("Offline:PreferOffline", "true")
.Add("Offline:AllowNetworkFallback", "false")
.Add("Cosign:Mode", "None");
var settings = new VexConnectorSettings(settingsValues);
await connector.ValidateAsync(settings, CancellationToken.None);
var sink = new CapturingRawSink();
var verifier = new CapturingSignatureVerifier();
var context = new VexConnectorContext(
Since: null,
Settings: VexConnectorSettings.Empty,
RawSink: sink,
SignatureVerifier: verifier,
Normalizers: new NoopNormalizerRouter(),
Services: new Microsoft.Extensions.DependencyInjection.ServiceCollection().BuildServiceProvider(),
ResumeTokens: ImmutableDictionary<string, string>.Empty);
var documents = new List<VexRawDocument>();
await foreach (var document in connector.FetchAsync(context, CancellationToken.None))
{
documents.Add(document);
}
documents.Should().HaveCount(1);
sink.Documents.Should().HaveCount(1);
documents[0].Format.Should().Be(VexDocumentFormat.OciAttestation);
documents[0].Metadata.Should().ContainKey("oci.attestation.sourceKind").WhoseValue.Should().Be("offline");
documents[0].Metadata.Should().ContainKey("vex.provenance.sourceKind").WhoseValue.Should().Be("offline");
documents[0].Metadata.Should().ContainKey("vex.provenance.registryAuthMode").WhoseValue.Should().Be("Anonymous");
verifier.VerifyCalls.Should().Be(1);
}
[Fact]
public async Task FetchAsync_WithSignatureMetadata_EnrichesProvenance()
{
var fileSystem = new MockFileSystem(new Dictionary<string, MockFileData>
{
["/bundles/attestation.json"] = new MockFileData("{\"payload\":\"\",\"payloadType\":\"application/vnd.in-toto+json\",\"signatures\":[{\"sig\":\"\"}]}"),
});
using var cache = new MemoryCache(new MemoryCacheOptions());
var httpClient = new HttpClient(new StubHttpMessageHandler())
{
BaseAddress = new System.Uri("https://registry.example.com/")
};
var httpFactory = new SingleClientHttpClientFactory(httpClient);
var discovery = new OciAttestationDiscoveryService(cache, fileSystem, NullLogger<OciAttestationDiscoveryService>.Instance);
var fetcher = new OciAttestationFetcher(httpFactory, fileSystem, NullLogger<OciAttestationFetcher>.Instance);
var connector = new OciOpenVexAttestationConnector(
discovery,
fetcher,
NullLogger<OciOpenVexAttestationConnector>.Instance,
TimeProvider.System);
var settingsValues = ImmutableDictionary<string, string>.Empty
.Add("Images:0:Reference", "registry.example.com/repo/image:latest")
.Add("Images:0:OfflineBundlePath", "/bundles/attestation.json")
.Add("Offline:PreferOffline", "true")
.Add("Offline:AllowNetworkFallback", "false")
.Add("Cosign:Mode", "Keyless")
.Add("Cosign:Keyless:Issuer", "https://issuer.example.com")
.Add("Cosign:Keyless:Subject", "subject@example.com");
var settings = new VexConnectorSettings(settingsValues);
await connector.ValidateAsync(settings, CancellationToken.None);
var sink = new CapturingRawSink();
var verifier = new CapturingSignatureVerifier
{
Result = new VexSignatureMetadata(
type: "cosign",
subject: "sig-subject",
issuer: "sig-issuer",
keyId: "key-id",
verifiedAt: DateTimeOffset.UtcNow,
transparencyLogReference: "rekor://entry/123")
};
var context = new VexConnectorContext(
Since: null,
Settings: VexConnectorSettings.Empty,
RawSink: sink,
SignatureVerifier: verifier,
Normalizers: new NoopNormalizerRouter(),
Services: new ServiceCollection().BuildServiceProvider(),
ResumeTokens: ImmutableDictionary<string, string>.Empty);
var documents = new List<VexRawDocument>();
await foreach (var document in connector.FetchAsync(context, CancellationToken.None))
{
documents.Add(document);
}
documents.Should().HaveCount(1);
var metadata = documents[0].Metadata;
metadata.Should().Contain("vex.signature.type", "cosign");
metadata.Should().Contain("vex.signature.subject", "sig-subject");
metadata.Should().Contain("vex.signature.issuer", "sig-issuer");
metadata.Should().Contain("vex.signature.keyId", "key-id");
metadata.Should().ContainKey("vex.signature.verifiedAt");
metadata.Should().Contain("vex.signature.transparencyLogReference", "rekor://entry/123");
metadata.Should().Contain("vex.provenance.cosign.mode", "Keyless");
metadata.Should().Contain("vex.provenance.cosign.issuer", "https://issuer.example.com");
metadata.Should().Contain("vex.provenance.cosign.subject", "subject@example.com");
verifier.VerifyCalls.Should().Be(1);
}
private sealed class CapturingRawSink : IVexRawDocumentSink
{
public List<VexRawDocument> Documents { get; } = new();
public ValueTask StoreAsync(VexRawDocument document, CancellationToken cancellationToken)
{
Documents.Add(document);
return ValueTask.CompletedTask;
}
}
private sealed class CapturingSignatureVerifier : IVexSignatureVerifier
{
public int VerifyCalls { get; private set; }
public VexSignatureMetadata? Result { get; set; }
public ValueTask<VexSignatureMetadata?> VerifyAsync(VexRawDocument document, CancellationToken cancellationToken)
{
VerifyCalls++;
return ValueTask.FromResult(Result);
}
}
private sealed class NoopNormalizerRouter : IVexNormalizerRouter
{
public ValueTask<VexClaimBatch> NormalizeAsync(VexRawDocument document, CancellationToken cancellationToken)
=> ValueTask.FromResult(new VexClaimBatch(document, ImmutableArray<VexClaim>.Empty, ImmutableDictionary<string, string>.Empty));
}
private sealed class SingleClientHttpClientFactory : IHttpClientFactory
{
private readonly HttpClient _client;
public SingleClientHttpClientFactory(HttpClient client)
{
_client = client;
}
public HttpClient CreateClient(string name) => _client;
}
private sealed class StubHttpMessageHandler : HttpMessageHandler
{
protected override Task<HttpResponseMessage> SendAsync(HttpRequestMessage request, CancellationToken cancellationToken)
{
return Task.FromResult(new HttpResponseMessage(HttpStatusCode.NotFound)
{
RequestMessage = request
});
}
}
}

View File

@@ -1,18 +1,17 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<LangVersion>preview</LangVersion>
<Nullable>enable</Nullable>
<ImplicitUsings>enable</ImplicitUsings>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
<WarningsNotAsErrors>NU1903</WarningsNotAsErrors>
</PropertyGroup>
<ItemGroup>
<ProjectReference Include="..\StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest\StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest.csproj" />
</ItemGroup>
<ItemGroup>
<PackageReference Include="FluentAssertions" Version="6.12.0" />
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="8.0.0" />
<PackageReference Include="System.IO.Abstractions.TestingHelpers" Version="20.0.28" />
</ItemGroup>
</Project>
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<LangVersion>preview</LangVersion>
<Nullable>enable</Nullable>
<ImplicitUsings>enable</ImplicitUsings>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
</PropertyGroup>
<ItemGroup>
<ProjectReference Include="..\StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest\StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest.csproj" />
</ItemGroup>
<ItemGroup>
<PackageReference Include="FluentAssertions" Version="6.12.0" />
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="10.0.0-preview.7.25380.108" />
<PackageReference Include="System.IO.Abstractions.TestingHelpers" Version="20.0.28" />
</ItemGroup>
</Project>

View File

@@ -1,20 +1,19 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<LangVersion>preview</LangVersion>
<Nullable>enable</Nullable>
<ImplicitUsings>enable</ImplicitUsings>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
<WarningsNotAsErrors>NU1903</WarningsNotAsErrors>
</PropertyGroup>
<ItemGroup>
<ProjectReference Include="..\StellaOps.Excititor.Connectors.Abstractions\StellaOps.Excititor.Connectors.Abstractions.csproj" />
<ProjectReference Include="..\StellaOps.Excititor.Core\StellaOps.Excititor.Core.csproj" />
</ItemGroup>
<ItemGroup>
<PackageReference Include="Microsoft.Extensions.Caching.Memory" Version="9.0.0" />
<PackageReference Include="Microsoft.Extensions.Http" Version="9.0.0" />
<PackageReference Include="Microsoft.Extensions.Options" Version="9.0.0" />
<PackageReference Include="System.IO.Abstractions" Version="20.0.28" />
</ItemGroup>
</Project>
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<LangVersion>preview</LangVersion>
<Nullable>enable</Nullable>
<ImplicitUsings>enable</ImplicitUsings>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
</PropertyGroup>
<ItemGroup>
<ProjectReference Include="..\StellaOps.Excititor.Connectors.Abstractions\StellaOps.Excititor.Connectors.Abstractions.csproj" />
<ProjectReference Include="..\StellaOps.Excititor.Core\StellaOps.Excititor.Core.csproj" />
</ItemGroup>
<ItemGroup>
<PackageReference Include="Microsoft.Extensions.Caching.Memory" Version="10.0.0-preview.7.25380.108" />
<PackageReference Include="Microsoft.Extensions.Http" Version="10.0.0-preview.7.25380.108" />
<PackageReference Include="Microsoft.Extensions.Options" Version="10.0.0-preview.7.25380.108" />
<PackageReference Include="System.IO.Abstractions" Version="20.0.28" />
</ItemGroup>
</Project>

View File

@@ -1,260 +1,262 @@
using System;
using System.Collections.Generic;
using System.Collections.Immutable;
using System.Net;
using System.Net.Http;
using System.Security.Cryptography;
using System.Text;
using System.Text.Json;
using System.Threading;
using System.Threading.Tasks;
using FluentAssertions;
using Microsoft.Extensions.Caching.Memory;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.Logging.Abstractions;
using StellaOps.Excititor.Connectors.Abstractions;
using StellaOps.Excititor.Connectors.Oracle.CSAF;
using StellaOps.Excititor.Connectors.Oracle.CSAF.Configuration;
using System;
using System.Collections.Generic;
using System.Collections.Immutable;
using System.Net;
using System.Net.Http;
using System.Security.Cryptography;
using System.Text;
using System.Text.Json;
using System.Threading;
using System.Threading.Tasks;
using FluentAssertions;
using Microsoft.Extensions.Caching.Memory;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.Logging.Abstractions;
using StellaOps.Excititor.Connectors.Abstractions;
using StellaOps.Excititor.Connectors.Oracle.CSAF;
using StellaOps.Excititor.Connectors.Oracle.CSAF.Configuration;
using StellaOps.Excititor.Connectors.Oracle.CSAF.Metadata;
using StellaOps.Excititor.Core;
using StellaOps.Excititor.Storage.Mongo;
using System.IO.Abstractions.TestingHelpers;
using Xunit;
using MongoDB.Driver;
namespace StellaOps.Excititor.Connectors.Oracle.CSAF.Tests.Connectors;
public sealed class OracleCsafConnectorTests
{
[Fact]
public async Task FetchAsync_NewEntry_PersistsDocumentAndUpdatesState()
{
var documentUri = new Uri("https://oracle.example/security/csaf/cpu2025oct.json");
var payload = Encoding.UTF8.GetBytes("{\"document\":\"payload\"}");
var payloadDigest = ComputeDigest(payload);
var snapshotPath = "/snapshots/oracle-catalog.json";
var fileSystem = new MockFileSystem();
fileSystem.AddFile(snapshotPath, new MockFileData(BuildOfflineSnapshot(documentUri, payloadDigest, "2025-10-15T00:00:00Z")));
var handler = new StubHttpMessageHandler(new Dictionary<Uri, HttpResponseMessage>
{
[documentUri] = CreateResponse(payload),
});
var httpClient = new HttpClient(handler);
var httpFactory = new SingleHttpClientFactory(httpClient);
var loader = new OracleCatalogLoader(
httpFactory,
new MemoryCache(new MemoryCacheOptions()),
fileSystem,
NullLogger<OracleCatalogLoader>.Instance,
TimeProvider.System);
var stateRepository = new InMemoryConnectorStateRepository();
var connector = new OracleCsafConnector(
loader,
httpFactory,
stateRepository,
new[] { new OracleConnectorOptionsValidator(fileSystem) },
NullLogger<OracleCsafConnector>.Instance,
TimeProvider.System);
var settingsValues = ImmutableDictionary<string, string>.Empty
.Add(nameof(OracleConnectorOptions.PreferOfflineSnapshot), "true")
.Add(nameof(OracleConnectorOptions.OfflineSnapshotPath), snapshotPath)
.Add(nameof(OracleConnectorOptions.PersistOfflineSnapshot), "false");
var settings = new VexConnectorSettings(settingsValues);
await connector.ValidateAsync(settings, CancellationToken.None);
var sink = new InMemoryRawSink();
var context = new VexConnectorContext(
Since: null,
Settings: settings,
RawSink: sink,
SignatureVerifier: new NoopSignatureVerifier(),
Normalizers: new NoopNormalizerRouter(),
Services: new ServiceCollection().BuildServiceProvider());
var documents = new List<VexRawDocument>();
await foreach (var document in connector.FetchAsync(context, CancellationToken.None))
{
documents.Add(document);
}
documents.Should().HaveCount(1);
sink.Documents.Should().HaveCount(1);
documents[0].Digest.Should().Be(payloadDigest);
documents[0].Metadata["oracle.csaf.entryId"].Should().Be("CPU2025Oct");
documents[0].Metadata["oracle.csaf.sha256"].Should().Be(payloadDigest);
stateRepository.State.Should().NotBeNull();
stateRepository.State!.DocumentDigests.Should().ContainSingle().Which.Should().Be(payloadDigest);
handler.GetCallCount(documentUri).Should().Be(1);
// second run should short-circuit without downloading again
sink.Documents.Clear();
documents.Clear();
await foreach (var document in connector.FetchAsync(context, CancellationToken.None))
{
documents.Add(document);
}
documents.Should().BeEmpty();
sink.Documents.Should().BeEmpty();
handler.GetCallCount(documentUri).Should().Be(1);
}
[Fact]
public async Task FetchAsync_ChecksumMismatch_SkipsDocument()
{
var documentUri = new Uri("https://oracle.example/security/csaf/cpu2025oct.json");
var payload = Encoding.UTF8.GetBytes("{\"document\":\"payload\"}");
var snapshotPath = "/snapshots/oracle-catalog.json";
var fileSystem = new MockFileSystem();
fileSystem.AddFile(snapshotPath, new MockFileData(BuildOfflineSnapshot(documentUri, "deadbeef", "2025-10-15T00:00:00Z")));
var handler = new StubHttpMessageHandler(new Dictionary<Uri, HttpResponseMessage>
{
[documentUri] = CreateResponse(payload),
});
var httpClient = new HttpClient(handler);
var httpFactory = new SingleHttpClientFactory(httpClient);
var loader = new OracleCatalogLoader(
httpFactory,
new MemoryCache(new MemoryCacheOptions()),
fileSystem,
NullLogger<OracleCatalogLoader>.Instance,
TimeProvider.System);
var stateRepository = new InMemoryConnectorStateRepository();
var connector = new OracleCsafConnector(
loader,
httpFactory,
stateRepository,
new[] { new OracleConnectorOptionsValidator(fileSystem) },
NullLogger<OracleCsafConnector>.Instance,
TimeProvider.System);
var settingsValues = ImmutableDictionary<string, string>.Empty
.Add(nameof(OracleConnectorOptions.PreferOfflineSnapshot), "true")
.Add(nameof(OracleConnectorOptions.OfflineSnapshotPath), snapshotPath)
.Add(nameof(OracleConnectorOptions.PersistOfflineSnapshot), "false");
var settings = new VexConnectorSettings(settingsValues);
await connector.ValidateAsync(settings, CancellationToken.None);
var sink = new InMemoryRawSink();
var context = new VexConnectorContext(
Since: null,
Settings: settings,
RawSink: sink,
SignatureVerifier: new NoopSignatureVerifier(),
Normalizers: new NoopNormalizerRouter(),
Services: new ServiceCollection().BuildServiceProvider());
var documents = new List<VexRawDocument>();
await foreach (var document in connector.FetchAsync(context, CancellationToken.None))
{
documents.Add(document);
}
documents.Should().BeEmpty();
sink.Documents.Should().BeEmpty();
stateRepository.State.Should().BeNull();
handler.GetCallCount(documentUri).Should().Be(1);
}
private static HttpResponseMessage CreateResponse(byte[] payload)
=> new(HttpStatusCode.OK)
{
Content = new ByteArrayContent(payload)
{
Headers =
{
ContentType = new System.Net.Http.Headers.MediaTypeHeaderValue("application/json"),
}
}
};
private static string ComputeDigest(byte[] payload)
{
Span<byte> buffer = stackalloc byte[32];
SHA256.HashData(payload, buffer);
return "sha256:" + Convert.ToHexString(buffer).ToLowerInvariant();
}
private static string BuildOfflineSnapshot(Uri documentUri, string sha256, string publishedAt)
{
var snapshot = new
{
metadata = new
{
generatedAt = "2025-10-14T12:00:00Z",
entries = new[]
{
new
{
id = "CPU2025Oct",
title = "Oracle Critical Patch Update Advisory - October 2025",
documentUri = documentUri.ToString(),
publishedAt,
revision = publishedAt,
sha256,
size = 1024,
products = new[] { "Oracle Database" }
}
},
cpuSchedule = Array.Empty<object>()
},
fetchedAt = "2025-10-14T12:00:00Z"
};
return JsonSerializer.Serialize(snapshot, new JsonSerializerOptions(JsonSerializerDefaults.Web));
}
private sealed class StubHttpMessageHandler : HttpMessageHandler
{
private readonly Dictionary<Uri, HttpResponseMessage> _responses;
private readonly Dictionary<Uri, int> _callCounts = new();
public StubHttpMessageHandler(Dictionary<Uri, HttpResponseMessage> responses)
{
_responses = responses;
}
public int GetCallCount(Uri uri) => _callCounts.TryGetValue(uri, out var count) ? count : 0;
protected override Task<HttpResponseMessage> SendAsync(HttpRequestMessage request, CancellationToken cancellationToken)
{
if (request.RequestUri is null || !_responses.TryGetValue(request.RequestUri, out var response))
{
return Task.FromResult(new HttpResponseMessage(HttpStatusCode.NotFound));
}
_callCounts.TryGetValue(request.RequestUri, out var count);
_callCounts[request.RequestUri] = count + 1;
return Task.FromResult(response.Clone());
}
}
private sealed class SingleHttpClientFactory : IHttpClientFactory
{
private readonly HttpClient _client;
public SingleHttpClientFactory(HttpClient client)
{
_client = client;
}
public HttpClient CreateClient(string name) => _client;
}
private sealed class InMemoryConnectorStateRepository : IVexConnectorStateRepository
{
public VexConnectorState? State { get; private set; }
namespace StellaOps.Excititor.Connectors.Oracle.CSAF.Tests.Connectors;
public sealed class OracleCsafConnectorTests
{
[Fact]
public async Task FetchAsync_NewEntry_PersistsDocumentAndUpdatesState()
{
var documentUri = new Uri("https://oracle.example/security/csaf/cpu2025oct.json");
var payload = Encoding.UTF8.GetBytes("{\"document\":\"payload\"}");
var payloadDigest = ComputeDigest(payload);
var snapshotPath = "/snapshots/oracle-catalog.json";
var fileSystem = new MockFileSystem();
fileSystem.AddFile(snapshotPath, new MockFileData(BuildOfflineSnapshot(documentUri, payloadDigest, "2025-10-15T00:00:00Z")));
var handler = new StubHttpMessageHandler(new Dictionary<Uri, HttpResponseMessage>
{
[documentUri] = CreateResponse(payload),
});
var httpClient = new HttpClient(handler);
var httpFactory = new SingleHttpClientFactory(httpClient);
var loader = new OracleCatalogLoader(
httpFactory,
new MemoryCache(new MemoryCacheOptions()),
fileSystem,
NullLogger<OracleCatalogLoader>.Instance,
TimeProvider.System);
var stateRepository = new InMemoryConnectorStateRepository();
var connector = new OracleCsafConnector(
loader,
httpFactory,
stateRepository,
new[] { new OracleConnectorOptionsValidator(fileSystem) },
NullLogger<OracleCsafConnector>.Instance,
TimeProvider.System);
var settingsValues = ImmutableDictionary<string, string>.Empty
.Add(nameof(OracleConnectorOptions.PreferOfflineSnapshot), "true")
.Add(nameof(OracleConnectorOptions.OfflineSnapshotPath), snapshotPath)
.Add(nameof(OracleConnectorOptions.PersistOfflineSnapshot), "false");
var settings = new VexConnectorSettings(settingsValues);
await connector.ValidateAsync(settings, CancellationToken.None);
var sink = new InMemoryRawSink();
var context = new VexConnectorContext(
Since: null,
Settings: settings,
RawSink: sink,
SignatureVerifier: new NoopSignatureVerifier(),
Normalizers: new NoopNormalizerRouter(),
Services: new ServiceCollection().BuildServiceProvider(),
ResumeTokens: ImmutableDictionary<string, string>.Empty);
var documents = new List<VexRawDocument>();
await foreach (var document in connector.FetchAsync(context, CancellationToken.None))
{
documents.Add(document);
}
documents.Should().HaveCount(1);
sink.Documents.Should().HaveCount(1);
documents[0].Digest.Should().Be(payloadDigest);
documents[0].Metadata["oracle.csaf.entryId"].Should().Be("CPU2025Oct");
documents[0].Metadata["oracle.csaf.sha256"].Should().Be(payloadDigest);
stateRepository.State.Should().NotBeNull();
stateRepository.State!.DocumentDigests.Should().ContainSingle().Which.Should().Be(payloadDigest);
handler.GetCallCount(documentUri).Should().Be(1);
// second run should short-circuit without downloading again
sink.Documents.Clear();
documents.Clear();
await foreach (var document in connector.FetchAsync(context, CancellationToken.None))
{
documents.Add(document);
}
documents.Should().BeEmpty();
sink.Documents.Should().BeEmpty();
handler.GetCallCount(documentUri).Should().Be(1);
}
[Fact]
public async Task FetchAsync_ChecksumMismatch_SkipsDocument()
{
var documentUri = new Uri("https://oracle.example/security/csaf/cpu2025oct.json");
var payload = Encoding.UTF8.GetBytes("{\"document\":\"payload\"}");
var snapshotPath = "/snapshots/oracle-catalog.json";
var fileSystem = new MockFileSystem();
fileSystem.AddFile(snapshotPath, new MockFileData(BuildOfflineSnapshot(documentUri, "deadbeef", "2025-10-15T00:00:00Z")));
var handler = new StubHttpMessageHandler(new Dictionary<Uri, HttpResponseMessage>
{
[documentUri] = CreateResponse(payload),
});
var httpClient = new HttpClient(handler);
var httpFactory = new SingleHttpClientFactory(httpClient);
var loader = new OracleCatalogLoader(
httpFactory,
new MemoryCache(new MemoryCacheOptions()),
fileSystem,
NullLogger<OracleCatalogLoader>.Instance,
TimeProvider.System);
var stateRepository = new InMemoryConnectorStateRepository();
var connector = new OracleCsafConnector(
loader,
httpFactory,
stateRepository,
new[] { new OracleConnectorOptionsValidator(fileSystem) },
NullLogger<OracleCsafConnector>.Instance,
TimeProvider.System);
var settingsValues = ImmutableDictionary<string, string>.Empty
.Add(nameof(OracleConnectorOptions.PreferOfflineSnapshot), "true")
.Add(nameof(OracleConnectorOptions.OfflineSnapshotPath), snapshotPath)
.Add(nameof(OracleConnectorOptions.PersistOfflineSnapshot), "false");
var settings = new VexConnectorSettings(settingsValues);
await connector.ValidateAsync(settings, CancellationToken.None);
var sink = new InMemoryRawSink();
var context = new VexConnectorContext(
Since: null,
Settings: settings,
RawSink: sink,
SignatureVerifier: new NoopSignatureVerifier(),
Normalizers: new NoopNormalizerRouter(),
Services: new ServiceCollection().BuildServiceProvider(),
ResumeTokens: ImmutableDictionary<string, string>.Empty);
var documents = new List<VexRawDocument>();
await foreach (var document in connector.FetchAsync(context, CancellationToken.None))
{
documents.Add(document);
}
documents.Should().BeEmpty();
sink.Documents.Should().BeEmpty();
stateRepository.State.Should().BeNull();
handler.GetCallCount(documentUri).Should().Be(1);
}
private static HttpResponseMessage CreateResponse(byte[] payload)
=> new(HttpStatusCode.OK)
{
Content = new ByteArrayContent(payload)
{
Headers =
{
ContentType = new System.Net.Http.Headers.MediaTypeHeaderValue("application/json"),
}
}
};
private static string ComputeDigest(byte[] payload)
{
Span<byte> buffer = stackalloc byte[32];
SHA256.HashData(payload, buffer);
return "sha256:" + Convert.ToHexString(buffer).ToLowerInvariant();
}
private static string BuildOfflineSnapshot(Uri documentUri, string sha256, string publishedAt)
{
var snapshot = new
{
metadata = new
{
generatedAt = "2025-10-14T12:00:00Z",
entries = new[]
{
new
{
id = "CPU2025Oct",
title = "Oracle Critical Patch Update Advisory - October 2025",
documentUri = documentUri.ToString(),
publishedAt,
revision = publishedAt,
sha256,
size = 1024,
products = new[] { "Oracle Database" }
}
},
cpuSchedule = Array.Empty<object>()
},
fetchedAt = "2025-10-14T12:00:00Z"
};
return JsonSerializer.Serialize(snapshot, new JsonSerializerOptions(JsonSerializerDefaults.Web));
}
private sealed class StubHttpMessageHandler : HttpMessageHandler
{
private readonly Dictionary<Uri, HttpResponseMessage> _responses;
private readonly Dictionary<Uri, int> _callCounts = new();
public StubHttpMessageHandler(Dictionary<Uri, HttpResponseMessage> responses)
{
_responses = responses;
}
public int GetCallCount(Uri uri) => _callCounts.TryGetValue(uri, out var count) ? count : 0;
protected override Task<HttpResponseMessage> SendAsync(HttpRequestMessage request, CancellationToken cancellationToken)
{
if (request.RequestUri is null || !_responses.TryGetValue(request.RequestUri, out var response))
{
return Task.FromResult(new HttpResponseMessage(HttpStatusCode.NotFound));
}
_callCounts.TryGetValue(request.RequestUri, out var count);
_callCounts[request.RequestUri] = count + 1;
return Task.FromResult(response.Clone());
}
}
private sealed class SingleHttpClientFactory : IHttpClientFactory
{
private readonly HttpClient _client;
public SingleHttpClientFactory(HttpClient client)
{
_client = client;
}
public HttpClient CreateClient(string name) => _client;
}
private sealed class InMemoryConnectorStateRepository : IVexConnectorStateRepository
{
public VexConnectorState? State { get; private set; }
public ValueTask<VexConnectorState?> GetAsync(string connectorId, CancellationToken cancellationToken, IClientSessionHandle? session = null)
=> ValueTask.FromResult(State);
@@ -263,50 +265,50 @@ public sealed class OracleCsafConnectorTests
State = state;
return ValueTask.CompletedTask;
}
}
private sealed class InMemoryRawSink : IVexRawDocumentSink
{
public List<VexRawDocument> Documents { get; } = new();
public ValueTask StoreAsync(VexRawDocument document, CancellationToken cancellationToken)
{
Documents.Add(document);
return ValueTask.CompletedTask;
}
}
private sealed class NoopSignatureVerifier : IVexSignatureVerifier
{
public ValueTask<VexSignatureMetadata?> VerifyAsync(VexRawDocument document, CancellationToken cancellationToken)
=> ValueTask.FromResult<VexSignatureMetadata?>(null);
}
private sealed class NoopNormalizerRouter : IVexNormalizerRouter
{
public ValueTask<VexClaimBatch> NormalizeAsync(VexRawDocument document, CancellationToken cancellationToken)
=> ValueTask.FromResult(new VexClaimBatch(document, ImmutableArray<VexClaim>.Empty, ImmutableDictionary<string, string>.Empty));
}
}
internal static class HttpResponseMessageExtensions
{
public static HttpResponseMessage Clone(this HttpResponseMessage response)
{
var clone = new HttpResponseMessage(response.StatusCode);
foreach (var header in response.Headers)
{
clone.Headers.TryAddWithoutValidation(header.Key, header.Value);
}
if (response.Content is not null)
{
var payload = response.Content.ReadAsByteArrayAsync().GetAwaiter().GetResult();
var mediaType = response.Content.Headers.ContentType?.MediaType ?? "application/json";
clone.Content = new ByteArrayContent(payload);
clone.Content.Headers.ContentType = new System.Net.Http.Headers.MediaTypeHeaderValue(mediaType);
}
return clone;
}
}
}
private sealed class InMemoryRawSink : IVexRawDocumentSink
{
public List<VexRawDocument> Documents { get; } = new();
public ValueTask StoreAsync(VexRawDocument document, CancellationToken cancellationToken)
{
Documents.Add(document);
return ValueTask.CompletedTask;
}
}
private sealed class NoopSignatureVerifier : IVexSignatureVerifier
{
public ValueTask<VexSignatureMetadata?> VerifyAsync(VexRawDocument document, CancellationToken cancellationToken)
=> ValueTask.FromResult<VexSignatureMetadata?>(null);
}
private sealed class NoopNormalizerRouter : IVexNormalizerRouter
{
public ValueTask<VexClaimBatch> NormalizeAsync(VexRawDocument document, CancellationToken cancellationToken)
=> ValueTask.FromResult(new VexClaimBatch(document, ImmutableArray<VexClaim>.Empty, ImmutableDictionary<string, string>.Empty));
}
}
internal static class HttpResponseMessageExtensions
{
public static HttpResponseMessage Clone(this HttpResponseMessage response)
{
var clone = new HttpResponseMessage(response.StatusCode);
foreach (var header in response.Headers)
{
clone.Headers.TryAddWithoutValidation(header.Key, header.Value);
}
if (response.Content is not null)
{
var payload = response.Content.ReadAsByteArrayAsync().GetAwaiter().GetResult();
var mediaType = response.Content.Headers.ContentType?.MediaType ?? "application/json";
clone.Content = new ByteArrayContent(payload);
clone.Content.Headers.ContentType = new System.Net.Http.Headers.MediaTypeHeaderValue(mediaType);
}
return clone;
}
}

View File

@@ -1,17 +1,17 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<LangVersion>preview</LangVersion>
<Nullable>enable</Nullable>
<ImplicitUsings>enable</ImplicitUsings>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
</PropertyGroup>
<ItemGroup>
<ProjectReference Include="..\StellaOps.Excititor.Connectors.Oracle.CSAF\StellaOps.Excititor.Connectors.Oracle.CSAF.csproj" />
</ItemGroup>
<ItemGroup>
<PackageReference Include="FluentAssertions" Version="6.12.0" />
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="9.0.0" />
<PackageReference Include="System.IO.Abstractions.TestingHelpers" Version="20.0.28" />
</ItemGroup>
</Project>
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<LangVersion>preview</LangVersion>
<Nullable>enable</Nullable>
<ImplicitUsings>enable</ImplicitUsings>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
</PropertyGroup>
<ItemGroup>
<ProjectReference Include="..\StellaOps.Excititor.Connectors.Oracle.CSAF\StellaOps.Excititor.Connectors.Oracle.CSAF.csproj" />
</ItemGroup>
<ItemGroup>
<PackageReference Include="FluentAssertions" Version="6.12.0" />
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="10.0.0-preview.7.25380.108" />
<PackageReference Include="System.IO.Abstractions.TestingHelpers" Version="20.0.28" />
</ItemGroup>
</Project>

View File

@@ -1,20 +1,20 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<LangVersion>preview</LangVersion>
<Nullable>enable</Nullable>
<ImplicitUsings>enable</ImplicitUsings>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
</PropertyGroup>
<ItemGroup>
<ProjectReference Include="..\StellaOps.Excititor.Connectors.Abstractions\StellaOps.Excititor.Connectors.Abstractions.csproj" />
<ProjectReference Include="..\StellaOps.Excititor.Core\StellaOps.Excititor.Core.csproj" />
<ProjectReference Include="..\StellaOps.Excititor.Storage.Mongo\StellaOps.Excititor.Storage.Mongo.csproj" />
</ItemGroup>
<ItemGroup>
<PackageReference Include="Microsoft.Extensions.Caching.Memory" Version="9.0.0" />
<PackageReference Include="Microsoft.Extensions.Http" Version="9.0.0" />
<PackageReference Include="Microsoft.Extensions.Options" Version="9.0.0" />
<PackageReference Include="System.IO.Abstractions" Version="20.0.28" />
</ItemGroup>
</Project>
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<LangVersion>preview</LangVersion>
<Nullable>enable</Nullable>
<ImplicitUsings>enable</ImplicitUsings>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
</PropertyGroup>
<ItemGroup>
<ProjectReference Include="..\StellaOps.Excititor.Connectors.Abstractions\StellaOps.Excititor.Connectors.Abstractions.csproj" />
<ProjectReference Include="..\StellaOps.Excititor.Core\StellaOps.Excititor.Core.csproj" />
<ProjectReference Include="..\StellaOps.Excititor.Storage.Mongo\StellaOps.Excititor.Storage.Mongo.csproj" />
</ItemGroup>
<ItemGroup>
<PackageReference Include="Microsoft.Extensions.Caching.Memory" Version="10.0.0-preview.7.25380.108" />
<PackageReference Include="Microsoft.Extensions.Http" Version="10.0.0-preview.7.25380.108" />
<PackageReference Include="Microsoft.Extensions.Options" Version="10.0.0-preview.7.25380.108" />
<PackageReference Include="System.IO.Abstractions" Version="20.0.28" />
</ItemGroup>
</Project>

View File

@@ -73,13 +73,14 @@ public sealed class RedHatCsafConnectorTests
var connector = new RedHatCsafConnector(Descriptor, metadataLoader, factory, stateRepository, NullLogger<RedHatCsafConnector>.Instance, TimeProvider.System);
var rawSink = new CapturingRawSink();
var context = new VexConnectorContext(
new DateTimeOffset(2025, 10, 16, 12, 0, 0, TimeSpan.Zero),
VexConnectorSettings.Empty,
rawSink,
new NoopSignatureVerifier(),
new NoopNormalizerRouter(),
new ServiceCollection().BuildServiceProvider());
var context = new VexConnectorContext(
new DateTimeOffset(2025, 10, 16, 12, 0, 0, TimeSpan.Zero),
VexConnectorSettings.Empty,
rawSink,
new NoopSignatureVerifier(),
new NoopNormalizerRouter(),
new ServiceCollection().BuildServiceProvider(),
ImmutableDictionary<string, string>.Empty);
var results = new List<VexRawDocument>();
await foreach (var document in connector.FetchAsync(context, CancellationToken.None))
@@ -238,13 +239,14 @@ public sealed class RedHatCsafConnectorTests
var connector = new RedHatCsafConnector(Descriptor, metadataLoader, factory, stateRepository, NullLogger<RedHatCsafConnector>.Instance, TimeProvider.System);
var rawSink = new CapturingRawSink();
var context = new VexConnectorContext(
null,
VexConnectorSettings.Empty,
rawSink,
new NoopSignatureVerifier(),
new NoopNormalizerRouter(),
new ServiceCollection().BuildServiceProvider());
var context = new VexConnectorContext(
null,
VexConnectorSettings.Empty,
rawSink,
new NoopSignatureVerifier(),
new NoopNormalizerRouter(),
new ServiceCollection().BuildServiceProvider(),
ImmutableDictionary<string, string>.Empty);
var documents = new List<VexRawDocument>();
await foreach (var document in connector.FetchAsync(context, CancellationToken.None))

View File

@@ -1,19 +1,19 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<LangVersion>preview</LangVersion>
<Nullable>enable</Nullable>
<ImplicitUsings>enable</ImplicitUsings>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
</PropertyGroup>
<ItemGroup>
<ProjectReference Include="..\StellaOps.Excititor.Connectors.Abstractions\StellaOps.Excititor.Connectors.Abstractions.csproj" />
<ProjectReference Include="..\StellaOps.Excititor.Storage.Mongo\StellaOps.Excititor.Storage.Mongo.csproj" />
</ItemGroup>
<ItemGroup>
<PackageReference Include="Microsoft.Extensions.Caching.Memory" Version="9.0.0" />
<PackageReference Include="Microsoft.Extensions.Http" Version="9.0.0" />
<PackageReference Include="Microsoft.Extensions.Options" Version="9.0.0" />
<PackageReference Include="System.IO.Abstractions" Version="20.0.28" />
</ItemGroup>
</Project>
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<LangVersion>preview</LangVersion>
<Nullable>enable</Nullable>
<ImplicitUsings>enable</ImplicitUsings>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
</PropertyGroup>
<ItemGroup>
<ProjectReference Include="..\StellaOps.Excititor.Connectors.Abstractions\StellaOps.Excititor.Connectors.Abstractions.csproj" />
<ProjectReference Include="..\StellaOps.Excititor.Storage.Mongo\StellaOps.Excititor.Storage.Mongo.csproj" />
</ItemGroup>
<ItemGroup>
<PackageReference Include="Microsoft.Extensions.Caching.Memory" Version="10.0.0-preview.7.25380.108" />
<PackageReference Include="Microsoft.Extensions.Http" Version="10.0.0-preview.7.25380.108" />
<PackageReference Include="Microsoft.Extensions.Options" Version="10.0.0-preview.7.25380.108" />
<PackageReference Include="System.IO.Abstractions" Version="20.0.28" />
</ItemGroup>
</Project>

View File

@@ -1,19 +1,19 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<LangVersion>preview</LangVersion>
<Nullable>enable</Nullable>
<ImplicitUsings>enable</ImplicitUsings>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
</PropertyGroup>
<ItemGroup>
<ProjectReference Include="..\StellaOps.Excititor.Connectors.Abstractions\StellaOps.Excititor.Connectors.Abstractions.csproj" />
<ProjectReference Include="..\StellaOps.Excititor.Storage.Mongo\StellaOps.Excititor.Storage.Mongo.csproj" />
</ItemGroup>
<ItemGroup>
<PackageReference Include="Microsoft.Extensions.Caching.Memory" Version="9.0.0" />
<PackageReference Include="Microsoft.Extensions.Http" Version="9.0.0" />
<PackageReference Include="Microsoft.Extensions.Options" Version="9.0.0" />
<PackageReference Include="System.IO.Abstractions" Version="20.0.28" />
</ItemGroup>
</Project>
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<LangVersion>preview</LangVersion>
<Nullable>enable</Nullable>
<ImplicitUsings>enable</ImplicitUsings>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
</PropertyGroup>
<ItemGroup>
<ProjectReference Include="..\StellaOps.Excititor.Connectors.Abstractions\StellaOps.Excititor.Connectors.Abstractions.csproj" />
<ProjectReference Include="..\StellaOps.Excititor.Storage.Mongo\StellaOps.Excititor.Storage.Mongo.csproj" />
</ItemGroup>
<ItemGroup>
<PackageReference Include="Microsoft.Extensions.Caching.Memory" Version="10.0.0-preview.7.25380.108" />
<PackageReference Include="Microsoft.Extensions.Http" Version="10.0.0-preview.7.25380.108" />
<PackageReference Include="Microsoft.Extensions.Options" Version="10.0.0-preview.7.25380.108" />
<PackageReference Include="System.IO.Abstractions" Version="20.0.28" />
</ItemGroup>
</Project>

View File

@@ -61,7 +61,7 @@ public sealed class UbuntuCsafConnectorTests
await connector.ValidateAsync(settings, CancellationToken.None);
var sink = new InMemoryRawSink();
var context = new VexConnectorContext(null, VexConnectorSettings.Empty, sink, new NoopSignatureVerifier(), new NoopNormalizerRouter(), new ServiceCollection().BuildServiceProvider());
var context = new VexConnectorContext(null, VexConnectorSettings.Empty, sink, new NoopSignatureVerifier(), new NoopNormalizerRouter(), new ServiceCollection().BuildServiceProvider(), ImmutableDictionary<string, string>.Empty);
var documents = new List<VexRawDocument>();
await foreach (var doc in connector.FetchAsync(context, CancellationToken.None))
@@ -130,7 +130,7 @@ public sealed class UbuntuCsafConnectorTests
await connector.ValidateAsync(new VexConnectorSettings(ImmutableDictionary<string, string>.Empty), CancellationToken.None);
var sink = new InMemoryRawSink();
var context = new VexConnectorContext(null, VexConnectorSettings.Empty, sink, new NoopSignatureVerifier(), new NoopNormalizerRouter(), new ServiceCollection().BuildServiceProvider());
var context = new VexConnectorContext(null, VexConnectorSettings.Empty, sink, new NoopSignatureVerifier(), new NoopNormalizerRouter(), new ServiceCollection().BuildServiceProvider(), ImmutableDictionary<string, string>.Empty);
var documents = new List<VexRawDocument>();
await foreach (var doc in connector.FetchAsync(context, CancellationToken.None))

View File

@@ -1,17 +1,17 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<LangVersion>preview</LangVersion>
<Nullable>enable</Nullable>
<ImplicitUsings>enable</ImplicitUsings>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
</PropertyGroup>
<ItemGroup>
<ProjectReference Include="..\StellaOps.Excititor.Connectors.Ubuntu.CSAF\StellaOps.Excititor.Connectors.Ubuntu.CSAF.csproj" />
</ItemGroup>
<ItemGroup>
<PackageReference Include="FluentAssertions" Version="6.12.0" />
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="9.0.0" />
<PackageReference Include="System.IO.Abstractions.TestingHelpers" Version="20.0.28" />
</ItemGroup>
</Project>
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<LangVersion>preview</LangVersion>
<Nullable>enable</Nullable>
<ImplicitUsings>enable</ImplicitUsings>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
</PropertyGroup>
<ItemGroup>
<ProjectReference Include="..\StellaOps.Excititor.Connectors.Ubuntu.CSAF\StellaOps.Excititor.Connectors.Ubuntu.CSAF.csproj" />
</ItemGroup>
<ItemGroup>
<PackageReference Include="FluentAssertions" Version="6.12.0" />
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="10.0.0-preview.7.25380.108" />
<PackageReference Include="System.IO.Abstractions.TestingHelpers" Version="20.0.28" />
</ItemGroup>
</Project>

View File

@@ -1,20 +1,20 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<LangVersion>preview</LangVersion>
<Nullable>enable</Nullable>
<ImplicitUsings>enable</ImplicitUsings>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
</PropertyGroup>
<ItemGroup>
<ProjectReference Include="..\StellaOps.Excititor.Connectors.Abstractions\StellaOps.Excititor.Connectors.Abstractions.csproj" />
<ProjectReference Include="..\StellaOps.Excititor.Core\StellaOps.Excititor.Core.csproj" />
<ProjectReference Include="..\StellaOps.Excititor.Storage.Mongo\StellaOps.Excititor.Storage.Mongo.csproj" />
</ItemGroup>
<ItemGroup>
<PackageReference Include="Microsoft.Extensions.Caching.Memory" Version="9.0.0" />
<PackageReference Include="Microsoft.Extensions.Http" Version="9.0.0" />
<PackageReference Include="Microsoft.Extensions.Options" Version="9.0.0" />
<PackageReference Include="System.IO.Abstractions" Version="20.0.28" />
</ItemGroup>
</Project>
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<LangVersion>preview</LangVersion>
<Nullable>enable</Nullable>
<ImplicitUsings>enable</ImplicitUsings>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
</PropertyGroup>
<ItemGroup>
<ProjectReference Include="..\StellaOps.Excititor.Connectors.Abstractions\StellaOps.Excititor.Connectors.Abstractions.csproj" />
<ProjectReference Include="..\StellaOps.Excititor.Core\StellaOps.Excititor.Core.csproj" />
<ProjectReference Include="..\StellaOps.Excititor.Storage.Mongo\StellaOps.Excititor.Storage.Mongo.csproj" />
</ItemGroup>
<ItemGroup>
<PackageReference Include="Microsoft.Extensions.Caching.Memory" Version="10.0.0-preview.7.25380.108" />
<PackageReference Include="Microsoft.Extensions.Http" Version="10.0.0-preview.7.25380.108" />
<PackageReference Include="Microsoft.Extensions.Options" Version="10.0.0-preview.7.25380.108" />
<PackageReference Include="System.IO.Abstractions" Version="20.0.28" />
</ItemGroup>
</Project>

View File

@@ -1,7 +1,7 @@
using System;
using System.Collections.Immutable;
using System.Threading;
using System.Threading.Tasks;
using System.Threading;
using System.Threading.Tasks;
using System.Collections.Immutable;
namespace StellaOps.Excititor.Core;
@@ -24,13 +24,14 @@ public interface IVexConnector
/// <summary>
/// Connector context populated by the orchestrator/worker.
/// </summary>
public sealed record VexConnectorContext(
DateTimeOffset? Since,
VexConnectorSettings Settings,
IVexRawDocumentSink RawSink,
IVexSignatureVerifier SignatureVerifier,
IVexNormalizerRouter Normalizers,
IServiceProvider Services);
public sealed record VexConnectorContext(
DateTimeOffset? Since,
VexConnectorSettings Settings,
IVexRawDocumentSink RawSink,
IVexSignatureVerifier SignatureVerifier,
IVexNormalizerRouter Normalizers,
IServiceProvider Services,
ImmutableDictionary<string, string> ResumeTokens);
/// <summary>
/// Normalized connector configuration values.

View File

@@ -0,0 +1,47 @@
namespace StellaOps.Excititor.Core;
public sealed record VexConsensusHold
{
public VexConsensusHold(
string vulnerabilityId,
string productKey,
VexConsensus candidate,
DateTimeOffset requestedAt,
DateTimeOffset eligibleAt,
string reason)
{
if (string.IsNullOrWhiteSpace(vulnerabilityId))
{
throw new ArgumentException("Vulnerability id must be provided.", nameof(vulnerabilityId));
}
if (string.IsNullOrWhiteSpace(productKey))
{
throw new ArgumentException("Product key must be provided.", nameof(productKey));
}
if (eligibleAt < requestedAt)
{
throw new ArgumentOutOfRangeException(nameof(eligibleAt), "EligibleAt cannot be earlier than RequestedAt.");
}
VulnerabilityId = vulnerabilityId.Trim();
ProductKey = productKey.Trim();
Candidate = candidate ?? throw new ArgumentNullException(nameof(candidate));
RequestedAt = requestedAt;
EligibleAt = eligibleAt;
Reason = string.IsNullOrWhiteSpace(reason) ? "unspecified" : reason.Trim();
}
public string VulnerabilityId { get; }
public string ProductKey { get; }
public VexConsensus Candidate { get; }
public DateTimeOffset RequestedAt { get; }
public DateTimeOffset EligibleAt { get; }
public string Reason { get; }
}

View File

@@ -1,19 +1,19 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<LangVersion>preview</LangVersion>
<Nullable>enable</Nullable>
<ImplicitUsings>enable</ImplicitUsings>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="9.0.0" />
<PackageReference Include="Microsoft.Extensions.Options" Version="9.0.0" />
<PackageReference Include="System.IO.Abstractions" Version="20.0.28" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\StellaOps.Excititor.Core\StellaOps.Excititor.Core.csproj" />
<ProjectReference Include="..\StellaOps.Excititor.Policy\StellaOps.Excititor.Policy.csproj" />
<ProjectReference Include="..\StellaOps.Excititor.Storage.Mongo\StellaOps.Excititor.Storage.Mongo.csproj" />
</ItemGroup>
</Project>
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<LangVersion>preview</LangVersion>
<Nullable>enable</Nullable>
<ImplicitUsings>enable</ImplicitUsings>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="10.0.0-preview.7.25380.108" />
<PackageReference Include="Microsoft.Extensions.Options" Version="10.0.0-preview.7.25380.108" />
<PackageReference Include="System.IO.Abstractions" Version="20.0.28" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\StellaOps.Excititor.Core\StellaOps.Excititor.Core.csproj" />
<ProjectReference Include="..\StellaOps.Excititor.Policy\StellaOps.Excititor.Policy.csproj" />
<ProjectReference Include="..\StellaOps.Excititor.Storage.Mongo\StellaOps.Excititor.Storage.Mongo.csproj" />
</ItemGroup>
</Project>

View File

@@ -1,16 +1,16 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<LangVersion>preview</LangVersion>
<Nullable>enable</Nullable>
<ImplicitUsings>enable</ImplicitUsings>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="Microsoft.Extensions.DependencyInjection.Abstractions" Version="8.0.0" />
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="8.0.0" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\StellaOps.Excititor.Core\StellaOps.Excititor.Core.csproj" />
</ItemGroup>
</Project>
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<LangVersion>preview</LangVersion>
<Nullable>enable</Nullable>
<ImplicitUsings>enable</ImplicitUsings>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="Microsoft.Extensions.DependencyInjection.Abstractions" Version="10.0.0-preview.7.25380.108" />
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="10.0.0-preview.7.25380.108" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\StellaOps.Excititor.Core\StellaOps.Excititor.Core.csproj" />
</ItemGroup>
</Project>

View File

@@ -1,16 +1,16 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<LangVersion>preview</LangVersion>
<Nullable>enable</Nullable>
<ImplicitUsings>enable</ImplicitUsings>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="Microsoft.Extensions.DependencyInjection.Abstractions" Version="8.0.0" />
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="8.0.0" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\StellaOps.Excititor.Core\StellaOps.Excititor.Core.csproj" />
</ItemGroup>
</Project>
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<LangVersion>preview</LangVersion>
<Nullable>enable</Nullable>
<ImplicitUsings>enable</ImplicitUsings>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="Microsoft.Extensions.DependencyInjection.Abstractions" Version="10.0.0-preview.7.25380.108" />
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="10.0.0-preview.7.25380.108" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\StellaOps.Excititor.Core\StellaOps.Excititor.Core.csproj" />
</ItemGroup>
</Project>

View File

@@ -1,16 +1,16 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<LangVersion>preview</LangVersion>
<Nullable>enable</Nullable>
<ImplicitUsings>enable</ImplicitUsings>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="Microsoft.Extensions.DependencyInjection.Abstractions" Version="8.0.0" />
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="8.0.0" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\StellaOps.Excititor.Core\StellaOps.Excititor.Core.csproj" />
</ItemGroup>
</Project>
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<LangVersion>preview</LangVersion>
<Nullable>enable</Nullable>
<ImplicitUsings>enable</ImplicitUsings>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="Microsoft.Extensions.DependencyInjection.Abstractions" Version="10.0.0-preview.7.25380.108" />
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="10.0.0-preview.7.25380.108" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\StellaOps.Excititor.Core\StellaOps.Excititor.Core.csproj" />
</ItemGroup>
</Project>

View File

@@ -1,17 +1,17 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<LangVersion>preview</LangVersion>
<Nullable>enable</Nullable>
<ImplicitUsings>enable</ImplicitUsings>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="9.0.0" />
<PackageReference Include="Microsoft.Extensions.Options" Version="9.0.0" />
<PackageReference Include="YamlDotNet" Version="13.7.1" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\StellaOps.Excititor.Core\StellaOps.Excititor.Core.csproj" />
</ItemGroup>
</Project>
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<LangVersion>preview</LangVersion>
<Nullable>enable</Nullable>
<ImplicitUsings>enable</ImplicitUsings>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="10.0.0-preview.7.25380.108" />
<PackageReference Include="Microsoft.Extensions.Options" Version="10.0.0-preview.7.25380.108" />
<PackageReference Include="YamlDotNet" Version="13.7.1" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\StellaOps.Excititor.Core\StellaOps.Excititor.Core.csproj" />
</ItemGroup>
</Project>

View File

@@ -17,14 +17,17 @@ public interface IVexProviderStore
ValueTask SaveAsync(VexProvider provider, CancellationToken cancellationToken, IClientSessionHandle? session = null);
}
public interface IVexConsensusStore
{
ValueTask<VexConsensus?> FindAsync(string vulnerabilityId, string productKey, CancellationToken cancellationToken, IClientSessionHandle? session = null);
ValueTask<IReadOnlyCollection<VexConsensus>> FindByVulnerabilityAsync(string vulnerabilityId, CancellationToken cancellationToken, IClientSessionHandle? session = null);
ValueTask SaveAsync(VexConsensus consensus, CancellationToken cancellationToken, IClientSessionHandle? session = null);
}
public interface IVexConsensusStore
{
ValueTask<VexConsensus?> FindAsync(string vulnerabilityId, string productKey, CancellationToken cancellationToken, IClientSessionHandle? session = null);
ValueTask<IReadOnlyCollection<VexConsensus>> FindByVulnerabilityAsync(string vulnerabilityId, CancellationToken cancellationToken, IClientSessionHandle? session = null);
ValueTask SaveAsync(VexConsensus consensus, CancellationToken cancellationToken, IClientSessionHandle? session = null);
IAsyncEnumerable<VexConsensus> FindCalculatedBeforeAsync(DateTimeOffset cutoff, int batchSize, CancellationToken cancellationToken, IClientSessionHandle? session = null)
=> throw new NotSupportedException();
}
public interface IVexClaimStore
{
@@ -60,12 +63,23 @@ public sealed record VexConnectorState(
}
}
public interface IVexConnectorStateRepository
{
ValueTask<VexConnectorState?> GetAsync(string connectorId, CancellationToken cancellationToken, IClientSessionHandle? session = null);
ValueTask SaveAsync(VexConnectorState state, CancellationToken cancellationToken, IClientSessionHandle? session = null);
}
public interface IVexConnectorStateRepository
{
ValueTask<VexConnectorState?> GetAsync(string connectorId, CancellationToken cancellationToken, IClientSessionHandle? session = null);
ValueTask SaveAsync(VexConnectorState state, CancellationToken cancellationToken, IClientSessionHandle? session = null);
}
public interface IVexConsensusHoldStore
{
ValueTask<VexConsensusHold?> FindAsync(string vulnerabilityId, string productKey, CancellationToken cancellationToken, IClientSessionHandle? session = null);
ValueTask SaveAsync(VexConsensusHold hold, CancellationToken cancellationToken, IClientSessionHandle? session = null);
ValueTask RemoveAsync(string vulnerabilityId, string productKey, CancellationToken cancellationToken, IClientSessionHandle? session = null);
IAsyncEnumerable<VexConsensusHold> FindEligibleAsync(DateTimeOffset asOf, int batchSize, CancellationToken cancellationToken, IClientSessionHandle? session = null);
}
public interface IVexCacheIndex
{

View File

@@ -0,0 +1,29 @@
using System.Threading;
using System.Threading.Tasks;
using MongoDB.Driver;
namespace StellaOps.Excititor.Storage.Mongo.Migrations;
internal sealed class VexConsensusHoldMigration : IVexMongoMigration
{
public string Id => "20251021-consensus-holds";
public async ValueTask ExecuteAsync(IMongoDatabase database, CancellationToken cancellationToken)
{
ArgumentNullException.ThrowIfNull(database);
var collection = database.GetCollection<VexConsensusHoldRecord>(VexMongoCollectionNames.ConsensusHolds);
var eligibleIndex = Builders<VexConsensusHoldRecord>.IndexKeys
.Ascending(x => x.EligibleAt);
var keyIndex = Builders<VexConsensusHoldRecord>.IndexKeys
.Ascending(x => x.VulnerabilityId)
.Ascending(x => x.ProductKey);
await Task.WhenAll(
collection.Indexes.CreateOneAsync(new CreateIndexModel<VexConsensusHoldRecord>(eligibleIndex), cancellationToken: cancellationToken),
collection.Indexes.CreateOneAsync(new CreateIndexModel<VexConsensusHoldRecord>(keyIndex), cancellationToken: cancellationToken))
.ConfigureAwait(false);
}
}

View File

@@ -0,0 +1,88 @@
using System.Collections.Generic;
using System.Runtime.CompilerServices;
using System.Threading;
using System.Threading.Tasks;
using MongoDB.Driver;
using StellaOps.Excititor.Core;
namespace StellaOps.Excititor.Storage.Mongo;
public sealed class MongoVexConsensusHoldStore : IVexConsensusHoldStore
{
private readonly IMongoCollection<VexConsensusHoldRecord> _collection;
public MongoVexConsensusHoldStore(IMongoDatabase database)
{
ArgumentNullException.ThrowIfNull(database);
VexMongoMappingRegistry.Register();
_collection = database.GetCollection<VexConsensusHoldRecord>(VexMongoCollectionNames.ConsensusHolds);
}
public async ValueTask<VexConsensusHold?> FindAsync(string vulnerabilityId, string productKey, CancellationToken cancellationToken, IClientSessionHandle? session = null)
{
ArgumentException.ThrowIfNullOrWhiteSpace(vulnerabilityId);
ArgumentException.ThrowIfNullOrWhiteSpace(productKey);
var id = VexConsensusRecord.CreateId(vulnerabilityId, productKey);
var filter = Builders<VexConsensusHoldRecord>.Filter.Eq(x => x.Id, id);
var record = session is null
? await _collection.Find(filter).FirstOrDefaultAsync(cancellationToken).ConfigureAwait(false)
: await _collection.Find(session, filter).FirstOrDefaultAsync(cancellationToken).ConfigureAwait(false);
return record?.ToDomain();
}
public async ValueTask SaveAsync(VexConsensusHold hold, CancellationToken cancellationToken, IClientSessionHandle? session = null)
{
ArgumentNullException.ThrowIfNull(hold);
var record = VexConsensusHoldRecord.FromDomain(hold);
var filter = Builders<VexConsensusHoldRecord>.Filter.Eq(x => x.Id, record.Id);
if (session is null)
{
await _collection.ReplaceOneAsync(filter, record, new ReplaceOptions { IsUpsert = true }, cancellationToken).ConfigureAwait(false);
}
else
{
await _collection.ReplaceOneAsync(session, filter, record, new ReplaceOptions { IsUpsert = true }, cancellationToken).ConfigureAwait(false);
}
}
public async ValueTask RemoveAsync(string vulnerabilityId, string productKey, CancellationToken cancellationToken, IClientSessionHandle? session = null)
{
ArgumentException.ThrowIfNullOrWhiteSpace(vulnerabilityId);
ArgumentException.ThrowIfNullOrWhiteSpace(productKey);
var id = VexConsensusRecord.CreateId(vulnerabilityId, productKey);
var filter = Builders<VexConsensusHoldRecord>.Filter.Eq(x => x.Id, id);
if (session is null)
{
await _collection.DeleteOneAsync(filter, cancellationToken).ConfigureAwait(false);
}
else
{
await _collection.DeleteOneAsync(session, filter, options: null, cancellationToken).ConfigureAwait(false);
}
}
public async IAsyncEnumerable<VexConsensusHold> FindEligibleAsync(DateTimeOffset asOf, int batchSize, [EnumeratorCancellation] CancellationToken cancellationToken, IClientSessionHandle? session = null)
{
var cutoff = asOf.UtcDateTime;
var filter = Builders<VexConsensusHoldRecord>.Filter.Lte(x => x.EligibleAt, cutoff);
var find = session is null
? _collection.Find(filter)
: _collection.Find(session, filter);
find = find.SortBy(x => x.EligibleAt);
if (batchSize > 0)
{
find = find.Limit(batchSize);
}
using var cursor = await find.ToCursorAsync(cancellationToken).ConfigureAwait(false);
while (await cursor.MoveNextAsync(cancellationToken).ConfigureAwait(false))
{
foreach (var record in cursor.Current)
{
yield return record.ToDomain();
}
}
}
}

View File

@@ -40,19 +40,43 @@ public sealed class MongoVexConsensusStore : IVexConsensusStore
return records.ConvertAll(static record => record.ToDomain());
}
public async ValueTask SaveAsync(VexConsensus consensus, CancellationToken cancellationToken, IClientSessionHandle? session = null)
{
ArgumentNullException.ThrowIfNull(consensus);
var record = VexConsensusRecord.FromDomain(consensus);
var filter = Builders<VexConsensusRecord>.Filter.Eq(x => x.Id, record.Id);
if (session is null)
{
await _collection.ReplaceOneAsync(filter, record, new ReplaceOptions { IsUpsert = true }, cancellationToken).ConfigureAwait(false);
}
else
{
await _collection.ReplaceOneAsync(session, filter, record, new ReplaceOptions { IsUpsert = true }, cancellationToken).ConfigureAwait(false);
}
}
}
public async ValueTask SaveAsync(VexConsensus consensus, CancellationToken cancellationToken, IClientSessionHandle? session = null)
{
ArgumentNullException.ThrowIfNull(consensus);
var record = VexConsensusRecord.FromDomain(consensus);
var filter = Builders<VexConsensusRecord>.Filter.Eq(x => x.Id, record.Id);
if (session is null)
{
await _collection.ReplaceOneAsync(filter, record, new ReplaceOptions { IsUpsert = true }, cancellationToken).ConfigureAwait(false);
}
else
{
await _collection.ReplaceOneAsync(session, filter, record, new ReplaceOptions { IsUpsert = true }, cancellationToken).ConfigureAwait(false);
}
}
public async IAsyncEnumerable<VexConsensus> FindCalculatedBeforeAsync(DateTimeOffset cutoff, int batchSize, [System.Runtime.CompilerServices.EnumeratorCancellation] CancellationToken cancellationToken, IClientSessionHandle? session = null)
{
var filter = Builders<VexConsensusRecord>.Filter.Lt(x => x.CalculatedAt, cutoff.UtcDateTime);
var find = session is null
? _collection.Find(filter)
: _collection.Find(session, filter);
find = find.SortBy(x => x.CalculatedAt);
if (batchSize > 0)
{
find = find.Limit(batchSize);
}
using var cursor = await find.ToCursorAsync(cancellationToken).ConfigureAwait(false);
while (await cursor.MoveNextAsync(cancellationToken).ConfigureAwait(false))
{
foreach (var record in cursor.Current)
{
yield return record.ToDomain();
}
}
}
}

View File

@@ -48,18 +48,20 @@ public static class VexMongoServiceCollectionExtensions
services.AddScoped<IVexRawStore, MongoVexRawStore>();
services.AddScoped<IVexExportStore, MongoVexExportStore>();
services.AddScoped<IVexProviderStore, MongoVexProviderStore>();
services.AddScoped<IVexNormalizerRouter, StorageBackedVexNormalizerRouter>();
services.AddScoped<IVexConsensusStore, MongoVexConsensusStore>();
services.AddScoped<IVexClaimStore, MongoVexClaimStore>();
services.AddScoped<IVexCacheIndex, MongoVexCacheIndex>();
services.AddScoped<IVexCacheMaintenance, MongoVexCacheMaintenance>();
services.AddScoped<IVexConnectorStateRepository, MongoVexConnectorStateRepository>();
services.AddScoped<VexStatementBackfillService>();
services.AddSingleton<IVexMongoMigration, VexInitialIndexMigration>();
services.AddSingleton<IVexMongoMigration, VexConsensusSignalsMigration>();
services.AddSingleton<VexMongoMigrationRunner>();
services.AddHostedService<VexMongoMigrationHostedService>();
return services;
}
}
services.AddScoped<IVexProviderStore, MongoVexProviderStore>();
services.AddScoped<IVexNormalizerRouter, StorageBackedVexNormalizerRouter>();
services.AddScoped<IVexConsensusStore, MongoVexConsensusStore>();
services.AddScoped<IVexConsensusHoldStore, MongoVexConsensusHoldStore>();
services.AddScoped<IVexClaimStore, MongoVexClaimStore>();
services.AddScoped<IVexCacheIndex, MongoVexCacheIndex>();
services.AddScoped<IVexCacheMaintenance, MongoVexCacheMaintenance>();
services.AddScoped<IVexConnectorStateRepository, MongoVexConnectorStateRepository>();
services.AddScoped<VexStatementBackfillService>();
services.AddSingleton<IVexMongoMigration, VexInitialIndexMigration>();
services.AddSingleton<IVexMongoMigration, VexConsensusSignalsMigration>();
services.AddSingleton<IVexMongoMigration, VexConsensusHoldMigration>();
services.AddSingleton<VexMongoMigrationRunner>();
services.AddHostedService<VexMongoMigrationHostedService>();
return services;
}
}

View File

@@ -1,18 +1,18 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<LangVersion>preview</LangVersion>
<Nullable>enable</Nullable>
<ImplicitUsings>enable</ImplicitUsings>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="9.0.0" />
<PackageReference Include="Microsoft.Extensions.Options" Version="9.0.0" />
<PackageReference Include="Microsoft.Extensions.Hosting.Abstractions" Version="9.0.0" />
<PackageReference Include="MongoDB.Driver" Version="3.5.0" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\StellaOps.Excititor.Core\StellaOps.Excititor.Core.csproj" />
</ItemGroup>
</Project>
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<LangVersion>preview</LangVersion>
<Nullable>enable</Nullable>
<ImplicitUsings>enable</ImplicitUsings>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="10.0.0-preview.7.25380.108" />
<PackageReference Include="Microsoft.Extensions.Options" Version="10.0.0-preview.7.25380.108" />
<PackageReference Include="Microsoft.Extensions.Hosting.Abstractions" Version="10.0.0-preview.7.25380.108" />
<PackageReference Include="MongoDB.Driver" Version="3.5.0" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\StellaOps.Excititor.Core\StellaOps.Excititor.Core.csproj" />
</ItemGroup>
</Project>

View File

@@ -43,8 +43,9 @@ public static class VexMongoMappingRegistry
RegisterClassMap<VexClaimDocumentRecord>();
RegisterClassMap<VexSignatureMetadataDocument>();
RegisterClassMap<VexStatementRecord>();
RegisterClassMap<VexCacheEntryRecord>();
RegisterClassMap<VexConnectorStateDocument>();
RegisterClassMap<VexCacheEntryRecord>();
RegisterClassMap<VexConnectorStateDocument>();
RegisterClassMap<VexConsensusHoldRecord>();
}
private static void RegisterClassMap<TDocument>()
@@ -71,7 +72,8 @@ public static class VexMongoCollectionNames
public const string Statements = "vex.statements";
public const string Claims = Statements;
public const string Consensus = "vex.consensus";
public const string Exports = "vex.exports";
public const string Cache = "vex.cache";
public const string ConnectorState = "vex.connector_state";
}
public const string Exports = "vex.exports";
public const string Cache = "vex.cache";
public const string ConnectorState = "vex.connector_state";
public const string ConsensusHolds = "vex.consensus_holds";
}

View File

@@ -329,8 +329,8 @@ internal sealed class VexCosignTrustDocument
}
[BsonIgnoreExtraElements]
internal sealed class VexConsensusRecord
{
internal sealed class VexConsensusRecord
{
[BsonId]
public string Id { get; set; } = default!;
@@ -395,7 +395,115 @@ internal sealed class VexConsensusRecord
Summary,
PolicyRevisionId,
PolicyDigest);
}
}
[BsonIgnoreExtraElements]
internal sealed class VexConsensusHoldRecord
{
[BsonId]
public string Id { get; set; } = default!;
public string VulnerabilityId { get; set; } = default!;
public string ProductKey { get; set; } = default!;
public HeldConsensusDocument Candidate { get; set; } = default!;
public DateTime RequestedAt { get; set; }
= DateTime.SpecifyKind(DateTime.UtcNow, DateTimeKind.Utc);
public DateTime EligibleAt { get; set; }
= DateTime.SpecifyKind(DateTime.UtcNow, DateTimeKind.Utc);
public string Reason { get; set; } = "unspecified";
public static VexConsensusHoldRecord FromDomain(VexConsensusHold hold)
=> new()
{
Id = VexConsensusRecord.CreateId(hold.VulnerabilityId, hold.ProductKey),
VulnerabilityId = hold.VulnerabilityId,
ProductKey = hold.ProductKey,
Candidate = HeldConsensusDocument.FromDomain(hold.Candidate),
RequestedAt = hold.RequestedAt.UtcDateTime,
EligibleAt = hold.EligibleAt.UtcDateTime,
Reason = hold.Reason,
};
public VexConsensusHold ToDomain()
{
var requestedAt = DateTime.SpecifyKind(RequestedAt, DateTimeKind.Utc);
var eligibleAt = DateTime.SpecifyKind(EligibleAt, DateTimeKind.Utc);
return new VexConsensusHold(
VulnerabilityId,
ProductKey,
Candidate.ToDomain(),
new DateTimeOffset(requestedAt, TimeSpan.Zero),
new DateTimeOffset(eligibleAt, TimeSpan.Zero),
Reason);
}
}
[BsonIgnoreExtraElements]
internal sealed class HeldConsensusDocument
{
public string VulnerabilityId { get; set; } = default!;
public VexProductDocument Product { get; set; } = default!;
public string Status { get; set; } = default!;
public DateTime CalculatedAt { get; set; }
= DateTime.SpecifyKind(DateTime.UtcNow, DateTimeKind.Utc);
public List<VexConsensusSourceDocument> Sources { get; set; } = new();
public List<VexConsensusConflictDocument> Conflicts { get; set; } = new();
public VexSignalDocument? Signals { get; set; }
= null;
public string? PolicyVersion { get; set; }
= null;
public string? PolicyRevisionId { get; set; }
= null;
public string? PolicyDigest { get; set; }
= null;
public string? Summary { get; set; }
= null;
public static HeldConsensusDocument FromDomain(VexConsensus consensus)
=> new()
{
VulnerabilityId = consensus.VulnerabilityId,
Product = VexProductDocument.FromDomain(consensus.Product),
Status = consensus.Status.ToString().ToLowerInvariant(),
CalculatedAt = consensus.CalculatedAt.UtcDateTime,
Sources = consensus.Sources.Select(VexConsensusSourceDocument.FromDomain).ToList(),
Conflicts = consensus.Conflicts.Select(VexConsensusConflictDocument.FromDomain).ToList(),
Signals = VexSignalDocument.FromDomain(consensus.Signals),
PolicyVersion = consensus.PolicyVersion,
PolicyRevisionId = consensus.PolicyRevisionId,
PolicyDigest = consensus.PolicyDigest,
Summary = consensus.Summary,
};
public VexConsensus ToDomain()
=> new(
VulnerabilityId,
Product.ToDomain(),
Enum.Parse<VexConsensusStatus>(Status, ignoreCase: true),
new DateTimeOffset(DateTime.SpecifyKind(CalculatedAt, DateTimeKind.Utc), TimeSpan.Zero),
Sources.Select(static source => source.ToDomain()),
Conflicts.Select(static conflict => conflict.ToDomain()),
Signals?.ToDomain(),
PolicyVersion,
Summary,
PolicyRevisionId,
PolicyDigest);
}
[BsonIgnoreExtraElements]
internal sealed class VexProductDocument

View File

@@ -0,0 +1,274 @@
using System.Collections.Immutable;
using System.IO;
using System.Security.Claims;
using System.Text.Json;
using Microsoft.AspNetCore.Http;
using Microsoft.AspNetCore.Http.HttpResults;
using Microsoft.Extensions.DependencyInjection;
using StellaOps.Excititor.WebService.Endpoints;
using StellaOps.Excititor.WebService.Services;
namespace StellaOps.Excititor.WebService.Tests;
public sealed class IngestEndpointsTests
{
private readonly FakeIngestOrchestrator _orchestrator = new();
private readonly TimeProvider _timeProvider = TimeProvider.System;
[Fact]
public async Task InitEndpoint_ReturnsUnauthorized_WhenMissingToken()
{
var httpContext = CreateHttpContext();
var request = new IngestEndpoints.ExcititorInitRequest(null, false);
var result = await IngestEndpoints.HandleInitAsync(httpContext, request, _orchestrator, _timeProvider, CancellationToken.None);
Assert.IsType<UnauthorizedHttpResult>(result);
}
[Fact]
public async Task InitEndpoint_ReturnsForbidden_WhenScopeMissing()
{
var httpContext = CreateHttpContext("vex.read");
var request = new IngestEndpoints.ExcititorInitRequest(null, false);
var result = await IngestEndpoints.HandleInitAsync(httpContext, request, _orchestrator, _timeProvider, CancellationToken.None);
Assert.IsType<ForbidHttpResult>(result);
}
[Fact]
public async Task InitEndpoint_NormalizesProviders_AndReturnsSummary()
{
var httpContext = CreateHttpContext("vex.admin");
var request = new IngestEndpoints.ExcititorInitRequest(new[] { " suse ", "redhat", "REDHAT" }, true);
var started = DateTimeOffset.Parse("2025-10-20T12:00:00Z");
var completed = started.AddMinutes(2);
_orchestrator.InitFactory = options => new InitSummary(
Guid.Parse("9a5eb53c-3118-4f78-991e-7d2c1af92a14"),
started,
completed,
ImmutableArray.Create(
new InitProviderResult("redhat", "Red Hat", "succeeded", TimeSpan.FromSeconds(12), null),
new InitProviderResult("suse", "SUSE", "failed", TimeSpan.FromSeconds(7), "unreachable")));
var result = await IngestEndpoints.HandleInitAsync(httpContext, request, _orchestrator, _timeProvider, CancellationToken.None);
var ok = Assert.IsType<Ok<object>>(result);
Assert.Equal(new[] { "redhat", "suse" }, _orchestrator.LastInitOptions?.Providers);
Assert.True(_orchestrator.LastInitOptions?.Resume);
using var document = JsonDocument.Parse(JsonSerializer.Serialize(ok.Value));
Assert.Equal("Initialized 2 provider(s); 1 succeeded, 1 failed.", document.RootElement.GetProperty("message").GetString());
}
[Fact]
public async Task RunEndpoint_ReturnsBadRequest_WhenSinceInvalid()
{
var httpContext = CreateHttpContext("vex.admin");
var request = new IngestEndpoints.ExcititorIngestRunRequest(new[] { "redhat" }, "not-a-date", null, false);
var result = await IngestEndpoints.HandleRunAsync(httpContext, request, _orchestrator, _timeProvider, CancellationToken.None);
var bad = Assert.IsType<BadRequest<object>>(result);
using var document = JsonDocument.Parse(JsonSerializer.Serialize(bad.Value));
Assert.Contains("Invalid 'since'", document.RootElement.GetProperty("message").GetString());
}
[Fact]
public async Task RunEndpoint_ReturnsBadRequest_WhenWindowInvalid()
{
var httpContext = CreateHttpContext("vex.admin");
var request = new IngestEndpoints.ExcititorIngestRunRequest(Array.Empty<string>(), null, "-01:00:00", false);
var result = await IngestEndpoints.HandleRunAsync(httpContext, request, _orchestrator, _timeProvider, CancellationToken.None);
var bad = Assert.IsType<BadRequest<object>>(result);
using var document = JsonDocument.Parse(JsonSerializer.Serialize(bad.Value));
Assert.Contains("Invalid duration", document.RootElement.GetProperty("message").GetString());
}
[Fact]
public async Task RunEndpoint_PassesOptionsToOrchestrator()
{
var httpContext = CreateHttpContext("vex.admin");
var started = DateTimeOffset.Parse("2025-10-20T14:00:00Z");
var completed = started.AddMinutes(5);
_orchestrator.RunFactory = options => new IngestRunSummary(
Guid.Parse("65bbfa25-82fd-41da-8b6b-9d8bb1e2bb5f"),
started,
completed,
ImmutableArray.Create(
new ProviderRunResult(
"redhat",
"succeeded",
12,
42,
started,
completed,
completed - started,
"sha256:abc",
completed.AddHours(-1),
"cp1",
null,
options.Since)));
var request = new IngestEndpoints.ExcititorIngestRunRequest(new[] { "redhat" }, "2025-10-19T00:00:00Z", "1.00:00:00", true);
var result = await IngestEndpoints.HandleRunAsync(httpContext, request, _orchestrator, _timeProvider, CancellationToken.None);
var ok = Assert.IsType<Ok<object>>(result);
Assert.NotNull(_orchestrator.LastRunOptions);
Assert.Equal(new[] { "redhat" }, _orchestrator.LastRunOptions!.Providers);
Assert.True(_orchestrator.LastRunOptions.Force);
Assert.Equal(TimeSpan.FromDays(1), _orchestrator.LastRunOptions.Window);
using var document = JsonDocument.Parse(JsonSerializer.Serialize(ok.Value));
Assert.Equal("cp1", document.RootElement.GetProperty("providers")[0].GetProperty("checkpoint").GetString());
}
[Fact]
public async Task ResumeEndpoint_PassesCheckpointToOrchestrator()
{
var httpContext = CreateHttpContext("vex.admin");
var started = DateTimeOffset.Parse("2025-10-20T16:00:00Z");
var completed = started.AddMinutes(2);
_orchestrator.ResumeFactory = options => new IngestRunSummary(
Guid.Parse("88407f25-4b3f-434d-8f8e-1c7f4925c37b"),
started,
completed,
ImmutableArray.Create(
new ProviderRunResult(
"suse",
"succeeded",
5,
10,
started,
completed,
completed - started,
null,
null,
options.Checkpoint,
null,
DateTimeOffset.UtcNow.AddDays(-1))));
var request = new IngestEndpoints.ExcititorIngestResumeRequest(new[] { "suse" }, "resume-token");
var result = await IngestEndpoints.HandleResumeAsync(httpContext, request, _orchestrator, _timeProvider, CancellationToken.None);
Assert.IsType<Ok<object>>(result);
Assert.Equal("resume-token", _orchestrator.LastResumeOptions?.Checkpoint);
}
[Fact]
public async Task ReconcileEndpoint_ReturnsBadRequest_WhenMaxAgeInvalid()
{
var httpContext = CreateHttpContext("vex.admin");
var request = new IngestEndpoints.ExcititorReconcileRequest(Array.Empty<string>(), "invalid");
var result = await IngestEndpoints.HandleReconcileAsync(httpContext, request, _orchestrator, _timeProvider, CancellationToken.None);
var bad = Assert.IsType<BadRequest<object>>(result);
using var document = JsonDocument.Parse(JsonSerializer.Serialize(bad.Value));
Assert.Contains("Invalid duration", document.RootElement.GetProperty("message").GetString());
}
[Fact]
public async Task ReconcileEndpoint_PassesOptionsAndReturnsSummary()
{
var httpContext = CreateHttpContext("vex.admin");
var started = DateTimeOffset.Parse("2025-10-20T18:00:00Z");
var completed = started.AddMinutes(4);
_orchestrator.ReconcileFactory = options => new ReconcileSummary(
Guid.Parse("a2c2cfe6-c21a-4a62-9db7-2ed2792f4e2d"),
started,
completed,
ImmutableArray.Create(
new ReconcileProviderResult(
"ubuntu",
"succeeded",
"reconciled",
started.AddDays(-2),
started - TimeSpan.FromDays(3),
20,
18,
null)));
var request = new IngestEndpoints.ExcititorReconcileRequest(new[] { "ubuntu" }, "2.00:00:00");
var result = await IngestEndpoints.HandleReconcileAsync(httpContext, request, _orchestrator, _timeProvider, CancellationToken.None);
var ok = Assert.IsType<Ok<object>>(result);
Assert.Equal(TimeSpan.FromDays(2), _orchestrator.LastReconcileOptions?.MaxAge);
using var document = JsonDocument.Parse(JsonSerializer.Serialize(ok.Value));
Assert.Equal("reconciled", document.RootElement.GetProperty("providers")[0].GetProperty("action").GetString());
}
private static DefaultHttpContext CreateHttpContext(params string[] scopes)
{
var context = new DefaultHttpContext
{
RequestServices = new ServiceCollection().BuildServiceProvider(),
Response = { Body = new MemoryStream() }
};
if (scopes.Length > 0)
{
var claims = new List<Claim> { new Claim(ClaimTypes.NameIdentifier, "test-user") };
claims.Add(new Claim("scope", string.Join(' ', scopes)));
var identity = new ClaimsIdentity(claims, "Test");
context.User = new ClaimsPrincipal(identity);
}
else
{
context.User = new ClaimsPrincipal(new ClaimsIdentity());
}
return context;
}
private sealed class FakeIngestOrchestrator : IVexIngestOrchestrator
{
public IngestInitOptions? LastInitOptions { get; private set; }
public IngestRunOptions? LastRunOptions { get; private set; }
public IngestResumeOptions? LastResumeOptions { get; private set; }
public ReconcileOptions? LastReconcileOptions { get; private set; }
public Func<IngestInitOptions, InitSummary>? InitFactory { get; set; }
public Func<IngestRunOptions, IngestRunSummary>? RunFactory { get; set; }
public Func<IngestResumeOptions, IngestRunSummary>? ResumeFactory { get; set; }
public Func<ReconcileOptions, ReconcileSummary>? ReconcileFactory { get; set; }
public Task<InitSummary> InitializeAsync(IngestInitOptions options, CancellationToken cancellationToken)
{
LastInitOptions = options;
return Task.FromResult(InitFactory is null ? CreateDefaultInitSummary() : InitFactory(options));
}
public Task<IngestRunSummary> RunAsync(IngestRunOptions options, CancellationToken cancellationToken)
{
LastRunOptions = options;
return Task.FromResult(RunFactory is null ? CreateDefaultRunSummary() : RunFactory(options));
}
public Task<IngestRunSummary> ResumeAsync(IngestResumeOptions options, CancellationToken cancellationToken)
{
LastResumeOptions = options;
return Task.FromResult(ResumeFactory is null ? CreateDefaultRunSummary() : ResumeFactory(options));
}
public Task<ReconcileSummary> ReconcileAsync(ReconcileOptions options, CancellationToken cancellationToken)
{
LastReconcileOptions = options;
return Task.FromResult(ReconcileFactory is null ? CreateDefaultReconcileSummary() : ReconcileFactory(options));
}
private static InitSummary CreateDefaultInitSummary()
{
var now = DateTimeOffset.UtcNow;
return new InitSummary(Guid.Empty, now, now, ImmutableArray<InitProviderResult>.Empty);
}
private static IngestRunSummary CreateDefaultRunSummary()
{
var now = DateTimeOffset.UtcNow;
return new IngestRunSummary(Guid.Empty, now, now, ImmutableArray<ProviderRunResult>.Empty);
}
private static ReconcileSummary CreateDefaultReconcileSummary()
{
var now = DateTimeOffset.UtcNow;
return new ReconcileSummary(Guid.Empty, now, now, ImmutableArray<ReconcileProviderResult>.Empty);
}
}
}

View File

@@ -1,225 +1,213 @@
using System.Collections.Concurrent;
using System.Collections.Immutable;
using System.Net;
using System.Net.Http.Json;
using System.Text.Json;
using Microsoft.AspNetCore.Mvc.Testing;
using Microsoft.Extensions.Configuration;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.DependencyInjection.Extensions;
using MongoDB.Driver;
using StellaOps.Excititor.Core;
using StellaOps.Excititor.Connectors.Abstractions;
using StellaOps.Excititor.Export;
using StellaOps.Excititor.Policy;
using StellaOps.Excititor.Storage.Mongo;
using StellaOps.Excititor.WebService.Options;
namespace StellaOps.Excititor.WebService.Tests;
public sealed class MirrorEndpointsTests : IClassFixture<WebApplicationFactory<Program>>, IDisposable
{
private readonly WebApplicationFactory<Program> _factory;
private readonly Mongo2Go.MongoDbRunner _runner;
public MirrorEndpointsTests(WebApplicationFactory<Program> factory)
{
_runner = Mongo2Go.MongoDbRunner.Start();
_factory = factory.WithWebHostBuilder(builder =>
{
builder.ConfigureAppConfiguration((_, configuration) =>
{
var data = new Dictionary<string, string?>
{
[$"{MirrorDistributionOptions.SectionName}:Domains:0:Id"] = "primary",
[$"{MirrorDistributionOptions.SectionName}:Domains:0:DisplayName"] = "Primary Mirror",
[$"{MirrorDistributionOptions.SectionName}:Domains:0:MaxIndexRequestsPerHour"] = "1000",
[$"{MirrorDistributionOptions.SectionName}:Domains:0:MaxDownloadRequestsPerHour"] = "1000",
[$"{MirrorDistributionOptions.SectionName}:Domains:0:Exports:0:Key"] = "consensus",
[$"{MirrorDistributionOptions.SectionName}:Domains:0:Exports:0:Format"] = "json",
[$"{MirrorDistributionOptions.SectionName}:Domains:0:Exports:0:Filters:vulnId"] = "CVE-2025-0001",
[$"{MirrorDistributionOptions.SectionName}:Domains:0:Exports:0:Filters:productKey"] = "pkg:test/demo",
};
configuration.AddInMemoryCollection(data!);
});
builder.ConfigureServices(services =>
{
services.RemoveAll<IMongoClient>();
services.AddSingleton<IMongoClient>(_ => new MongoClient(_runner.ConnectionString));
services.RemoveAll<IMongoDatabase>();
services.AddSingleton(provider => provider.GetRequiredService<IMongoClient>().GetDatabase("mirror-tests"));
services.RemoveAll<IVexExportStore>();
services.AddSingleton<IVexExportStore>(provider =>
{
var timeProvider = provider.GetRequiredService<TimeProvider>();
return new FakeExportStore(timeProvider);
});
services.RemoveAll<IVexArtifactStore>();
services.AddSingleton<IVexArtifactStore>(_ => new FakeArtifactStore());
services.AddSingleton(new VexConnectorDescriptor("excititor:redhat", VexProviderKind.Distro, "Red Hat CSAF"));
services.AddSingleton<StellaOps.Excititor.Attestation.Signing.IVexSigner, FakeSigner>();
services.AddSingleton<StellaOps.Excititor.Policy.IVexPolicyEvaluator, FakePolicyEvaluator>();
services.AddSingleton<IVexExportDataSource, FakeExportDataSource>();
});
});
}
[Fact]
public async Task ListDomains_ReturnsConfiguredDomain()
{
var client = _factory.CreateClient();
var response = await client.GetAsync("/excititor/mirror/domains");
response.EnsureSuccessStatusCode();
using var document = JsonDocument.Parse(await response.Content.ReadAsStringAsync());
var domains = document.RootElement.GetProperty("domains");
Assert.Equal(1, domains.GetArrayLength());
Assert.Equal("primary", domains[0].GetProperty("id").GetString());
}
[Fact]
public async Task DomainIndex_ReturnsManifestMetadata()
{
var client = _factory.CreateClient();
var response = await client.GetAsync("/excititor/mirror/domains/primary/index");
response.EnsureSuccessStatusCode();
using var document = JsonDocument.Parse(await response.Content.ReadAsStringAsync());
var exports = document.RootElement.GetProperty("exports");
Assert.Equal(1, exports.GetArrayLength());
var entry = exports[0];
Assert.Equal("consensus", entry.GetProperty("exportKey").GetString());
Assert.Equal("exports/20251019T000000000Z/abcdef", entry.GetProperty("exportId").GetString());
var artifact = entry.GetProperty("artifact");
Assert.Equal("sha256", artifact.GetProperty("algorithm").GetString());
Assert.Equal("deadbeef", artifact.GetProperty("digest").GetString());
}
[Fact]
public async Task Download_ReturnsArtifactContent()
{
var client = _factory.CreateClient();
var response = await client.GetAsync("/excititor/mirror/domains/primary/exports/consensus/download");
response.EnsureSuccessStatusCode();
Assert.Equal("application/json", response.Content.Headers.ContentType?.MediaType);
var payload = await response.Content.ReadAsStringAsync();
Assert.Equal("{\"status\":\"ok\"}", payload);
}
public void Dispose()
{
_runner.Dispose();
}
private sealed class FakeExportStore : IVexExportStore
{
private readonly ConcurrentDictionary<(string Signature, VexExportFormat Format), VexExportManifest> _manifests = new();
public FakeExportStore(TimeProvider timeProvider)
{
var filters = new[]
{
new VexQueryFilter("vulnId", "CVE-2025-0001"),
new VexQueryFilter("productKey", "pkg:test/demo"),
};
var query = VexQuery.Create(filters, Enumerable.Empty<VexQuerySort>());
var signature = VexQuerySignature.FromQuery(query);
var createdAt = new DateTimeOffset(2025, 10, 19, 0, 0, 0, TimeSpan.Zero);
var manifest = new VexExportManifest(
"exports/20251019T000000000Z/abcdef",
signature,
VexExportFormat.Json,
createdAt,
new VexContentAddress("sha256", "deadbeef"),
1,
new[] { "primary" },
fromCache: false,
consensusRevision: "rev-1",
attestation: new VexAttestationMetadata("https://stella-ops.org/attestations/vex-export"),
sizeBytes: 16);
_manifests.TryAdd((signature.Value, VexExportFormat.Json), manifest);
// Seed artifact content for download test.
FakeArtifactStore.Seed(manifest.Artifact, "{\"status\":\"ok\"}");
}
public ValueTask<VexExportManifest?> FindAsync(VexQuerySignature signature, VexExportFormat format, CancellationToken cancellationToken, IClientSessionHandle? session = null)
{
_manifests.TryGetValue((signature.Value, format), out var manifest);
return ValueTask.FromResult(manifest);
}
public ValueTask SaveAsync(VexExportManifest manifest, CancellationToken cancellationToken, IClientSessionHandle? session = null)
=> ValueTask.CompletedTask;
}
private sealed class FakeArtifactStore : IVexArtifactStore
{
private static readonly ConcurrentDictionary<VexContentAddress, byte[]> Content = new();
public static void Seed(VexContentAddress contentAddress, string payload)
{
var bytes = System.Text.Encoding.UTF8.GetBytes(payload);
Content[contentAddress] = bytes;
}
public ValueTask<VexStoredArtifact> SaveAsync(VexExportArtifact artifact, CancellationToken cancellationToken)
{
Content[artifact.ContentAddress] = artifact.Content.ToArray();
return ValueTask.FromResult(new VexStoredArtifact(artifact.ContentAddress, "memory://artifact", artifact.Content.Length, artifact.Metadata));
}
public ValueTask DeleteAsync(VexContentAddress contentAddress, CancellationToken cancellationToken)
{
Content.TryRemove(contentAddress, out _);
return ValueTask.CompletedTask;
}
public ValueTask<Stream?> OpenReadAsync(VexContentAddress contentAddress, CancellationToken cancellationToken)
{
if (!Content.TryGetValue(contentAddress, out var bytes))
{
return ValueTask.FromResult<Stream?>(null);
}
return ValueTask.FromResult<Stream?>(new MemoryStream(bytes, writable: false));
}
}
private sealed class FakeSigner : StellaOps.Excititor.Attestation.Signing.IVexSigner
{
public ValueTask<StellaOps.Excititor.Attestation.Signing.VexSignedPayload> SignAsync(ReadOnlyMemory<byte> payload, CancellationToken cancellationToken)
=> ValueTask.FromResult(new StellaOps.Excititor.Attestation.Signing.VexSignedPayload("signature", "key"));
}
private sealed class FakePolicyEvaluator : StellaOps.Excititor.Policy.IVexPolicyEvaluator
{
public string Version => "test";
public VexPolicySnapshot Snapshot => VexPolicySnapshot.Default;
public double GetProviderWeight(VexProvider provider) => 1.0;
public bool IsClaimEligible(VexClaim claim, VexProvider provider, out string? rejectionReason)
{
rejectionReason = null;
return true;
}
}
private sealed class FakeExportDataSource : IVexExportDataSource
{
public ValueTask<VexExportDataSet> FetchAsync(VexQuery query, CancellationToken cancellationToken)
{
var dataset = new VexExportDataSet(ImmutableArray<VexConsensus>.Empty, ImmutableArray<VexClaim>.Empty, ImmutableArray<string>.Empty);
return ValueTask.FromResult(dataset);
}
}
}
using System.Collections.Concurrent;
using System.Collections.Immutable;
using System.Net;
using System.Net.Http.Json;
using System.Text.Json;
using Microsoft.Extensions.Configuration;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.DependencyInjection.Extensions;
using EphemeralMongo;
using MongoRunner = EphemeralMongo.MongoRunner;
using MongoRunnerOptions = EphemeralMongo.MongoRunnerOptions;
using MongoDB.Driver;
using StellaOps.Excititor.Core;
using StellaOps.Excititor.Connectors.Abstractions;
using StellaOps.Excititor.Export;
using StellaOps.Excititor.Policy;
using StellaOps.Excititor.Storage.Mongo;
using StellaOps.Excititor.WebService.Options;
namespace StellaOps.Excititor.WebService.Tests;
public sealed class MirrorEndpointsTests : IDisposable
{
private readonly TestWebApplicationFactory _factory;
private readonly IMongoRunner _runner;
public MirrorEndpointsTests()
{
_runner = MongoRunner.Run(new MongoRunnerOptions { UseSingleNodeReplicaSet = true });
_factory = new TestWebApplicationFactory(
configureConfiguration: configuration =>
{
var data = new Dictionary<string, string?>
{
["Excititor:Storage:Mongo:ConnectionString"] = _runner.ConnectionString,
["Excititor:Storage:Mongo:DatabaseName"] = "mirror-tests",
[$"{MirrorDistributionOptions.SectionName}:Domains:0:Id"] = "primary",
[$"{MirrorDistributionOptions.SectionName}:Domains:0:DisplayName"] = "Primary Mirror",
[$"{MirrorDistributionOptions.SectionName}:Domains:0:MaxIndexRequestsPerHour"] = "1000",
[$"{MirrorDistributionOptions.SectionName}:Domains:0:MaxDownloadRequestsPerHour"] = "1000",
[$"{MirrorDistributionOptions.SectionName}:Domains:0:Exports:0:Key"] = "consensus",
[$"{MirrorDistributionOptions.SectionName}:Domains:0:Exports:0:Format"] = "json",
[$"{MirrorDistributionOptions.SectionName}:Domains:0:Exports:0:Filters:vulnId"] = "CVE-2025-0001",
[$"{MirrorDistributionOptions.SectionName}:Domains:0:Exports:0:Filters:productKey"] = "pkg:test/demo",
};
configuration.AddInMemoryCollection(data!);
},
configureServices: services =>
{
TestServiceOverrides.Apply(services);
services.RemoveAll<IVexExportStore>();
services.AddSingleton<IVexExportStore>(provider =>
{
var timeProvider = provider.GetRequiredService<TimeProvider>();
return new FakeExportStore(timeProvider);
});
services.RemoveAll<IVexArtifactStore>();
services.AddSingleton<IVexArtifactStore>(_ => new FakeArtifactStore());
services.AddSingleton(new VexConnectorDescriptor("excititor:redhat", VexProviderKind.Distro, "Red Hat CSAF"));
services.AddSingleton<StellaOps.Excititor.Attestation.Signing.IVexSigner, FakeSigner>();
services.AddSingleton<StellaOps.Excititor.Policy.IVexPolicyEvaluator, FakePolicyEvaluator>();
});
}
[Fact]
public async Task ListDomains_ReturnsConfiguredDomain()
{
var client = _factory.CreateClient();
var response = await client.GetAsync("/excititor/mirror/domains");
response.EnsureSuccessStatusCode();
using var document = JsonDocument.Parse(await response.Content.ReadAsStringAsync());
var domains = document.RootElement.GetProperty("domains");
Assert.Equal(1, domains.GetArrayLength());
Assert.Equal("primary", domains[0].GetProperty("id").GetString());
}
[Fact]
public async Task DomainIndex_ReturnsManifestMetadata()
{
var client = _factory.CreateClient();
var response = await client.GetAsync("/excititor/mirror/domains/primary/index");
response.EnsureSuccessStatusCode();
using var document = JsonDocument.Parse(await response.Content.ReadAsStringAsync());
var exports = document.RootElement.GetProperty("exports");
Assert.Equal(1, exports.GetArrayLength());
var entry = exports[0];
Assert.Equal("consensus", entry.GetProperty("exportKey").GetString());
Assert.Equal("exports/20251019T000000000Z/abcdef", entry.GetProperty("exportId").GetString());
var artifact = entry.GetProperty("artifact");
Assert.Equal("sha256", artifact.GetProperty("algorithm").GetString());
Assert.Equal("deadbeef", artifact.GetProperty("digest").GetString());
}
[Fact]
public async Task Download_ReturnsArtifactContent()
{
var client = _factory.CreateClient();
var response = await client.GetAsync("/excititor/mirror/domains/primary/exports/consensus/download");
response.EnsureSuccessStatusCode();
Assert.Equal("application/json", response.Content.Headers.ContentType?.MediaType);
var payload = await response.Content.ReadAsStringAsync();
Assert.Equal("{\"status\":\"ok\"}", payload);
}
public void Dispose()
{
_factory.Dispose();
_runner.Dispose();
}
private sealed class FakeExportStore : IVexExportStore
{
private readonly ConcurrentDictionary<(string Signature, VexExportFormat Format), VexExportManifest> _manifests = new();
public FakeExportStore(TimeProvider timeProvider)
{
var filters = new[]
{
new VexQueryFilter("vulnId", "CVE-2025-0001"),
new VexQueryFilter("productKey", "pkg:test/demo"),
};
var query = VexQuery.Create(filters, Enumerable.Empty<VexQuerySort>());
var signature = VexQuerySignature.FromQuery(query);
var createdAt = new DateTimeOffset(2025, 10, 19, 0, 0, 0, TimeSpan.Zero);
var manifest = new VexExportManifest(
"exports/20251019T000000000Z/abcdef",
signature,
VexExportFormat.Json,
createdAt,
new VexContentAddress("sha256", "deadbeef"),
1,
new[] { "primary" },
fromCache: false,
consensusRevision: "rev-1",
attestation: new VexAttestationMetadata("https://stella-ops.org/attestations/vex-export"),
sizeBytes: 16);
_manifests.TryAdd((signature.Value, VexExportFormat.Json), manifest);
// Seed artifact content for download test.
FakeArtifactStore.Seed(manifest.Artifact, "{\"status\":\"ok\"}");
}
public ValueTask<VexExportManifest?> FindAsync(VexQuerySignature signature, VexExportFormat format, CancellationToken cancellationToken, IClientSessionHandle? session = null)
{
_manifests.TryGetValue((signature.Value, format), out var manifest);
return ValueTask.FromResult(manifest);
}
public ValueTask SaveAsync(VexExportManifest manifest, CancellationToken cancellationToken, IClientSessionHandle? session = null)
=> ValueTask.CompletedTask;
}
private sealed class FakeArtifactStore : IVexArtifactStore
{
private static readonly ConcurrentDictionary<VexContentAddress, byte[]> Content = new();
public static void Seed(VexContentAddress contentAddress, string payload)
{
var bytes = System.Text.Encoding.UTF8.GetBytes(payload);
Content[contentAddress] = bytes;
}
public ValueTask<VexStoredArtifact> SaveAsync(VexExportArtifact artifact, CancellationToken cancellationToken)
{
Content[artifact.ContentAddress] = artifact.Content.ToArray();
return ValueTask.FromResult(new VexStoredArtifact(artifact.ContentAddress, "memory://artifact", artifact.Content.Length, artifact.Metadata));
}
public ValueTask DeleteAsync(VexContentAddress contentAddress, CancellationToken cancellationToken)
{
Content.TryRemove(contentAddress, out _);
return ValueTask.CompletedTask;
}
public ValueTask<Stream?> OpenReadAsync(VexContentAddress contentAddress, CancellationToken cancellationToken)
{
if (!Content.TryGetValue(contentAddress, out var bytes))
{
return ValueTask.FromResult<Stream?>(null);
}
return ValueTask.FromResult<Stream?>(new MemoryStream(bytes, writable: false));
}
}
private sealed class FakeSigner : StellaOps.Excititor.Attestation.Signing.IVexSigner
{
public ValueTask<StellaOps.Excititor.Attestation.Signing.VexSignedPayload> SignAsync(ReadOnlyMemory<byte> payload, CancellationToken cancellationToken)
=> ValueTask.FromResult(new StellaOps.Excititor.Attestation.Signing.VexSignedPayload("signature", "key"));
}
private sealed class FakePolicyEvaluator : StellaOps.Excititor.Policy.IVexPolicyEvaluator
{
public string Version => "test";
public VexPolicySnapshot Snapshot => VexPolicySnapshot.Default;
public double GetProviderWeight(VexProvider provider) => 1.0;
public bool IsClaimEligible(VexClaim claim, VexProvider provider, out string? rejectionReason)
{
rejectionReason = null;
return true;
}
}
}

Some files were not shown because too many files have changed in this diff Show More