This commit is contained in:
StellaOps Bot
2025-12-13 02:22:15 +02:00
parent 564df71bfb
commit 999e26a48e
395 changed files with 25045 additions and 2224 deletions

View File

@@ -3,7 +3,7 @@ using StellaOps.AirGap.Importer.Models;
namespace StellaOps.AirGap.Importer.Repositories;
/// <summary>
/// Deterministic in-memory implementations suitable for offline tests and as a template for Mongo-backed repos.
/// Deterministic in-memory implementations suitable for offline tests and as a template for persistent storage repos.
/// Enforces tenant isolation and stable ordering (by BundleId then Path).
/// </summary>
public sealed class InMemoryBundleCatalogRepository : IBundleCatalogRepository

View File

@@ -1037,7 +1037,7 @@ paths:
value:
status: degraded
service: policy
reason: mongo unavailable
reason: database unavailable
timestamp: 2025-11-18T00:00:00Z
x-service: policy
x-original-path: /health

View File

@@ -46,7 +46,7 @@ paths:
value:
status: degraded
service: policy
reason: mongo unavailable
reason: database unavailable
timestamp: '2025-11-18T00:00:00Z'
/healthz:
get:

View File

@@ -1037,7 +1037,7 @@ paths:
value:
status: degraded
service: policy
reason: mongo unavailable
reason: database unavailable
timestamp: 2025-11-18T00:00:00Z
x-service: policy
x-original-path: /health

View File

@@ -16,7 +16,7 @@ public sealed class AttestorOptions
public SigningOptions Signing { get; set; } = new();
public MongoOptions Mongo { get; set; } = new();
public StorageOptions Storage { get; set; } = new();
public RedisOptions Redis { get; set; } = new();
@@ -122,7 +122,7 @@ public sealed class AttestorOptions
public bool Enabled { get; set; }
}
public sealed class MongoOptions
public sealed class StorageOptions
{
public string? Uri { get; set; }

View File

@@ -4,7 +4,7 @@ using System.Collections.Generic;
namespace StellaOps.Attestor.Core.Storage;
/// <summary>
/// Canonical representation of a Rekor entry persisted in Mongo.
/// Canonical representation of a Rekor entry persisted in storage.
/// </summary>
public sealed class AttestorEntry
{

View File

@@ -22,7 +22,7 @@
<PackageReference Include="Microsoft.Extensions.Hosting" Version="10.0.0" />
<PackageReference Include="Microsoft.Extensions.Options" Version="10.0.0" />
<PackageReference Include="Microsoft.Extensions.Http" Version="10.0.0" />
<PackageReference Include="StackExchange.Redis" Version="2.8.24" />
<PackageReference Include="StackExchange.Redis" Version="2.8.37" />
<PackageReference Include="AWSSDK.S3" Version="4.0.2" />
</ItemGroup>
</Project>

View File

@@ -190,8 +190,8 @@ internal sealed class AttestorWebApplicationFactory : WebApplicationFactory<Prog
["attestor:s3:endpoint"] = "http://localhost",
["attestor:s3:useTls"] = "false",
["attestor:redis:url"] = string.Empty,
["attestor:mongo:uri"] = "mongodb://localhost:27017/attestor-tests",
["attestor:mongo:database"] = "attestor-tests"
["attestor:postgres:connectionString"] = "Host=localhost;Port=5432;Database=attestor-tests",
["attestor:postgres:database"] = "attestor-tests"
};
configuration.AddInMemoryCollection(settings!);

View File

@@ -15,7 +15,7 @@
<PackageReference Include="OpenTelemetry.Instrumentation.Runtime" Version="1.12.0" />
<PackageReference Include="Serilog.AspNetCore" Version="8.0.1" />
<PackageReference Include="Serilog.Sinks.Console" Version="5.0.1" />
<PackageReference Include="StackExchange.Redis" Version="2.8.24" />
<PackageReference Include="StackExchange.Redis" Version="2.8.37" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\StellaOps.Attestor.Core\StellaOps.Attestor.Core.csproj" />

View File

@@ -22,7 +22,7 @@ public sealed class LdapClientProvisioningStoreTests
private readonly TestTimeProvider timeProvider = new(new DateTimeOffset(2025, 11, 9, 8, 0, 0, TimeSpan.Zero));
[Fact]
public async Task CreateOrUpdateAsync_WritesToMongoLdapAndAudit()
public async Task CreateOrUpdateAsync_WritesToStorageLdapAndAudit()
{
var clientStore = new TrackingClientStore();
var revocationStore = new TrackingRevocationStore();

View File

@@ -1,4 +1,4 @@
namespace StellaOps.Authority.Storage.Mongo.Documents;
namespace StellaOps.Authority.Storage.Documents;
/// <summary>
/// Represents a bootstrap invite document.

View File

@@ -1,4 +1,4 @@
namespace StellaOps.Authority.Storage.Mongo.Documents;
namespace StellaOps.Authority.Storage.Documents;
/// <summary>
/// Result status for token usage recording.

View File

@@ -4,7 +4,7 @@ using StellaOps.Authority.Storage.InMemory.Initialization;
using StellaOps.Authority.Storage.InMemory.Sessions;
using StellaOps.Authority.Storage.InMemory.Stores;
namespace StellaOps.Authority.Storage.Mongo.Extensions;
namespace StellaOps.Authority.Storage.Extensions;
/// <summary>
/// Compatibility shim storage options. In PostgreSQL mode, these are largely unused.
@@ -17,16 +17,16 @@ public sealed class AuthorityStorageOptions
}
/// <summary>
/// Extension methods for configuring Authority MongoDB compatibility storage services.
/// In PostgreSQL mode, this registers in-memory implementations for the Mongo interfaces.
/// Extension methods for configuring Authority storage compatibility storage services.
/// In PostgreSQL mode, this registers in-memory implementations for the storage interfaces.
/// </summary>
public static class ServiceCollectionExtensions
{
/// <summary>
/// Adds Authority MongoDB compatibility storage services (in-memory implementations).
/// Adds Authority storage compatibility storage services (in-memory implementations).
/// For production PostgreSQL storage, use AddAuthorityPostgresStorage from StellaOps.Authority.Storage.Postgres.
/// </summary>
public static IServiceCollection AddAuthorityMongoStorage(
public static IServiceCollection AddAuthorityInMemoryStorage(
this IServiceCollection services,
Action<AuthorityStorageOptions> configureOptions)
{
@@ -34,11 +34,11 @@ public static class ServiceCollectionExtensions
configureOptions(options);
services.AddSingleton(options);
RegisterMongoCompatServices(services, options);
RegisterInMemoryServices(services, options);
return services;
}
private static void RegisterMongoCompatServices(IServiceCollection services, AuthorityStorageOptions options)
private static void RegisterInMemoryServices(IServiceCollection services, AuthorityStorageOptions options)
{
// Register the initializer (no-op for Postgres mode)
services.AddSingleton<AuthorityStorageInitializer>();

View File

@@ -1,59 +1,59 @@
using MongoDB.Bson;
using StellaOps.Storage.Documents;
namespace MongoDB.Bson.Serialization.Attributes;
namespace StellaOps.Storage.Serialization.Attributes;
/// <summary>
/// Compatibility shim for MongoDB BsonId attribute.
/// Compatibility shim for storage Id attribute.
/// In PostgreSQL mode, this attribute is ignored but allows code to compile.
/// </summary>
[AttributeUsage(AttributeTargets.Property | AttributeTargets.Field)]
public class BsonIdAttribute : Attribute
public class StorageIdAttribute : Attribute
{
}
/// <summary>
/// Compatibility shim for MongoDB BsonElement attribute.
/// Compatibility shim for storage Element attribute.
/// In PostgreSQL mode, this attribute is ignored but allows code to compile.
/// </summary>
[AttributeUsage(AttributeTargets.Property | AttributeTargets.Field)]
public class BsonElementAttribute : Attribute
public class StorageElementAttribute : Attribute
{
public string ElementName { get; }
public BsonElementAttribute(string elementName)
public StorageElementAttribute(string elementName)
{
ElementName = elementName;
}
}
/// <summary>
/// Compatibility shim for MongoDB BsonIgnore attribute.
/// Compatibility shim for storage Ignore attribute.
/// In PostgreSQL mode, this attribute is ignored but allows code to compile.
/// </summary>
[AttributeUsage(AttributeTargets.Property | AttributeTargets.Field)]
public class BsonIgnoreAttribute : Attribute
public class StorageIgnoreAttribute : Attribute
{
}
/// <summary>
/// Compatibility shim for MongoDB BsonIgnoreIfNull attribute.
/// Compatibility shim for storage IgnoreIfNull attribute.
/// In PostgreSQL mode, this attribute is ignored but allows code to compile.
/// </summary>
[AttributeUsage(AttributeTargets.Property | AttributeTargets.Field)]
public class BsonIgnoreIfNullAttribute : Attribute
public class StorageIgnoreIfNullAttribute : Attribute
{
}
/// <summary>
/// Compatibility shim for MongoDB BsonRepresentation attribute.
/// Compatibility shim for storage Representation attribute.
/// In PostgreSQL mode, this attribute is ignored but allows code to compile.
/// </summary>
[AttributeUsage(AttributeTargets.Property | AttributeTargets.Field)]
public class BsonRepresentationAttribute : Attribute
public class StorageRepresentationAttribute : Attribute
{
public BsonType Representation { get; }
public StorageType Representation { get; }
public BsonRepresentationAttribute(BsonType representation)
public StorageRepresentationAttribute(StorageType representation)
{
Representation = representation;
}

View File

@@ -1,7 +1,7 @@
namespace MongoDB.Bson;
namespace StellaOps.Storage.Documents;
/// <summary>
/// Compatibility shim for MongoDB ObjectId.
/// Compatibility shim for storage ObjectId.
/// In PostgreSQL mode, this wraps a GUID string.
/// </summary>
public readonly struct ObjectId : IEquatable<ObjectId>, IComparable<ObjectId>
@@ -51,9 +51,9 @@ public readonly struct ObjectId : IEquatable<ObjectId>, IComparable<ObjectId>
}
/// <summary>
/// Compatibility shim for MongoDB BsonType enum.
/// Compatibility shim for storage document type enum.
/// </summary>
public enum BsonType
public enum StorageType
{
EndOfDocument = 0,
Double = 1,

View File

@@ -1,7 +1,7 @@
namespace StellaOps.Authority.Storage.Mongo.Sessions;
namespace StellaOps.Authority.Storage.Sessions;
/// <summary>
/// Compatibility shim for MongoDB session handle. In PostgreSQL mode, this is unused.
/// Compatibility shim for database session handle. In PostgreSQL mode, this is unused.
/// </summary>
public interface IClientSessionHandle : IDisposable
{

View File

@@ -6,8 +6,8 @@
<Nullable>enable</Nullable>
<ImplicitUsings>enable</ImplicitUsings>
<TreatWarningsAsErrors>false</TreatWarningsAsErrors>
<RootNamespace>StellaOps.Authority.Storage.Mongo</RootNamespace>
<Description>MongoDB compatibility shim for Authority storage - provides in-memory implementations for Mongo interfaces while PostgreSQL migration is in progress</Description>
<RootNamespace>StellaOps.Authority.Storage.InMemory</RootNamespace>
<Description>In-memory storage shim for Authority - provides in-memory implementations for storage interfaces while PostgreSQL migration is in progress</Description>
</PropertyGroup>
<ItemGroup>

View File

@@ -109,7 +109,7 @@ public sealed class AuthorityAdvisoryAiConsentEvaluatorTests
Issuer = new Uri("https://authority.test")
};
options.Storage.ConnectionString = "mongodb://localhost:27017/authority";
options.Storage.ConnectionString = "Host=localhost;Port=5432;Database=authority";
options.Signing.ActiveKeyId = "test-key";
options.Signing.KeyPath = "/tmp/test-key.pem";

View File

@@ -107,9 +107,9 @@ public sealed class AuthorityWebApplicationFactory : WebApplicationFactory<Progr
services.RemoveAll<IAuthorityRevocationExportStateStore>();
services.RemoveAll<IAuthoritySessionAccessor>();
services.AddAuthorityMongoStorage(options =>
services.AddAuthorityInMemoryStorage(options =>
{
options.ConnectionString = "mongodb://localhost/authority-tests";
options.ConnectionString = "Host=localhost;Database=authority-tests";
options.DatabaseName = "authority-tests";
});
});

View File

@@ -120,7 +120,7 @@ public sealed class AuthorityAckTokenIssuerTests
return new StellaOpsAuthorityOptions
{
Issuer = new Uri("https://authority.test"),
Storage = { ConnectionString = "mongodb://localhost/test" },
Storage = { ConnectionString = "Host=localhost;Database=test" },
Notifications =
{
AckTokens =

View File

@@ -81,7 +81,7 @@ public sealed class AuthorityAckTokenKeyManagerTests
return new StellaOpsAuthorityOptions
{
Issuer = new Uri("https://authority.test"),
Storage = { ConnectionString = "mongodb://localhost/test" },
Storage = { ConnectionString = "Host=localhost;Database=test" },
Notifications =
{
AckTokens =

View File

@@ -44,7 +44,7 @@ public sealed class AuthorityWebhookAllowlistEvaluatorTests
return new StellaOpsAuthorityOptions
{
Issuer = new Uri("https://authority.test"),
Storage = { ConnectionString = "mongodb://localhost/test" },
Storage = { ConnectionString = "Host=localhost;Database=test" },
Notifications =
{
Webhooks =

View File

@@ -550,7 +550,7 @@ public class ClientCredentialsHandlersTests
await validateHandler.HandleAsync(validateContext);
Assert.False(validateContext.IsRejected);
var sessionAccessor = new NullMongoSessionAccessor();
var sessionAccessor = new NullSessionAccessor();
var handleHandler = new HandleClientCredentialsHandler(
registry,
tokenStore,
@@ -2485,7 +2485,7 @@ public class ClientCredentialsHandlersTests
await validateHandler.HandleAsync(validateContext);
Assert.False(validateContext.IsRejected);
var sessionAccessor = new NullMongoSessionAccessor();
var sessionAccessor = new NullSessionAccessor();
var handleHandler = new HandleClientCredentialsHandler(
registry,
tokenStore,
@@ -2691,14 +2691,14 @@ public class ClientCredentialsHandlersTests
var handleHandler = new HandleClientCredentialsHandler(
registry,
tokenStore,
new NullMongoSessionAccessor(),
new NullSessionAccessor(),
rateMetadata,
TimeProvider.System,
TestInstruments.ActivitySource,
NullLogger<HandleClientCredentialsHandler>.Instance);
var persistHandler = new PersistTokensHandler(
tokenStore,
new NullMongoSessionAccessor(),
new NullSessionAccessor(),
TimeProvider.System,
TestInstruments.ActivitySource,
NullLogger<PersistTokensHandler>.Instance);
@@ -2742,7 +2742,7 @@ public class ClientCredentialsHandlersTests
var tokenStore = new TestTokenStore();
var persistHandler = new PersistTokensHandler(
tokenStore,
new NullMongoSessionAccessor(),
new NullSessionAccessor(),
TimeProvider.System,
TestInstruments.ActivitySource,
NullLogger<PersistTokensHandler>.Instance);
@@ -2799,7 +2799,7 @@ public class ClientCredentialsHandlersTests
options.Security.SenderConstraints.Dpop.Nonce.RequiredAudiences.Add("signer");
options.Signing.ActiveKeyId = "test-key";
options.Signing.KeyPath = "/tmp/test-key.pem";
options.Storage.ConnectionString = "mongodb://localhost/test";
options.Storage.ConnectionString = "Host=localhost;Database=test";
Assert.Contains("signer", options.Security.SenderConstraints.Dpop.Nonce.RequiredAudiences);
var clientDocument = CreateClient(
@@ -2944,7 +2944,7 @@ public class ClientCredentialsHandlersTests
options.Security.SenderConstraints.Mtls.AllowedSanTypes.Clear();
options.Signing.ActiveKeyId = "test-key";
options.Signing.KeyPath = "/tmp/test-key.pem";
options.Storage.ConnectionString = "mongodb://localhost/test";
options.Storage.ConnectionString = "Host=localhost;Database=test";
var clientDocument = CreateClient(
secret: "s3cr3t!",
@@ -3009,7 +3009,7 @@ public class ClientCredentialsHandlersTests
options.Security.SenderConstraints.Mtls.Enabled = true;
options.Signing.ActiveKeyId = "test-key";
options.Signing.KeyPath = "/tmp/test-key.pem";
options.Storage.ConnectionString = "mongodb://localhost/test";
options.Storage.ConnectionString = "Host=localhost;Database=test";
var clientDocument = CreateClient(
secret: "s3cr3t!",
@@ -3151,7 +3151,7 @@ public class ClientCredentialsHandlersTests
var descriptor = CreateDescriptor(clientDocument);
var registry = CreateRegistry(withClientProvisioning: true, clientDescriptor: descriptor);
var tokenStore = new TestTokenStore();
var sessionAccessor = new NullMongoSessionAccessor();
var sessionAccessor = new NullSessionAccessor();
var authSink = new TestAuthEventSink();
var metadataAccessor = new TestRateLimiterMetadataAccessor();
var serviceAccountStore = new TestServiceAccountStore();
@@ -3240,7 +3240,7 @@ public class ClientCredentialsHandlersTests
var registry = CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(clientDocument));
var tokenStore = new TestTokenStore();
var sessionAccessor = new NullMongoSessionAccessor();
var sessionAccessor = new NullSessionAccessor();
var authSink = new TestAuthEventSink();
var metadataAccessor = new TestRateLimiterMetadataAccessor();
var serviceAccountStore = new TestServiceAccountStore(serviceAccount);
@@ -3323,7 +3323,7 @@ public class ClientCredentialsHandlersTests
var registry = CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(clientDocument));
var tokenStore = new TestTokenStore();
var sessionAccessor = new NullMongoSessionAccessor();
var sessionAccessor = new NullSessionAccessor();
var authSink = new TestAuthEventSink();
var metadataAccessor = new TestRateLimiterMetadataAccessor();
var serviceAccountStore = new TestServiceAccountStore(serviceAccount);
@@ -3424,7 +3424,7 @@ public class ClientCredentialsHandlersTests
var registry = CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(clientDocument));
var tokenStore = new TestTokenStore();
var sessionAccessor = new NullMongoSessionAccessor();
var sessionAccessor = new NullSessionAccessor();
var authSink = new TestAuthEventSink();
var metadataAccessor = new TestRateLimiterMetadataAccessor();
var serviceAccountStore = new TestServiceAccountStore(serviceAccount);
@@ -3498,7 +3498,7 @@ public class TokenValidationHandlersTests
var metadataAccessor = new TestRateLimiterMetadataAccessor();
var auditSink = new TestAuthEventSink();
var sessionAccessor = new NullMongoSessionAccessor();
var sessionAccessor = new NullSessionAccessor();
var handler = new ValidateAccessTokenHandler(
tokenStore,
sessionAccessor,
@@ -3548,7 +3548,7 @@ public class TokenValidationHandlersTests
var metadataAccessor = new TestRateLimiterMetadataAccessor();
var auditSink = new TestAuthEventSink();
var sessionAccessor = new NullMongoSessionAccessor();
var sessionAccessor = new NullSessionAccessor();
var handler = new ValidateAccessTokenHandler(
tokenStore,
sessionAccessor,
@@ -3603,7 +3603,7 @@ public class TokenValidationHandlersTests
var metadataAccessor = new TestRateLimiterMetadataAccessor();
var auditSink = new TestAuthEventSink();
var sessionAccessor = new NullMongoSessionAccessor();
var sessionAccessor = new NullSessionAccessor();
var handler = new ValidateAccessTokenHandler(
tokenStore,
sessionAccessor,
@@ -3654,7 +3654,7 @@ public class TokenValidationHandlersTests
var metadataAccessor = new TestRateLimiterMetadataAccessor();
var auditSink = new TestAuthEventSink();
var sessionAccessor = new NullMongoSessionAccessor();
var sessionAccessor = new NullSessionAccessor();
var handler = new ValidateAccessTokenHandler(
tokenStore,
sessionAccessor,
@@ -3704,7 +3704,7 @@ public class TokenValidationHandlersTests
var metadataAccessor = new TestRateLimiterMetadataAccessor();
var auditSink = new TestAuthEventSink();
var sessionAccessor = new NullMongoSessionAccessor();
var sessionAccessor = new NullSessionAccessor();
var handler = new ValidateAccessTokenHandler(
tokenStore,
sessionAccessor,
@@ -3755,7 +3755,7 @@ public class TokenValidationHandlersTests
var metadataAccessorSuccess = new TestRateLimiterMetadataAccessor();
var auditSinkSuccess = new TestAuthEventSink();
var sessionAccessor = new NullMongoSessionAccessor();
var sessionAccessor = new NullSessionAccessor();
var handler = new ValidateAccessTokenHandler(
new TestTokenStore(),
sessionAccessor,
@@ -3812,7 +3812,7 @@ public class TokenValidationHandlersTests
var registry = CreateRegistry(withClientProvisioning: false, clientDescriptor: null);
var metadataAccessor = new TestRateLimiterMetadataAccessor();
var auditSink = new TestAuthEventSink();
var sessionAccessor = new NullMongoSessionAccessor();
var sessionAccessor = new NullSessionAccessor();
var handler = new ValidateAccessTokenHandler(
tokenStore,
sessionAccessor,
@@ -3886,7 +3886,7 @@ public class TokenValidationHandlersTests
clientDocument.ClientId = "agent";
var auditSink = new TestAuthEventSink();
var registry = CreateRegistry(withClientProvisioning: false, clientDescriptor: null);
var sessionAccessorReplay = new NullMongoSessionAccessor();
var sessionAccessorReplay = new NullSessionAccessor();
var handler = new ValidateAccessTokenHandler(
tokenStore,
sessionAccessorReplay,
@@ -3939,7 +3939,7 @@ public class AuthorityClientCertificateValidatorTests
options.Security.SenderConstraints.Mtls.AllowedSanTypes.Add("uri");
options.Signing.ActiveKeyId = "test-key";
options.Signing.KeyPath = "/tmp/test-key.pem";
options.Storage.ConnectionString = "mongodb://localhost/test";
options.Storage.ConnectionString = "Host=localhost;Database=test";
using var rsa = RSA.Create(2048);
var request = new CertificateRequest("CN=mtls-client", rsa, HashAlgorithmName.SHA256, RSASignaturePadding.Pkcs1);
@@ -3977,7 +3977,7 @@ public class AuthorityClientCertificateValidatorTests
options.Security.SenderConstraints.Mtls.RotationGrace = TimeSpan.FromMinutes(5);
options.Signing.ActiveKeyId = "test-key";
options.Signing.KeyPath = "/tmp/test-key.pem";
options.Storage.ConnectionString = "mongodb://localhost/test";
options.Storage.ConnectionString = "Host=localhost;Database=test";
using var rsa = RSA.Create(2048);
var request = new CertificateRequest("CN=mtls-client", rsa, HashAlgorithmName.SHA256, RSASignaturePadding.Pkcs1);
@@ -4017,7 +4017,7 @@ public class AuthorityClientCertificateValidatorTests
options.Security.SenderConstraints.Mtls.RequireChainValidation = false;
options.Signing.ActiveKeyId = "test-key";
options.Signing.KeyPath = "/tmp/test-key.pem";
options.Storage.ConnectionString = "mongodb://localhost/test";
options.Storage.ConnectionString = "Host=localhost;Database=test";
using var rsa = RSA.Create(2048);
var request = new CertificateRequest("CN=mtls-client", rsa, HashAlgorithmName.SHA256, RSASignaturePadding.Pkcs1);
@@ -4055,7 +4055,7 @@ public class AuthorityClientCertificateValidatorTests
options.Security.SenderConstraints.Mtls.RequireChainValidation = false;
options.Signing.ActiveKeyId = "test-key";
options.Signing.KeyPath = "/tmp/test-key.pem";
options.Storage.ConnectionString = "mongodb://localhost/test";
options.Storage.ConnectionString = "Host=localhost;Database=test";
using var rsa = RSA.Create(2048);
var request = new CertificateRequest("CN=mtls-client", rsa, HashAlgorithmName.SHA256, RSASignaturePadding.Pkcs1);
@@ -4475,7 +4475,7 @@ internal sealed class StubCertificateValidator : IAuthorityClientCertificateVali
}
}
internal sealed class NullMongoSessionAccessor : IAuthoritySessionAccessor
internal sealed class NullSessionAccessor : IAuthoritySessionAccessor
{
public IClientSessionHandle? CurrentSession => null;
@@ -4506,7 +4506,7 @@ public class ObservabilityIncidentTokenHandlerTests
var metadataAccessor = new TestRateLimiterMetadataAccessor();
var auditSink = new TestAuthEventSink();
var sessionAccessor = new NullMongoSessionAccessor();
var sessionAccessor = new NullSessionAccessor();
var handler = new ValidateAccessTokenHandler(
tokenStore,
sessionAccessor,
@@ -4562,7 +4562,7 @@ public class ObservabilityIncidentTokenHandlerTests
var metadataAccessor = new TestRateLimiterMetadataAccessor();
var auditSink = new TestAuthEventSink();
var sessionAccessor = new NullMongoSessionAccessor();
var sessionAccessor = new NullSessionAccessor();
var handler = new ValidateAccessTokenHandler(
tokenStore,
sessionAccessor,
@@ -4620,7 +4620,7 @@ public class ObservabilityIncidentTokenHandlerTests
var metadataAccessor = new TestRateLimiterMetadataAccessor();
var auditSink = new TestAuthEventSink();
var sessionAccessor = new NullMongoSessionAccessor();
var sessionAccessor = new NullSessionAccessor();
var handler = new ValidateAccessTokenHandler(
tokenStore,
sessionAccessor,
@@ -4682,7 +4682,7 @@ public class ObservabilityIncidentTokenHandlerTests
var metadataAccessor = new TestRateLimiterMetadataAccessor();
var auditSink = new TestAuthEventSink();
var sessionAccessor = new NullMongoSessionAccessor();
var sessionAccessor = new NullSessionAccessor();
var handler = new ValidateAccessTokenHandler(
tokenStore,
sessionAccessor,
@@ -4818,7 +4818,7 @@ public class ObservabilityIncidentTokenHandlerTests
var metadataAccessor = new TestRateLimiterMetadataAccessor();
var auditSink = new TestAuthEventSink();
var sessionAccessor = new NullMongoSessionAccessor();
var sessionAccessor = new NullSessionAccessor();
var handler = new ValidateAccessTokenHandler(
tokenStore,
sessionAccessor,
@@ -4879,7 +4879,7 @@ public class ObservabilityIncidentTokenHandlerTests
var metadataAccessor = new TestRateLimiterMetadataAccessor();
var auditSink = new TestAuthEventSink();
var sessionAccessor = new NullMongoSessionAccessor();
var sessionAccessor = new NullSessionAccessor();
var handler = new ValidateAccessTokenHandler(
tokenStore,
sessionAccessor,
@@ -5166,7 +5166,7 @@ internal static class TestHelpers
options.Signing.ActiveKeyId = "test-key";
options.Signing.KeyPath = "/tmp/test-key.pem";
options.Storage.ConnectionString = "mongodb://localhost/test";
options.Storage.ConnectionString = "Host=localhost;Database=test";
configure?.Invoke(options);
return options;

View File

@@ -780,7 +780,7 @@ public class PasswordGrantHandlersTests
};
options.Signing.ActiveKeyId = "test-key";
options.Signing.KeyPath = "/tmp/test-key.pem";
options.Storage.ConnectionString = "mongodb://localhost:27017/authority";
options.Storage.ConnectionString = "Host=localhost;Port=5432;Database=authority";
configure?.Invoke(options);
return options;

View File

@@ -40,7 +40,7 @@ public sealed class VulnPermalinkServiceTests
var options = new StellaOpsAuthorityOptions
{
Issuer = new Uri("https://authority.test"),
Storage = { ConnectionString = "mongodb://localhost/test" },
Storage = { ConnectionString = "Host=localhost;Database=test" },
Signing =
{
Enabled = true,

View File

@@ -88,7 +88,7 @@ public class AuthorityRateLimiterIntegrationTests
Issuer = new Uri("https://authority.integration.test"),
SchemaVersion = 1
};
options.Storage.ConnectionString = "mongodb://localhost/authority";
options.Storage.ConnectionString = "Host=localhost;Database=authority";
configure?.Invoke(options);

View File

@@ -74,7 +74,7 @@ public class AuthorityRateLimiterTests
SchemaVersion = 1
};
options.Storage.ConnectionString = "mongodb://localhost/authority";
options.Storage.ConnectionString = "Host=localhost;Database=authority";
return options;
}
}

View File

@@ -119,7 +119,7 @@ public sealed class AuthorityJwksServiceTests
Issuer = new Uri("https://authority.test"),
Storage =
{
ConnectionString = "mongodb://localhost/test"
ConnectionString = "Host=localhost;Database=test"
},
Signing =
{

View File

@@ -34,7 +34,7 @@ public sealed class AuthoritySigningKeyManagerTests
var options = new StellaOpsAuthorityOptions
{
Issuer = new Uri("https://authority.test"),
Storage = { ConnectionString = "mongodb://localhost/test" },
Storage = { ConnectionString = "Host=localhost;Database=test" },
Signing =
{
Enabled = true,

View File

@@ -10,7 +10,7 @@ internal static class TestEnvironment
OpenSslLegacyShim.EnsureOpenSsl11();
Environment.SetEnvironmentVariable("STELLAOPS_AUTHORITY_ISSUER", "https://authority.test");
Environment.SetEnvironmentVariable("STELLAOPS_AUTHORITY_STORAGE__CONNECTIONSTRING", "mongodb://localhost/authority");
Environment.SetEnvironmentVariable("STELLAOPS_AUTHORITY_STORAGE__CONNECTIONSTRING", "Host=localhost;Database=authority");
Environment.SetEnvironmentVariable("STELLAOPS_AUTHORITY_SIGNING__ENABLED", "false");
}
}

View File

@@ -19,7 +19,7 @@ using Microsoft.Net.Http.Headers;
using OpenIddict.Abstractions;
using OpenIddict.Server;
using OpenIddict.Server.AspNetCore;
// MongoDB.Driver removed - using PostgreSQL storage with Mongo compatibility shim
// Using PostgreSQL storage with in-memory compatibility shim
using Serilog;
using Serilog.Events;
using StellaOps.Authority;

View File

@@ -18,7 +18,7 @@
<PackageReference Include="OpenTelemetry.Instrumentation.Runtime" Version="1.12.0" />
<PackageReference Include="Serilog.AspNetCore" Version="8.0.1" />
<PackageReference Include="Serilog.Sinks.Console" Version="5.0.1" />
<PackageReference Include="StackExchange.Redis" Version="2.8.24" />
<PackageReference Include="StackExchange.Redis" Version="2.8.37" />
<PackageReference Include="YamlDotNet" Version="13.7.1" />
<ProjectReference Include="..\StellaOps.Authority.Plugins.Abstractions\StellaOps.Authority.Plugins.Abstractions.csproj" />
<ProjectReference Include="..\StellaOps.Authority.Plugin.Standard\StellaOps.Authority.Plugin.Standard.csproj" />

View File

@@ -67,7 +67,7 @@ public static class ServiceCollectionExtensions
services.AddScoped<ITokenRepository>(sp => sp.GetRequiredService<TokenRepository>());
services.AddScoped<IRefreshTokenRepository>(sp => sp.GetRequiredService<RefreshTokenRepository>());
// Mongo-store equivalents (PostgreSQL-backed)
// Additional stores (PostgreSQL-backed)
services.AddScoped<BootstrapInviteRepository>();
services.AddScoped<ServiceAccountRepository>();
services.AddScoped<ClientRepository>();

View File

@@ -16,7 +16,7 @@ public sealed class BaselineLoaderTests
{
await File.WriteAllTextAsync(
path,
"scenario,iterations,observations,aliases,linksets,mean_total_ms,p95_total_ms,max_total_ms,mean_insert_ms,mean_correlation_ms,mean_throughput_per_sec,min_throughput_per_sec,mean_mongo_throughput_per_sec,min_mongo_throughput_per_sec,max_allocated_mb\n" +
"scenario,iterations,observations,aliases,linksets,mean_total_ms,p95_total_ms,max_total_ms,mean_insert_ms,mean_correlation_ms,mean_throughput_per_sec,min_throughput_per_sec,mean_insert_throughput_per_sec,min_insert_throughput_per_sec,max_allocated_mb\n" +
"lnm_ingest_baseline,5,5000,500,450,320.5,340.1,360.9,120.2,210.3,15000.0,13500.0,18000.0,16500.0,96.5\n");
var baseline = await BaselineLoader.LoadAsync(path, CancellationToken.None);
@@ -27,7 +27,7 @@ public sealed class BaselineLoaderTests
Assert.Equal(5000, entry.Value.Observations);
Assert.Equal(500, entry.Value.Aliases);
Assert.Equal(360.9, entry.Value.MaxTotalMs);
Assert.Equal(16500.0, entry.Value.MinMongoThroughputPerSecond);
Assert.Equal(16500.0, entry.Value.MinInsertThroughputPerSecond);
Assert.Equal(96.5, entry.Value.MaxAllocatedMb);
}
finally

View File

@@ -24,7 +24,7 @@ public sealed class BenchmarkScenarioReportTests
AllocationStatistics: new AllocationStatistics(120),
ThresholdMs: null,
MinThroughputThresholdPerSecond: null,
MinMongoThroughputThresholdPerSecond: null,
MinInsertThroughputThresholdPerSecond: null,
MaxAllocatedThresholdMb: null);
var baseline = new BaselineEntry(
@@ -40,15 +40,15 @@ public sealed class BenchmarkScenarioReportTests
MeanCorrelationMs: 90,
MeanThroughputPerSecond: 9000,
MinThroughputPerSecond: 8500,
MeanMongoThroughputPerSecond: 10000,
MinMongoThroughputPerSecond: 9500,
MeanInsertThroughputPerSecond: 10000,
MinInsertThroughputPerSecond: 9500,
MaxAllocatedMb: 100);
var report = new BenchmarkScenarioReport(result, baseline, regressionLimit: 1.1);
Assert.True(report.DurationRegressionBreached);
Assert.True(report.ThroughputRegressionBreached);
Assert.True(report.MongoThroughputRegressionBreached);
Assert.True(report.InsertThroughputRegressionBreached);
Assert.Contains(report.BuildRegressionFailureMessages(), message => message.Contains("max duration"));
}
@@ -70,7 +70,7 @@ public sealed class BenchmarkScenarioReportTests
AllocationStatistics: new AllocationStatistics(64),
ThresholdMs: null,
MinThroughputThresholdPerSecond: null,
MinMongoThroughputThresholdPerSecond: null,
MinInsertThroughputThresholdPerSecond: null,
MaxAllocatedThresholdMb: null);
var report = new BenchmarkScenarioReport(result, baseline: null, regressionLimit: null);

View File

@@ -13,6 +13,6 @@ internal sealed record BaselineEntry(
double MeanCorrelationMs,
double MeanThroughputPerSecond,
double MinThroughputPerSecond,
double MeanMongoThroughputPerSecond,
double MinMongoThroughputPerSecond,
double MeanInsertThroughputPerSecond,
double MinInsertThroughputPerSecond,
double MaxAllocatedMb);

View File

@@ -55,8 +55,8 @@ internal static class BaselineLoader
MeanCorrelationMs: ParseDouble(parts[9], resolved, lineNumber),
MeanThroughputPerSecond: ParseDouble(parts[10], resolved, lineNumber),
MinThroughputPerSecond: ParseDouble(parts[11], resolved, lineNumber),
MeanMongoThroughputPerSecond: ParseDouble(parts[12], resolved, lineNumber),
MinMongoThroughputPerSecond: ParseDouble(parts[13], resolved, lineNumber),
MeanInsertThroughputPerSecond: ParseDouble(parts[12], resolved, lineNumber),
MinInsertThroughputPerSecond: ParseDouble(parts[13], resolved, lineNumber),
MaxAllocatedMb: ParseDouble(parts[14], resolved, lineNumber));
result[entry.ScenarioId] = entry;

View File

@@ -6,7 +6,7 @@ namespace StellaOps.Bench.LinkNotMerge;
internal sealed record BenchmarkConfig(
double? ThresholdMs,
double? MinThroughputPerSecond,
double? MinMongoThroughputPerSecond,
double? MinInsertThroughputPerSecond,
double? MaxAllocatedMb,
int? Iterations,
IReadOnlyList<LinkNotMergeScenarioConfig> Scenarios)
@@ -49,7 +49,7 @@ internal sealed record BenchmarkConfig(
return new BenchmarkConfig(
model.ThresholdMs,
model.MinThroughputPerSecond,
model.MinMongoThroughputPerSecond,
model.MinInsertThroughputPerSecond,
model.MaxAllocatedMb,
model.Iterations,
model.Scenarios);
@@ -63,8 +63,8 @@ internal sealed record BenchmarkConfig(
[JsonPropertyName("minThroughputPerSecond")]
public double? MinThroughputPerSecond { get; init; }
[JsonPropertyName("minMongoThroughputPerSecond")]
public double? MinMongoThroughputPerSecond { get; init; }
[JsonPropertyName("minInsertThroughputPerSecond")]
public double? MinInsertThroughputPerSecond { get; init; }
[JsonPropertyName("maxAllocatedMb")]
public double? MaxAllocatedMb { get; init; }
@@ -127,8 +127,8 @@ internal sealed class LinkNotMergeScenarioConfig
[JsonPropertyName("minThroughputPerSecond")]
public double? MinThroughputPerSecond { get; init; }
[JsonPropertyName("minMongoThroughputPerSecond")]
public double? MinMongoThroughputPerSecond { get; init; }
[JsonPropertyName("minInsertThroughputPerSecond")]
public double? MinInsertThroughputPerSecond { get; init; }
[JsonPropertyName("maxAllocatedMb")]
public double? MaxAllocatedMb { get; init; }

View File

@@ -29,11 +29,11 @@ internal static class Program
var correlationStats = DurationStatistics.From(execution.CorrelationDurationsMs);
var allocationStats = AllocationStatistics.From(execution.AllocatedMb);
var throughputStats = ThroughputStatistics.From(execution.TotalThroughputsPerSecond);
var mongoThroughputStats = ThroughputStatistics.From(execution.InsertThroughputsPerSecond);
var insertThroughputStats = ThroughputStatistics.From(execution.InsertThroughputsPerSecond);
var thresholdMs = scenario.ThresholdMs ?? options.ThresholdMs ?? config.ThresholdMs;
var throughputFloor = scenario.MinThroughputPerSecond ?? options.MinThroughputPerSecond ?? config.MinThroughputPerSecond;
var mongoThroughputFloor = scenario.MinMongoThroughputPerSecond ?? options.MinMongoThroughputPerSecond ?? config.MinMongoThroughputPerSecond;
var insertThroughputFloor = scenario.MinInsertThroughputPerSecond ?? options.MinInsertThroughputPerSecond ?? config.MinInsertThroughputPerSecond;
var allocationLimit = scenario.MaxAllocatedMb ?? options.MaxAllocatedMb ?? config.MaxAllocatedMb;
var result = new ScenarioResult(
@@ -47,11 +47,11 @@ internal static class Program
insertStats,
correlationStats,
throughputStats,
mongoThroughputStats,
insertThroughputStats,
allocationStats,
thresholdMs,
throughputFloor,
mongoThroughputFloor,
insertThroughputFloor,
allocationLimit);
results.Add(result);
@@ -66,9 +66,9 @@ internal static class Program
failures.Add($"{result.Id} fell below throughput floor: {result.TotalThroughputStatistics.MinPerSecond:N0} obs/s < {floor:N0} obs/s");
}
if (mongoThroughputFloor is { } mongoFloor && result.InsertThroughputStatistics.MinPerSecond < mongoFloor)
if (insertThroughputFloor is { } insertFloor && result.InsertThroughputStatistics.MinPerSecond < insertFloor)
{
failures.Add($"{result.Id} fell below Mongo throughput floor: {result.InsertThroughputStatistics.MinPerSecond:N0} ops/s < {mongoFloor:N0} ops/s");
failures.Add($"{result.Id} fell below insert throughput floor: {result.InsertThroughputStatistics.MinPerSecond:N0} ops/s < {insertFloor:N0} ops/s");
}
if (allocationLimit is { } limit && result.AllocationStatistics.MaxAllocatedMb > limit)
@@ -131,7 +131,7 @@ internal static class Program
int? Iterations,
double? ThresholdMs,
double? MinThroughputPerSecond,
double? MinMongoThroughputPerSecond,
double? MinInsertThroughputPerSecond,
double? MaxAllocatedMb,
string? CsvOutPath,
string? JsonOutPath,
@@ -150,7 +150,7 @@ internal static class Program
int? iterations = null;
double? thresholdMs = null;
double? minThroughput = null;
double? minMongoThroughput = null;
double? minInsertThroughput = null;
double? maxAllocated = null;
string? csvOut = null;
string? jsonOut = null;
@@ -181,9 +181,9 @@ internal static class Program
EnsureNext(args, index);
minThroughput = double.Parse(args[++index], CultureInfo.InvariantCulture);
break;
case "--min-mongo-throughput":
case "--min-insert-throughput":
EnsureNext(args, index);
minMongoThroughput = double.Parse(args[++index], CultureInfo.InvariantCulture);
minInsertThroughput = double.Parse(args[++index], CultureInfo.InvariantCulture);
break;
case "--max-allocated-mb":
EnsureNext(args, index);
@@ -236,7 +236,7 @@ internal static class Program
iterations,
thresholdMs,
minThroughput,
minMongoThroughput,
minInsertThroughput,
maxAllocated,
csvOut,
jsonOut,
@@ -281,7 +281,7 @@ internal static class Program
Console.WriteLine(" --iterations <count> Override iteration count.");
Console.WriteLine(" --threshold-ms <value> Global latency threshold in milliseconds.");
Console.WriteLine(" --min-throughput <value> Global throughput floor (observations/second).");
Console.WriteLine(" --min-mongo-throughput <value> Mongo insert throughput floor (ops/second).");
Console.WriteLine(" --min-insert-throughput <value> Insert throughput floor (ops/second).");
Console.WriteLine(" --max-allocated-mb <value> Global allocation ceiling (MB).");
Console.WriteLine(" --csv <path> Write CSV results to path.");
Console.WriteLine(" --json <path> Write JSON results to path.");
@@ -299,7 +299,7 @@ internal static class TablePrinter
{
public static void Print(IEnumerable<ScenarioResult> results)
{
Console.WriteLine("Scenario | Observations | Aliases | Linksets | Total(ms) | Correl(ms) | Insert(ms) | Min k/s | Mongo k/s | Alloc(MB)");
Console.WriteLine("Scenario | Observations | Aliases | Linksets | Total(ms) | Correl(ms) | Insert(ms) | Min k/s | Ins k/s | Alloc(MB)");
Console.WriteLine("---------------------------- | ------------- | ------- | -------- | ---------- | ---------- | ----------- | -------- | --------- | --------");
foreach (var row in results)
{
@@ -313,7 +313,7 @@ internal static class TablePrinter
row.CorrelationMeanColumn,
row.InsertMeanColumn,
row.ThroughputColumn,
row.MongoThroughputColumn,
row.InsertThroughputColumn,
row.AllocatedColumn,
}));
}
@@ -336,7 +336,7 @@ internal static class CsvWriter
using var stream = new FileStream(resolved, FileMode.Create, FileAccess.Write, FileShare.None);
using var writer = new StreamWriter(stream);
writer.WriteLine("scenario,iterations,observations,aliases,linksets,mean_total_ms,p95_total_ms,max_total_ms,mean_insert_ms,mean_correlation_ms,mean_throughput_per_sec,min_throughput_per_sec,mean_mongo_throughput_per_sec,min_mongo_throughput_per_sec,max_allocated_mb");
writer.WriteLine("scenario,iterations,observations,aliases,linksets,mean_total_ms,p95_total_ms,max_total_ms,mean_insert_ms,mean_correlation_ms,mean_throughput_per_sec,min_throughput_per_sec,mean_insert_throughput_per_sec,min_insert_throughput_per_sec,max_allocated_mb");
foreach (var result in results)
{

View File

@@ -62,7 +62,7 @@ internal static class BenchmarkJsonWriter
report.Result.AllocationStatistics.MaxAllocatedMb,
report.Result.ThresholdMs,
report.Result.MinThroughputThresholdPerSecond,
report.Result.MinMongoThroughputThresholdPerSecond,
report.Result.MinInsertThroughputThresholdPerSecond,
report.Result.MaxAllocatedThresholdMb,
baseline is null
? null
@@ -78,13 +78,13 @@ internal static class BenchmarkJsonWriter
baseline.MeanCorrelationMs,
baseline.MeanThroughputPerSecond,
baseline.MinThroughputPerSecond,
baseline.MeanMongoThroughputPerSecond,
baseline.MinMongoThroughputPerSecond,
baseline.MeanInsertThroughputPerSecond,
baseline.MinInsertThroughputPerSecond,
baseline.MaxAllocatedMb),
new BenchmarkJsonScenarioRegression(
report.DurationRegressionRatio,
report.ThroughputRegressionRatio,
report.MongoThroughputRegressionRatio,
report.InsertThroughputRegressionRatio,
report.RegressionLimit,
report.RegressionBreached));
}
@@ -110,12 +110,12 @@ internal static class BenchmarkJsonWriter
double MeanCorrelationMs,
double MeanThroughputPerSecond,
double MinThroughputPerSecond,
double MeanMongoThroughputPerSecond,
double MinMongoThroughputPerSecond,
double MeanInsertThroughputPerSecond,
double MinInsertThroughputPerSecond,
double MaxAllocatedMb,
double? ThresholdMs,
double? MinThroughputThresholdPerSecond,
double? MinMongoThroughputThresholdPerSecond,
double? MinInsertThroughputThresholdPerSecond,
double? MaxAllocatedThresholdMb,
BenchmarkJsonScenarioBaseline? Baseline,
BenchmarkJsonScenarioRegression Regression);
@@ -132,14 +132,14 @@ internal static class BenchmarkJsonWriter
double MeanCorrelationMs,
double MeanThroughputPerSecond,
double MinThroughputPerSecond,
double MeanMongoThroughputPerSecond,
double MinMongoThroughputPerSecond,
double MeanInsertThroughputPerSecond,
double MinInsertThroughputPerSecond,
double MaxAllocatedMb);
private sealed record BenchmarkJsonScenarioRegression(
double? DurationRatio,
double? ThroughputRatio,
double? MongoThroughputRatio,
double? InsertThroughputRatio,
double Limit,
bool Breached);
}

View File

@@ -13,7 +13,7 @@ internal sealed class BenchmarkScenarioReport
RegressionLimit = regressionLimit is { } limit && limit > 0 ? limit : DefaultRegressionLimit;
DurationRegressionRatio = CalculateRatio(result.TotalStatistics.MaxMs, baseline?.MaxTotalMs);
ThroughputRegressionRatio = CalculateInverseRatio(result.TotalThroughputStatistics.MinPerSecond, baseline?.MinThroughputPerSecond);
MongoThroughputRegressionRatio = CalculateInverseRatio(result.InsertThroughputStatistics.MinPerSecond, baseline?.MinMongoThroughputPerSecond);
InsertThroughputRegressionRatio = CalculateInverseRatio(result.InsertThroughputStatistics.MinPerSecond, baseline?.MinInsertThroughputPerSecond);
}
public ScenarioResult Result { get; }
@@ -26,15 +26,15 @@ internal sealed class BenchmarkScenarioReport
public double? ThroughputRegressionRatio { get; }
public double? MongoThroughputRegressionRatio { get; }
public double? InsertThroughputRegressionRatio { get; }
public bool DurationRegressionBreached => DurationRegressionRatio is { } ratio && ratio >= RegressionLimit;
public bool ThroughputRegressionBreached => ThroughputRegressionRatio is { } ratio && ratio >= RegressionLimit;
public bool MongoThroughputRegressionBreached => MongoThroughputRegressionRatio is { } ratio && ratio >= RegressionLimit;
public bool InsertThroughputRegressionBreached => InsertThroughputRegressionRatio is { } ratio && ratio >= RegressionLimit;
public bool RegressionBreached => DurationRegressionBreached || ThroughputRegressionBreached || MongoThroughputRegressionBreached;
public bool RegressionBreached => DurationRegressionBreached || ThroughputRegressionBreached || InsertThroughputRegressionBreached;
public IEnumerable<string> BuildRegressionFailureMessages()
{
@@ -55,10 +55,10 @@ internal sealed class BenchmarkScenarioReport
yield return $"{Result.Id} throughput regressed: min {Result.TotalThroughputStatistics.MinPerSecond:N0} obs/s vs baseline {Baseline.MinThroughputPerSecond:N0} obs/s (-{delta:F1}%).";
}
if (MongoThroughputRegressionBreached && MongoThroughputRegressionRatio is { } mongoRatio)
if (InsertThroughputRegressionBreached && InsertThroughputRegressionRatio is { } insertRatio)
{
var delta = (mongoRatio - 1d) * 100d;
yield return $"{Result.Id} Mongo throughput regressed: min {Result.InsertThroughputStatistics.MinPerSecond:N0} ops/s vs baseline {Baseline.MinMongoThroughputPerSecond:N0} ops/s (-{delta:F1}%).";
var delta = (insertRatio - 1d) * 100d;
yield return $"{Result.Id} insert throughput regressed: min {Result.InsertThroughputStatistics.MinPerSecond:N0} ops/s vs baseline {Baseline.MinInsertThroughputPerSecond:N0} ops/s (-{delta:F1}%).";
}
}

View File

@@ -22,12 +22,12 @@ internal static class PrometheusWriter
builder.AppendLine("# TYPE linknotmerge_bench_total_ms gauge");
builder.AppendLine("# HELP linknotmerge_bench_correlation_ms Link-Not-Merge benchmark correlation duration metrics (milliseconds).");
builder.AppendLine("# TYPE linknotmerge_bench_correlation_ms gauge");
builder.AppendLine("# HELP linknotmerge_bench_insert_ms Link-Not-Merge benchmark Mongo insert duration metrics (milliseconds).");
builder.AppendLine("# HELP linknotmerge_bench_insert_ms Link-Not-Merge benchmark insert duration metrics (milliseconds).");
builder.AppendLine("# TYPE linknotmerge_bench_insert_ms gauge");
builder.AppendLine("# HELP linknotmerge_bench_throughput_per_sec Link-Not-Merge benchmark throughput metrics (observations per second).");
builder.AppendLine("# TYPE linknotmerge_bench_throughput_per_sec gauge");
builder.AppendLine("# HELP linknotmerge_bench_mongo_throughput_per_sec Link-Not-Merge benchmark Mongo throughput metrics (operations per second).");
builder.AppendLine("# TYPE linknotmerge_bench_mongo_throughput_per_sec gauge");
builder.AppendLine("# HELP linknotmerge_bench_insert_throughput_per_sec Link-Not-Merge benchmark insert throughput metrics (operations per second).");
builder.AppendLine("# TYPE linknotmerge_bench_insert_throughput_per_sec gauge");
builder.AppendLine("# HELP linknotmerge_bench_allocated_mb Link-Not-Merge benchmark allocation metrics (megabytes).");
builder.AppendLine("# TYPE linknotmerge_bench_allocated_mb gauge");
@@ -46,9 +46,9 @@ internal static class PrometheusWriter
AppendMetric(builder, "linknotmerge_bench_min_throughput_per_sec", scenario, report.Result.TotalThroughputStatistics.MinPerSecond);
AppendMetric(builder, "linknotmerge_bench_throughput_floor_per_sec", scenario, report.Result.MinThroughputThresholdPerSecond);
AppendMetric(builder, "linknotmerge_bench_mean_mongo_throughput_per_sec", scenario, report.Result.InsertThroughputStatistics.MeanPerSecond);
AppendMetric(builder, "linknotmerge_bench_min_mongo_throughput_per_sec", scenario, report.Result.InsertThroughputStatistics.MinPerSecond);
AppendMetric(builder, "linknotmerge_bench_mongo_throughput_floor_per_sec", scenario, report.Result.MinMongoThroughputThresholdPerSecond);
AppendMetric(builder, "linknotmerge_bench_mean_insert_throughput_per_sec", scenario, report.Result.InsertThroughputStatistics.MeanPerSecond);
AppendMetric(builder, "linknotmerge_bench_min_insert_throughput_per_sec", scenario, report.Result.InsertThroughputStatistics.MinPerSecond);
AppendMetric(builder, "linknotmerge_bench_insert_throughput_floor_per_sec", scenario, report.Result.MinInsertThroughputThresholdPerSecond);
AppendMetric(builder, "linknotmerge_bench_max_allocated_mb", scenario, report.Result.AllocationStatistics.MaxAllocatedMb);
AppendMetric(builder, "linknotmerge_bench_max_allocated_threshold_mb", scenario, report.Result.MaxAllocatedThresholdMb);
@@ -57,7 +57,7 @@ internal static class PrometheusWriter
{
AppendMetric(builder, "linknotmerge_bench_baseline_max_total_ms", scenario, baseline.MaxTotalMs);
AppendMetric(builder, "linknotmerge_bench_baseline_min_throughput_per_sec", scenario, baseline.MinThroughputPerSecond);
AppendMetric(builder, "linknotmerge_bench_baseline_min_mongo_throughput_per_sec", scenario, baseline.MinMongoThroughputPerSecond);
AppendMetric(builder, "linknotmerge_bench_baseline_min_insert_throughput_per_sec", scenario, baseline.MinInsertThroughputPerSecond);
}
if (report.DurationRegressionRatio is { } durationRatio)
@@ -70,9 +70,9 @@ internal static class PrometheusWriter
AppendMetric(builder, "linknotmerge_bench_throughput_regression_ratio", scenario, throughputRatio);
}
if (report.MongoThroughputRegressionRatio is { } mongoRatio)
if (report.InsertThroughputRegressionRatio is { } insertRatio)
{
AppendMetric(builder, "linknotmerge_bench_mongo_throughput_regression_ratio", scenario, mongoRatio);
AppendMetric(builder, "linknotmerge_bench_insert_throughput_regression_ratio", scenario, insertRatio);
}
AppendMetric(builder, "linknotmerge_bench_regression_limit", scenario, report.RegressionLimit);

View File

@@ -17,7 +17,7 @@ internal sealed record ScenarioResult(
AllocationStatistics AllocationStatistics,
double? ThresholdMs,
double? MinThroughputThresholdPerSecond,
double? MinMongoThroughputThresholdPerSecond,
double? MinInsertThroughputThresholdPerSecond,
double? MaxAllocatedThresholdMb)
{
public string IdColumn => Id.Length <= 28 ? Id.PadRight(28) : Id[..28];
@@ -36,7 +36,7 @@ internal sealed record ScenarioResult(
public string ThroughputColumn => (TotalThroughputStatistics.MinPerSecond / 1_000d).ToString("F2", CultureInfo.InvariantCulture).PadLeft(11);
public string MongoThroughputColumn => (InsertThroughputStatistics.MinPerSecond / 1_000d).ToString("F2", CultureInfo.InvariantCulture).PadLeft(11);
public string InsertThroughputColumn => (InsertThroughputStatistics.MinPerSecond / 1_000d).ToString("F2", CultureInfo.InvariantCulture).PadLeft(11);
public string AllocatedColumn => AllocationStatistics.MaxAllocatedMb.ToString("F2", CultureInfo.InvariantCulture).PadLeft(9);
}

View File

@@ -1,4 +1,4 @@
scenario,iterations,observations,aliases,linksets,mean_total_ms,p95_total_ms,max_total_ms,mean_insert_ms,mean_correlation_ms,mean_throughput_per_sec,min_throughput_per_sec,mean_mongo_throughput_per_sec,min_mongo_throughput_per_sec,max_allocated_mb
scenario,iterations,observations,aliases,linksets,mean_total_ms,p95_total_ms,max_total_ms,mean_insert_ms,mean_correlation_ms,mean_throughput_per_sec,min_throughput_per_sec,mean_insert_throughput_per_sec,min_insert_throughput_per_sec,max_allocated_mb
lnm_ingest_baseline,5,5000,500,6000,555.1984,823.4957,866.6236,366.2635,188.9349,9877.7916,5769.5175,15338.0851,8405.1257,62.4477
lnm_ingest_fanout_medium,5,10000,800,14800,785.8909,841.6247,842.8815,453.5087,332.3822,12794.9550,11864.0639,22086.0320,20891.0579,145.8328
lnm_ingest_fanout_high,5,15000,1200,17400,1299.3458,1367.0934,1369.9430,741.6265,557.7193,11571.0991,10949.3607,20232.5180,19781.6762,238.3450
1 scenario iterations observations aliases linksets mean_total_ms p95_total_ms max_total_ms mean_insert_ms mean_correlation_ms mean_throughput_per_sec min_throughput_per_sec mean_mongo_throughput_per_sec mean_insert_throughput_per_sec min_mongo_throughput_per_sec min_insert_throughput_per_sec max_allocated_mb
2 lnm_ingest_baseline 5 5000 500 6000 555.1984 823.4957 866.6236 366.2635 188.9349 9877.7916 5769.5175 15338.0851 15338.0851 8405.1257 8405.1257 62.4477
3 lnm_ingest_fanout_medium 5 10000 800 14800 785.8909 841.6247 842.8815 453.5087 332.3822 12794.9550 11864.0639 22086.0320 22086.0320 20891.0579 20891.0579 145.8328
4 lnm_ingest_fanout_high 5 15000 1200 17400 1299.3458 1367.0934 1369.9430 741.6265 557.7193 11571.0991 10949.3607 20232.5180 20232.5180 19781.6762 19781.6762 238.3450

View File

@@ -1,7 +1,7 @@
{
"thresholdMs": 2000,
"minThroughputPerSecond": 7000,
"minMongoThroughputPerSecond": 12000,
"minInsertThroughputPerSecond": 12000,
"maxAllocatedMb": 600,
"iterations": 5,
"scenarios": [
@@ -18,7 +18,7 @@
"seed": 42022,
"thresholdMs": 900,
"minThroughputPerSecond": 5500,
"minMongoThroughputPerSecond": 8000,
"minInsertThroughputPerSecond": 8000,
"maxAllocatedMb": 160
},
{
@@ -34,7 +34,7 @@
"seed": 52022,
"thresholdMs": 1300,
"minThroughputPerSecond": 8000,
"minMongoThroughputPerSecond": 13000,
"minInsertThroughputPerSecond": 13000,
"maxAllocatedMb": 220
},
{
@@ -50,7 +50,7 @@
"seed": 62022,
"thresholdMs": 2200,
"minThroughputPerSecond": 7000,
"minMongoThroughputPerSecond": 13000,
"minInsertThroughputPerSecond": 13000,
"maxAllocatedMb": 300
}
]

View File

@@ -1,6 +1,6 @@
<Project>
<PropertyGroup>
<!-- Keep Concelier test harness active while trimming Mongo dependencies. Allow opt-out per project. -->
<!-- Keep Concelier test harness active while trimming legacy dependencies. Allow opt-out per project. -->
<UseConcelierTestInfra Condition="'$(UseConcelierTestInfra)'==''">true</UseConcelierTestInfra>
<!-- Suppress noisy warnings from duplicate usings and analyzer fixture hints during Concelier test harness runs. -->
<NoWarn>$(NoWarn);CS0105;CS1591;CS8601;CS8602;CS8604;CS0618;RS1032;RS2007;xUnit1041;xUnit1031;xUnit2013;NU1510;NETSDK1023;SYSLIB0057</NoWarn>

View File

@@ -6,7 +6,7 @@ using StellaOps.Concelier.Storage.Postgres.Advisories;
namespace StellaOps.Concelier.WebService.DualWrite;
/// <summary>
/// Postgres-backed advisory store that implements the legacy Mongo contracts.
/// Postgres-backed advisory store that implements the legacy storage contracts.
/// </summary>
public sealed class DualWriteAdvisoryStore : IAdvisoryStore
{

View File

@@ -7,8 +7,8 @@ namespace StellaOps.Concelier.WebService.Options;
public sealed class ConcelierOptions
{
[Obsolete("Mongo storage has been removed; use PostgresStorage.")]
public StorageOptions Storage { get; set; } = new();
[Obsolete("Legacy storage has been removed; use PostgresStorage.")]
public LegacyStorageOptions LegacyStorage { get; set; } = new();
public PostgresStorageOptions? PostgresStorage { get; set; } = new PostgresStorageOptions
{
@@ -37,10 +37,10 @@ public sealed class ConcelierOptions
/// </summary>
public AirGapOptions AirGap { get; set; } = new();
[Obsolete("Mongo storage has been removed; use PostgresStorage.")]
public sealed class StorageOptions
[Obsolete("Legacy storage has been removed; use PostgresStorage.")]
public sealed class LegacyStorageOptions
{
public string Driver { get; set; } = "mongo";
public string Driver { get; set; } = "postgres";
public string Dsn { get; set; } = string.Empty;
@@ -56,7 +56,6 @@ public sealed class ConcelierOptions
{
/// <summary>
/// Enable PostgreSQL storage for LNM linkset cache.
/// When true, the linkset cache is stored in PostgreSQL instead of MongoDB.
/// </summary>
public bool Enabled { get; set; }

View File

@@ -226,7 +226,7 @@ builder.Services.AddOptions<AdvisoryObservationEventPublisherOptions>()
{
options.Subject ??= "concelier.advisory.observation.updated.v1";
options.Stream ??= "CONCELIER_OBS";
options.Transport = string.IsNullOrWhiteSpace(options.Transport) ? "mongo" : options.Transport;
options.Transport = string.IsNullOrWhiteSpace(options.Transport) ? "inmemory" : options.Transport;
})
.ValidateOnStart();
builder.Services.AddConcelierAocGuards();

View File

@@ -673,7 +673,7 @@ public sealed class AcscConnector : IFeedConnector
private async Task<AcscCursor> GetCursorCoreAsync(CancellationToken cancellationToken)
{
var state = await _stateRepository.TryGetAsync(SourceName, cancellationToken).ConfigureAwait(false);
return state is null ? AcscCursor.Empty : AcscCursor.FromBson(state.Cursor);
return state is null ? AcscCursor.Empty : AcscCursor.FromDocument(state.Cursor);
}
private Task UpdateCursorAsync(AcscCursor cursor, CancellationToken cancellationToken)

View File

@@ -70,7 +70,7 @@ internal sealed record AcscCursor(
return document;
}
public static AcscCursor FromBson(DocumentObject? document)
public static AcscCursor FromDocument(DocumentObject? document)
{
if (document is null || document.ElementCount == 0)
{

View File

@@ -332,8 +332,8 @@ public sealed class CccsConnector : IFeedConnector
}
var dtoJson = JsonSerializer.Serialize(dto, DtoSerializerOptions);
var dtoBson = DocumentObject.Parse(dtoJson);
var dtoRecord = new DtoRecord(Guid.NewGuid(), document.Id, SourceName, DtoSchemaVersion, dtoBson, now);
var dtoDoc = DocumentObject.Parse(dtoJson);
var dtoRecord = new DtoRecord(Guid.NewGuid(), document.Id, SourceName, DtoSchemaVersion, dtoDoc, now);
await _dtoStore.UpsertAsync(dtoRecord, cancellationToken).ConfigureAwait(false);
await _documentStore.UpdateStatusAsync(document.Id, DocumentStatuses.PendingMap, cancellationToken).ConfigureAwait(false);
@@ -459,7 +459,7 @@ public sealed class CccsConnector : IFeedConnector
private async Task<CccsCursor> GetCursorAsync(CancellationToken cancellationToken)
{
var state = await _stateRepository.TryGetAsync(SourceName, cancellationToken).ConfigureAwait(false);
return state is null ? CccsCursor.Empty : CccsCursor.FromBson(state.Cursor);
return state is null ? CccsCursor.Empty : CccsCursor.FromDocument(state.Cursor);
}
private Task UpdateCursorAsync(CccsCursor cursor, CancellationToken cancellationToken)

View File

@@ -70,7 +70,7 @@ internal sealed record CccsCursor(
return doc;
}
public static CccsCursor FromBson(DocumentObject? document)
public static CccsCursor FromDocument(DocumentObject? document)
{
if (document is null || document.ElementCount == 0)
{

View File

@@ -286,8 +286,8 @@ public sealed class CertBundConnector : IFeedConnector
_diagnostics.ParseSuccess(dto.Products.Count, dto.CveIds.Count);
parsedCount++;
var bson = DocumentObject.Parse(JsonSerializer.Serialize(dto, SerializerOptions));
var dtoRecord = new DtoRecord(Guid.NewGuid(), document.Id, SourceName, "cert-bund.detail.v1", bson, now);
var doc = DocumentObject.Parse(JsonSerializer.Serialize(dto, SerializerOptions));
var dtoRecord = new DtoRecord(Guid.NewGuid(), document.Id, SourceName, "cert-bund.detail.v1", doc, now);
await _dtoStore.UpsertAsync(dtoRecord, cancellationToken).ConfigureAwait(false);
await _documentStore.UpdateStatusAsync(document.Id, DocumentStatuses.PendingMap, cancellationToken).ConfigureAwait(false);
@@ -423,7 +423,7 @@ public sealed class CertBundConnector : IFeedConnector
private async Task<CertBundCursor> GetCursorAsync(CancellationToken cancellationToken)
{
var state = await _stateRepository.TryGetAsync(SourceName, cancellationToken).ConfigureAwait(false);
return state is null ? CertBundCursor.Empty : CertBundCursor.FromBson(state.Cursor);
return state is null ? CertBundCursor.Empty : CertBundCursor.FromDocument(state.Cursor);
}
private Task UpdateCursorAsync(CertBundCursor cursor, CancellationToken cancellationToken)

View File

@@ -53,7 +53,7 @@ internal sealed record CertBundCursor(
return document;
}
public static CertBundCursor FromBson(DocumentObject? document)
public static CertBundCursor FromDocument(DocumentObject? document)
{
if (document is null || document.ElementCount == 0)
{

View File

@@ -672,7 +672,7 @@ public sealed class CertCcConnector : IFeedConnector
private async Task<CertCcCursor> GetCursorAsync(CancellationToken cancellationToken)
{
var record = await _stateRepository.TryGetAsync(SourceName, cancellationToken).ConfigureAwait(false);
return CertCcCursor.FromBson(record?.Cursor);
return CertCcCursor.FromDocument(record?.Cursor);
}
private async Task UpdateCursorAsync(CertCcCursor cursor, CancellationToken cancellationToken)

View File

@@ -43,7 +43,7 @@ internal sealed record CertCcCursor(
return document;
}
public static CertCcCursor FromBson(DocumentObject? document)
public static CertCcCursor FromDocument(DocumentObject? document)
{
if (document is null || document.ElementCount == 0)
{
@@ -124,10 +124,10 @@ internal sealed record CertCcCursor(
{
switch (element)
{
case DocumentString bsonString when !string.IsNullOrWhiteSpace(bsonString.AsString):
results.Add(bsonString.AsString.Trim());
case DocumentString docString when !string.IsNullOrWhiteSpace(docString.AsString):
results.Add(docString.AsString.Trim());
break;
case DocumentObject bsonDocument when bsonDocument.TryGetValue("value", out var inner) && inner.IsString:
case DocumentObject docObject when docObject.TryGetValue("value", out var inner) && inner.IsString:
results.Add(inner.AsString.Trim());
break;
}
@@ -144,7 +144,7 @@ internal sealed record CertCcCursor(
private static bool TryReadGuid(DocumentValue value, out Guid guid)
{
if (value is DocumentString bsonString && Guid.TryParse(bsonString.AsString, out guid))
if (value is DocumentString docString && Guid.TryParse(docString.AsString, out guid))
{
return true;
}

View File

@@ -326,7 +326,7 @@ public sealed class CertFrConnector : IFeedConnector
private async Task<CertFrCursor> GetCursorAsync(CancellationToken cancellationToken)
{
var record = await _stateRepository.TryGetAsync(SourceName, cancellationToken).ConfigureAwait(false);
return CertFrCursor.FromBson(record?.Cursor);
return CertFrCursor.FromDocument(record?.Cursor);
}
private async Task UpdateCursorAsync(CertFrCursor cursor, CancellationToken cancellationToken)

View File

@@ -28,7 +28,7 @@ internal sealed record CertFrCursor(
return document;
}
public static CertFrCursor FromBson(DocumentObject? document)
public static CertFrCursor FromDocument(DocumentObject? document)
{
if (document is null || document.ElementCount == 0)
{

View File

@@ -418,7 +418,7 @@ public sealed class CertInConnector : IFeedConnector
private async Task<CertInCursor> GetCursorAsync(CancellationToken cancellationToken)
{
var state = await _stateRepository.TryGetAsync(SourceName, cancellationToken).ConfigureAwait(false);
return state is null ? CertInCursor.Empty : CertInCursor.FromBson(state.Cursor);
return state is null ? CertInCursor.Empty : CertInCursor.FromDocument(state.Cursor);
}
private Task UpdateCursorAsync(CertInCursor cursor, CancellationToken cancellationToken)

View File

@@ -28,7 +28,7 @@ internal sealed record CertInCursor(
return document;
}
public static CertInCursor FromBson(DocumentObject? document)
public static CertInCursor FromDocument(DocumentObject? document)
{
if (document is null || document.ElementCount == 0)
{

View File

@@ -10,7 +10,7 @@ using System.Text;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
using StellaOps.Concelier.Documents;
using MongoContracts = StellaOps.Concelier.Storage;
using LegacyContracts = StellaOps.Concelier.Storage;
using StorageContracts = StellaOps.Concelier.Storage.Contracts;
using StellaOps.Concelier.Connector.Common.Http;
using StellaOps.Concelier.Connector.Common.Telemetry;
@@ -32,12 +32,12 @@ public sealed class SourceFetchService
private readonly IHttpClientFactory _httpClientFactory;
private readonly RawDocumentStorage _rawDocumentStorage;
private readonly MongoContracts.IDocumentStore _documentStore;
private readonly LegacyContracts.IDocumentStore _documentStore;
private readonly StorageContracts.IStorageDocumentStore _storageDocumentStore;
private readonly ILogger<SourceFetchService> _logger;
private readonly TimeProvider _timeProvider;
private readonly IOptionsMonitor<SourceHttpClientOptions> _httpClientOptions;
private readonly IOptions<MongoContracts.StorageOptions> _storageOptions;
private readonly IOptions<LegacyContracts.StorageOptions> _storageOptions;
private readonly IJitterSource _jitterSource;
private readonly IAdvisoryRawWriteGuard _guard;
private readonly IAdvisoryLinksetMapper _linksetMapper;
@@ -47,7 +47,7 @@ public sealed class SourceFetchService
public SourceFetchService(
IHttpClientFactory httpClientFactory,
RawDocumentStorage rawDocumentStorage,
MongoContracts.IDocumentStore documentStore,
LegacyContracts.IDocumentStore documentStore,
StorageContracts.IStorageDocumentStore storageDocumentStore,
ILogger<SourceFetchService> logger,
IJitterSource jitterSource,
@@ -56,7 +56,7 @@ public sealed class SourceFetchService
ICryptoHash hash,
TimeProvider? timeProvider = null,
IOptionsMonitor<SourceHttpClientOptions>? httpClientOptions = null,
IOptions<MongoContracts.StorageOptions>? storageOptions = null)
IOptions<LegacyContracts.StorageOptions>? storageOptions = null)
{
_httpClientFactory = httpClientFactory ?? throw new ArgumentNullException(nameof(httpClientFactory));
_rawDocumentStorage = rawDocumentStorage ?? throw new ArgumentNullException(nameof(rawDocumentStorage));
@@ -77,7 +77,7 @@ public sealed class SourceFetchService
public SourceFetchService(
IHttpClientFactory httpClientFactory,
RawDocumentStorage rawDocumentStorage,
MongoContracts.IDocumentStore documentStore,
LegacyContracts.IDocumentStore documentStore,
ILogger<SourceFetchService> logger,
IJitterSource jitterSource,
IAdvisoryRawWriteGuard guard,
@@ -85,7 +85,7 @@ public sealed class SourceFetchService
ICryptoHash hash,
TimeProvider? timeProvider = null,
IOptionsMonitor<SourceHttpClientOptions>? httpClientOptions = null,
IOptions<MongoContracts.StorageOptions>? storageOptions = null)
IOptions<LegacyContracts.StorageOptions>? storageOptions = null)
: this(
httpClientFactory,
rawDocumentStorage,

View File

@@ -2,7 +2,7 @@ using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Logging.Abstractions;
using StellaOps.Concelier.Documents;
using StellaOps.Concelier.Connector.Common.Fetch;
using MongoContracts = StellaOps.Concelier.Storage;
using LegacyContracts = StellaOps.Concelier.Storage;
using StellaOps.Cryptography;
namespace StellaOps.Concelier.Connector.Common.State;
@@ -12,17 +12,17 @@ namespace StellaOps.Concelier.Connector.Common.State;
/// </summary>
public sealed class SourceStateSeedProcessor
{
private readonly MongoContracts.IDocumentStore _documentStore;
private readonly LegacyContracts.IDocumentStore _documentStore;
private readonly RawDocumentStorage _rawDocumentStorage;
private readonly MongoContracts.ISourceStateRepository _stateRepository;
private readonly LegacyContracts.ISourceStateRepository _stateRepository;
private readonly TimeProvider _timeProvider;
private readonly ILogger<SourceStateSeedProcessor> _logger;
private readonly ICryptoHash _hash;
public SourceStateSeedProcessor(
MongoContracts.IDocumentStore documentStore,
LegacyContracts.IDocumentStore documentStore,
RawDocumentStorage rawDocumentStorage,
MongoContracts.ISourceStateRepository stateRepository,
LegacyContracts.ISourceStateRepository stateRepository,
ICryptoHash hash,
TimeProvider? timeProvider = null,
ILogger<SourceStateSeedProcessor>? logger = null)
@@ -173,7 +173,7 @@ public sealed class SourceStateSeedProcessor
var metadata = CloneDictionary(document.Metadata);
var record = new MongoContracts.DocumentRecord(
var record = new LegacyContracts.DocumentRecord(
recordId,
source,
document.Uri,

View File

@@ -571,7 +571,7 @@ public sealed class CveConnector : IFeedConnector
private async Task<CveCursor> GetCursorAsync(CancellationToken cancellationToken)
{
var state = await _stateRepository.TryGetAsync(SourceName, cancellationToken).ConfigureAwait(false);
return state is null ? CveCursor.Empty : CveCursor.FromBson(state.Cursor);
return state is null ? CveCursor.Empty : CveCursor.FromDocument(state.Cursor);
}
private async Task UpdateCursorAsync(CveCursor cursor, CancellationToken cancellationToken)

View File

@@ -49,7 +49,7 @@ internal sealed record CveCursor(
return document;
}
public static CveCursor FromBson(DocumentObject? document)
public static CveCursor FromDocument(DocumentObject? document)
{
if (document is null || document.ElementCount == 0)
{

View File

@@ -368,7 +368,7 @@ public sealed class DebianConnector : IFeedConnector
continue;
}
var payload = ToBson(dto);
var payload = ToDocument(dto);
var dtoRecord = new DtoRecord(Guid.NewGuid(), document.Id, SourceName, SchemaVersion, payload, _timeProvider.GetUtcNow());
await _dtoStore.UpsertAsync(dtoRecord, cancellationToken).ConfigureAwait(false);
await _documentStore.UpdateStatusAsync(document.Id, DocumentStatuses.PendingMap, cancellationToken).ConfigureAwait(false);
@@ -414,7 +414,7 @@ public sealed class DebianConnector : IFeedConnector
DebianAdvisoryDto dto;
try
{
dto = FromBson(dtoRecord.Payload);
dto = FromDocument(dtoRecord.Payload);
}
catch (Exception ex)
{
@@ -438,7 +438,7 @@ public sealed class DebianConnector : IFeedConnector
private async Task<DebianCursor> GetCursorAsync(CancellationToken cancellationToken)
{
var state = await _stateRepository.TryGetAsync(SourceName, cancellationToken).ConfigureAwait(false);
return state is null ? DebianCursor.Empty : DebianCursor.FromBson(state.Cursor);
return state is null ? DebianCursor.Empty : DebianCursor.FromDocument(state.Cursor);
}
private async Task UpdateCursorAsync(DebianCursor cursor, CancellationToken cancellationToken)
@@ -508,7 +508,7 @@ public sealed class DebianConnector : IFeedConnector
cveList);
}
private static DocumentObject ToBson(DebianAdvisoryDto dto)
private static DocumentObject ToDocument(DebianAdvisoryDto dto)
{
var packages = new DocumentArray();
foreach (var package in dto.Packages)
@@ -575,15 +575,15 @@ public sealed class DebianConnector : IFeedConnector
};
}
private static DebianAdvisoryDto FromBson(DocumentObject document)
private static DebianAdvisoryDto FromDocument(DocumentObject document)
{
var advisoryId = document.GetValue("advisoryId", "").AsString;
var sourcePackage = document.GetValue("sourcePackage", advisoryId).AsString;
var title = document.GetValue("title", advisoryId).AsString;
var description = document.TryGetValue("description", out var desc) ? desc.AsString : null;
var cves = document.TryGetValue("cves", out var cveArray) && cveArray is DocumentArray cvesBson
? cvesBson.OfType<DocumentValue>()
var cves = document.TryGetValue("cves", out var cveArray) && cveArray is DocumentArray cvesArr
? cvesArr.OfType<DocumentValue>()
.Select(static value => value.ToString())
.Where(static s => !string.IsNullOrWhiteSpace(s))
.Select(static s => s!)
@@ -591,9 +591,9 @@ public sealed class DebianConnector : IFeedConnector
: Array.Empty<string>();
var packages = new List<DebianPackageStateDto>();
if (document.TryGetValue("packages", out var packageArray) && packageArray is DocumentArray packagesBson)
if (document.TryGetValue("packages", out var packageArray) && packageArray is DocumentArray packagesArr)
{
foreach (var element in packagesBson.OfType<DocumentObject>())
foreach (var element in packagesArr.OfType<DocumentObject>())
{
packages.Add(new DebianPackageStateDto(
element.GetValue("package", sourcePackage).AsString,
@@ -614,9 +614,9 @@ public sealed class DebianConnector : IFeedConnector
}
var references = new List<DebianReferenceDto>();
if (document.TryGetValue("references", out var referenceArray) && referenceArray is DocumentArray refBson)
if (document.TryGetValue("references", out var referenceArray) && referenceArray is DocumentArray refArr)
{
foreach (var element in refBson.OfType<DocumentObject>())
foreach (var element in refArr.OfType<DocumentObject>())
{
references.Add(new DebianReferenceDto(
element.GetValue("url", "").AsString,

View File

@@ -19,7 +19,7 @@ internal sealed record DebianCursor(
public static DebianCursor Empty { get; } = new(null, EmptyIds, EmptyGuidList, EmptyGuidList, EmptyCache);
public static DebianCursor FromBson(DocumentObject? document)
public static DebianCursor FromDocument(DocumentObject? document)
{
if (document is null || document.ElementCount == 0)
{
@@ -168,7 +168,7 @@ internal sealed record DebianCursor(
{
if (element.Value is DocumentObject entry)
{
cache[element.Name] = DebianFetchCacheEntry.FromBson(entry);
cache[element.Name] = DebianFetchCacheEntry.FromDocument(entry);
}
}

View File

@@ -10,7 +10,7 @@ internal sealed record DebianFetchCacheEntry(string? ETag, DateTimeOffset? LastM
public static DebianFetchCacheEntry FromDocument(StellaOps.Concelier.Storage.DocumentRecord document)
=> new(document.Etag, document.LastModified);
public static DebianFetchCacheEntry FromBson(DocumentObject document)
public static DebianFetchCacheEntry FromDocument(DocumentObject document)
{
if (document is null || document.ElementCount == 0)
{

View File

@@ -19,7 +19,7 @@ internal sealed record SuseCursor(
public static SuseCursor Empty { get; } = new(null, EmptyStringList, EmptyGuidList, EmptyGuidList, EmptyCache);
public static SuseCursor FromBson(DocumentObject? document)
public static SuseCursor FromDocument(DocumentObject? document)
{
if (document is null || document.ElementCount == 0)
{
@@ -168,7 +168,7 @@ internal sealed record SuseCursor(
{
if (element.Value is DocumentObject entry)
{
cache[element.Name] = SuseFetchCacheEntry.FromBson(entry);
cache[element.Name] = SuseFetchCacheEntry.FromDocument(entry);
}
}

View File

@@ -1,6 +1,6 @@
using System;
using StellaOps.Concelier.Documents;
using MongoContracts = StellaOps.Concelier.Storage;
using LegacyContracts = StellaOps.Concelier.Storage;
using StorageContracts = StellaOps.Concelier.Storage.Contracts;
namespace StellaOps.Concelier.Connector.Distro.Suse.Internal;
@@ -12,10 +12,10 @@ internal sealed record SuseFetchCacheEntry(string? ETag, DateTimeOffset? LastMod
public static SuseFetchCacheEntry FromDocument(StorageContracts.StorageDocument document)
=> new(document.Etag, document.LastModified);
public static SuseFetchCacheEntry FromDocument(MongoContracts.DocumentRecord document)
public static SuseFetchCacheEntry FromDocument(LegacyContracts.DocumentRecord document)
=> new(document.Etag, document.LastModified);
public static SuseFetchCacheEntry FromBson(DocumentObject document)
public static SuseFetchCacheEntry FromDocument(DocumentObject document)
{
if (document is null || document.ElementCount == 0)
{
@@ -79,7 +79,7 @@ internal sealed record SuseFetchCacheEntry(string? ETag, DateTimeOffset? LastMod
return !LastModified.HasValue && !document.LastModified.HasValue;
}
public bool Matches(MongoContracts.DocumentRecord document)
public bool Matches(LegacyContracts.DocumentRecord document)
{
if (document is null)
{

View File

@@ -343,7 +343,7 @@ public sealed class SuseConnector : IFeedConnector
var updatedDocument = document with { Metadata = metadata };
await _documentStore.UpsertAsync(updatedDocument, cancellationToken).ConfigureAwait(false);
var payload = ToBson(dto);
var payload = ToDocument(dto);
var dtoRecord = new DtoRecord(Guid.NewGuid(), document.Id, SourceName, "suse.csaf.v1", payload, _timeProvider.GetUtcNow());
await _dtoStore.UpsertAsync(dtoRecord, cancellationToken).ConfigureAwait(false);
@@ -390,7 +390,7 @@ public sealed class SuseConnector : IFeedConnector
SuseAdvisoryDto dto;
try
{
dto = FromBson(dtoRecord.Payload);
dto = FromDocument(dtoRecord.Payload);
}
catch (Exception ex)
{
@@ -415,7 +415,7 @@ public sealed class SuseConnector : IFeedConnector
private async Task<SuseCursor> GetCursorAsync(CancellationToken cancellationToken)
{
var state = await _stateRepository.TryGetAsync(SourceName, cancellationToken).ConfigureAwait(false);
return state is null ? SuseCursor.Empty : SuseCursor.FromBson(state.Cursor);
return state is null ? SuseCursor.Empty : SuseCursor.FromDocument(state.Cursor);
}
private async Task UpdateCursorAsync(SuseCursor cursor, CancellationToken cancellationToken)
@@ -424,7 +424,7 @@ public sealed class SuseConnector : IFeedConnector
await _stateRepository.UpdateCursorAsync(SourceName, document, _timeProvider.GetUtcNow(), cancellationToken).ConfigureAwait(false);
}
private static DocumentObject ToBson(SuseAdvisoryDto dto)
private static DocumentObject ToDocument(SuseAdvisoryDto dto)
{
var packages = new DocumentArray();
foreach (var package in dto.Packages)
@@ -493,7 +493,7 @@ public sealed class SuseConnector : IFeedConnector
};
}
private static SuseAdvisoryDto FromBson(DocumentObject document)
private static SuseAdvisoryDto FromDocument(DocumentObject document)
{
var advisoryId = document.GetValue("advisoryId", string.Empty).AsString;
var title = document.GetValue("title", advisoryId).AsString;
@@ -507,8 +507,8 @@ public sealed class SuseConnector : IFeedConnector
}
: DateTimeOffset.UtcNow;
var cves = document.TryGetValue("cves", out var cveArray) && cveArray is DocumentArray bsonCves
? bsonCves.OfType<DocumentValue>()
var cves = document.TryGetValue("cves", out var cveArray) && cveArray is DocumentArray cveArr
? cveArr.OfType<DocumentValue>()
.Select(static value => value?.ToString())
.Where(static value => !string.IsNullOrWhiteSpace(value))
.Select(static value => value!)
@@ -517,9 +517,9 @@ public sealed class SuseConnector : IFeedConnector
: Array.Empty<string>();
var packageList = new List<SusePackageStateDto>();
if (document.TryGetValue("packages", out var packageArray) && packageArray is DocumentArray bsonPackages)
if (document.TryGetValue("packages", out var packageArray) && packageArray is DocumentArray packageArr)
{
foreach (var element in bsonPackages.OfType<DocumentObject>())
foreach (var element in packageArr.OfType<DocumentObject>())
{
var package = element.GetValue("package", string.Empty).AsString;
var platform = element.GetValue("platform", string.Empty).AsString;
@@ -544,9 +544,9 @@ public sealed class SuseConnector : IFeedConnector
}
var referenceList = new List<SuseReferenceDto>();
if (document.TryGetValue("references", out var referenceArray) && referenceArray is DocumentArray bsonReferences)
if (document.TryGetValue("references", out var referenceArray) && referenceArray is DocumentArray referenceArr)
{
foreach (var element in bsonReferences.OfType<DocumentObject>())
foreach (var element in referenceArr.OfType<DocumentObject>())
{
var url = element.GetValue("url", string.Empty).AsString;
if (string.IsNullOrWhiteSpace(url))

View File

@@ -19,7 +19,7 @@ internal sealed record UbuntuCursor(
public static UbuntuCursor Empty { get; } = new(null, EmptyIds, EmptyGuidList, EmptyGuidList, EmptyCache);
public static UbuntuCursor FromBson(DocumentObject? document)
public static UbuntuCursor FromDocument(DocumentObject? document)
{
if (document is null || document.ElementCount == 0)
{
@@ -168,7 +168,7 @@ internal sealed record UbuntuCursor(
{
if (element.Value is DocumentObject entryDoc)
{
cache[element.Name] = UbuntuFetchCacheEntry.FromBson(entryDoc);
cache[element.Name] = UbuntuFetchCacheEntry.FromDocument(entryDoc);
}
}

View File

@@ -11,7 +11,7 @@ internal sealed record UbuntuFetchCacheEntry(string? ETag, DateTimeOffset? LastM
public static UbuntuFetchCacheEntry FromDocument(StorageContracts.StorageDocument document)
=> new(document.Etag, document.LastModified);
public static UbuntuFetchCacheEntry FromBson(DocumentObject document)
public static UbuntuFetchCacheEntry FromDocument(DocumentObject document)
{
if (document is null || document.ElementCount == 0)
{

View File

@@ -144,7 +144,7 @@ public sealed class UbuntuConnector : IFeedConnector
["ubuntu.published"] = notice.Published.ToString("O")
};
var dtoDocument = ToBson(notice);
var dtoDocument = ToDocument(notice);
var sha256 = ComputeNoticeHash(dtoDocument);
var documentId = existing?.Id ?? Guid.NewGuid();
@@ -217,7 +217,7 @@ public sealed class UbuntuConnector : IFeedConnector
UbuntuNoticeDto notice;
try
{
notice = FromBson(dto.Payload);
notice = FromDocument(dto.Payload);
}
catch (Exception ex)
{
@@ -409,7 +409,7 @@ public sealed class UbuntuConnector : IFeedConnector
private async Task<UbuntuCursor> GetCursorAsync(CancellationToken cancellationToken)
{
var state = await _stateRepository.TryGetAsync(SourceName, cancellationToken).ConfigureAwait(false);
return state is null ? UbuntuCursor.Empty : UbuntuCursor.FromBson(state.Cursor);
return state is null ? UbuntuCursor.Empty : UbuntuCursor.FromDocument(state.Cursor);
}
private async Task UpdateCursorAsync(UbuntuCursor cursor, CancellationToken cancellationToken)
@@ -420,12 +420,12 @@ public sealed class UbuntuConnector : IFeedConnector
private string ComputeNoticeHash(DocumentObject document)
{
var bytes = document.ToBson();
var bytes = document.ToDocument();
var hash = _hash.ComputeHash(bytes, HashAlgorithms.Sha256);
return Convert.ToHexString(hash).ToLowerInvariant();
}
private static DocumentObject ToBson(UbuntuNoticeDto notice)
private static DocumentObject ToDocument(UbuntuNoticeDto notice)
{
var packages = new DocumentArray();
foreach (var package in notice.Packages)
@@ -473,7 +473,7 @@ public sealed class UbuntuConnector : IFeedConnector
};
}
private static UbuntuNoticeDto FromBson(DocumentObject document)
private static UbuntuNoticeDto FromDocument(DocumentObject document)
{
var noticeId = document.GetValue("noticeId", string.Empty).AsString;
var published = document.TryGetValue("published", out var publishedValue)
@@ -488,8 +488,8 @@ public sealed class UbuntuConnector : IFeedConnector
var title = document.GetValue("title", noticeId).AsString;
var summary = document.GetValue("summary", string.Empty).AsString;
var cves = document.TryGetValue("cves", out var cveArray) && cveArray is DocumentArray cveBson
? cveBson.OfType<DocumentValue>()
var cves = document.TryGetValue("cves", out var cveArray) && cveArray is DocumentArray cveArr
? cveArr.OfType<DocumentValue>()
.Select(static value => value?.ToString())
.Where(static value => !string.IsNullOrWhiteSpace(value))
.Select(static value => value!)
@@ -497,9 +497,9 @@ public sealed class UbuntuConnector : IFeedConnector
: Array.Empty<string>();
var packages = new List<UbuntuReleasePackageDto>();
if (document.TryGetValue("packages", out var packageArray) && packageArray is DocumentArray packageBson)
if (document.TryGetValue("packages", out var packageArray) && packageArray is DocumentArray packageArr)
{
foreach (var element in packageBson.OfType<DocumentObject>())
foreach (var element in packageArr.OfType<DocumentObject>())
{
packages.Add(new UbuntuReleasePackageDto(
Release: element.GetValue("release", string.Empty).AsString,
@@ -511,9 +511,9 @@ public sealed class UbuntuConnector : IFeedConnector
}
var references = new List<UbuntuReferenceDto>();
if (document.TryGetValue("references", out var referenceArray) && referenceArray is DocumentArray referenceBson)
if (document.TryGetValue("references", out var referenceArray) && referenceArray is DocumentArray referenceArr)
{
foreach (var element in referenceBson.OfType<DocumentObject>())
foreach (var element in referenceArr.OfType<DocumentObject>())
{
var url = element.GetValue("url", string.Empty).AsString;
if (string.IsNullOrWhiteSpace(url))

View File

@@ -422,7 +422,7 @@ public sealed class GhsaConnector : IFeedConnector
private async Task<GhsaCursor> GetCursorAsync(CancellationToken cancellationToken)
{
var state = await _stateRepository.TryGetAsync(SourceName, cancellationToken).ConfigureAwait(false);
return state is null ? GhsaCursor.Empty : GhsaCursor.FromBson(state.Cursor);
return state is null ? GhsaCursor.Empty : GhsaCursor.FromDocument(state.Cursor);
}
private async Task UpdateCursorAsync(GhsaCursor cursor, CancellationToken cancellationToken)

View File

@@ -49,7 +49,7 @@ internal sealed record GhsaCursor(
return document;
}
public static GhsaCursor FromBson(DocumentObject? document)
public static GhsaCursor FromDocument(DocumentObject? document)
{
if (document is null || document.ElementCount == 0)
{

View File

@@ -322,13 +322,13 @@ public sealed class IcsCisaConnector : IFeedConnector
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull,
WriteIndented = false,
});
var bson = DocumentObject.Parse(json);
var doc = DocumentObject.Parse(json);
var dtoRecord = new DtoRecord(
Guid.NewGuid(),
document.Id,
SourceName,
SchemaVersion,
bson,
doc,
_timeProvider.GetUtcNow());
await _dtoStore.UpsertAsync(dtoRecord, cancellationToken).ConfigureAwait(false);
@@ -1411,7 +1411,7 @@ public sealed class IcsCisaConnector : IFeedConnector
private async Task<IcsCisaCursor> GetCursorAsync(CancellationToken cancellationToken)
{
var state = await _stateRepository.TryGetAsync(SourceName, cancellationToken).ConfigureAwait(false);
return state is null ? IcsCisaCursor.Empty : IcsCisaCursor.FromBson(state.Cursor);
return state is null ? IcsCisaCursor.Empty : IcsCisaCursor.FromDocument(state.Cursor);
}
private Task UpdateCursorAsync(IcsCisaCursor cursor, CancellationToken cancellationToken)

View File

@@ -28,7 +28,7 @@ internal sealed record IcsCisaCursor(
return document;
}
public static IcsCisaCursor FromBson(DocumentObject? document)
public static IcsCisaCursor FromDocument(DocumentObject? document)
{
if (document is null || document.ElementCount == 0)
{

View File

@@ -59,7 +59,7 @@ internal sealed record KasperskyCursor(
return document;
}
public static KasperskyCursor FromBson(DocumentObject? document)
public static KasperskyCursor FromDocument(DocumentObject? document)
{
if (document is null || document.ElementCount == 0)
{

View File

@@ -442,7 +442,7 @@ public sealed class KasperskyConnector : IFeedConnector
private async Task<KasperskyCursor> GetCursorAsync(CancellationToken cancellationToken)
{
var state = await _stateRepository.TryGetAsync(SourceName, cancellationToken).ConfigureAwait(false);
return state is null ? KasperskyCursor.Empty : KasperskyCursor.FromBson(state.Cursor);
return state is null ? KasperskyCursor.Empty : KasperskyCursor.FromDocument(state.Cursor);
}
private async Task UpdateCursorAsync(KasperskyCursor cursor, CancellationToken cancellationToken)

View File

@@ -36,7 +36,7 @@ internal sealed record JvnCursor(
return document;
}
public static JvnCursor FromBson(DocumentObject? document)
public static JvnCursor FromDocument(DocumentObject? document)
{
if (document is null || document.ElementCount == 0)
{

View File

@@ -314,7 +314,7 @@ public sealed class JvnConnector : IFeedConnector
private async Task<JvnCursor> GetCursorAsync(CancellationToken cancellationToken)
{
var state = await _stateRepository.TryGetAsync(SourceName, cancellationToken).ConfigureAwait(false);
return state is null ? JvnCursor.Empty : JvnCursor.FromBson(state.Cursor);
return state is null ? JvnCursor.Empty : JvnCursor.FromDocument(state.Cursor);
}
private async Task UpdateCursorAsync(JvnCursor cursor, CancellationToken cancellationToken)

View File

@@ -34,7 +34,7 @@ internal sealed record KevCursor(
return document;
}
public static KevCursor FromBson(DocumentObject? document)
public static KevCursor FromDocument(DocumentObject? document)
{
if (document is null || document.ElementCount == 0)
{

View File

@@ -386,7 +386,7 @@ public sealed class KevConnector : IFeedConnector
private async Task<KevCursor> GetCursorAsync(CancellationToken cancellationToken)
{
var state = await _stateRepository.TryGetAsync(SourceName, cancellationToken).ConfigureAwait(false);
return state is null ? KevCursor.Empty : KevCursor.FromBson(state.Cursor);
return state is null ? KevCursor.Empty : KevCursor.FromDocument(state.Cursor);
}
private Task UpdateCursorAsync(KevCursor cursor, CancellationToken cancellationToken)

View File

@@ -54,7 +54,7 @@ internal sealed record KisaCursor(
return document;
}
public static KisaCursor FromBson(DocumentObject? document)
public static KisaCursor FromDocument(DocumentObject? document)
{
if (document is null || document.ElementCount == 0)
{

View File

@@ -287,8 +287,8 @@ public sealed class KisaConnector : IFeedConnector
_diagnostics.ParseSuccess(category);
_logger.LogDebug("KISA parsed detail for {DocumentId} ({Category})", document.Id, category ?? "unknown");
var dtoBson = DocumentObject.Parse(JsonSerializer.Serialize(parsed, SerializerOptions));
var dtoRecord = new DtoRecord(Guid.NewGuid(), document.Id, SourceName, "kisa.detail.v1", dtoBson, now);
var dtoDoc = DocumentObject.Parse(JsonSerializer.Serialize(parsed, SerializerOptions));
var dtoRecord = new DtoRecord(Guid.NewGuid(), document.Id, SourceName, "kisa.detail.v1", dtoDoc, now);
await _dtoStore.UpsertAsync(dtoRecord, cancellationToken).ConfigureAwait(false);
await _documentStore.UpdateStatusAsync(document.Id, DocumentStatuses.PendingMap, cancellationToken).ConfigureAwait(false);
@@ -412,7 +412,7 @@ public sealed class KisaConnector : IFeedConnector
private async Task<KisaCursor> GetCursorAsync(CancellationToken cancellationToken)
{
var state = await _stateRepository.TryGetAsync(SourceName, cancellationToken).ConfigureAwait(false);
return state is null ? KisaCursor.Empty : KisaCursor.FromBson(state.Cursor);
return state is null ? KisaCursor.Empty : KisaCursor.FromDocument(state.Cursor);
}
private Task UpdateCursorAsync(KisaCursor cursor, CancellationToken cancellationToken)

View File

@@ -78,7 +78,7 @@ internal sealed record OsvCursor(
return document;
}
public static OsvCursor FromBson(DocumentObject? document)
public static OsvCursor FromDocument(DocumentObject? document)
{
if (document is null || document.ElementCount == 0)
{

View File

@@ -297,7 +297,7 @@ public sealed class OsvConnector : IFeedConnector
private async Task<OsvCursor> GetCursorAsync(CancellationToken cancellationToken)
{
var state = await _stateRepository.TryGetAsync(SourceName, cancellationToken).ConfigureAwait(false);
return state is null ? OsvCursor.Empty : OsvCursor.FromBson(state.Cursor);
return state is null ? OsvCursor.Empty : OsvCursor.FromDocument(state.Cursor);
}
private async Task UpdateCursorAsync(OsvCursor cursor, CancellationToken cancellationToken)

View File

@@ -36,7 +36,7 @@ internal sealed record RuBduCursor(
return document;
}
public static RuBduCursor FromBson(DocumentObject? document)
public static RuBduCursor FromDocument(DocumentObject? document)
{
if (document is null || document.ElementCount == 0)
{

View File

@@ -268,8 +268,8 @@ public sealed class RuBduConnector : IFeedConnector
continue;
}
var bson = StellaOps.Concelier.Documents.DocumentObject.Parse(JsonSerializer.Serialize(dto, SerializerOptions));
var dtoRecord = new DtoRecord(Guid.NewGuid(), document.Id, SourceName, "ru-bdu.v1", bson, _timeProvider.GetUtcNow());
var doc = StellaOps.Concelier.Documents.DocumentObject.Parse(JsonSerializer.Serialize(dto, SerializerOptions));
var dtoRecord = new DtoRecord(Guid.NewGuid(), document.Id, SourceName, "ru-bdu.v1", doc, _timeProvider.GetUtcNow());
await _dtoStore.UpsertAsync(dtoRecord, cancellationToken).ConfigureAwait(false);
await _documentStore.UpdateStatusAsync(document.Id, DocumentStatuses.PendingMap, cancellationToken).ConfigureAwait(false);
_diagnostics.ParseSuccess(
@@ -520,7 +520,7 @@ public sealed class RuBduConnector : IFeedConnector
private async Task<RuBduCursor> GetCursorAsync(CancellationToken cancellationToken)
{
var state = await _stateRepository.TryGetAsync(SourceName, cancellationToken).ConfigureAwait(false);
return state is null ? RuBduCursor.Empty : RuBduCursor.FromBson(state.Cursor);
return state is null ? RuBduCursor.Empty : RuBduCursor.FromDocument(state.Cursor);
}
private Task UpdateCursorAsync(RuBduCursor cursor, CancellationToken cancellationToken)

View File

@@ -42,7 +42,7 @@ internal sealed record RuNkckiCursor(
return document;
}
public static RuNkckiCursor FromBson(DocumentObject? document)
public static RuNkckiCursor FromDocument(DocumentObject? document)
{
if (document is null || document.ElementCount == 0)
{

View File

@@ -338,8 +338,8 @@ public sealed class RuNkckiConnector : IFeedConnector
continue;
}
var bson = StellaOps.Concelier.Documents.DocumentObject.Parse(JsonSerializer.Serialize(dto, SerializerOptions));
var dtoRecord = new DtoRecord(Guid.NewGuid(), document.Id, SourceName, "ru-nkcki.v1", bson, _timeProvider.GetUtcNow());
var doc = StellaOps.Concelier.Documents.DocumentObject.Parse(JsonSerializer.Serialize(dto, SerializerOptions));
var dtoRecord = new DtoRecord(Guid.NewGuid(), document.Id, SourceName, "ru-nkcki.v1", doc, _timeProvider.GetUtcNow());
await _dtoStore.UpsertAsync(dtoRecord, cancellationToken).ConfigureAwait(false);
await _documentStore.UpdateStatusAsync(document.Id, DocumentStatuses.PendingMap, cancellationToken).ConfigureAwait(false);
@@ -871,7 +871,7 @@ public sealed class RuNkckiConnector : IFeedConnector
private async Task<RuNkckiCursor> GetCursorAsync(CancellationToken cancellationToken)
{
var state = await _stateRepository.TryGetAsync(SourceName, cancellationToken).ConfigureAwait(false);
return state is null ? RuNkckiCursor.Empty : RuNkckiCursor.FromBson(state.Cursor);
return state is null ? RuNkckiCursor.Empty : RuNkckiCursor.FromDocument(state.Cursor);
}
private Task UpdateCursorAsync(RuNkckiCursor cursor, CancellationToken cancellationToken)

View File

@@ -52,7 +52,7 @@ internal sealed record StellaOpsMirrorCursor(
return document;
}
public static StellaOpsMirrorCursor FromBson(DocumentObject? document)
public static StellaOpsMirrorCursor FromDocument(DocumentObject? document)
{
if (document is null || document.ElementCount == 0)
{

View File

@@ -275,7 +275,7 @@ public sealed class StellaOpsMirrorConnector : IFeedConnector
private async Task<StellaOpsMirrorCursor> GetCursorAsync(CancellationToken cancellationToken)
{
var state = await _stateRepository.TryGetAsync(Source, cancellationToken).ConfigureAwait(false);
return state is null ? StellaOpsMirrorCursor.Empty : StellaOpsMirrorCursor.FromBson(state.Cursor);
return state is null ? StellaOpsMirrorCursor.Empty : StellaOpsMirrorCursor.FromDocument(state.Cursor);
}
private async Task UpdateCursorAsync(StellaOpsMirrorCursor cursor, CancellationToken cancellationToken)
@@ -422,8 +422,8 @@ public sealed class StellaOpsMirrorConnector : IFeedConnector
continue;
}
var dtoBson = DocumentObject.Parse(json);
var dtoRecord = new DtoRecord(Guid.NewGuid(), document.Id, Source, BundleDtoSchemaVersion, dtoBson, now);
var dtoDoc = DocumentObject.Parse(json);
var dtoRecord = new DtoRecord(Guid.NewGuid(), document.Id, Source, BundleDtoSchemaVersion, dtoDoc, now);
await _dtoStore.UpsertAsync(dtoRecord, cancellationToken).ConfigureAwait(false);
await _documentStore.UpdateStatusAsync(document.Id, DocumentStatuses.PendingMap, cancellationToken).ConfigureAwait(false);

View File

@@ -29,7 +29,7 @@ internal sealed record AdobeCursor(
var cacheDocument = new DocumentObject();
foreach (var (key, entry) in FetchCache)
{
cacheDocument[key] = entry.ToBson();
cacheDocument[key] = entry.ToDocument();
}
document["fetchCache"] = cacheDocument;
@@ -138,7 +138,7 @@ internal sealed record AdobeCursor(
{
if (element.Value is DocumentObject entryDocument)
{
dictionary[element.Name] = AdobeFetchCacheEntry.FromBson(entryDocument);
dictionary[element.Name] = AdobeFetchCacheEntry.FromDocument(entryDocument);
}
}
@@ -150,7 +150,7 @@ internal sealed record AdobeFetchCacheEntry(string Sha256)
{
public static AdobeFetchCacheEntry Empty { get; } = new(string.Empty);
public DocumentObject ToBson()
public DocumentObject ToDocument()
{
var document = new DocumentObject
{
@@ -160,7 +160,7 @@ internal sealed record AdobeFetchCacheEntry(string Sha256)
return document;
}
public static AdobeFetchCacheEntry FromBson(DocumentObject document)
public static AdobeFetchCacheEntry FromDocument(DocumentObject document)
{
var sha = document.TryGetValue("sha256", out var shaValue) ? shaValue.AsString : string.Empty;
return new AdobeFetchCacheEntry(sha);

View File

@@ -428,12 +428,12 @@ public sealed class AppleConnector : IFeedConnector
private async Task<AppleCursor> GetCursorAsync(CancellationToken cancellationToken)
{
var state = await _stateRepository.TryGetAsync(SourceName, cancellationToken).ConfigureAwait(false);
return state is null ? AppleCursor.Empty : AppleCursor.FromBson(state.Cursor);
return state is null ? AppleCursor.Empty : AppleCursor.FromDocument(state.Cursor);
}
private async Task UpdateCursorAsync(AppleCursor cursor, CancellationToken cancellationToken)
{
var document = cursor.ToBson();
var document = cursor.ToDocument();
await _stateRepository.UpdateCursorAsync(SourceName, document, _timeProvider.GetUtcNow(), cancellationToken).ConfigureAwait(false);
}
}

View File

@@ -16,7 +16,7 @@ internal sealed record AppleCursor(
public static AppleCursor Empty { get; } = new(null, EmptyStringCollection, EmptyGuidCollection, EmptyGuidCollection);
public DocumentObject ToBson()
public DocumentObject ToDocument()
{
var document = new DocumentObject
{
@@ -37,7 +37,7 @@ internal sealed record AppleCursor(
return document;
}
public static AppleCursor FromBson(DocumentObject? document)
public static AppleCursor FromDocument(DocumentObject? document)
{
if (document is null || document.ElementCount == 0)
{

View File

@@ -28,7 +28,7 @@ internal sealed record ChromiumCursor(
var cacheDocument = new DocumentObject();
foreach (var (key, entry) in FetchCache)
{
cacheDocument[key] = entry.ToBson();
cacheDocument[key] = entry.ToDocument();
}
document["fetchCache"] = cacheDocument;
@@ -113,7 +113,7 @@ internal sealed record ChromiumCursor(
{
if (element.Value is DocumentObject entryDocument)
{
dictionary[element.Name] = ChromiumFetchCacheEntry.FromBson(entryDocument);
dictionary[element.Name] = ChromiumFetchCacheEntry.FromDocument(entryDocument);
}
}
@@ -125,7 +125,7 @@ internal sealed record ChromiumFetchCacheEntry(string Sha256)
{
public static ChromiumFetchCacheEntry Empty { get; } = new(string.Empty);
public DocumentObject ToBson()
public DocumentObject ToDocument()
{
var document = new DocumentObject
{
@@ -135,7 +135,7 @@ internal sealed record ChromiumFetchCacheEntry(string Sha256)
return document;
}
public static ChromiumFetchCacheEntry FromBson(DocumentObject document)
public static ChromiumFetchCacheEntry FromDocument(DocumentObject document)
{
var sha = document.TryGetValue("sha256", out var shaValue) ? shaValue.AsString : string.Empty;
return new ChromiumFetchCacheEntry(sha);

View File

@@ -325,8 +325,8 @@ public sealed class CiscoConnector : IFeedConnector
try
{
var dtoJson = JsonSerializer.Serialize(dto, DtoSerializerOptions);
var dtoBson = DocumentObject.Parse(dtoJson);
var dtoRecord = new DtoRecord(Guid.NewGuid(), document.Id, SourceName, DtoSchemaVersion, dtoBson, _timeProvider.GetUtcNow());
var dtoDoc = DocumentObject.Parse(dtoJson);
var dtoRecord = new DtoRecord(Guid.NewGuid(), document.Id, SourceName, DtoSchemaVersion, dtoDoc, _timeProvider.GetUtcNow());
await _dtoStore.UpsertAsync(dtoRecord, cancellationToken).ConfigureAwait(false);
await _documentStore.UpdateStatusAsync(document.Id, DocumentStatuses.PendingMap, cancellationToken).ConfigureAwait(false);
pendingDocuments.Remove(documentId);
@@ -577,12 +577,12 @@ public sealed class CiscoConnector : IFeedConnector
private async Task<CiscoCursor> GetCursorAsync(CancellationToken cancellationToken)
{
var state = await _stateRepository.TryGetAsync(SourceName, cancellationToken).ConfigureAwait(false);
return state is null ? CiscoCursor.Empty : CiscoCursor.FromBson(state.Cursor);
return state is null ? CiscoCursor.Empty : CiscoCursor.FromDocument(state.Cursor);
}
private async Task UpdateCursorAsync(CiscoCursor cursor, CancellationToken cancellationToken)
{
var document = cursor.ToBson();
var document = cursor.ToDocument();
await _stateRepository.UpdateCursorAsync(SourceName, document, _timeProvider.GetUtcNow(), cancellationToken).ConfigureAwait(false);
}

View File

@@ -12,7 +12,7 @@ internal sealed record CiscoCursor(
public static CiscoCursor Empty { get; } = new(null, null, EmptyGuidCollection, EmptyGuidCollection);
public DocumentObject ToBson()
public DocumentObject ToDocument()
{
var document = new DocumentObject
{
@@ -33,7 +33,7 @@ internal sealed record CiscoCursor(
return document;
}
public static CiscoCursor FromBson(DocumentObject? document)
public static CiscoCursor FromDocument(DocumentObject? document)
{
if (document is null || document.ElementCount == 0)
{

Some files were not shown because too many files have changed in this diff Show More