Add Policy DSL Validator, Schema Exporter, and Simulation Smoke tools
- Implemented PolicyDslValidator with command-line options for strict mode and JSON output. - Created PolicySchemaExporter to generate JSON schemas for policy-related models. - Developed PolicySimulationSmoke tool to validate policy simulations against expected outcomes. - Added project files and necessary dependencies for each tool. - Ensured proper error handling and usage instructions across tools.
This commit is contained in:
@@ -0,0 +1,135 @@
|
||||
using System.Diagnostics;
|
||||
using EphemeralMongo;
|
||||
using MongoDB.Bson;
|
||||
using MongoDB.Driver;
|
||||
|
||||
namespace StellaOps.Bench.LinkNotMerge;
|
||||
|
||||
internal sealed class LinkNotMergeScenarioRunner
|
||||
{
|
||||
private readonly LinkNotMergeScenarioConfig _config;
|
||||
private readonly IReadOnlyList<ObservationSeed> _seeds;
|
||||
|
||||
public LinkNotMergeScenarioRunner(LinkNotMergeScenarioConfig config)
|
||||
{
|
||||
_config = config ?? throw new ArgumentNullException(nameof(config));
|
||||
_seeds = ObservationGenerator.Generate(config);
|
||||
}
|
||||
|
||||
public ScenarioExecutionResult Execute(int iterations, CancellationToken cancellationToken)
|
||||
{
|
||||
if (iterations <= 0)
|
||||
{
|
||||
throw new ArgumentOutOfRangeException(nameof(iterations), iterations, "Iterations must be positive.");
|
||||
}
|
||||
|
||||
var totalDurations = new double[iterations];
|
||||
var insertDurations = new double[iterations];
|
||||
var correlationDurations = new double[iterations];
|
||||
var allocated = new double[iterations];
|
||||
var totalThroughputs = new double[iterations];
|
||||
var insertThroughputs = new double[iterations];
|
||||
LinksetAggregationResult lastAggregation = new(0, 0, 0, 0, 0);
|
||||
|
||||
for (var iteration = 0; iteration < iterations; iteration++)
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
|
||||
using var runner = MongoRunner.Run(new MongoRunnerOptions
|
||||
{
|
||||
UseSingleNodeReplicaSet = false,
|
||||
});
|
||||
|
||||
var client = new MongoClient(runner.ConnectionString);
|
||||
var database = client.GetDatabase("linknotmerge_bench");
|
||||
var collection = database.GetCollection<BsonDocument>("advisory_observations");
|
||||
|
||||
CreateIndexes(collection, cancellationToken);
|
||||
|
||||
var beforeAllocated = GC.GetTotalAllocatedBytes();
|
||||
var insertStopwatch = Stopwatch.StartNew();
|
||||
InsertObservations(collection, _seeds, _config.ResolveBatchSize(), cancellationToken);
|
||||
insertStopwatch.Stop();
|
||||
|
||||
var correlationStopwatch = Stopwatch.StartNew();
|
||||
var documents = collection
|
||||
.Find(FilterDefinition<BsonDocument>.Empty)
|
||||
.Project(Builders<BsonDocument>.Projection
|
||||
.Include("tenant")
|
||||
.Include("linkset"))
|
||||
.ToList(cancellationToken);
|
||||
|
||||
var correlator = new LinksetAggregator();
|
||||
lastAggregation = correlator.Correlate(documents);
|
||||
correlationStopwatch.Stop();
|
||||
|
||||
var totalElapsed = insertStopwatch.Elapsed + correlationStopwatch.Elapsed;
|
||||
var afterAllocated = GC.GetTotalAllocatedBytes();
|
||||
|
||||
totalDurations[iteration] = totalElapsed.TotalMilliseconds;
|
||||
insertDurations[iteration] = insertStopwatch.Elapsed.TotalMilliseconds;
|
||||
correlationDurations[iteration] = correlationStopwatch.Elapsed.TotalMilliseconds;
|
||||
allocated[iteration] = Math.Max(0, afterAllocated - beforeAllocated) / (1024d * 1024d);
|
||||
|
||||
var totalSeconds = Math.Max(totalElapsed.TotalSeconds, 0.0001d);
|
||||
totalThroughputs[iteration] = _seeds.Count / totalSeconds;
|
||||
|
||||
var insertSeconds = Math.Max(insertStopwatch.Elapsed.TotalSeconds, 0.0001d);
|
||||
insertThroughputs[iteration] = _seeds.Count / insertSeconds;
|
||||
}
|
||||
|
||||
return new ScenarioExecutionResult(
|
||||
totalDurations,
|
||||
insertDurations,
|
||||
correlationDurations,
|
||||
allocated,
|
||||
totalThroughputs,
|
||||
insertThroughputs,
|
||||
ObservationCount: _seeds.Count,
|
||||
AliasGroups: _config.ResolveAliasGroups(),
|
||||
LinksetCount: lastAggregation.LinksetCount,
|
||||
TenantCount: _config.ResolveTenantCount(),
|
||||
AggregationResult: lastAggregation);
|
||||
}
|
||||
|
||||
private static void InsertObservations(
|
||||
IMongoCollection<BsonDocument> collection,
|
||||
IReadOnlyList<ObservationSeed> seeds,
|
||||
int batchSize,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
for (var offset = 0; offset < seeds.Count; offset += batchSize)
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
|
||||
var remaining = Math.Min(batchSize, seeds.Count - offset);
|
||||
var batch = new List<BsonDocument>(remaining);
|
||||
for (var index = 0; index < remaining; index++)
|
||||
{
|
||||
batch.Add(seeds[offset + index].ToBsonDocument());
|
||||
}
|
||||
|
||||
collection.InsertMany(batch, new InsertManyOptions
|
||||
{
|
||||
IsOrdered = false,
|
||||
BypassDocumentValidation = true,
|
||||
}, cancellationToken);
|
||||
}
|
||||
}
|
||||
|
||||
private static void CreateIndexes(IMongoCollection<BsonDocument> collection, CancellationToken cancellationToken)
|
||||
{
|
||||
var indexKeys = Builders<BsonDocument>.IndexKeys
|
||||
.Ascending("tenant")
|
||||
.Ascending("identifiers.aliases");
|
||||
|
||||
try
|
||||
{
|
||||
collection.Indexes.CreateOne(new CreateIndexModel<BsonDocument>(indexKeys), cancellationToken: cancellationToken);
|
||||
}
|
||||
catch
|
||||
{
|
||||
// Index creation failures should not abort the benchmark; they may occur when running multiple iterations concurrently.
|
||||
}
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user