save progress

This commit is contained in:
StellaOps Bot
2026-01-06 09:42:02 +02:00
parent 94d68bee8b
commit 37e11918e0
443 changed files with 85863 additions and 897 deletions

View File

@@ -0,0 +1,446 @@
// <copyright file="HlcMergeServiceTests.cs" company="StellaOps">
// Copyright (c) StellaOps. Licensed under AGPL-3.0-or-later.
// </copyright>
using FluentAssertions;
using Microsoft.Extensions.Logging.Abstractions;
using StellaOps.AirGap.Sync.Models;
using StellaOps.AirGap.Sync.Services;
using StellaOps.HybridLogicalClock;
using StellaOps.TestKit;
using Xunit;
namespace StellaOps.AirGap.Sync.Tests;
/// <summary>
/// Unit tests for <see cref="HlcMergeService"/>.
/// </summary>
[Trait("Category", TestCategories.Unit)]
public sealed class HlcMergeServiceTests
{
private readonly HlcMergeService _sut;
private readonly ConflictResolver _conflictResolver;
public HlcMergeServiceTests()
{
_conflictResolver = new ConflictResolver(NullLogger<ConflictResolver>.Instance);
_sut = new HlcMergeService(_conflictResolver, NullLogger<HlcMergeService>.Instance);
}
#region OMP-014: Merge Algorithm Correctness
[Fact]
public async Task MergeAsync_EmptyInput_ReturnsEmptyResult()
{
// Arrange
var nodeLogs = new List<NodeJobLog>();
// Act
var result = await _sut.MergeAsync(nodeLogs);
// Assert
result.MergedEntries.Should().BeEmpty();
result.Duplicates.Should().BeEmpty();
result.SourceNodes.Should().BeEmpty();
result.MergedChainHead.Should().BeNull();
}
[Fact]
public async Task MergeAsync_SingleNode_PreservesOrder()
{
// Arrange
var nodeLog = CreateNodeLog("node-a", new[]
{
CreateEntry("node-a", 100, 0, Guid.Parse("11111111-1111-1111-1111-111111111111")),
CreateEntry("node-a", 200, 0, Guid.Parse("22222222-2222-2222-2222-222222222222")),
CreateEntry("node-a", 300, 0, Guid.Parse("33333333-3333-3333-3333-333333333333"))
});
// Act
var result = await _sut.MergeAsync(new[] { nodeLog });
// Assert
result.MergedEntries.Should().HaveCount(3);
result.MergedEntries[0].JobId.Should().Be(Guid.Parse("11111111-1111-1111-1111-111111111111"));
result.MergedEntries[1].JobId.Should().Be(Guid.Parse("22222222-2222-2222-2222-222222222222"));
result.MergedEntries[2].JobId.Should().Be(Guid.Parse("33333333-3333-3333-3333-333333333333"));
result.Duplicates.Should().BeEmpty();
result.SourceNodes.Should().ContainSingle().Which.Should().Be("node-a");
}
[Fact]
public async Task MergeAsync_TwoNodes_MergesByHlcOrder()
{
// Arrange - Two nodes with interleaved HLC timestamps
// Node A: T=100, T=102
// Node B: T=101, T=103
// Expected order: 100, 101, 102, 103
var nodeA = CreateNodeLog("node-a", new[]
{
CreateEntry("node-a", 100, 0, Guid.Parse("aaaaaaaa-0001-0000-0000-000000000000")),
CreateEntry("node-a", 102, 0, Guid.Parse("aaaaaaaa-0003-0000-0000-000000000000"))
});
var nodeB = CreateNodeLog("node-b", new[]
{
CreateEntry("node-b", 101, 0, Guid.Parse("bbbbbbbb-0002-0000-0000-000000000000")),
CreateEntry("node-b", 103, 0, Guid.Parse("bbbbbbbb-0004-0000-0000-000000000000"))
});
// Act
var result = await _sut.MergeAsync(new[] { nodeA, nodeB });
// Assert
result.MergedEntries.Should().HaveCount(4);
result.MergedEntries[0].THlc.PhysicalTime.Should().Be(100);
result.MergedEntries[1].THlc.PhysicalTime.Should().Be(101);
result.MergedEntries[2].THlc.PhysicalTime.Should().Be(102);
result.MergedEntries[3].THlc.PhysicalTime.Should().Be(103);
result.SourceNodes.Should().HaveCount(2);
}
[Fact]
public async Task MergeAsync_SamePhysicalTime_OrdersByLogicalCounter()
{
// Arrange - Same physical time, different logical counters
var nodeA = CreateNodeLog("node-a", new[]
{
CreateEntry("node-a", 100, 0, Guid.Parse("aaaaaaaa-0000-0000-0000-000000000001")),
CreateEntry("node-a", 100, 2, Guid.Parse("aaaaaaaa-0000-0000-0000-000000000003"))
});
var nodeB = CreateNodeLog("node-b", new[]
{
CreateEntry("node-b", 100, 1, Guid.Parse("bbbbbbbb-0000-0000-0000-000000000002")),
CreateEntry("node-b", 100, 3, Guid.Parse("bbbbbbbb-0000-0000-0000-000000000004"))
});
// Act
var result = await _sut.MergeAsync(new[] { nodeA, nodeB });
// Assert
result.MergedEntries.Should().HaveCount(4);
result.MergedEntries[0].THlc.LogicalCounter.Should().Be(0);
result.MergedEntries[1].THlc.LogicalCounter.Should().Be(1);
result.MergedEntries[2].THlc.LogicalCounter.Should().Be(2);
result.MergedEntries[3].THlc.LogicalCounter.Should().Be(3);
}
[Fact]
public async Task MergeAsync_SameTimeAndCounter_OrdersByNodeId()
{
// Arrange - Same physical time and counter, different node IDs
var nodeA = CreateNodeLog("alpha-node", new[]
{
CreateEntry("alpha-node", 100, 0, Guid.Parse("aaaaaaaa-0000-0000-0000-000000000001"))
});
var nodeB = CreateNodeLog("beta-node", new[]
{
CreateEntry("beta-node", 100, 0, Guid.Parse("bbbbbbbb-0000-0000-0000-000000000002"))
});
// Act
var result = await _sut.MergeAsync(new[] { nodeA, nodeB });
// Assert - "alpha-node" < "beta-node" alphabetically
result.MergedEntries.Should().HaveCount(2);
result.MergedEntries[0].SourceNodeId.Should().Be("alpha-node");
result.MergedEntries[1].SourceNodeId.Should().Be("beta-node");
}
[Fact]
public async Task MergeAsync_RecomputesUnifiedChain()
{
// Arrange
var nodeLog = CreateNodeLog("node-a", new[]
{
CreateEntry("node-a", 100, 0, Guid.Parse("11111111-1111-1111-1111-111111111111")),
CreateEntry("node-a", 200, 0, Guid.Parse("22222222-2222-2222-2222-222222222222"))
});
// Act
var result = await _sut.MergeAsync(new[] { nodeLog });
// Assert - Chain should be recomputed
result.MergedEntries.Should().HaveCount(2);
result.MergedEntries[0].MergedLink.Should().NotBeNull();
result.MergedEntries[1].MergedLink.Should().NotBeNull();
result.MergedChainHead.Should().NotBeNull();
// First entry's link should be computed from null prev_link
result.MergedEntries[0].MergedLink.Should().HaveCount(32);
// Chain head should equal last entry's merged link
result.MergedChainHead.Should().BeEquivalentTo(result.MergedEntries[1].MergedLink);
}
#endregion
#region OMP-015: Duplicate Detection
[Fact]
public async Task MergeAsync_DuplicateJobId_SamePayload_TakesEarliest()
{
// Arrange - Same job ID (same payload hash) from two nodes
var jobId = Guid.Parse("dddddddd-dddd-dddd-dddd-dddddddddddd");
var payloadHash = new byte[32];
payloadHash[0] = 0xAA;
var nodeA = CreateNodeLog("node-a", new[]
{
CreateEntryWithPayloadHash("node-a", 100, 0, jobId, payloadHash)
});
var nodeB = CreateNodeLog("node-b", new[]
{
CreateEntryWithPayloadHash("node-b", 105, 0, jobId, payloadHash)
});
// Act
var result = await _sut.MergeAsync(new[] { nodeA, nodeB });
// Assert - Should take earliest (T=100 from node-a)
result.MergedEntries.Should().ContainSingle();
result.MergedEntries[0].SourceNodeId.Should().Be("node-a");
result.MergedEntries[0].THlc.PhysicalTime.Should().Be(100);
// Should report duplicate
result.Duplicates.Should().ContainSingle();
result.Duplicates[0].JobId.Should().Be(jobId);
result.Duplicates[0].NodeId.Should().Be("node-b");
result.Duplicates[0].THlc.PhysicalTime.Should().Be(105);
}
[Fact]
public async Task MergeAsync_TriplicateJobId_SamePayload_TakesEarliest()
{
// Arrange - Same job ID from three nodes
var jobId = Guid.Parse("eeeeeeee-eeee-eeee-eeee-eeeeeeeeeeee");
var payloadHash = new byte[32];
payloadHash[0] = 0xBB;
var nodeA = CreateNodeLog("node-a", new[]
{
CreateEntryWithPayloadHash("node-a", 200, 0, jobId, payloadHash)
});
var nodeB = CreateNodeLog("node-b", new[]
{
CreateEntryWithPayloadHash("node-b", 100, 0, jobId, payloadHash) // Earliest
});
var nodeC = CreateNodeLog("node-c", new[]
{
CreateEntryWithPayloadHash("node-c", 150, 0, jobId, payloadHash)
});
// Act
var result = await _sut.MergeAsync(new[] { nodeA, nodeB, nodeC });
// Assert - Should take earliest (T=100 from node-b)
result.MergedEntries.Should().ContainSingle();
result.MergedEntries[0].NodeId.Should().Be("node-b");
result.MergedEntries[0].THlc.PhysicalTime.Should().Be(100);
// Should report two duplicates
result.Duplicates.Should().HaveCount(2);
}
[Fact]
public async Task MergeAsync_DuplicateJobId_DifferentPayload_ThrowsError()
{
// Arrange - Same job ID but different payload hashes (indicates bug)
var jobId = Guid.Parse("ffffffff-ffff-ffff-ffff-ffffffffffff");
var payloadHashA = new byte[32];
payloadHashA[0] = 0x01;
var payloadHashB = new byte[32];
payloadHashB[0] = 0x02;
var nodeA = CreateNodeLog("node-a", new[]
{
CreateEntryWithPayloadHash("node-a", 100, 0, jobId, payloadHashA)
});
var nodeB = CreateNodeLog("node-b", new[]
{
CreateEntryWithPayloadHash("node-b", 105, 0, jobId, payloadHashB)
});
// Act & Assert - Should throw because payloads differ
var act = () => _sut.MergeAsync(new[] { nodeA, nodeB });
await act.Should().ThrowAsync<InvalidOperationException>()
.WithMessage("*conflicting payloads*");
}
#endregion
#region OMP-018: Multi-Node Merge
[Fact]
public async Task MergeAsync_ThreeNodes_MergesCorrectly()
{
// Arrange - Three nodes with various timestamps
var nodeA = CreateNodeLog("node-a", new[]
{
CreateEntry("node-a", 100, 0, Guid.Parse("aaaaaaaa-0001-0000-0000-000000000000")),
CreateEntry("node-a", 400, 0, Guid.Parse("aaaaaaaa-0007-0000-0000-000000000000"))
});
var nodeB = CreateNodeLog("node-b", new[]
{
CreateEntry("node-b", 200, 0, Guid.Parse("bbbbbbbb-0002-0000-0000-000000000000")),
CreateEntry("node-b", 500, 0, Guid.Parse("bbbbbbbb-0008-0000-0000-000000000000"))
});
var nodeC = CreateNodeLog("node-c", new[]
{
CreateEntry("node-c", 300, 0, Guid.Parse("cccccccc-0003-0000-0000-000000000000")),
CreateEntry("node-c", 600, 0, Guid.Parse("cccccccc-0009-0000-0000-000000000000"))
});
// Act
var result = await _sut.MergeAsync(new[] { nodeA, nodeB, nodeC });
// Assert
result.MergedEntries.Should().HaveCount(6);
result.MergedEntries.Select(e => e.THlc.PhysicalTime).Should()
.BeInAscendingOrder();
result.MergedEntries.Select(e => e.THlc.PhysicalTime).Should()
.ContainInOrder(100L, 200L, 300L, 400L, 500L, 600L);
result.SourceNodes.Should().HaveCount(3);
}
[Fact]
public async Task MergeAsync_ManyNodes_PreservesTotalOrder()
{
// Arrange - 5 nodes with 2 entries each
var nodes = new List<NodeJobLog>();
for (int i = 0; i < 5; i++)
{
var nodeId = $"node-{i:D2}";
nodes.Add(CreateNodeLog(nodeId, new[]
{
CreateEntry(nodeId, 100 + i * 10, 0, Guid.NewGuid()),
CreateEntry(nodeId, 150 + i * 10, 0, Guid.NewGuid())
}));
}
// Act
var result = await _sut.MergeAsync(nodes);
// Assert
result.MergedEntries.Should().HaveCount(10);
result.MergedEntries.Select(e => e.THlc.PhysicalTime).Should()
.BeInAscendingOrder();
}
#endregion
#region OMP-019: Determinism Tests
[Fact]
public async Task MergeAsync_SameInput_ProducesSameOutput()
{
// Arrange
var nodeA = CreateNodeLog("node-a", new[]
{
CreateEntry("node-a", 100, 0, Guid.Parse("aaaaaaaa-0001-0000-0000-000000000000")),
CreateEntry("node-a", 300, 0, Guid.Parse("aaaaaaaa-0003-0000-0000-000000000000"))
});
var nodeB = CreateNodeLog("node-b", new[]
{
CreateEntry("node-b", 200, 0, Guid.Parse("bbbbbbbb-0002-0000-0000-000000000000")),
CreateEntry("node-b", 400, 0, Guid.Parse("bbbbbbbb-0004-0000-0000-000000000000"))
});
// Act - Run merge twice
var result1 = await _sut.MergeAsync(new[] { nodeA, nodeB });
var result2 = await _sut.MergeAsync(new[] { nodeA, nodeB });
// Assert - Results should be identical
result1.MergedEntries.Should().HaveCount(result2.MergedEntries.Count);
for (int i = 0; i < result1.MergedEntries.Count; i++)
{
result1.MergedEntries[i].JobId.Should().Be(result2.MergedEntries[i].JobId);
result1.MergedEntries[i].THlc.Should().Be(result2.MergedEntries[i].THlc);
result1.MergedEntries[i].MergedLink.Should().BeEquivalentTo(result2.MergedEntries[i].MergedLink);
}
result1.MergedChainHead.Should().BeEquivalentTo(result2.MergedChainHead);
}
[Fact]
public async Task MergeAsync_InputOrderIndependent_ProducesSameOutput()
{
// Arrange
var nodeA = CreateNodeLog("node-a", new[]
{
CreateEntry("node-a", 100, 0, Guid.Parse("aaaaaaaa-0001-0000-0000-000000000000"))
});
var nodeB = CreateNodeLog("node-b", new[]
{
CreateEntry("node-b", 200, 0, Guid.Parse("bbbbbbbb-0002-0000-0000-000000000000"))
});
// Act - Merge in different orders
var result1 = await _sut.MergeAsync(new[] { nodeA, nodeB });
var result2 = await _sut.MergeAsync(new[] { nodeB, nodeA });
// Assert - Results should be identical regardless of input order
result1.MergedEntries.Select(e => e.JobId).Should()
.BeEquivalentTo(result2.MergedEntries.Select(e => e.JobId));
result1.MergedChainHead.Should().BeEquivalentTo(result2.MergedChainHead);
}
#endregion
#region Helper Methods
private static NodeJobLog CreateNodeLog(string nodeId, IEnumerable<OfflineJobLogEntry> entries)
{
return new NodeJobLog
{
NodeId = nodeId,
Entries = entries.ToList()
};
}
private static OfflineJobLogEntry CreateEntry(string nodeId, long physicalTime, int logicalCounter, Guid jobId)
{
var payloadHash = new byte[32];
jobId.ToByteArray().CopyTo(payloadHash, 0);
var hlc = new HlcTimestamp
{
PhysicalTime = physicalTime,
NodeId = nodeId,
LogicalCounter = logicalCounter
};
return new OfflineJobLogEntry
{
NodeId = nodeId,
THlc = hlc,
JobId = jobId,
Payload = $"{{\"id\":\"{jobId}\"}}",
PayloadHash = payloadHash,
Link = new byte[32],
EnqueuedAt = DateTimeOffset.UtcNow
};
}
private static OfflineJobLogEntry CreateEntryWithPayloadHash(
string nodeId, long physicalTime, int logicalCounter, Guid jobId, byte[] payloadHash)
{
var hlc = new HlcTimestamp
{
PhysicalTime = physicalTime,
NodeId = nodeId,
LogicalCounter = logicalCounter
};
return new OfflineJobLogEntry
{
NodeId = nodeId,
THlc = hlc,
JobId = jobId,
Payload = $"{{\"id\":\"{jobId}\"}}",
PayloadHash = payloadHash,
Link = new byte[32],
EnqueuedAt = DateTimeOffset.UtcNow
};
}
#endregion
}

View File

@@ -0,0 +1,29 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<LangVersion>preview</LangVersion>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
<IsPackable>false</IsPackable>
<IsTestProject>true</IsTestProject>
<TreatWarningsAsErrors>false</TreatWarningsAsErrors>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="xunit.runner.visualstudio">
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
<PrivateAssets>all</PrivateAssets>
</PackageReference>
<PackageReference Include="coverlet.collector">
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
<PrivateAssets>all</PrivateAssets>
</PackageReference>
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\..\__Libraries\StellaOps.AirGap.Sync\StellaOps.AirGap.Sync.csproj" />
<ProjectReference Include="..\..\..\__Libraries\StellaOps.TestKit\StellaOps.TestKit.csproj" />
</ItemGroup>
</Project>