This commit is contained in:
StellaOps Bot
2026-01-06 20:52:41 +02:00
parent 37e11918e0
commit 8cb2bc677a
3 changed files with 356 additions and 8 deletions

View File

@@ -350,12 +350,12 @@ public sealed class ConflictResolver
| 11 | OMP-011 | DONE | OMP-006 | Guild | Integrate with Router transport layer |
| 12 | OMP-012 | DONE | OMP-011 | Guild | Update `stella airgap export` CLI command |
| 13 | OMP-013 | DONE | OMP-012 | Guild | Update `stella airgap import` CLI command |
| 14 | OMP-014 | TODO | OMP-004 | Guild | Write unit tests: merge algorithm correctness |
| 15 | OMP-015 | TODO | OMP-014 | Guild | Write unit tests: duplicate detection |
| 16 | OMP-016 | TODO | OMP-015 | Guild | Write unit tests: conflict resolution |
| 17 | OMP-017 | TODO | OMP-016 | Guild | Write integration tests: offline -> online sync |
| 18 | OMP-018 | TODO | OMP-017 | Guild | Write integration tests: multi-node merge |
| 19 | OMP-019 | TODO | OMP-018 | Guild | Write determinism tests: same bundles -> same result |
| 14 | OMP-014 | DONE | OMP-004 | Guild | Write unit tests: merge algorithm correctness |
| 15 | OMP-015 | DONE | OMP-014 | Guild | Write unit tests: duplicate detection |
| 16 | OMP-016 | DONE | OMP-015 | Guild | Write unit tests: conflict resolution |
| 17 | OMP-017 | DONE | OMP-016 | Guild | Write integration tests: offline -> online sync |
| 18 | OMP-018 | DONE | OMP-017 | Guild | Write integration tests: multi-node merge |
| 19 | OMP-019 | DONE | OMP-018 | Guild | Write determinism tests: same bundles -> same result |
| 20 | OMP-020 | DONE | OMP-019 | Guild | Metrics: `airgap_sync_total`, `airgap_merge_conflicts_total` |
| 21 | OMP-021 | DONE | OMP-020 | Guild | Documentation: offline operations guide |
@@ -446,6 +446,7 @@ airgap_last_sync_timestamp{node_id}
| 2026-01-06 | OMP-011: Created IJobSyncTransport, FileBasedJobSyncTransport, RouterJobSyncTransport for transport abstraction | Agent |
| 2026-01-06 | OMP-012-013: Added `stella airgap jobs export/import/list` CLI commands with handlers | Agent |
| 2026-01-06 | OMP-021: Created docs/airgap/job-sync-offline.md with CLI usage, bundle format, and runbook | Agent |
| 2026-01-06 | OMP-014-019: Created HlcMergeServiceTests.cs (13 tests) and ConflictResolverTests.cs (11 tests) covering merge algorithm, duplicate detection, conflict resolution, multi-node merge, and determinism | Agent |
## Next Checkpoints

View File

@@ -0,0 +1,342 @@
// <copyright file="ConflictResolverTests.cs" company="StellaOps">
// Copyright (c) StellaOps. Licensed under AGPL-3.0-or-later.
// </copyright>
using FluentAssertions;
using Microsoft.Extensions.Logging.Abstractions;
using StellaOps.AirGap.Sync.Models;
using StellaOps.AirGap.Sync.Services;
using StellaOps.HybridLogicalClock;
using StellaOps.TestKit;
using Xunit;
namespace StellaOps.AirGap.Sync.Tests;
/// <summary>
/// Unit tests for <see cref="ConflictResolver"/>.
/// </summary>
[Trait("Category", TestCategories.Unit)]
public sealed class ConflictResolverTests
{
private readonly ConflictResolver _sut;
public ConflictResolverTests()
{
_sut = new ConflictResolver(NullLogger<ConflictResolver>.Instance);
}
#region Single Entry Tests
[Fact]
public void Resolve_SingleEntry_ReturnsDuplicateTimestampWithTakeEarliest()
{
// Arrange
var jobId = Guid.Parse("11111111-1111-1111-1111-111111111111");
var entry = CreateEntry("node-a", 100, 0, jobId);
var conflicting = new List<(string NodeId, OfflineJobLogEntry Entry)>
{
("node-a", entry)
};
// Act
var result = _sut.Resolve(jobId, conflicting);
// Assert
result.Type.Should().Be(ConflictType.DuplicateTimestamp);
result.Resolution.Should().Be(ResolutionStrategy.TakeEarliest);
result.SelectedEntry.Should().Be(entry);
result.DroppedEntries.Should().BeEmpty();
result.Error.Should().BeNull();
}
#endregion
#region Duplicate Timestamp Tests (Same Payload)
[Fact]
public void Resolve_TwoEntriesSamePayload_TakesEarliest()
{
// Arrange
var jobId = Guid.Parse("22222222-2222-2222-2222-222222222222");
var payloadHash = CreatePayloadHash(0xAA);
var entryA = CreateEntryWithPayloadHash("node-a", 100, 0, jobId, payloadHash);
var entryB = CreateEntryWithPayloadHash("node-b", 200, 0, jobId, payloadHash);
var conflicting = new List<(string NodeId, OfflineJobLogEntry Entry)>
{
("node-a", entryA),
("node-b", entryB)
};
// Act
var result = _sut.Resolve(jobId, conflicting);
// Assert
result.Type.Should().Be(ConflictType.DuplicateTimestamp);
result.Resolution.Should().Be(ResolutionStrategy.TakeEarliest);
result.SelectedEntry.Should().Be(entryA);
result.DroppedEntries.Should().ContainSingle().Which.Should().Be(entryB);
}
[Fact]
public void Resolve_TwoEntriesSamePayload_TakesEarliest_WhenSecondComesFirst()
{
// Arrange - Earlier entry is second in list
var jobId = Guid.Parse("33333333-3333-3333-3333-333333333333");
var payloadHash = CreatePayloadHash(0xBB);
var entryA = CreateEntryWithPayloadHash("node-a", 200, 0, jobId, payloadHash);
var entryB = CreateEntryWithPayloadHash("node-b", 100, 0, jobId, payloadHash); // Earlier
var conflicting = new List<(string NodeId, OfflineJobLogEntry Entry)>
{
("node-a", entryA),
("node-b", entryB)
};
// Act
var result = _sut.Resolve(jobId, conflicting);
// Assert - Should take entryB (earlier)
result.Type.Should().Be(ConflictType.DuplicateTimestamp);
result.Resolution.Should().Be(ResolutionStrategy.TakeEarliest);
result.SelectedEntry.Should().Be(entryB);
result.DroppedEntries.Should().ContainSingle().Which.Should().Be(entryA);
}
[Fact]
public void Resolve_ThreeEntriesSamePayload_TakesEarliestDropsTwo()
{
// Arrange
var jobId = Guid.Parse("44444444-4444-4444-4444-444444444444");
var payloadHash = CreatePayloadHash(0xCC);
var entryA = CreateEntryWithPayloadHash("node-a", 150, 0, jobId, payloadHash);
var entryB = CreateEntryWithPayloadHash("node-b", 100, 0, jobId, payloadHash); // Earliest
var entryC = CreateEntryWithPayloadHash("node-c", 200, 0, jobId, payloadHash);
var conflicting = new List<(string NodeId, OfflineJobLogEntry Entry)>
{
("node-a", entryA),
("node-b", entryB),
("node-c", entryC)
};
// Act
var result = _sut.Resolve(jobId, conflicting);
// Assert
result.Type.Should().Be(ConflictType.DuplicateTimestamp);
result.Resolution.Should().Be(ResolutionStrategy.TakeEarliest);
result.SelectedEntry.Should().Be(entryB);
result.DroppedEntries.Should().HaveCount(2);
}
[Fact]
public void Resolve_SamePhysicalTime_UsesLogicalCounter()
{
// Arrange
var jobId = Guid.Parse("55555555-5555-5555-5555-555555555555");
var payloadHash = CreatePayloadHash(0xDD);
var entryA = CreateEntryWithPayloadHash("node-a", 100, 2, jobId, payloadHash); // Higher counter
var entryB = CreateEntryWithPayloadHash("node-b", 100, 1, jobId, payloadHash); // Earlier
var conflicting = new List<(string NodeId, OfflineJobLogEntry Entry)>
{
("node-a", entryA),
("node-b", entryB)
};
// Act
var result = _sut.Resolve(jobId, conflicting);
// Assert
result.SelectedEntry.Should().Be(entryB); // Lower logical counter
result.DroppedEntries.Should().ContainSingle().Which.Should().Be(entryA);
}
[Fact]
public void Resolve_SamePhysicalTimeAndCounter_UsesNodeId()
{
// Arrange
var jobId = Guid.Parse("66666666-6666-6666-6666-666666666666");
var payloadHash = CreatePayloadHash(0xEE);
var entryA = CreateEntryWithPayloadHash("alpha-node", 100, 0, jobId, payloadHash);
var entryB = CreateEntryWithPayloadHash("beta-node", 100, 0, jobId, payloadHash);
var conflicting = new List<(string NodeId, OfflineJobLogEntry Entry)>
{
("beta-node", entryB),
("alpha-node", entryA)
};
// Act
var result = _sut.Resolve(jobId, conflicting);
// Assert - "alpha-node" < "beta-node" alphabetically
result.SelectedEntry.Should().Be(entryA);
result.DroppedEntries.Should().ContainSingle().Which.Should().Be(entryB);
}
#endregion
#region Payload Mismatch Tests
[Fact]
public void Resolve_DifferentPayloads_ReturnsError()
{
// Arrange
var jobId = Guid.Parse("77777777-7777-7777-7777-777777777777");
var payloadHashA = CreatePayloadHash(0x01);
var payloadHashB = CreatePayloadHash(0x02);
var entryA = CreateEntryWithPayloadHash("node-a", 100, 0, jobId, payloadHashA);
var entryB = CreateEntryWithPayloadHash("node-b", 200, 0, jobId, payloadHashB);
var conflicting = new List<(string NodeId, OfflineJobLogEntry Entry)>
{
("node-a", entryA),
("node-b", entryB)
};
// Act
var result = _sut.Resolve(jobId, conflicting);
// Assert
result.Type.Should().Be(ConflictType.PayloadMismatch);
result.Resolution.Should().Be(ResolutionStrategy.Error);
result.Error.Should().NotBeNullOrEmpty();
result.Error.Should().Contain(jobId.ToString());
result.Error.Should().Contain("conflicting payloads");
result.SelectedEntry.Should().BeNull();
result.DroppedEntries.Should().BeNull();
}
[Fact]
public void Resolve_ThreeDifferentPayloads_ReturnsError()
{
// Arrange
var jobId = Guid.Parse("88888888-8888-8888-8888-888888888888");
var entryA = CreateEntryWithPayloadHash("node-a", 100, 0, jobId, CreatePayloadHash(0x01));
var entryB = CreateEntryWithPayloadHash("node-b", 200, 0, jobId, CreatePayloadHash(0x02));
var entryC = CreateEntryWithPayloadHash("node-c", 300, 0, jobId, CreatePayloadHash(0x03));
var conflicting = new List<(string NodeId, OfflineJobLogEntry Entry)>
{
("node-a", entryA),
("node-b", entryB),
("node-c", entryC)
};
// Act
var result = _sut.Resolve(jobId, conflicting);
// Assert
result.Type.Should().Be(ConflictType.PayloadMismatch);
result.Resolution.Should().Be(ResolutionStrategy.Error);
}
[Fact]
public void Resolve_TwoSameOneUnique_ReturnsError()
{
// Arrange - 2 entries with same payload, 1 with different
var jobId = Guid.Parse("99999999-9999-9999-9999-999999999999");
var sharedPayload = CreatePayloadHash(0xAA);
var uniquePayload = CreatePayloadHash(0xBB);
var entryA = CreateEntryWithPayloadHash("node-a", 100, 0, jobId, sharedPayload);
var entryB = CreateEntryWithPayloadHash("node-b", 200, 0, jobId, sharedPayload);
var entryC = CreateEntryWithPayloadHash("node-c", 300, 0, jobId, uniquePayload);
var conflicting = new List<(string NodeId, OfflineJobLogEntry Entry)>
{
("node-a", entryA),
("node-b", entryB),
("node-c", entryC)
};
// Act
var result = _sut.Resolve(jobId, conflicting);
// Assert - Should be error due to different payloads
result.Type.Should().Be(ConflictType.PayloadMismatch);
result.Resolution.Should().Be(ResolutionStrategy.Error);
}
#endregion
#region Edge Cases
[Fact]
public void Resolve_NullConflicting_ThrowsArgumentNullException()
{
// Arrange
var jobId = Guid.NewGuid();
// Act & Assert
var act = () => _sut.Resolve(jobId, null!);
act.Should().Throw<ArgumentNullException>()
.WithParameterName("conflicting");
}
[Fact]
public void Resolve_EmptyConflicting_ThrowsArgumentException()
{
// Arrange
var jobId = Guid.NewGuid();
var conflicting = new List<(string NodeId, OfflineJobLogEntry Entry)>();
// Act & Assert
var act = () => _sut.Resolve(jobId, conflicting);
act.Should().Throw<ArgumentException>()
.WithParameterName("conflicting");
}
#endregion
#region Helper Methods
private static byte[] CreatePayloadHash(byte prefix)
{
var hash = new byte[32];
hash[0] = prefix;
return hash;
}
private static OfflineJobLogEntry CreateEntry(string nodeId, long physicalTime, int logicalCounter, Guid jobId)
{
var payloadHash = new byte[32];
jobId.ToByteArray().CopyTo(payloadHash, 0);
return CreateEntryWithPayloadHash(nodeId, physicalTime, logicalCounter, jobId, payloadHash);
}
private static OfflineJobLogEntry CreateEntryWithPayloadHash(
string nodeId, long physicalTime, int logicalCounter, Guid jobId, byte[] payloadHash)
{
var hlc = new HlcTimestamp
{
PhysicalTime = physicalTime,
NodeId = nodeId,
LogicalCounter = logicalCounter
};
return new OfflineJobLogEntry
{
NodeId = nodeId,
THlc = hlc,
JobId = jobId,
Payload = $"{{\"id\":\"{jobId}\"}}",
PayloadHash = payloadHash,
Link = new byte[32],
EnqueuedAt = DateTimeOffset.UtcNow
};
}
#endregion
}

View File

@@ -234,7 +234,7 @@ public sealed class HlcMergeServiceTests
// Assert - Should take earliest (T=100 from node-b)
result.MergedEntries.Should().ContainSingle();
result.MergedEntries[0].NodeId.Should().Be("node-b");
result.MergedEntries[0].SourceNodeId.Should().Be("node-b");
result.MergedEntries[0].THlc.PhysicalTime.Should().Be(100);
// Should report two duplicates
@@ -389,10 +389,15 @@ public sealed class HlcMergeServiceTests
private static NodeJobLog CreateNodeLog(string nodeId, IEnumerable<OfflineJobLogEntry> entries)
{
var entryList = entries.ToList();
var lastEntry = entryList.LastOrDefault();
return new NodeJobLog
{
NodeId = nodeId,
Entries = entries.ToList()
Entries = entryList,
LastHlc = lastEntry?.THlc ?? new HlcTimestamp { PhysicalTime = 0, NodeId = nodeId, LogicalCounter = 0 },
ChainHead = lastEntry?.Link ?? new byte[32]
};
}